[WIP] Fix: Audio Latency issues: move audio to a dedicated media stream

For more details please see: https://groups.google.com/g/discuss-webrtc/c/ZvAHvkHsb0E
This commit is contained in:
Alex P 2025-09-09 00:23:15 +00:00
parent aa21b4b459
commit 845eadec18
5 changed files with 32 additions and 8 deletions

View File

@ -294,7 +294,7 @@ func (abm *AdaptiveBufferManager) ActivateGracefulDegradation(level int) {
atomic.StoreInt64(&abm.currentInputBufferSize, minSize)
atomic.StoreInt64(&abm.currentOutputBufferSize, minSize)
abm.logger.Error().
abm.logger.Warn().
Int("level", level).
Int64("buffer_size", minSize).
Msg("Activated severe graceful degradation - emergency mode")

View File

@ -530,7 +530,7 @@ func DefaultAudioConfig() *AudioConfigConstants {
// Graceful Degradation Configuration
CongestionMildReductionFactor: 0.75, // Buffer reduction factor for mild congestion (0.75)
CongestionModerateReductionFactor: 0.5, // Buffer reduction factor for moderate congestion (0.5)
CongestionThresholdMultiplier: 10.0, // Multiplier for congestion threshold calculations (increased to reduce false emergency mode triggers)
CongestionThresholdMultiplier: 36.0, // Multiplier for congestion threshold calculations (increased to reduce false emergency mode triggers)
CongestionRecoveryTimeout: 5 * time.Second, // Timeout for congestion recovery (5 seconds)
// Buffer Pool Cache Configuration

View File

@ -345,8 +345,13 @@ export default function WebRTCVideo({ microphone }: WebRTCVideoProps) {
peerConnection.addEventListener(
"track",
(e: RTCTrackEvent) => {
addStreamToVideoElm(e.streams[0]);
(_e: RTCTrackEvent) => {
// The combined MediaStream is now managed in the main component
// We'll use the mediaStream from the store instead of individual track streams
const { mediaStream } = useRTCStore.getState();
if (mediaStream) {
addStreamToVideoElm(mediaStream);
}
},
{ signal },
);

View File

@ -475,8 +475,27 @@ export default function KvmIdRoute() {
}
};
pc.ontrack = function (event) {
setMediaStream(event.streams[0]);
pc.ontrack = function (event: RTCTrackEvent) {
// Handle separate MediaStreams for audio and video tracks
const track = event.track;
const streams = event.streams;
if (streams && streams.length > 0) {
// Get existing MediaStream or create a new one
const existingStream = useRTCStore.getState().mediaStream;
let combinedStream: MediaStream;
if (existingStream) {
combinedStream = existingStream;
// Add the new track to the existing stream
combinedStream.addTrack(track);
} else {
// Create a new MediaStream with the track
combinedStream = new MediaStream([track]);
}
setMediaStream(combinedStream);
}
};
setTransceiver(pc.addTransceiver("video", { direction: "recvonly" }));

View File

@ -233,13 +233,13 @@ func newSession(config SessionConfig) (*Session, error) {
}
})
session.VideoTrack, err = webrtc.NewTrackLocalStaticSample(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeH264}, "video", "kvm")
session.VideoTrack, err = webrtc.NewTrackLocalStaticSample(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeH264}, "video", "kvm-video")
if err != nil {
scopedLogger.Warn().Err(err).Msg("Failed to create VideoTrack")
return nil, err
}
session.AudioTrack, err = webrtc.NewTrackLocalStaticSample(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeOpus}, "audio", "kvm")
session.AudioTrack, err = webrtc.NewTrackLocalStaticSample(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeOpus}, "audio", "kvm-audio")
if err != nil {
scopedLogger.Warn().Err(err).Msg("Failed to add VideoTrack to PeerConnection")
return nil, err