Separation of Concerns: Move the audio-related code into the audio internal package

This commit is contained in:
Alex P 2025-08-05 02:04:37 +03:00
parent a208715cc6
commit 3c1f96d49c
5 changed files with 111 additions and 45 deletions

View File

@ -1,4 +1,4 @@
package kvm package audio
import ( import (
"context" "context"
@ -7,7 +7,7 @@ import (
"github.com/coder/websocket" "github.com/coder/websocket"
"github.com/coder/websocket/wsjson" "github.com/coder/websocket/wsjson"
"github.com/jetkvm/kvm/internal/audio" "github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog" "github.com/rs/zerolog"
) )
@ -80,7 +80,7 @@ var (
// InitializeAudioEventBroadcaster initializes the global audio event broadcaster // InitializeAudioEventBroadcaster initializes the global audio event broadcaster
func InitializeAudioEventBroadcaster() { func InitializeAudioEventBroadcaster() {
audioEventOnce.Do(func() { audioEventOnce.Do(func() {
l := logger.With().Str("component", "audio-events").Logger() l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
audioEventBroadcaster = &AudioEventBroadcaster{ audioEventBroadcaster = &AudioEventBroadcaster{
subscribers: make(map[string]*AudioEventSubscriber), subscribers: make(map[string]*AudioEventSubscriber),
logger: &l, logger: &l,
@ -94,7 +94,7 @@ func InitializeAudioEventBroadcaster() {
// GetAudioEventBroadcaster returns the singleton audio event broadcaster // GetAudioEventBroadcaster returns the singleton audio event broadcaster
func GetAudioEventBroadcaster() *AudioEventBroadcaster { func GetAudioEventBroadcaster() *AudioEventBroadcaster {
audioEventOnce.Do(func() { audioEventOnce.Do(func() {
l := logger.With().Str("component", "audio-events").Logger() l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
audioEventBroadcaster = &AudioEventBroadcaster{ audioEventBroadcaster = &AudioEventBroadcaster{
subscribers: make(map[string]*AudioEventSubscriber), subscribers: make(map[string]*AudioEventSubscriber),
logger: &l, logger: &l,
@ -166,15 +166,18 @@ func (aeb *AudioEventBroadcaster) sendInitialState(connectionID string) {
// Send current audio mute state // Send current audio mute state
muteEvent := AudioEvent{ muteEvent := AudioEvent{
Type: AudioEventMuteChanged, Type: AudioEventMuteChanged,
Data: AudioMuteData{Muted: audio.IsAudioMuted()}, Data: AudioMuteData{Muted: IsAudioMuted()},
} }
aeb.sendToSubscriber(subscriber, muteEvent) aeb.sendToSubscriber(subscriber, muteEvent)
// Send current microphone state // Send current microphone state using session provider
sessionActive := currentSession != nil sessionProvider := GetSessionProvider()
sessionActive := sessionProvider.IsSessionActive()
var running bool var running bool
if sessionActive && currentSession.AudioInputManager != nil { if sessionActive {
running = currentSession.AudioInputManager.IsRunning() if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
running = inputManager.IsRunning()
}
} }
micStateEvent := AudioEvent{ micStateEvent := AudioEvent{
@ -193,7 +196,7 @@ func (aeb *AudioEventBroadcaster) sendInitialState(connectionID string) {
// sendCurrentMetrics sends current audio and microphone metrics to a subscriber // sendCurrentMetrics sends current audio and microphone metrics to a subscriber
func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) { func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) {
// Send audio metrics // Send audio metrics
audioMetrics := audio.GetAudioMetrics() audioMetrics := GetAudioMetrics()
audioMetricsEvent := AudioEvent{ audioMetricsEvent := AudioEvent{
Type: AudioEventMetricsUpdate, Type: AudioEventMetricsUpdate,
Data: AudioMetricsData{ Data: AudioMetricsData{
@ -207,21 +210,24 @@ func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubsc
} }
aeb.sendToSubscriber(subscriber, audioMetricsEvent) aeb.sendToSubscriber(subscriber, audioMetricsEvent)
// Send microphone metrics // Send microphone metrics using session provider
if currentSession != nil && currentSession.AudioInputManager != nil { sessionProvider := GetSessionProvider()
micMetrics := currentSession.AudioInputManager.GetMetrics() if sessionProvider.IsSessionActive() {
micMetricsEvent := AudioEvent{ if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
Type: AudioEventMicrophoneMetrics, micMetrics := inputManager.GetMetrics()
Data: MicrophoneMetricsData{ micMetricsEvent := AudioEvent{
FramesSent: micMetrics.FramesSent, Type: AudioEventMicrophoneMetrics,
FramesDropped: micMetrics.FramesDropped, Data: MicrophoneMetricsData{
BytesProcessed: micMetrics.BytesProcessed, FramesSent: micMetrics.FramesSent,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"), FramesDropped: micMetrics.FramesDropped,
ConnectionDrops: micMetrics.ConnectionDrops, BytesProcessed: micMetrics.BytesProcessed,
AverageLatency: micMetrics.AverageLatency.String(), LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
}, ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
}
aeb.sendToSubscriber(subscriber, micMetricsEvent)
} }
aeb.sendToSubscriber(subscriber, micMetricsEvent)
} }
} }
@ -241,7 +247,7 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
} }
// Broadcast audio metrics // Broadcast audio metrics
audioMetrics := audio.GetAudioMetrics() audioMetrics := GetAudioMetrics()
audioMetricsEvent := AudioEvent{ audioMetricsEvent := AudioEvent{
Type: AudioEventMetricsUpdate, Type: AudioEventMetricsUpdate,
Data: AudioMetricsData{ Data: AudioMetricsData{
@ -255,21 +261,24 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
} }
aeb.broadcast(audioMetricsEvent) aeb.broadcast(audioMetricsEvent)
// Broadcast microphone metrics if available // Broadcast microphone metrics if available using session provider
if currentSession != nil && currentSession.AudioInputManager != nil { sessionProvider := GetSessionProvider()
micMetrics := currentSession.AudioInputManager.GetMetrics() if sessionProvider.IsSessionActive() {
micMetricsEvent := AudioEvent{ if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
Type: AudioEventMicrophoneMetrics, micMetrics := inputManager.GetMetrics()
Data: MicrophoneMetricsData{ micMetricsEvent := AudioEvent{
FramesSent: micMetrics.FramesSent, Type: AudioEventMicrophoneMetrics,
FramesDropped: micMetrics.FramesDropped, Data: MicrophoneMetricsData{
BytesProcessed: micMetrics.BytesProcessed, FramesSent: micMetrics.FramesSent,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"), FramesDropped: micMetrics.FramesDropped,
ConnectionDrops: micMetrics.ConnectionDrops, BytesProcessed: micMetrics.BytesProcessed,
AverageLatency: micMetrics.AverageLatency.String(), LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
}, ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
}
aeb.broadcast(micMetricsEvent)
} }
aeb.broadcast(micMetricsEvent)
} }
} }
} }

30
internal/audio/session.go Normal file
View File

@ -0,0 +1,30 @@
package audio
// SessionProvider interface abstracts session management for audio events
type SessionProvider interface {
IsSessionActive() bool
GetAudioInputManager() *AudioInputManager
}
// DefaultSessionProvider is a no-op implementation
type DefaultSessionProvider struct{}
func (d *DefaultSessionProvider) IsSessionActive() bool {
return false
}
func (d *DefaultSessionProvider) GetAudioInputManager() *AudioInputManager {
return nil
}
var sessionProvider SessionProvider = &DefaultSessionProvider{}
// SetSessionProvider allows the main package to inject session management
func SetSessionProvider(provider SessionProvider) {
sessionProvider = provider
}
// GetSessionProvider returns the current session provider
func GetSessionProvider() SessionProvider {
return sessionProvider
}

View File

@ -106,8 +106,11 @@ func Main() {
logger.Warn().Err(err).Msg("failed to start non-blocking audio streaming") logger.Warn().Err(err).Msg("failed to start non-blocking audio streaming")
} }
// Initialize session provider for audio events
initializeAudioSessionProvider()
// Initialize audio event broadcaster for WebSocket-based real-time updates // Initialize audio event broadcaster for WebSocket-based real-time updates
InitializeAudioEventBroadcaster() audio.InitializeAudioEventBroadcaster()
logger.Info().Msg("audio event broadcaster initialized") logger.Info().Msg("audio event broadcaster initialized")
if err := setInitialVirtualMediaState(); err != nil { if err := setInitialVirtualMediaState(); err != nil {

24
session_provider.go Normal file
View File

@ -0,0 +1,24 @@
package kvm
import "github.com/jetkvm/kvm/internal/audio"
// KVMSessionProvider implements the audio.SessionProvider interface
type KVMSessionProvider struct{}
// IsSessionActive returns whether there's an active session
func (k *KVMSessionProvider) IsSessionActive() bool {
return currentSession != nil
}
// GetAudioInputManager returns the current session's audio input manager
func (k *KVMSessionProvider) GetAudioInputManager() *audio.AudioInputManager {
if currentSession == nil {
return nil
}
return currentSession.AudioInputManager
}
// initializeAudioSessionProvider sets up the session provider for the audio package
func initializeAudioSessionProvider() {
audio.SetSessionProvider(&KVMSessionProvider{})
}

10
web.go
View File

@ -175,7 +175,7 @@ func setupRouter() *gin.Engine {
audio.SetAudioMuted(req.Muted) audio.SetAudioMuted(req.Muted)
// Broadcast audio mute state change via WebSocket // Broadcast audio mute state change via WebSocket
broadcaster := GetAudioEventBroadcaster() broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.BroadcastAudioMuteChanged(req.Muted) broadcaster.BroadcastAudioMuteChanged(req.Muted)
c.JSON(200, gin.H{"muted": req.Muted}) c.JSON(200, gin.H{"muted": req.Muted})
@ -312,7 +312,7 @@ func setupRouter() *gin.Engine {
} }
// Broadcast microphone state change via WebSocket // Broadcast microphone state change via WebSocket
broadcaster := GetAudioEventBroadcaster() broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.BroadcastMicrophoneStateChanged(true, true) broadcaster.BroadcastMicrophoneStateChanged(true, true)
c.JSON(200, gin.H{ c.JSON(200, gin.H{
@ -347,7 +347,7 @@ func setupRouter() *gin.Engine {
audio.StopNonBlockingAudioInput() audio.StopNonBlockingAudioInput()
// Broadcast microphone state change via WebSocket // Broadcast microphone state change via WebSocket
broadcaster := GetAudioEventBroadcaster() broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.BroadcastMicrophoneStateChanged(false, true) broadcaster.BroadcastMicrophoneStateChanged(false, true)
c.JSON(200, gin.H{ c.JSON(200, gin.H{
@ -547,7 +547,7 @@ func handleWebRTCSignalWsMessages(
setCloudConnectionState(CloudConnectionStateDisconnected) setCloudConnectionState(CloudConnectionStateDisconnected)
} }
// Clean up audio event subscription // Clean up audio event subscription
broadcaster := GetAudioEventBroadcaster() broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.Unsubscribe(connectionID) broadcaster.Unsubscribe(connectionID)
cancelRun() cancelRun()
}() }()
@ -708,7 +708,7 @@ func handleWebRTCSignalWsMessages(
} }
} else if message.Type == "subscribe-audio-events" { } else if message.Type == "subscribe-audio-events" {
l.Info().Msg("client subscribing to audio events") l.Info().Msg("client subscribing to audio events")
broadcaster := GetAudioEventBroadcaster() broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.Subscribe(connectionID, wsCon, runCtx, &l) broadcaster.Subscribe(connectionID, wsCon, runCtx, &l)
} }
} }