Separation of Concerns: Move the audio-related code into the audio internal package

This commit is contained in:
Alex P 2025-08-05 02:04:37 +03:00
parent a208715cc6
commit 3c1f96d49c
5 changed files with 111 additions and 45 deletions

View File

@ -1,4 +1,4 @@
package kvm
package audio
import (
"context"
@ -7,7 +7,7 @@ import (
"github.com/coder/websocket"
"github.com/coder/websocket/wsjson"
"github.com/jetkvm/kvm/internal/audio"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
@ -80,7 +80,7 @@ var (
// InitializeAudioEventBroadcaster initializes the global audio event broadcaster
func InitializeAudioEventBroadcaster() {
audioEventOnce.Do(func() {
l := logger.With().Str("component", "audio-events").Logger()
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
audioEventBroadcaster = &AudioEventBroadcaster{
subscribers: make(map[string]*AudioEventSubscriber),
logger: &l,
@ -94,7 +94,7 @@ func InitializeAudioEventBroadcaster() {
// GetAudioEventBroadcaster returns the singleton audio event broadcaster
func GetAudioEventBroadcaster() *AudioEventBroadcaster {
audioEventOnce.Do(func() {
l := logger.With().Str("component", "audio-events").Logger()
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
audioEventBroadcaster = &AudioEventBroadcaster{
subscribers: make(map[string]*AudioEventSubscriber),
logger: &l,
@ -166,15 +166,18 @@ func (aeb *AudioEventBroadcaster) sendInitialState(connectionID string) {
// Send current audio mute state
muteEvent := AudioEvent{
Type: AudioEventMuteChanged,
Data: AudioMuteData{Muted: audio.IsAudioMuted()},
Data: AudioMuteData{Muted: IsAudioMuted()},
}
aeb.sendToSubscriber(subscriber, muteEvent)
// Send current microphone state
sessionActive := currentSession != nil
// Send current microphone state using session provider
sessionProvider := GetSessionProvider()
sessionActive := sessionProvider.IsSessionActive()
var running bool
if sessionActive && currentSession.AudioInputManager != nil {
running = currentSession.AudioInputManager.IsRunning()
if sessionActive {
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
running = inputManager.IsRunning()
}
}
micStateEvent := AudioEvent{
@ -193,7 +196,7 @@ func (aeb *AudioEventBroadcaster) sendInitialState(connectionID string) {
// sendCurrentMetrics sends current audio and microphone metrics to a subscriber
func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) {
// Send audio metrics
audioMetrics := audio.GetAudioMetrics()
audioMetrics := GetAudioMetrics()
audioMetricsEvent := AudioEvent{
Type: AudioEventMetricsUpdate,
Data: AudioMetricsData{
@ -207,21 +210,24 @@ func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubsc
}
aeb.sendToSubscriber(subscriber, audioMetricsEvent)
// Send microphone metrics
if currentSession != nil && currentSession.AudioInputManager != nil {
micMetrics := currentSession.AudioInputManager.GetMetrics()
micMetricsEvent := AudioEvent{
Type: AudioEventMicrophoneMetrics,
Data: MicrophoneMetricsData{
FramesSent: micMetrics.FramesSent,
FramesDropped: micMetrics.FramesDropped,
BytesProcessed: micMetrics.BytesProcessed,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
// Send microphone metrics using session provider
sessionProvider := GetSessionProvider()
if sessionProvider.IsSessionActive() {
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
micMetrics := inputManager.GetMetrics()
micMetricsEvent := AudioEvent{
Type: AudioEventMicrophoneMetrics,
Data: MicrophoneMetricsData{
FramesSent: micMetrics.FramesSent,
FramesDropped: micMetrics.FramesDropped,
BytesProcessed: micMetrics.BytesProcessed,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
}
aeb.sendToSubscriber(subscriber, micMetricsEvent)
}
aeb.sendToSubscriber(subscriber, micMetricsEvent)
}
}
@ -241,7 +247,7 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
}
// Broadcast audio metrics
audioMetrics := audio.GetAudioMetrics()
audioMetrics := GetAudioMetrics()
audioMetricsEvent := AudioEvent{
Type: AudioEventMetricsUpdate,
Data: AudioMetricsData{
@ -255,21 +261,24 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
}
aeb.broadcast(audioMetricsEvent)
// Broadcast microphone metrics if available
if currentSession != nil && currentSession.AudioInputManager != nil {
micMetrics := currentSession.AudioInputManager.GetMetrics()
micMetricsEvent := AudioEvent{
Type: AudioEventMicrophoneMetrics,
Data: MicrophoneMetricsData{
FramesSent: micMetrics.FramesSent,
FramesDropped: micMetrics.FramesDropped,
BytesProcessed: micMetrics.BytesProcessed,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
// Broadcast microphone metrics if available using session provider
sessionProvider := GetSessionProvider()
if sessionProvider.IsSessionActive() {
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
micMetrics := inputManager.GetMetrics()
micMetricsEvent := AudioEvent{
Type: AudioEventMicrophoneMetrics,
Data: MicrophoneMetricsData{
FramesSent: micMetrics.FramesSent,
FramesDropped: micMetrics.FramesDropped,
BytesProcessed: micMetrics.BytesProcessed,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
}
aeb.broadcast(micMetricsEvent)
}
aeb.broadcast(micMetricsEvent)
}
}
}

30
internal/audio/session.go Normal file
View File

@ -0,0 +1,30 @@
package audio
// SessionProvider interface abstracts session management for audio events
type SessionProvider interface {
IsSessionActive() bool
GetAudioInputManager() *AudioInputManager
}
// DefaultSessionProvider is a no-op implementation
type DefaultSessionProvider struct{}
func (d *DefaultSessionProvider) IsSessionActive() bool {
return false
}
func (d *DefaultSessionProvider) GetAudioInputManager() *AudioInputManager {
return nil
}
var sessionProvider SessionProvider = &DefaultSessionProvider{}
// SetSessionProvider allows the main package to inject session management
func SetSessionProvider(provider SessionProvider) {
sessionProvider = provider
}
// GetSessionProvider returns the current session provider
func GetSessionProvider() SessionProvider {
return sessionProvider
}

View File

@ -106,8 +106,11 @@ func Main() {
logger.Warn().Err(err).Msg("failed to start non-blocking audio streaming")
}
// Initialize session provider for audio events
initializeAudioSessionProvider()
// Initialize audio event broadcaster for WebSocket-based real-time updates
InitializeAudioEventBroadcaster()
audio.InitializeAudioEventBroadcaster()
logger.Info().Msg("audio event broadcaster initialized")
if err := setInitialVirtualMediaState(); err != nil {

24
session_provider.go Normal file
View File

@ -0,0 +1,24 @@
package kvm
import "github.com/jetkvm/kvm/internal/audio"
// KVMSessionProvider implements the audio.SessionProvider interface
type KVMSessionProvider struct{}
// IsSessionActive returns whether there's an active session
func (k *KVMSessionProvider) IsSessionActive() bool {
return currentSession != nil
}
// GetAudioInputManager returns the current session's audio input manager
func (k *KVMSessionProvider) GetAudioInputManager() *audio.AudioInputManager {
if currentSession == nil {
return nil
}
return currentSession.AudioInputManager
}
// initializeAudioSessionProvider sets up the session provider for the audio package
func initializeAudioSessionProvider() {
audio.SetSessionProvider(&KVMSessionProvider{})
}

10
web.go
View File

@ -175,7 +175,7 @@ func setupRouter() *gin.Engine {
audio.SetAudioMuted(req.Muted)
// Broadcast audio mute state change via WebSocket
broadcaster := GetAudioEventBroadcaster()
broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.BroadcastAudioMuteChanged(req.Muted)
c.JSON(200, gin.H{"muted": req.Muted})
@ -312,7 +312,7 @@ func setupRouter() *gin.Engine {
}
// Broadcast microphone state change via WebSocket
broadcaster := GetAudioEventBroadcaster()
broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.BroadcastMicrophoneStateChanged(true, true)
c.JSON(200, gin.H{
@ -347,7 +347,7 @@ func setupRouter() *gin.Engine {
audio.StopNonBlockingAudioInput()
// Broadcast microphone state change via WebSocket
broadcaster := GetAudioEventBroadcaster()
broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.BroadcastMicrophoneStateChanged(false, true)
c.JSON(200, gin.H{
@ -547,7 +547,7 @@ func handleWebRTCSignalWsMessages(
setCloudConnectionState(CloudConnectionStateDisconnected)
}
// Clean up audio event subscription
broadcaster := GetAudioEventBroadcaster()
broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.Unsubscribe(connectionID)
cancelRun()
}()
@ -708,7 +708,7 @@ func handleWebRTCSignalWsMessages(
}
} else if message.Type == "subscribe-audio-events" {
l.Info().Msg("client subscribing to audio events")
broadcaster := GetAudioEventBroadcaster()
broadcaster := audio.GetAudioEventBroadcaster()
broadcaster.Subscribe(connectionID, wsCon, runCtx, &l)
}
}