feat(audio): add microphone stop endpoint and improve mute handling

- Implement new POST /microphone/stop endpoint
- Refactor mute handling to properly start/stop audio processes
- Add callback mechanism for audio relay to reconnect to current session
- Simplify UI microphone controls by combining mute/start-stop functionality
This commit is contained in:
Alex P 2025-09-06 20:17:29 +00:00
parent 5a0dce9984
commit b267348084
5 changed files with 290 additions and 90 deletions

View File

@ -7,6 +7,7 @@ import (
"github.com/coder/websocket"
"github.com/gin-gonic/gin"
"github.com/jetkvm/kvm/internal/audio"
"github.com/pion/webrtc/v4"
"github.com/rs/zerolog"
)
@ -16,9 +17,81 @@ func initAudioControlService() {
if audioControlService == nil {
sessionProvider := &SessionProviderImpl{}
audioControlService = audio.NewAudioControlService(sessionProvider, logger)
// Set up callback for audio relay to get current session's audio track
audio.SetCurrentSessionCallback(func() audio.AudioTrackWriter {
return GetCurrentSessionAudioTrack()
})
}
}
// --- Global Convenience Functions for Audio Control ---
// StopAudioOutputAndRemoveTracks is a global helper to stop audio output subprocess and remove WebRTC tracks
func StopAudioOutputAndRemoveTracks() error {
initAudioControlService()
return audioControlService.MuteAudio(true)
}
// StartAudioOutputAndAddTracks is a global helper to start audio output subprocess and add WebRTC tracks
func StartAudioOutputAndAddTracks() error {
initAudioControlService()
return audioControlService.MuteAudio(false)
}
// StopMicrophoneAndRemoveTracks is a global helper to stop microphone subprocess and remove WebRTC tracks
func StopMicrophoneAndRemoveTracks() error {
initAudioControlService()
return audioControlService.MuteMicrophone(true)
}
// StartMicrophoneAndAddTracks is a global helper to start microphone subprocess and add WebRTC tracks
func StartMicrophoneAndAddTracks() error {
initAudioControlService()
return audioControlService.MuteMicrophone(false)
}
// IsAudioOutputActive is a global helper to check if audio output subprocess is running
func IsAudioOutputActive() bool {
initAudioControlService()
return audioControlService.IsAudioOutputActive()
}
// IsMicrophoneActive is a global helper to check if microphone subprocess is running
func IsMicrophoneActive() bool {
initAudioControlService()
return audioControlService.IsMicrophoneActive()
}
// ResetMicrophone is a global helper to reset the microphone
func ResetMicrophone() error {
initAudioControlService()
return audioControlService.ResetMicrophone()
}
// GetCurrentSessionAudioTrack returns the current session's audio track for audio relay
func GetCurrentSessionAudioTrack() *webrtc.TrackLocalStaticSample {
if currentSession != nil {
return currentSession.AudioTrack
}
return nil
}
// ConnectRelayToCurrentSession connects the audio relay to the current WebRTC session
func ConnectRelayToCurrentSession() error {
if currentTrack := GetCurrentSessionAudioTrack(); currentTrack != nil {
err := audio.UpdateAudioRelayTrack(currentTrack)
if err != nil {
logger.Error().Err(err).Msg("failed to connect current session's audio track to relay")
return err
}
logger.Info().Msg("connected current session's audio track to relay")
return nil
}
logger.Warn().Msg("no current session audio track found")
return nil
}
// handleAudioMute handles POST /audio/mute requests
func handleAudioMute(c *gin.Context) {
type muteReq struct {
@ -29,9 +102,14 @@ func handleAudioMute(c *gin.Context) {
c.JSON(400, gin.H{"error": "invalid request"})
return
}
initAudioControlService()
err := audioControlService.MuteAudio(req.Muted)
var err error
if req.Muted {
err = StopAudioOutputAndRemoveTracks()
} else {
err = StartAudioOutputAndAddTracks()
}
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
@ -45,9 +123,18 @@ func handleAudioMute(c *gin.Context) {
// handleMicrophoneStart handles POST /microphone/start requests
func handleMicrophoneStart(c *gin.Context) {
initAudioControlService()
err := StartMicrophoneAndAddTracks()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
err := audioControlService.StartMicrophone()
c.JSON(http.StatusOK, gin.H{"success": true})
}
// handleMicrophoneStop handles POST /microphone/stop requests
func handleMicrophoneStop(c *gin.Context) {
err := StopMicrophoneAndRemoveTracks()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
@ -67,9 +154,13 @@ func handleMicrophoneMute(c *gin.Context) {
return
}
initAudioControlService()
var err error
if req.Muted {
err = StopMicrophoneAndRemoveTracks()
} else {
err = StartMicrophoneAndAddTracks()
}
err := audioControlService.MuteMicrophone(req.Muted)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
@ -80,9 +171,7 @@ func handleMicrophoneMute(c *gin.Context) {
// handleMicrophoneReset handles POST /microphone/reset requests
func handleMicrophoneReset(c *gin.Context) {
initAudioControlService()
err := audioControlService.ResetMicrophone()
err := ResetMicrophone()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return

View File

@ -22,10 +22,49 @@ func NewAudioControlService(sessionProvider SessionProvider, logger *zerolog.Log
}
}
// MuteAudio sets the audio mute state
// MuteAudio sets the audio mute state by controlling the audio output subprocess
func (s *AudioControlService) MuteAudio(muted bool) error {
SetAudioMuted(muted)
SetAudioRelayMuted(muted)
if muted {
// Mute: Stop audio output subprocess and relay
supervisor := GetAudioOutputSupervisor()
if supervisor != nil {
supervisor.Stop()
s.logger.Info().Msg("audio output supervisor stopped")
}
StopAudioRelay()
SetAudioMuted(true)
s.logger.Info().Msg("audio output muted (subprocess and relay stopped)")
} else {
// Unmute: Start audio output subprocess and relay
if !s.sessionProvider.IsSessionActive() {
return errors.New("no active session for audio unmute")
}
supervisor := GetAudioOutputSupervisor()
if supervisor != nil {
err := supervisor.Start()
if err != nil {
s.logger.Error().Err(err).Msg("failed to start audio output supervisor during unmute")
return err
}
s.logger.Info().Msg("audio output supervisor started")
}
// Start audio relay
err := StartAudioRelay(nil)
if err != nil {
s.logger.Error().Err(err).Msg("failed to start audio relay during unmute")
return err
}
// Connect the relay to the current WebRTC session's audio track
// This is needed because UpdateAudioRelayTrack is normally only called during session creation
if err := connectRelayToCurrentSession(); err != nil {
s.logger.Warn().Err(err).Msg("failed to connect relay to current session, audio may not work")
}
SetAudioMuted(false)
s.logger.Info().Msg("audio output unmuted (subprocess and relay started)")
}
// Broadcast audio mute state change via WebSocket
broadcaster := GetAudioEventBroadcaster()
@ -59,16 +98,51 @@ func (s *AudioControlService) StartMicrophone() error {
return nil
}
// MuteMicrophone sets the microphone mute state
// StopMicrophone stops the microphone input
func (s *AudioControlService) StopMicrophone() error {
if !s.sessionProvider.IsSessionActive() {
return errors.New("no active session")
}
audioInputManager := s.sessionProvider.GetAudioInputManager()
if audioInputManager == nil {
return errors.New("audio input manager not available")
}
if !audioInputManager.IsRunning() {
s.logger.Info().Msg("microphone already stopped")
return nil
}
audioInputManager.Stop()
s.logger.Info().Msg("microphone stopped successfully")
return nil
}
// MuteMicrophone sets the microphone mute state by controlling the microphone process
func (s *AudioControlService) MuteMicrophone(muted bool) error {
// Set microphone mute state using the audio relay
SetAudioRelayMuted(muted)
if muted {
// Mute: Stop microphone process
err := s.StopMicrophone()
if err != nil {
s.logger.Error().Err(err).Msg("failed to stop microphone during mute")
return err
}
s.logger.Info().Msg("microphone muted (process stopped)")
} else {
// Unmute: Start microphone process
err := s.StartMicrophone()
if err != nil {
s.logger.Error().Err(err).Msg("failed to start microphone during unmute")
return err
}
s.logger.Info().Msg("microphone unmuted (process started)")
}
// Broadcast microphone mute state change via WebSocket
broadcaster := GetAudioEventBroadcaster()
broadcaster.BroadcastAudioDeviceChanged(!muted, "microphone_mute_changed")
s.logger.Info().Bool("muted", muted).Msg("microphone mute state updated")
return nil
}
@ -174,3 +248,22 @@ func (s *AudioControlService) UnsubscribeFromAudioEvents(connectionID string, lo
broadcaster := GetAudioEventBroadcaster()
broadcaster.Unsubscribe(connectionID)
}
// IsAudioOutputActive returns whether the audio output subprocess is running
func (s *AudioControlService) IsAudioOutputActive() bool {
return !IsAudioMuted() && IsAudioRelayRunning()
}
// IsMicrophoneActive returns whether the microphone subprocess is running
func (s *AudioControlService) IsMicrophoneActive() bool {
if !s.sessionProvider.IsSessionActive() {
return false
}
audioInputManager := s.sessionProvider.GetAudioInputManager()
if audioInputManager == nil {
return false
}
return audioInputManager.IsRunning()
}

View File

@ -1,6 +1,7 @@
package audio
import (
"errors"
"sync"
)
@ -107,3 +108,37 @@ func UpdateAudioRelayTrack(audioTrack AudioTrackWriter) error {
globalRelay.UpdateTrack(audioTrack)
return nil
}
// CurrentSessionCallback is a function type for getting the current session's audio track
type CurrentSessionCallback func() AudioTrackWriter
// currentSessionCallback holds the callback function to get the current session's audio track
var currentSessionCallback CurrentSessionCallback
// SetCurrentSessionCallback sets the callback function to get the current session's audio track
func SetCurrentSessionCallback(callback CurrentSessionCallback) {
currentSessionCallback = callback
}
// connectRelayToCurrentSession connects the audio relay to the current WebRTC session's audio track
// This is used when restarting the relay during unmute operations
func connectRelayToCurrentSession() error {
if currentSessionCallback == nil {
return errors.New("no current session callback set")
}
track := currentSessionCallback()
if track == nil {
return errors.New("no current session audio track available")
}
relayMutex.Lock()
defer relayMutex.Unlock()
if globalRelay != nil {
globalRelay.UpdateTrack(track)
return nil
}
return errors.New("no global relay running")
}

View File

@ -69,10 +69,8 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
// Microphone state from props
const {
isMicrophoneActive,
isMicrophoneMuted,
startMicrophone,
stopMicrophone,
toggleMicrophoneMute,
syncMicrophoneState,
// Loading states
isStarting,
@ -138,15 +136,35 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
};
const handleToggleMute = async () => {
const now = Date.now();
// Prevent rapid clicking
if (isLoading || (now - lastClickTime < CLICK_COOLDOWN)) {
return;
}
setLastClickTime(now);
setIsLoading(true);
try {
const resp = await api.POST("/audio/mute", { muted: !isMuted });
if (!resp.ok) {
// Failed to toggle mute
if (isMuted) {
// Unmute: Start audio output process and notify backend
const resp = await api.POST("/audio/mute", { muted: false });
if (!resp.ok) {
throw new Error(`Failed to unmute audio: ${resp.status}`);
}
// WebSocket will handle the state update automatically
} else {
// Mute: Stop audio output process and notify backend
const resp = await api.POST("/audio/mute", { muted: true });
if (!resp.ok) {
throw new Error(`Failed to mute audio: ${resp.status}`);
}
// WebSocket will handle the state update automatically
}
// WebSocket will handle the state update automatically
} catch {
// Failed to toggle mute
} catch (error) {
const errorMessage = error instanceof Error ? error.message : "Failed to toggle audio mute";
notifications.error(errorMessage);
} finally {
setIsLoading(false);
}
@ -179,27 +197,6 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
}
};
const handleToggleMicrophone = async () => {
const now = Date.now();
// Prevent rapid clicking - if any operation is in progress or within cooldown, ignore the click
if (isStarting || isStopping || isToggling || (now - lastClickTime < CLICK_COOLDOWN)) {
return;
}
setLastClickTime(now);
try {
const result = isMicrophoneActive ? await stopMicrophone() : await startMicrophone(selectedInputDevice);
if (!result.success && result.error) {
notifications.error(result.error.message);
}
} catch {
// Failed to toggle microphone
notifications.error("An unexpected error occurred");
}
};
const handleToggleMicrophoneMute = async () => {
const now = Date.now();
@ -211,13 +208,22 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
setLastClickTime(now);
try {
const result = await toggleMicrophoneMute();
if (!result.success && result.error) {
notifications.error(result.error.message);
if (isMicrophoneActive) {
// Microphone is active: stop the microphone process and WebRTC tracks
const result = await stopMicrophone();
if (!result.success && result.error) {
notifications.error(result.error.message);
}
} else {
// Microphone is inactive: start the microphone process and WebRTC tracks
const result = await startMicrophone(selectedInputDevice);
if (!result.success && result.error) {
notifications.error(result.error.message);
}
}
} catch {
// Failed to toggle microphone mute
notifications.error("Failed to toggle microphone mute");
} catch (error) {
const errorMessage = error instanceof Error ? error.message : "Failed to toggle microphone";
notifications.error(errorMessage);
}
};
@ -225,7 +231,7 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
const handleMicrophoneDeviceChange = async (deviceId: string) => {
setSelectedInputDevice(deviceId);
// If microphone is currently active, restart it with the new device
// If microphone is currently active (unmuted), restart it with the new device
if (isMicrophoneActive) {
try {
// Stop current microphone
@ -312,50 +318,26 @@ export default function AudioControlPopover({ microphone }: AudioControlPopoverP
<div className="flex items-center justify-between rounded-lg bg-slate-50 p-3 dark:bg-slate-700">
<div className="flex items-center gap-3">
{isMicrophoneActive ? (
isMicrophoneMuted ? (
<MdMicOff className="h-5 w-5 text-yellow-500" />
) : (
<MdMic className="h-5 w-5 text-green-500" />
)
<MdMic className="h-5 w-5 text-green-500" />
) : (
<MdMicOff className="h-5 w-5 text-red-500" />
)}
<span className="font-medium text-slate-900 dark:text-slate-100">
{!isMicrophoneActive
? "Inactive"
: isMicrophoneMuted
? "Muted"
: "Active"
}
{isMicrophoneActive ? "Unmuted" : "Muted"}
</span>
</div>
<div className="flex gap-2">
<Button
size="SM"
theme={isMicrophoneActive ? "danger" : "primary"}
text={
isStarting ? "Starting..." :
isStopping ? "Stopping..." :
isMicrophoneActive ? "Stop" : "Start"
}
onClick={handleToggleMicrophone}
disabled={isStarting || isStopping || isToggling}
loading={isStarting || isStopping}
/>
{isMicrophoneActive && (
<Button
size="SM"
theme={isMicrophoneMuted ? "danger" : "light"}
text={
isToggling ? (isMicrophoneMuted ? "Unmuting..." : "Muting...") :
isMicrophoneMuted ? "Unmute" : "Mute"
}
onClick={handleToggleMicrophoneMute}
disabled={isStarting || isStopping || isToggling}
loading={isToggling}
/>
)}
</div>
<Button
size="SM"
theme={isMicrophoneActive ? "danger" : "primary"}
text={
isStarting ? "Unmuting..." :
isStopping ? "Muting..." :
isMicrophoneActive ? "Mute" : "Unmute"
}
onClick={handleToggleMicrophoneMute}
disabled={isStarting || isStopping || isToggling}
loading={isStarting || isStopping}
/>
</div>

1
web.go
View File

@ -163,6 +163,7 @@ func setupRouter() *gin.Engine {
protected.GET("/microphone/quality", handleMicrophoneQuality)
protected.POST("/microphone/quality", handleSetMicrophoneQuality)
protected.POST("/microphone/start", handleMicrophoneStart)
protected.POST("/microphone/stop", handleMicrophoneStop)
protected.POST("/microphone/mute", handleMicrophoneMute)
protected.POST("/microphone/reset", handleMicrophoneReset)
}