mirror of https://github.com/jetkvm/kvm.git
Merge branch 'feat/dual-audio-mode' into feat/audio-support
This commit is contained in:
commit
1bca60ae6b
193
audio.go
193
audio.go
|
|
@ -1,6 +1,7 @@
|
||||||
package kvm
|
package kvm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"sync"
|
"sync"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
@ -20,8 +21,8 @@ var (
|
||||||
audioMutex sync.Mutex
|
audioMutex sync.Mutex
|
||||||
outputSupervisor *audio.Supervisor
|
outputSupervisor *audio.Supervisor
|
||||||
inputSupervisor *audio.Supervisor
|
inputSupervisor *audio.Supervisor
|
||||||
outputClient *audio.IPCClient
|
outputSource audio.AudioSource
|
||||||
inputClient *audio.IPCClient
|
inputSource audio.AudioSource
|
||||||
outputRelay *audio.OutputRelay
|
outputRelay *audio.OutputRelay
|
||||||
inputRelay *audio.InputRelay
|
inputRelay *audio.InputRelay
|
||||||
audioInitialized bool
|
audioInitialized bool
|
||||||
|
|
@ -66,61 +67,103 @@ func startAudioSubprocesses() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start output subprocess if not running and enabled
|
// Start output audio if not running and enabled
|
||||||
if outputSupervisor == nil && audioOutputEnabled.Load() {
|
if outputSource == nil && audioOutputEnabled.Load() {
|
||||||
alsaDevice := "hw:0,0" // HDMI
|
alsaDevice := "hw:0,0" // HDMI
|
||||||
if useUSBForAudioOutput {
|
if useUSBForAudioOutput {
|
||||||
alsaDevice = "hw:1,0" // USB
|
alsaDevice = "hw:1,0" // USB
|
||||||
}
|
}
|
||||||
|
|
||||||
outputSupervisor = audio.NewSupervisor(
|
ensureConfigLoaded()
|
||||||
"audio-output",
|
audioMode := config.AudioMode
|
||||||
audio.GetAudioOutputBinaryPath(),
|
if audioMode == "" {
|
||||||
socketPathOutput,
|
audioMode = "subprocess" // Default to subprocess
|
||||||
[]string{
|
|
||||||
"ALSA_CAPTURE_DEVICE=" + alsaDevice,
|
|
||||||
"OPUS_BITRATE=128000",
|
|
||||||
"OPUS_COMPLEXITY=5",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if err := outputSupervisor.Start(); err != nil {
|
|
||||||
audioLogger.Error().Err(err).Msg("Failed to start audio output supervisor")
|
|
||||||
outputSupervisor = nil
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
outputClient = audio.NewIPCClient("audio-output", socketPathOutput, 0x4A4B4F55)
|
if audioMode == "in-process" {
|
||||||
|
// In-process CGO mode
|
||||||
|
outputSource = audio.NewCgoOutputSource(alsaDevice)
|
||||||
|
audioLogger.Debug().
|
||||||
|
Str("mode", "in-process").
|
||||||
|
Str("device", alsaDevice).
|
||||||
|
Msg("Audio output configured for in-process mode")
|
||||||
|
} else {
|
||||||
|
// Subprocess mode (default)
|
||||||
|
outputSupervisor = audio.NewSupervisor(
|
||||||
|
"audio-output",
|
||||||
|
audio.GetAudioOutputBinaryPath(),
|
||||||
|
socketPathOutput,
|
||||||
|
[]string{
|
||||||
|
"ALSA_CAPTURE_DEVICE=" + alsaDevice,
|
||||||
|
"OPUS_BITRATE=128000",
|
||||||
|
"OPUS_COMPLEXITY=5",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if err := outputSupervisor.Start(); err != nil {
|
||||||
|
audioLogger.Error().Err(err).Msg("Failed to start audio output supervisor")
|
||||||
|
outputSupervisor = nil
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
outputSource = audio.NewIPCSource("audio-output", socketPathOutput, 0x4A4B4F55)
|
||||||
|
audioLogger.Debug().
|
||||||
|
Str("mode", "subprocess").
|
||||||
|
Str("device", alsaDevice).
|
||||||
|
Msg("Audio output configured for subprocess mode")
|
||||||
|
}
|
||||||
|
|
||||||
if currentAudioTrack != nil {
|
if currentAudioTrack != nil {
|
||||||
outputRelay = audio.NewOutputRelay(outputClient, currentAudioTrack)
|
outputRelay = audio.NewOutputRelay(outputSource, currentAudioTrack)
|
||||||
if err := outputRelay.Start(); err != nil {
|
if err := outputRelay.Start(); err != nil {
|
||||||
audioLogger.Error().Err(err).Msg("Failed to start audio output relay")
|
audioLogger.Error().Err(err).Msg("Failed to start audio output relay")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start input subprocess if not running, USB audio enabled, and input enabled
|
// Start input audio if not running, USB audio enabled, and input enabled
|
||||||
ensureConfigLoaded()
|
ensureConfigLoaded()
|
||||||
if inputSupervisor == nil && audioInputEnabled.Load() && config.UsbDevices != nil && config.UsbDevices.Audio {
|
if inputSource == nil && audioInputEnabled.Load() && config.UsbDevices != nil && config.UsbDevices.Audio {
|
||||||
inputSupervisor = audio.NewSupervisor(
|
alsaPlaybackDevice := "hw:1,0" // USB speakers
|
||||||
"audio-input",
|
|
||||||
audio.GetAudioInputBinaryPath(),
|
|
||||||
socketPathInput,
|
|
||||||
[]string{
|
|
||||||
"ALSA_PLAYBACK_DEVICE=hw:1,0",
|
|
||||||
"OPUS_BITRATE=128000",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if err := inputSupervisor.Start(); err != nil {
|
audioMode := config.AudioMode
|
||||||
audioLogger.Error().Err(err).Msg("Failed to start input supervisor")
|
if audioMode == "" {
|
||||||
inputSupervisor = nil
|
audioMode = "subprocess" // Default to subprocess
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
inputClient = audio.NewIPCClient("audio-input", socketPathInput, 0x4A4B4D49)
|
if audioMode == "in-process" {
|
||||||
inputRelay = audio.NewInputRelay(inputClient)
|
// In-process CGO mode
|
||||||
|
inputSource = audio.NewCgoInputSource(alsaPlaybackDevice)
|
||||||
|
audioLogger.Debug().
|
||||||
|
Str("mode", "in-process").
|
||||||
|
Str("device", alsaPlaybackDevice).
|
||||||
|
Msg("Audio input configured for in-process mode")
|
||||||
|
} else {
|
||||||
|
// Subprocess mode (default)
|
||||||
|
inputSupervisor = audio.NewSupervisor(
|
||||||
|
"audio-input",
|
||||||
|
audio.GetAudioInputBinaryPath(),
|
||||||
|
socketPathInput,
|
||||||
|
[]string{
|
||||||
|
"ALSA_PLAYBACK_DEVICE=hw:1,0",
|
||||||
|
"OPUS_BITRATE=128000",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if err := inputSupervisor.Start(); err != nil {
|
||||||
|
audioLogger.Error().Err(err).Msg("Failed to start input supervisor")
|
||||||
|
inputSupervisor = nil
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
inputSource = audio.NewIPCSource("audio-input", socketPathInput, 0x4A4B4D49)
|
||||||
|
audioLogger.Debug().
|
||||||
|
Str("mode", "subprocess").
|
||||||
|
Str("device", alsaPlaybackDevice).
|
||||||
|
Msg("Audio input configured for subprocess mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
inputRelay = audio.NewInputRelay(inputSource)
|
||||||
if err := inputRelay.Start(); err != nil {
|
if err := inputRelay.Start(); err != nil {
|
||||||
audioLogger.Error().Err(err).Msg("Failed to start input relay")
|
audioLogger.Error().Err(err).Msg("Failed to start input relay")
|
||||||
}
|
}
|
||||||
|
|
@ -135,9 +178,9 @@ func stopOutputSubprocessLocked() {
|
||||||
outputRelay.Stop()
|
outputRelay.Stop()
|
||||||
outputRelay = nil
|
outputRelay = nil
|
||||||
}
|
}
|
||||||
if outputClient != nil {
|
if outputSource != nil {
|
||||||
outputClient.Disconnect()
|
outputSource.Disconnect()
|
||||||
outputClient = nil
|
outputSource = nil
|
||||||
}
|
}
|
||||||
if outputSupervisor != nil {
|
if outputSupervisor != nil {
|
||||||
outputSupervisor.Stop()
|
outputSupervisor.Stop()
|
||||||
|
|
@ -151,9 +194,9 @@ func stopInputSubprocessLocked() {
|
||||||
inputRelay.Stop()
|
inputRelay.Stop()
|
||||||
inputRelay = nil
|
inputRelay = nil
|
||||||
}
|
}
|
||||||
if inputClient != nil {
|
if inputSource != nil {
|
||||||
inputClient.Disconnect()
|
inputSource.Disconnect()
|
||||||
inputClient = nil
|
inputSource = nil
|
||||||
}
|
}
|
||||||
if inputSupervisor != nil {
|
if inputSupervisor != nil {
|
||||||
inputSupervisor.Stop()
|
inputSupervisor.Stop()
|
||||||
|
|
@ -202,8 +245,8 @@ func setAudioTrack(audioTrack *webrtc.TrackLocalStaticSample) {
|
||||||
outputRelay = nil
|
outputRelay = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if outputClient != nil {
|
if outputSource != nil {
|
||||||
outputRelay = audio.NewOutputRelay(outputClient, audioTrack)
|
outputRelay = audio.NewOutputRelay(outputSource, audioTrack)
|
||||||
if err := outputRelay.Start(); err != nil {
|
if err := outputRelay.Start(); err != nil {
|
||||||
audioLogger.Error().Err(err).Msg("Failed to start output relay")
|
audioLogger.Error().Err(err).Msg("Failed to start output relay")
|
||||||
}
|
}
|
||||||
|
|
@ -248,6 +291,50 @@ func SetAudioOutputSource(useUSB bool) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetAudioMode switches between subprocess and in-process audio modes
|
||||||
|
func SetAudioMode(mode string) error {
|
||||||
|
if mode != "subprocess" && mode != "in-process" {
|
||||||
|
return fmt.Errorf("invalid audio mode: %s (must be 'subprocess' or 'in-process')", mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
audioMutex.Lock()
|
||||||
|
defer audioMutex.Unlock()
|
||||||
|
|
||||||
|
ensureConfigLoaded()
|
||||||
|
if config.AudioMode == mode {
|
||||||
|
return nil // Already in desired mode
|
||||||
|
}
|
||||||
|
|
||||||
|
audioLogger.Info().
|
||||||
|
Str("old_mode", config.AudioMode).
|
||||||
|
Str("new_mode", mode).
|
||||||
|
Msg("Switching audio mode")
|
||||||
|
|
||||||
|
// Save new mode to config
|
||||||
|
config.AudioMode = mode
|
||||||
|
if err := SaveConfig(); err != nil {
|
||||||
|
audioLogger.Error().Err(err).Msg("Failed to save config")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop all audio (both output and input)
|
||||||
|
stopAudioSubprocessesLocked()
|
||||||
|
|
||||||
|
// Restart if there are active connections
|
||||||
|
if activeConnections.Load() > 0 {
|
||||||
|
audioMutex.Unlock()
|
||||||
|
err := startAudioSubprocesses()
|
||||||
|
audioMutex.Lock()
|
||||||
|
if err != nil {
|
||||||
|
audioLogger.Error().Err(err).Msg("Failed to restart audio with new mode")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
audioLogger.Info().Str("mode", mode).Msg("Audio mode switch completed")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func setPendingInputTrack(track *webrtc.TrackRemote) {
|
func setPendingInputTrack(track *webrtc.TrackRemote) {
|
||||||
audioMutex.Lock()
|
audioMutex.Lock()
|
||||||
defer audioMutex.Unlock()
|
defer audioMutex.Unlock()
|
||||||
|
|
@ -329,23 +416,23 @@ func handleInputTrackForSession(track *webrtc.TrackRemote) {
|
||||||
continue // Drop frame but keep reading
|
continue // Drop frame but keep reading
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get client in single mutex operation (hot path optimization)
|
// Get source in single mutex operation (hot path optimization)
|
||||||
audioMutex.Lock()
|
audioMutex.Lock()
|
||||||
client := inputClient
|
source := inputSource
|
||||||
audioMutex.Unlock()
|
audioMutex.Unlock()
|
||||||
|
|
||||||
if client == nil {
|
if source == nil {
|
||||||
continue // No relay, drop frame but keep reading
|
continue // No relay, drop frame but keep reading
|
||||||
}
|
}
|
||||||
|
|
||||||
if !client.IsConnected() {
|
if !source.IsConnected() {
|
||||||
if err := client.Connect(); err != nil {
|
if err := source.Connect(); err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := client.WriteMessage(0, opusData); err != nil {
|
if err := source.WriteMessage(0, opusData); err != nil {
|
||||||
client.Disconnect()
|
source.Disconnect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -105,6 +105,7 @@ type Config struct {
|
||||||
NetworkConfig *network.NetworkConfig `json:"network_config"`
|
NetworkConfig *network.NetworkConfig `json:"network_config"`
|
||||||
DefaultLogLevel string `json:"default_log_level"`
|
DefaultLogLevel string `json:"default_log_level"`
|
||||||
AudioOutputSource string `json:"audio_output_source"` // "hdmi" or "usb"
|
AudioOutputSource string `json:"audio_output_source"` // "hdmi" or "usb"
|
||||||
|
AudioMode string `json:"audio_mode"` // "subprocess" or "in-process"
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Config) GetDisplayRotation() uint16 {
|
func (c *Config) GetDisplayRotation() uint16 {
|
||||||
|
|
@ -165,6 +166,7 @@ var defaultConfig = &Config{
|
||||||
NetworkConfig: &network.NetworkConfig{},
|
NetworkConfig: &network.NetworkConfig{},
|
||||||
DefaultLogLevel: "INFO",
|
DefaultLogLevel: "INFO",
|
||||||
AudioOutputSource: "usb",
|
AudioOutputSource: "usb",
|
||||||
|
AudioMode: "subprocess", // Default to subprocess mode for stability
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,214 @@
|
||||||
|
//go:build linux && (arm || arm64)
|
||||||
|
|
||||||
|
package audio
|
||||||
|
|
||||||
|
/*
|
||||||
|
#cgo CFLAGS: -O3 -ffast-math -I/opt/jetkvm-audio-libs/alsa-lib-1.2.14/include -I/opt/jetkvm-audio-libs/opus-1.5.2/include
|
||||||
|
#cgo LDFLAGS: /opt/jetkvm-audio-libs/alsa-lib-1.2.14/src/.libs/libasound.a /opt/jetkvm-audio-libs/opus-1.5.2/.libs/libopus.a -lm -ldl -lpthread
|
||||||
|
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include "c/audio.c"
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/jetkvm/kvm/internal/logging"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CgoSource implements AudioSource via direct CGO calls to C audio functions (in-process)
|
||||||
|
type CgoSource struct {
|
||||||
|
direction string // "output" or "input"
|
||||||
|
alsaDevice string
|
||||||
|
initialized bool
|
||||||
|
connected bool
|
||||||
|
mu sync.Mutex
|
||||||
|
logger zerolog.Logger
|
||||||
|
opusBuf []byte // Reusable buffer for Opus packets
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCgoOutputSource creates a new CGO audio source for output (HDMI/USB → browser)
|
||||||
|
func NewCgoOutputSource(alsaDevice string) *CgoSource {
|
||||||
|
logger := logging.GetDefaultLogger().With().Str("component", "audio-output-cgo").Logger()
|
||||||
|
|
||||||
|
return &CgoSource{
|
||||||
|
direction: "output",
|
||||||
|
alsaDevice: alsaDevice,
|
||||||
|
logger: logger,
|
||||||
|
opusBuf: make([]byte, ipcMaxFrameSize),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCgoInputSource creates a new CGO audio source for input (browser → USB speakers)
|
||||||
|
func NewCgoInputSource(alsaDevice string) *CgoSource {
|
||||||
|
logger := logging.GetDefaultLogger().With().Str("component", "audio-input-cgo").Logger()
|
||||||
|
|
||||||
|
return &CgoSource{
|
||||||
|
direction: "input",
|
||||||
|
alsaDevice: alsaDevice,
|
||||||
|
logger: logger,
|
||||||
|
opusBuf: make([]byte, ipcMaxFrameSize),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect initializes the C audio subsystem
|
||||||
|
func (c *CgoSource) Connect() error {
|
||||||
|
c.mu.Lock()
|
||||||
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
if c.connected {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set ALSA device via environment for C code to read via init_alsa_devices_from_env()
|
||||||
|
if c.direction == "output" {
|
||||||
|
// Set capture device for output path via environment variable
|
||||||
|
os.Setenv("ALSA_CAPTURE_DEVICE", c.alsaDevice)
|
||||||
|
|
||||||
|
// Initialize constants
|
||||||
|
C.update_audio_constants(
|
||||||
|
C.uint(128000), // bitrate
|
||||||
|
C.uchar(5), // complexity
|
||||||
|
C.uint(48000), // sample_rate
|
||||||
|
C.uchar(2), // channels
|
||||||
|
C.ushort(960), // frame_size
|
||||||
|
C.ushort(1500), // max_packet_size
|
||||||
|
C.uint(1000), // sleep_us
|
||||||
|
C.uchar(5), // max_attempts
|
||||||
|
C.uint(500000), // max_backoff_us
|
||||||
|
)
|
||||||
|
|
||||||
|
// Initialize capture (HDMI/USB → browser)
|
||||||
|
rc := C.jetkvm_audio_capture_init()
|
||||||
|
if rc != 0 {
|
||||||
|
c.logger.Error().Int("rc", int(rc)).Msg("Failed to initialize audio capture")
|
||||||
|
return fmt.Errorf("jetkvm_audio_capture_init failed: %d", rc)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.logger.Debug().Str("device", c.alsaDevice).Msg("Audio capture initialized")
|
||||||
|
} else {
|
||||||
|
// Set playback device for input path via environment variable
|
||||||
|
os.Setenv("ALSA_PLAYBACK_DEVICE", c.alsaDevice)
|
||||||
|
|
||||||
|
// Initialize decoder constants
|
||||||
|
C.update_audio_decoder_constants(
|
||||||
|
C.uint(48000), // sample_rate
|
||||||
|
C.uchar(2), // channels
|
||||||
|
C.ushort(960), // frame_size
|
||||||
|
C.ushort(1500), // max_packet_size
|
||||||
|
C.uint(1000), // sleep_us
|
||||||
|
C.uchar(5), // max_attempts
|
||||||
|
C.uint(500000), // max_backoff_us
|
||||||
|
)
|
||||||
|
|
||||||
|
// Initialize playback (browser → USB speakers)
|
||||||
|
rc := C.jetkvm_audio_playback_init()
|
||||||
|
if rc != 0 {
|
||||||
|
c.logger.Error().Int("rc", int(rc)).Msg("Failed to initialize audio playback")
|
||||||
|
return fmt.Errorf("jetkvm_audio_playback_init failed: %d", rc)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.logger.Debug().Str("device", c.alsaDevice).Msg("Audio playback initialized")
|
||||||
|
}
|
||||||
|
|
||||||
|
c.connected = true
|
||||||
|
c.initialized = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disconnect closes the C audio subsystem
|
||||||
|
func (c *CgoSource) Disconnect() {
|
||||||
|
c.mu.Lock()
|
||||||
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
if !c.connected {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.direction == "output" {
|
||||||
|
C.jetkvm_audio_capture_close()
|
||||||
|
c.logger.Debug().Msg("Audio capture closed")
|
||||||
|
} else {
|
||||||
|
C.jetkvm_audio_playback_close()
|
||||||
|
c.logger.Debug().Msg("Audio playback closed")
|
||||||
|
}
|
||||||
|
|
||||||
|
c.connected = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsConnected returns true if currently connected
|
||||||
|
func (c *CgoSource) IsConnected() bool {
|
||||||
|
c.mu.Lock()
|
||||||
|
defer c.mu.Unlock()
|
||||||
|
return c.connected
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadMessage reads the next audio frame from C audio subsystem
|
||||||
|
// For output path: reads HDMI/USB audio and encodes to Opus
|
||||||
|
// For input path: not used (input uses WriteMessage instead)
|
||||||
|
// Returns message type (0 = Opus), payload data, and error
|
||||||
|
func (c *CgoSource) ReadMessage() (uint8, []byte, error) {
|
||||||
|
c.mu.Lock()
|
||||||
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
if !c.connected {
|
||||||
|
return 0, nil, fmt.Errorf("not connected")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.direction != "output" {
|
||||||
|
return 0, nil, fmt.Errorf("ReadMessage only supported for output direction")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call C function to read HDMI/USB audio and encode to Opus
|
||||||
|
// Returns Opus packet size (>0) or error (<0)
|
||||||
|
opusSize := C.jetkvm_audio_read_encode(unsafe.Pointer(&c.opusBuf[0]))
|
||||||
|
|
||||||
|
if opusSize < 0 {
|
||||||
|
return 0, nil, fmt.Errorf("jetkvm_audio_read_encode failed: %d", opusSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
if opusSize == 0 {
|
||||||
|
// No data available (silence/DTX)
|
||||||
|
return 0, nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return slice of opusBuf - caller must use immediately
|
||||||
|
return ipcMsgTypeOpus, c.opusBuf[:opusSize], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteMessage writes an Opus packet to the C audio subsystem for playback
|
||||||
|
// Only used for input path (browser → USB speakers)
|
||||||
|
func (c *CgoSource) WriteMessage(msgType uint8, payload []byte) error {
|
||||||
|
c.mu.Lock()
|
||||||
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
if !c.connected {
|
||||||
|
return fmt.Errorf("not connected")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.direction != "input" {
|
||||||
|
return fmt.Errorf("WriteMessage only supported for input direction")
|
||||||
|
}
|
||||||
|
|
||||||
|
if msgType != ipcMsgTypeOpus {
|
||||||
|
// Ignore non-Opus messages
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(payload) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call C function to decode Opus and write to USB speakers
|
||||||
|
rc := C.jetkvm_audio_decode_write(unsafe.Pointer(&payload[0]), C.int(len(payload)))
|
||||||
|
|
||||||
|
if rc < 0 {
|
||||||
|
return fmt.Errorf("jetkvm_audio_decode_write failed: %d", rc)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
@ -33,8 +33,8 @@ const (
|
||||||
readTimeout = 2 * time.Second
|
readTimeout = 2 * time.Second
|
||||||
)
|
)
|
||||||
|
|
||||||
// IPCClient manages Unix socket communication with audio subprocess
|
// IPCSource implements AudioSource via Unix socket communication with audio subprocess
|
||||||
type IPCClient struct {
|
type IPCSource struct {
|
||||||
socketPath string
|
socketPath string
|
||||||
magicNumber uint32
|
magicNumber uint32
|
||||||
conn net.Conn
|
conn net.Conn
|
||||||
|
|
@ -43,13 +43,13 @@ type IPCClient struct {
|
||||||
readBuf []byte // Reusable buffer for reads (single reader per client)
|
readBuf []byte // Reusable buffer for reads (single reader per client)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewIPCClient creates a new IPC client
|
// NewIPCSource creates a new IPC audio source
|
||||||
// For output: socketPath="/var/run/audio_output.sock", magic=ipcMagicOutput
|
// For output: socketPath="/var/run/audio_output.sock", magic=ipcMagicOutput
|
||||||
// For input: socketPath="/var/run/audio_input.sock", magic=ipcMagicInput
|
// For input: socketPath="/var/run/audio_input.sock", magic=ipcMagicInput
|
||||||
func NewIPCClient(name, socketPath string, magicNumber uint32) *IPCClient {
|
func NewIPCSource(name, socketPath string, magicNumber uint32) *IPCSource {
|
||||||
logger := logging.GetDefaultLogger().With().Str("component", name+"-ipc").Logger()
|
logger := logging.GetDefaultLogger().With().Str("component", name+"-ipc").Logger()
|
||||||
|
|
||||||
return &IPCClient{
|
return &IPCSource{
|
||||||
socketPath: socketPath,
|
socketPath: socketPath,
|
||||||
magicNumber: magicNumber,
|
magicNumber: magicNumber,
|
||||||
logger: logger,
|
logger: logger,
|
||||||
|
|
@ -58,7 +58,7 @@ func NewIPCClient(name, socketPath string, magicNumber uint32) *IPCClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Connect establishes connection to the subprocess
|
// Connect establishes connection to the subprocess
|
||||||
func (c *IPCClient) Connect() error {
|
func (c *IPCSource) Connect() error {
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
defer c.mu.Unlock()
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
|
@ -78,7 +78,7 @@ func (c *IPCClient) Connect() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Disconnect closes the connection
|
// Disconnect closes the connection
|
||||||
func (c *IPCClient) Disconnect() {
|
func (c *IPCSource) Disconnect() {
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
defer c.mu.Unlock()
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
|
@ -90,7 +90,7 @@ func (c *IPCClient) Disconnect() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsConnected returns true if currently connected
|
// IsConnected returns true if currently connected
|
||||||
func (c *IPCClient) IsConnected() bool {
|
func (c *IPCSource) IsConnected() bool {
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
defer c.mu.Unlock()
|
defer c.mu.Unlock()
|
||||||
return c.conn != nil
|
return c.conn != nil
|
||||||
|
|
@ -100,7 +100,7 @@ func (c *IPCClient) IsConnected() bool {
|
||||||
// Returns message type, payload data, and error
|
// Returns message type, payload data, and error
|
||||||
// IMPORTANT: The returned payload slice is only valid until the next ReadMessage call.
|
// IMPORTANT: The returned payload slice is only valid until the next ReadMessage call.
|
||||||
// Callers must use the data immediately or copy if retention is needed.
|
// Callers must use the data immediately or copy if retention is needed.
|
||||||
func (c *IPCClient) ReadMessage() (uint8, []byte, error) {
|
func (c *IPCSource) ReadMessage() (uint8, []byte, error) {
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
defer c.mu.Unlock()
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
|
@ -150,7 +150,7 @@ func (c *IPCClient) ReadMessage() (uint8, []byte, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteMessage writes a complete IPC message
|
// WriteMessage writes a complete IPC message
|
||||||
func (c *IPCClient) WriteMessage(msgType uint8, payload []byte) error {
|
func (c *IPCSource) WriteMessage(msgType uint8, payload []byte) error {
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
defer c.mu.Unlock()
|
defer c.mu.Unlock()
|
||||||
|
|
||||||
|
|
@ -12,9 +12,9 @@ import (
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
)
|
)
|
||||||
|
|
||||||
// OutputRelay forwards audio from subprocess (HDMI) to WebRTC (browser)
|
// OutputRelay forwards audio from any AudioSource (CGO or IPC) to WebRTC (browser)
|
||||||
type OutputRelay struct {
|
type OutputRelay struct {
|
||||||
client *IPCClient
|
source AudioSource
|
||||||
audioTrack *webrtc.TrackLocalStaticSample
|
audioTrack *webrtc.TrackLocalStaticSample
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
cancel context.CancelFunc
|
cancel context.CancelFunc
|
||||||
|
|
@ -28,12 +28,12 @@ type OutputRelay struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewOutputRelay creates a relay for output audio (device → browser)
|
// NewOutputRelay creates a relay for output audio (device → browser)
|
||||||
func NewOutputRelay(client *IPCClient, audioTrack *webrtc.TrackLocalStaticSample) *OutputRelay {
|
func NewOutputRelay(source AudioSource, audioTrack *webrtc.TrackLocalStaticSample) *OutputRelay {
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
logger := logging.GetDefaultLogger().With().Str("component", "audio-output-relay").Logger()
|
logger := logging.GetDefaultLogger().With().Str("component", "audio-output-relay").Logger()
|
||||||
|
|
||||||
return &OutputRelay{
|
return &OutputRelay{
|
||||||
client: client,
|
source: source,
|
||||||
audioTrack: audioTrack,
|
audioTrack: audioTrack,
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
cancel: cancel,
|
cancel: cancel,
|
||||||
|
|
@ -68,27 +68,27 @@ func (r *OutputRelay) Stop() {
|
||||||
Msg("output relay stopped")
|
Msg("output relay stopped")
|
||||||
}
|
}
|
||||||
|
|
||||||
// relayLoop continuously reads from IPC and writes to WebRTC
|
// relayLoop continuously reads from audio source and writes to WebRTC
|
||||||
func (r *OutputRelay) relayLoop() {
|
func (r *OutputRelay) relayLoop() {
|
||||||
const reconnectDelay = 1 * time.Second
|
const reconnectDelay = 1 * time.Second
|
||||||
|
|
||||||
for r.running.Load() {
|
for r.running.Load() {
|
||||||
// Ensure connected
|
// Ensure connected
|
||||||
if !r.client.IsConnected() {
|
if !r.source.IsConnected() {
|
||||||
if err := r.client.Connect(); err != nil {
|
if err := r.source.Connect(); err != nil {
|
||||||
r.logger.Debug().Err(err).Msg("failed to connect, will retry")
|
r.logger.Debug().Err(err).Msg("failed to connect, will retry")
|
||||||
time.Sleep(reconnectDelay)
|
time.Sleep(reconnectDelay)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read message from subprocess
|
// Read message from audio source
|
||||||
msgType, payload, err := r.client.ReadMessage()
|
msgType, payload, err := r.source.ReadMessage()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Connection error - reconnect
|
// Connection error - reconnect
|
||||||
if r.running.Load() {
|
if r.running.Load() {
|
||||||
r.logger.Warn().Err(err).Msg("read error, reconnecting")
|
r.logger.Warn().Err(err).Msg("read error, reconnecting")
|
||||||
r.client.Disconnect()
|
r.source.Disconnect()
|
||||||
time.Sleep(reconnectDelay)
|
time.Sleep(reconnectDelay)
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
|
|
@ -111,7 +111,7 @@ func (r *OutputRelay) relayLoop() {
|
||||||
|
|
||||||
// InputRelay forwards audio from WebRTC (browser microphone) to subprocess (USB audio)
|
// InputRelay forwards audio from WebRTC (browser microphone) to subprocess (USB audio)
|
||||||
type InputRelay struct {
|
type InputRelay struct {
|
||||||
client *IPCClient
|
source AudioSource
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
cancel context.CancelFunc
|
cancel context.CancelFunc
|
||||||
logger zerolog.Logger
|
logger zerolog.Logger
|
||||||
|
|
@ -119,12 +119,12 @@ type InputRelay struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewInputRelay creates a relay for input audio (browser → device)
|
// NewInputRelay creates a relay for input audio (browser → device)
|
||||||
func NewInputRelay(client *IPCClient) *InputRelay {
|
func NewInputRelay(source AudioSource) *InputRelay {
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
logger := logging.GetDefaultLogger().With().Str("component", "audio-input-relay").Logger()
|
logger := logging.GetDefaultLogger().With().Str("component", "audio-input-relay").Logger()
|
||||||
|
|
||||||
return &InputRelay{
|
return &InputRelay{
|
||||||
client: client,
|
source: source,
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
cancel: cancel,
|
cancel: cancel,
|
||||||
logger: logger,
|
logger: logger,
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
package audio
|
||||||
|
|
||||||
|
// AudioSource provides audio frames from either CGO (in-process) or IPC (subprocess)
|
||||||
|
// This interface allows the relay goroutine to work with both modes transparently
|
||||||
|
type AudioSource interface {
|
||||||
|
// ReadMessage reads the next audio message
|
||||||
|
// Returns message type, payload data, and error
|
||||||
|
// Blocks until data is available or error occurs
|
||||||
|
// Used for output path (device → browser)
|
||||||
|
ReadMessage() (msgType uint8, payload []byte, err error)
|
||||||
|
|
||||||
|
// WriteMessage writes an audio message
|
||||||
|
// Used for input path (browser → device)
|
||||||
|
WriteMessage(msgType uint8, payload []byte) error
|
||||||
|
|
||||||
|
// IsConnected returns true if the source is connected and ready
|
||||||
|
IsConnected() bool
|
||||||
|
|
||||||
|
// Connect establishes connection to the audio source
|
||||||
|
// For CGO: initializes C audio subsystem
|
||||||
|
// For IPC: connects to Unix socket
|
||||||
|
Connect() error
|
||||||
|
|
||||||
|
// Disconnect closes the connection and releases resources
|
||||||
|
Disconnect()
|
||||||
|
}
|
||||||
14
jsonrpc.go
14
jsonrpc.go
|
|
@ -1021,6 +1021,18 @@ func rpcSetAudioInputEnabled(enabled bool) error {
|
||||||
return SetAudioInputEnabled(enabled)
|
return SetAudioInputEnabled(enabled)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func rpcGetAudioMode() (string, error) {
|
||||||
|
ensureConfigLoaded()
|
||||||
|
if config.AudioMode == "" {
|
||||||
|
return "subprocess", nil // Default
|
||||||
|
}
|
||||||
|
return config.AudioMode, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func rpcSetAudioMode(mode string) error {
|
||||||
|
return SetAudioMode(mode)
|
||||||
|
}
|
||||||
|
|
||||||
func rpcSetCloudUrl(apiUrl string, appUrl string) error {
|
func rpcSetCloudUrl(apiUrl string, appUrl string) error {
|
||||||
currentCloudURL := config.CloudURL
|
currentCloudURL := config.CloudURL
|
||||||
config.CloudURL = apiUrl
|
config.CloudURL = apiUrl
|
||||||
|
|
@ -1343,6 +1355,8 @@ var rpcHandlers = map[string]RPCHandler{
|
||||||
"setAudioOutputEnabled": {Func: rpcSetAudioOutputEnabled, Params: []string{"enabled"}},
|
"setAudioOutputEnabled": {Func: rpcSetAudioOutputEnabled, Params: []string{"enabled"}},
|
||||||
"getAudioInputEnabled": {Func: rpcGetAudioInputEnabled},
|
"getAudioInputEnabled": {Func: rpcGetAudioInputEnabled},
|
||||||
"setAudioInputEnabled": {Func: rpcSetAudioInputEnabled, Params: []string{"enabled"}},
|
"setAudioInputEnabled": {Func: rpcSetAudioInputEnabled, Params: []string{"enabled"}},
|
||||||
|
"getAudioMode": {Func: rpcGetAudioMode},
|
||||||
|
"setAudioMode": {Func: rpcSetAudioMode, Params: []string{"mode"}},
|
||||||
"setCloudUrl": {Func: rpcSetCloudUrl, Params: []string{"apiUrl", "appUrl"}},
|
"setCloudUrl": {Func: rpcSetCloudUrl, Params: []string{"apiUrl", "appUrl"}},
|
||||||
"getKeyboardLayout": {Func: rpcGetKeyboardLayout},
|
"getKeyboardLayout": {Func: rpcGetKeyboardLayout},
|
||||||
"setKeyboardLayout": {Func: rpcSetKeyboardLayout, Params: []string{"layout"}},
|
"setKeyboardLayout": {Func: rpcSetKeyboardLayout, Params: []string{"layout"}},
|
||||||
|
|
|
||||||
|
|
@ -351,6 +351,12 @@ export interface SettingsState {
|
||||||
setVideoBrightness: (value: number) => void;
|
setVideoBrightness: (value: number) => void;
|
||||||
videoContrast: number;
|
videoContrast: number;
|
||||||
setVideoContrast: (value: number) => void;
|
setVideoContrast: (value: number) => void;
|
||||||
|
|
||||||
|
// Audio settings
|
||||||
|
audioOutputSource: string;
|
||||||
|
audioMode: string;
|
||||||
|
audioOutputEnabled: boolean;
|
||||||
|
audioInputEnabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const useSettingsStore = create(
|
export const useSettingsStore = create(
|
||||||
|
|
@ -396,6 +402,12 @@ export const useSettingsStore = create(
|
||||||
setVideoBrightness: (value: number) => set({ videoBrightness: value }),
|
setVideoBrightness: (value: number) => set({ videoBrightness: value }),
|
||||||
videoContrast: 1.0,
|
videoContrast: 1.0,
|
||||||
setVideoContrast: (value: number) => set({ videoContrast: value }),
|
setVideoContrast: (value: number) => set({ videoContrast: value }),
|
||||||
|
|
||||||
|
// Audio settings with defaults
|
||||||
|
audioOutputSource: "usb",
|
||||||
|
audioMode: "subprocess",
|
||||||
|
audioOutputEnabled: true,
|
||||||
|
audioInputEnabled: true,
|
||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
name: "settings",
|
name: "settings",
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,7 @@ const SettingsKeyboardRoute = lazy(() => import("@routes/devices.$id.settings.ke
|
||||||
const SettingsAdvancedRoute = lazy(() => import("@routes/devices.$id.settings.advanced"));
|
const SettingsAdvancedRoute = lazy(() => import("@routes/devices.$id.settings.advanced"));
|
||||||
const SettingsHardwareRoute = lazy(() => import("@routes/devices.$id.settings.hardware"));
|
const SettingsHardwareRoute = lazy(() => import("@routes/devices.$id.settings.hardware"));
|
||||||
const SettingsVideoRoute = lazy(() => import("@routes/devices.$id.settings.video"));
|
const SettingsVideoRoute = lazy(() => import("@routes/devices.$id.settings.video"));
|
||||||
|
const SettingsAudioRoute = lazy(() => import("@routes/devices.$id.settings.audio"));
|
||||||
const SettingsAppearanceRoute = lazy(() => import("@routes/devices.$id.settings.appearance"));
|
const SettingsAppearanceRoute = lazy(() => import("@routes/devices.$id.settings.appearance"));
|
||||||
const SettingsGeneralIndexRoute = lazy(() => import("@routes/devices.$id.settings.general._index"));
|
const SettingsGeneralIndexRoute = lazy(() => import("@routes/devices.$id.settings.general._index"));
|
||||||
const SettingsGeneralRebootRoute = lazy(() => import("@routes/devices.$id.settings.general.reboot"));
|
const SettingsGeneralRebootRoute = lazy(() => import("@routes/devices.$id.settings.general.reboot"));
|
||||||
|
|
@ -190,6 +191,10 @@ if (isOnDevice) {
|
||||||
path: "video",
|
path: "video",
|
||||||
element: <SettingsVideoRoute />,
|
element: <SettingsVideoRoute />,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
path: "audio",
|
||||||
|
element: <SettingsAudioRoute />,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
path: "appearance",
|
path: "appearance",
|
||||||
element: <SettingsAppearanceRoute />,
|
element: <SettingsAppearanceRoute />,
|
||||||
|
|
@ -323,6 +328,10 @@ if (isOnDevice) {
|
||||||
path: "video",
|
path: "video",
|
||||||
element: <SettingsVideoRoute />,
|
element: <SettingsVideoRoute />,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
path: "audio",
|
||||||
|
element: <SettingsAudioRoute />,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
path: "appearance",
|
path: "appearance",
|
||||||
element: <SettingsAppearanceRoute />,
|
element: <SettingsAppearanceRoute />,
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,174 @@
|
||||||
|
import { useEffect } from "react";
|
||||||
|
|
||||||
|
import { SettingsItem } from "@components/SettingsItem";
|
||||||
|
import { SettingsPageHeader } from "@components/SettingsPageheader";
|
||||||
|
import { useSettingsStore } from "@/hooks/stores";
|
||||||
|
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc";
|
||||||
|
import { SelectMenuBasic } from "@components/SelectMenuBasic";
|
||||||
|
import Checkbox from "@components/Checkbox";
|
||||||
|
|
||||||
|
import notifications from "../notifications";
|
||||||
|
|
||||||
|
export default function SettingsAudioRoute() {
|
||||||
|
const { send } = useJsonRpc();
|
||||||
|
const settings = useSettingsStore();
|
||||||
|
|
||||||
|
// Fetch current audio settings on mount
|
||||||
|
useEffect(() => {
|
||||||
|
send("getAudioOutputSource", {}, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const source = resp.result as string;
|
||||||
|
settings.audioOutputSource = source;
|
||||||
|
});
|
||||||
|
|
||||||
|
send("getAudioMode", {}, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const mode = resp.result as string;
|
||||||
|
settings.audioMode = mode;
|
||||||
|
});
|
||||||
|
|
||||||
|
send("getAudioOutputEnabled", {}, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
settings.audioOutputEnabled = resp.result as boolean;
|
||||||
|
});
|
||||||
|
|
||||||
|
send("getAudioInputEnabled", {}, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
settings.audioInputEnabled = resp.result as boolean;
|
||||||
|
});
|
||||||
|
}, [send]);
|
||||||
|
|
||||||
|
const handleAudioOutputSourceChange = (source: string) => {
|
||||||
|
send("setAudioOutputSource", { source }, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
notifications.error(
|
||||||
|
`Failed to set audio output source: ${resp.error.data || "Unknown error"}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
settings.audioOutputSource = source;
|
||||||
|
notifications.success("Audio output source updated successfully");
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleAudioModeChange = (mode: string) => {
|
||||||
|
send("setAudioMode", { mode }, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
notifications.error(
|
||||||
|
`Failed to set audio mode: ${resp.error.data || "Unknown error"}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
settings.audioMode = mode;
|
||||||
|
notifications.success("Audio mode updated successfully. Changes will take effect on next connection.");
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleAudioOutputEnabledChange = (enabled: boolean) => {
|
||||||
|
send("setAudioOutputEnabled", { enabled }, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
notifications.error(
|
||||||
|
`Failed to ${enabled ? "enable" : "disable"} audio output: ${resp.error.data || "Unknown error"}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
settings.audioOutputEnabled = enabled;
|
||||||
|
notifications.success(`Audio output ${enabled ? "enabled" : "disabled"} successfully`);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleAudioInputEnabledChange = (enabled: boolean) => {
|
||||||
|
send("setAudioInputEnabled", { enabled }, (resp: JsonRpcResponse) => {
|
||||||
|
if ("error" in resp) {
|
||||||
|
notifications.error(
|
||||||
|
`Failed to ${enabled ? "enable" : "disable"} audio input: ${resp.error.data || "Unknown error"}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
settings.audioInputEnabled = enabled;
|
||||||
|
notifications.success(`Audio input ${enabled ? "enabled" : "disabled"} successfully`);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<SettingsPageHeader
|
||||||
|
title="Audio"
|
||||||
|
description="Configure audio input and output settings for your JetKVM device"
|
||||||
|
/>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<SettingsItem
|
||||||
|
title="Audio Output"
|
||||||
|
description="Enable or disable audio from the remote computer"
|
||||||
|
>
|
||||||
|
<Checkbox
|
||||||
|
checked={settings.audioOutputEnabled || false}
|
||||||
|
onChange={(e) => handleAudioOutputEnabledChange(e.target.checked)}
|
||||||
|
/>
|
||||||
|
</SettingsItem>
|
||||||
|
|
||||||
|
{settings.audioOutputEnabled && (
|
||||||
|
<SettingsItem
|
||||||
|
title="Audio Output Source"
|
||||||
|
description="Select the audio capture device (HDMI or USB)"
|
||||||
|
>
|
||||||
|
<SelectMenuBasic
|
||||||
|
size="SM"
|
||||||
|
label=""
|
||||||
|
value={settings.audioOutputSource || "usb"}
|
||||||
|
options={[
|
||||||
|
{ value: "hdmi", label: "HDMI" },
|
||||||
|
{ value: "usb", label: "USB" },
|
||||||
|
]}
|
||||||
|
onChange={e => {
|
||||||
|
handleAudioOutputSourceChange(e.target.value);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</SettingsItem>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<SettingsItem
|
||||||
|
title="Audio Input"
|
||||||
|
description="Enable or disable microphone audio to the remote computer"
|
||||||
|
>
|
||||||
|
<Checkbox
|
||||||
|
checked={settings.audioInputEnabled || false}
|
||||||
|
onChange={(e) => handleAudioInputEnabledChange(e.target.checked)}
|
||||||
|
/>
|
||||||
|
</SettingsItem>
|
||||||
|
|
||||||
|
<div className="border-t border-slate-200 pt-4 dark:border-slate-700">
|
||||||
|
<h3 className="mb-2 text-sm font-medium">Advanced</h3>
|
||||||
|
<SettingsItem
|
||||||
|
title="Audio Processing Mode"
|
||||||
|
description="In-process mode uses less CPU but subprocess mode provides better isolation"
|
||||||
|
>
|
||||||
|
<SelectMenuBasic
|
||||||
|
size="SM"
|
||||||
|
label=""
|
||||||
|
value={settings.audioMode || "subprocess"}
|
||||||
|
options={[
|
||||||
|
{ value: "subprocess", label: "Subprocess (Recommended)" },
|
||||||
|
{ value: "in-process", label: "In-Process" },
|
||||||
|
]}
|
||||||
|
onChange={e => {
|
||||||
|
handleAudioModeChange(e.target.value);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</SettingsItem>
|
||||||
|
<p className="mt-2 text-xs text-slate-600 dark:text-slate-400">
|
||||||
|
Changing the audio mode will take effect when the next WebRTC connection is established.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
@ -5,6 +5,7 @@ import {
|
||||||
LuMouse,
|
LuMouse,
|
||||||
LuKeyboard,
|
LuKeyboard,
|
||||||
LuVideo,
|
LuVideo,
|
||||||
|
LuVolume2,
|
||||||
LuCpu,
|
LuCpu,
|
||||||
LuShieldCheck,
|
LuShieldCheck,
|
||||||
LuWrench,
|
LuWrench,
|
||||||
|
|
@ -168,6 +169,17 @@ export default function SettingsRoute() {
|
||||||
</div>
|
</div>
|
||||||
</NavLink>
|
</NavLink>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="shrink-0">
|
||||||
|
<NavLink
|
||||||
|
to="audio"
|
||||||
|
className={({ isActive }) => (isActive ? "active" : "")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-x-2 rounded-md px-2.5 py-2.5 text-sm transition-colors hover:bg-slate-100 dark:hover:bg-slate-700 in-[.active]:bg-blue-50 in-[.active]:text-blue-700! md:in-[.active]:bg-transparent dark:in-[.active]:bg-blue-900 dark:in-[.active]:text-blue-200! dark:md:in-[.active]:bg-transparent">
|
||||||
|
<LuVolume2 className="h-4 w-4 shrink-0" />
|
||||||
|
<h1>Audio</h1>
|
||||||
|
</div>
|
||||||
|
</NavLink>
|
||||||
|
</div>
|
||||||
<div className="shrink-0">
|
<div className="shrink-0">
|
||||||
<NavLink
|
<NavLink
|
||||||
to="hardware"
|
to="hardware"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue