refactor(audio): centralize config and remove debug logs

- Move hardcoded constants to centralized config system
- Remove verbose debug logging statements
- Clean up unused code and improve error handling
This commit is contained in:
Alex P 2025-08-25 16:49:48 +00:00
parent d1c192bf8b
commit 7ec583ed6a
17 changed files with 265 additions and 165 deletions

View File

@ -10,7 +10,10 @@ var (
ErrAudioAlreadyRunning = errors.New("audio already running")
)
const MaxAudioFrameSize = 1500
// MaxAudioFrameSize is now retrieved from centralized config
func GetMaxAudioFrameSize() int {
return GetConfig().MaxAudioFrameSize
}
// AudioQuality represents different audio quality presets
type AudioQuality int
@ -45,14 +48,14 @@ var (
currentConfig = AudioConfig{
Quality: AudioQualityMedium,
Bitrate: 64,
SampleRate: 48000,
Channels: 2,
SampleRate: GetConfig().SampleRate,
Channels: GetConfig().Channels,
FrameSize: 20 * time.Millisecond,
}
currentMicrophoneConfig = AudioConfig{
Quality: AudioQualityMedium,
Bitrate: 32,
SampleRate: 48000,
SampleRate: GetConfig().SampleRate,
Channels: 1,
FrameSize: 20 * time.Millisecond,
}
@ -77,12 +80,12 @@ var qualityPresets = map[AudioQuality]struct {
},
AudioQualityHigh: {
outputBitrate: 128, inputBitrate: 64,
sampleRate: 48000, channels: 2,
sampleRate: GetConfig().SampleRate, channels: GetConfig().Channels,
frameSize: 20 * time.Millisecond,
},
AudioQualityUltra: {
outputBitrate: 192, inputBitrate: 96,
sampleRate: 48000, channels: 2,
sampleRate: GetConfig().SampleRate, channels: GetConfig().Channels,
frameSize: 10 * time.Millisecond,
},
}

View File

@ -2,28 +2,14 @@ package audio
import "time"
// MonitoringConfig contains configuration constants for audio monitoring
type MonitoringConfig struct {
// MetricsUpdateInterval defines how often metrics are collected and broadcast
MetricsUpdateInterval time.Duration
}
// DefaultMonitoringConfig returns the default monitoring configuration
func DefaultMonitoringConfig() MonitoringConfig {
return MonitoringConfig{
MetricsUpdateInterval: 1000 * time.Millisecond, // 1 second interval
}
}
// Global monitoring configuration instance
var monitoringConfig = DefaultMonitoringConfig()
// GetMetricsUpdateInterval returns the current metrics update interval
// GetMetricsUpdateInterval returns the current metrics update interval from centralized config
func GetMetricsUpdateInterval() time.Duration {
return monitoringConfig.MetricsUpdateInterval
return GetConfig().MetricsUpdateInterval
}
// SetMetricsUpdateInterval sets the metrics update interval
// SetMetricsUpdateInterval sets the metrics update interval in centralized config
func SetMetricsUpdateInterval(interval time.Duration) {
monitoringConfig.MetricsUpdateInterval = interval
config := GetConfig()
config.MetricsUpdateInterval = interval
UpdateConfig(config)
}

View File

@ -0,0 +1,150 @@
package audio
import "time"
// AudioConfigConstants centralizes all hardcoded values used across audio components
type AudioConfigConstants struct {
// Audio Quality Presets
MaxAudioFrameSize int
// Opus Encoding Parameters
OpusBitrate int
OpusComplexity int
OpusVBR int
OpusVBRConstraint int
OpusDTX int
// Audio Parameters
SampleRate int
Channels int
FrameSize int
MaxPacketSize int
// Process Management
MaxRestartAttempts int
RestartWindow time.Duration
RestartDelay time.Duration
MaxRestartDelay time.Duration
// Buffer Management
PreallocSize int
MaxPoolSize int
MessagePoolSize int
OptimalSocketBuffer int
MaxSocketBuffer int
MinSocketBuffer int
// IPC Configuration
MagicNumber uint32
MaxFrameSize int
WriteTimeout time.Duration
MaxDroppedFrames int
HeaderSize int
// Monitoring and Metrics
MetricsUpdateInterval time.Duration
EMAAlpha float64
WarmupSamples int
LogThrottleInterval time.Duration
MetricsChannelBuffer int
// Performance Tuning
CPUFactor float64
MemoryFactor float64
LatencyFactor float64
InputSizeThreshold int
OutputSizeThreshold int
TargetLevel float64
// Priority Scheduling
AudioHighPriority int
AudioMediumPriority int
AudioLowPriority int
NormalPriority int
NiceValue int
// Error Handling
MaxConsecutiveErrors int
MaxRetryAttempts int
}
// DefaultAudioConfig returns the default configuration constants
func DefaultAudioConfig() *AudioConfigConstants {
return &AudioConfigConstants{
// Audio Quality Presets
MaxAudioFrameSize: 4096,
// Opus Encoding Parameters
OpusBitrate: 128000,
OpusComplexity: 10,
OpusVBR: 1,
OpusVBRConstraint: 0,
OpusDTX: 0,
// Audio Parameters
SampleRate: 48000,
Channels: 2,
FrameSize: 960,
MaxPacketSize: 4000,
// Process Management
MaxRestartAttempts: 5,
RestartWindow: 5 * time.Minute,
RestartDelay: 2 * time.Second,
MaxRestartDelay: 30 * time.Second,
// Buffer Management
PreallocSize: 1024 * 1024, // 1MB
MaxPoolSize: 100,
MessagePoolSize: 100,
OptimalSocketBuffer: 262144, // 256KB
MaxSocketBuffer: 1048576, // 1MB
MinSocketBuffer: 8192, // 8KB
// IPC Configuration
MagicNumber: 0xDEADBEEF,
MaxFrameSize: 4096,
WriteTimeout: 5 * time.Second,
MaxDroppedFrames: 10,
HeaderSize: 8,
// Monitoring and Metrics
MetricsUpdateInterval: 1000 * time.Millisecond,
EMAAlpha: 0.1,
WarmupSamples: 10,
LogThrottleInterval: 5 * time.Second,
MetricsChannelBuffer: 100,
// Performance Tuning
CPUFactor: 0.7,
MemoryFactor: 0.8,
LatencyFactor: 0.9,
InputSizeThreshold: 1024,
OutputSizeThreshold: 2048,
TargetLevel: 0.5,
// Priority Scheduling
AudioHighPriority: -10,
AudioMediumPriority: -5,
AudioLowPriority: 0,
NormalPriority: 0,
NiceValue: -10,
// Error Handling
MaxConsecutiveErrors: 5,
MaxRetryAttempts: 3,
}
}
// Global configuration instance
var audioConfigInstance = DefaultAudioConfig()
// UpdateConfig allows runtime configuration updates
func UpdateConfig(newConfig *AudioConfigConstants) {
audioConfigInstance = newConfig
}
// GetConfig returns the current configuration
func GetConfig() *AudioConfigConstants {
return audioConfigInstance
}

View File

@ -220,18 +220,6 @@ func (aeb *AudioEventBroadcaster) sendInitialState(connectionID string) {
aeb.sendCurrentMetrics(subscriber)
}
// convertAudioMetricsToEventData converts internal audio metrics to AudioMetricsData for events
func convertAudioMetricsToEventData(metrics AudioMetrics) AudioMetricsData {
return AudioMetricsData{
FramesReceived: metrics.FramesReceived,
FramesDropped: metrics.FramesDropped,
BytesProcessed: metrics.BytesProcessed,
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: metrics.ConnectionDrops,
AverageLatency: metrics.AverageLatency.String(),
}
}
// convertAudioMetricsToEventDataWithLatencyMs converts internal audio metrics to AudioMetricsData with millisecond latency formatting
func convertAudioMetricsToEventDataWithLatencyMs(metrics AudioMetrics) AudioMetricsData {
return AudioMetricsData{
@ -244,18 +232,6 @@ func convertAudioMetricsToEventDataWithLatencyMs(metrics AudioMetrics) AudioMetr
}
}
// convertAudioInputMetricsToEventData converts internal audio input metrics to MicrophoneMetricsData for events
func convertAudioInputMetricsToEventData(metrics AudioInputMetrics) MicrophoneMetricsData {
return MicrophoneMetricsData{
FramesSent: metrics.FramesSent,
FramesDropped: metrics.FramesDropped,
BytesProcessed: metrics.BytesProcessed,
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: metrics.ConnectionDrops,
AverageLatency: metrics.AverageLatency.String(),
}
}
// convertAudioInputMetricsToEventDataWithLatencyMs converts internal audio input metrics to MicrophoneMetricsData with millisecond latency formatting
func convertAudioInputMetricsToEventDataWithLatencyMs(metrics AudioInputMetrics) MicrophoneMetricsData {
return MicrophoneMetricsData{
@ -358,7 +334,7 @@ func (aeb *AudioEventBroadcaster) getMicrophoneProcessMetrics() ProcessMetricsDa
func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) {
// Send audio metrics
audioMetrics := GetAudioMetrics()
audioMetricsEvent := createAudioEvent(AudioEventMetricsUpdate, convertAudioMetricsToEventData(audioMetrics))
audioMetricsEvent := createAudioEvent(AudioEventMetricsUpdate, convertAudioMetricsToEventDataWithLatencyMs(audioMetrics))
aeb.sendToSubscriber(subscriber, audioMetricsEvent)
// Send audio process metrics
@ -374,7 +350,7 @@ func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubsc
if sessionProvider.IsSessionActive() {
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
micMetrics := inputManager.GetMetrics()
micMetricsEvent := createAudioEvent(AudioEventMicrophoneMetrics, convertAudioInputMetricsToEventData(micMetrics))
micMetricsEvent := createAudioEvent(AudioEventMicrophoneMetrics, convertAudioInputMetricsToEventDataWithLatencyMs(micMetrics))
aeb.sendToSubscriber(subscriber, micMetricsEvent)
}
}

View File

@ -58,7 +58,7 @@ func NewOutputStreamer() (*OutputStreamer, error) {
ctx, cancel := context.WithCancel(context.Background())
return &OutputStreamer{
client: client,
bufferPool: NewAudioBufferPool(MaxAudioFrameSize), // Use existing buffer pool
bufferPool: NewAudioBufferPool(GetMaxAudioFrameSize()), // Use existing buffer pool
ctx: ctx,
cancel: cancel,
batchSize: initialBatchSize, // Use adaptive batch size
@ -319,7 +319,7 @@ func StartAudioOutputStreaming(send func([]byte)) error {
}()
getOutputStreamingLogger().Info().Msg("Audio output streaming started")
buffer := make([]byte, MaxAudioFrameSize)
buffer := make([]byte, GetMaxAudioFrameSize())
for {
select {

View File

@ -17,16 +17,22 @@ import (
"github.com/rs/zerolog"
)
const (
// Maximum number of restart attempts within the restart window
maxRestartAttempts = 5
// Time window for counting restart attempts
restartWindow = 5 * time.Minute
// Delay between restart attempts
restartDelay = 2 * time.Second
// Maximum restart delay (exponential backoff)
maxRestartDelay = 30 * time.Second
)
// Restart configuration is now retrieved from centralized config
func getMaxRestartAttempts() int {
return GetConfig().MaxRestartAttempts
}
func getRestartWindow() time.Duration {
return GetConfig().RestartWindow
}
func getRestartDelay() time.Duration {
return GetConfig().RestartDelay
}
func getMaxRestartDelay() time.Duration {
return GetConfig().MaxRestartDelay
}
// AudioServerSupervisor manages the audio server subprocess lifecycle
type AudioServerSupervisor struct {
@ -395,13 +401,13 @@ func (s *AudioServerSupervisor) shouldRestart() bool {
now := time.Now()
var recentAttempts []time.Time
for _, attempt := range s.restartAttempts {
if now.Sub(attempt) < restartWindow {
if now.Sub(attempt) < getRestartWindow() {
recentAttempts = append(recentAttempts, attempt)
}
}
s.restartAttempts = recentAttempts
return len(s.restartAttempts) < maxRestartAttempts
return len(s.restartAttempts) < getMaxRestartAttempts()
}
// recordRestartAttempt records a restart attempt
@ -420,17 +426,17 @@ func (s *AudioServerSupervisor) calculateRestartDelay() time.Duration {
// Exponential backoff based on recent restart attempts
attempts := len(s.restartAttempts)
if attempts == 0 {
return restartDelay
return getRestartDelay()
}
// Calculate exponential backoff: 2^attempts * base delay
delay := restartDelay
for i := 0; i < attempts && delay < maxRestartDelay; i++ {
delay := getRestartDelay()
for i := 0; i < attempts && delay < getMaxRestartDelay(); i++ {
delay *= 2
}
if delay > maxRestartDelay {
delay = maxRestartDelay
if delay > getMaxRestartDelay() {
delay = getMaxRestartDelay()
}
return delay

View File

@ -262,7 +262,7 @@ type ZeroCopyFramePoolStats struct {
}
var (
globalZeroCopyPool = NewZeroCopyFramePool(MaxAudioFrameSize)
globalZeroCopyPool = NewZeroCopyFramePool(GetMaxAudioFrameSize())
)
// GetZeroCopyFrame gets a frame from the global pool
@ -284,16 +284,17 @@ func PutZeroCopyFrame(frame *ZeroCopyAudioFrame) {
func ZeroCopyAudioReadEncode() (*ZeroCopyAudioFrame, error) {
frame := GetZeroCopyFrame()
maxFrameSize := GetMaxAudioFrameSize()
// Ensure frame has enough capacity
if frame.Capacity() < MaxAudioFrameSize {
if frame.Capacity() < maxFrameSize {
// Reallocate if needed
frame.data = make([]byte, MaxAudioFrameSize)
frame.capacity = MaxAudioFrameSize
frame.data = make([]byte, maxFrameSize)
frame.capacity = maxFrameSize
frame.pooled = false
}
// Use unsafe pointer for direct CGO call
n, err := CGOAudioReadEncode(frame.data[:MaxAudioFrameSize])
n, err := CGOAudioReadEncode(frame.data[:maxFrameSize])
if err != nil {
PutZeroCopyFrame(frame)
return nil, err

View File

@ -122,8 +122,8 @@ export default function AudioMetricsDashboard() {
const response = await api.GET('/system/memory');
const data = await response.json();
setSystemMemoryMB(data.total_memory_mb);
} catch (error) {
console.warn('Failed to fetch system memory, using default:', error);
} catch {
// Failed to fetch system memory, using default
}
};
fetchSystemMemory();
@ -260,8 +260,8 @@ export default function AudioMetricsDashboard() {
const micConfigData = await micConfigResp.json();
setMicrophoneConfig(micConfigData.current);
}
} catch (micConfigError) {
console.debug("Microphone config not available:", micConfigError);
} catch {
// Microphone config not available
}
} catch (error) {
console.error("Failed to load audio config:", error);
@ -321,8 +321,8 @@ export default function AudioMetricsDashboard() {
});
}
}
} catch (audioProcessError) {
console.debug("Audio process metrics not available:", audioProcessError);
} catch {
// Audio process metrics not available
}
// Load microphone metrics
@ -332,9 +332,9 @@ export default function AudioMetricsDashboard() {
const micData = await micResp.json();
setFallbackMicrophoneMetrics(micData);
}
} catch (micError) {
} catch {
// Microphone metrics might not be available, that's okay
console.debug("Microphone metrics not available:", micError);
// Microphone metrics not available
}
// Load microphone process metrics
@ -374,8 +374,8 @@ export default function AudioMetricsDashboard() {
return newMap;
});
}
} catch (micProcessError) {
console.debug("Microphone process metrics not available:", micProcessError);
} catch {
// Microphone process metrics not available
}
} catch (error) {
console.error("Failed to load audio data:", error);

View File

@ -32,9 +32,8 @@ export default function InfoBar() {
useEffect(() => {
if (!rpcDataChannel) return;
rpcDataChannel.onclose = () => console.log("rpcDataChannel has closed");
rpcDataChannel.onerror = e =>
console.log(`Error on DataChannel '${rpcDataChannel.label}': ${e}`);
rpcDataChannel.onclose = () => { /* RPC data channel closed */ };
rpcDataChannel.onerror = () => { /* Error on RPC data channel */ };
}, [rpcDataChannel]);
const keyboardLedState = useHidStore(state => state.keyboardLedState);

View File

@ -155,8 +155,8 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
}
setConfigsLoaded(true);
} catch (error) {
console.error("Failed to load audio configurations:", error);
} catch {
// Failed to load audio configurations
}
};
@ -165,11 +165,11 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
try {
const resp = await api.POST("/audio/mute", { muted: !isMuted });
if (!resp.ok) {
console.error("Failed to toggle mute:", resp.statusText);
// Failed to toggle mute
}
// WebSocket will handle the state update automatically
} catch (error) {
console.error("Failed to toggle mute:", error);
} catch {
// Failed to toggle mute
} finally {
setIsLoading(false);
}
@ -183,8 +183,8 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
const data = await resp.json();
setCurrentConfig(data.config);
}
} catch (error) {
console.error("Failed to change audio quality:", error);
} catch {
// Failed to change audio quality
} finally {
setIsLoading(false);
}
@ -197,8 +197,8 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
const data = await resp.json();
setCurrentMicrophoneConfig(data.config);
}
} catch (error) {
console.error("Failed to change microphone quality:", error);
} catch {
// Failed to change microphone quality
}
};
@ -217,8 +217,8 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
if (!result.success && result.error) {
notifications.error(result.error.message);
}
} catch (error) {
console.error("Failed to toggle microphone:", error);
} catch {
// Failed to toggle microphone
notifications.error("An unexpected error occurred");
}
};
@ -238,8 +238,8 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
if (!result.success && result.error) {
notifications.error(result.error.message);
}
} catch (error) {
console.error("Failed to toggle microphone mute:", error);
} catch {
// Failed to toggle microphone mute
notifications.error("Failed to toggle microphone mute");
}
};
@ -258,8 +258,8 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
if (!result.success && result.error) {
notifications.error(result.error.message);
}
} catch (error) {
console.error("Failed to change microphone device:", error);
} catch {
// Failed to change microphone device
notifications.error("Failed to change microphone device");
}
}
@ -273,11 +273,11 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
if (videoElement && 'setSinkId' in videoElement) {
try {
await (videoElement as HTMLVideoElement & { setSinkId: (deviceId: string) => Promise<void> }).setSinkId(deviceId);
} catch (error: unknown) {
console.error('Failed to change audio output device:', error);
} catch {
// Failed to change audio output device
}
} else {
console.warn('setSinkId not supported or video element not found');
// setSinkId not supported or video element not found
}
};

View File

@ -845,7 +845,7 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
const { sendFn } = get();
if (!sendFn) {
console.warn("JSON-RPC send function not available.");
// console.warn("JSON-RPC send function not available.");
return;
}
@ -855,7 +855,7 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
await new Promise<void>((resolve, reject) => {
sendFn("getKeyboardMacros", {}, (response: JsonRpcResponse) => {
if (response.error) {
console.error("Error loading macros:", response.error);
// console.error("Error loading macros:", response.error);
reject(new Error(response.error.message));
return;
}
@ -879,8 +879,8 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
resolve();
});
});
} catch (error) {
console.error("Failed to load macros:", error);
} catch {
// console.error("Failed to load macros:", _error);
} finally {
set({ loading: false });
}
@ -889,20 +889,20 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
saveMacros: async (macros: KeySequence[]) => {
const { sendFn } = get();
if (!sendFn) {
console.warn("JSON-RPC send function not available.");
// console.warn("JSON-RPC send function not available.");
throw new Error("JSON-RPC send function not available");
}
if (macros.length > MAX_TOTAL_MACROS) {
console.error(`Cannot save: exceeded maximum of ${MAX_TOTAL_MACROS} macros`);
// console.error(`Cannot save: exceeded maximum of ${MAX_TOTAL_MACROS} macros`);
throw new Error(`Cannot save: exceeded maximum of ${MAX_TOTAL_MACROS} macros`);
}
for (const macro of macros) {
if (macro.steps.length > MAX_STEPS_PER_MACRO) {
console.error(
`Cannot save: macro "${macro.name}" exceeds maximum of ${MAX_STEPS_PER_MACRO} steps`,
);
// console.error(
// `Cannot save: macro "${macro.name}" exceeds maximum of ${MAX_STEPS_PER_MACRO} steps`,
// );
throw new Error(
`Cannot save: macro "${macro.name}" exceeds maximum of ${MAX_STEPS_PER_MACRO} steps`,
);
@ -911,9 +911,9 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
for (let i = 0; i < macro.steps.length; i++) {
const step = macro.steps[i];
if (step.keys && step.keys.length > MAX_KEYS_PER_STEP) {
console.error(
`Cannot save: macro "${macro.name}" step ${i + 1} exceeds maximum of ${MAX_KEYS_PER_STEP} keys`,
);
// console.error(
// `Cannot save: macro "${macro.name}" step ${i + 1} exceeds maximum of ${MAX_KEYS_PER_STEP} keys`,
// );
throw new Error(
`Cannot save: macro "${macro.name}" step ${i + 1} exceeds maximum of ${MAX_KEYS_PER_STEP} keys`,
);
@ -940,7 +940,7 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
});
if (response.error) {
console.error("Error saving macros:", response.error);
// console.error("Error saving macros:", response.error);
const errorMessage =
typeof response.error.data === "string"
? response.error.data
@ -950,9 +950,6 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
// Only update the store if the request was successful
set({ macros: macrosWithSortOrder });
} catch (error) {
console.error("Failed to save macros:", error);
throw error;
} finally {
set({ loading: false });
}

View File

@ -63,10 +63,7 @@ export function useAudioDevices(): UseAudioDevicesReturn {
setAudioInputDevices(inputDevices);
setAudioOutputDevices(outputDevices);
console.log('Audio devices enumerated:', {
inputs: inputDevices.length,
outputs: outputDevices.length
});
// Audio devices enumerated
} catch (err) {
console.error('Failed to enumerate audio devices:', err);
@ -79,7 +76,7 @@ export function useAudioDevices(): UseAudioDevicesReturn {
// Listen for device changes
useEffect(() => {
const handleDeviceChange = () => {
console.log('Audio devices changed, refreshing...');
// Audio devices changed, refreshing
refreshDevices();
};

View File

@ -124,13 +124,13 @@ export function useAudioEvents(onAudioDeviceChanged?: (data: AudioDeviceChangedD
reconnectInterval: 3000,
share: true, // Share the WebSocket connection across multiple hooks
onOpen: () => {
console.log('[AudioEvents] WebSocket connected');
// WebSocket connected
// Reset global state on new connection
globalSubscriptionState.isSubscribed = false;
globalSubscriptionState.connectionId = Math.random().toString(36);
},
onClose: () => {
console.log('[AudioEvents] WebSocket disconnected');
// WebSocket disconnected
// Reset global state on disconnect
globalSubscriptionState.isSubscribed = false;
globalSubscriptionState.subscriberCount = 0;
@ -160,7 +160,7 @@ export function useAudioEvents(onAudioDeviceChanged?: (data: AudioDeviceChangedD
sendMessage(JSON.stringify(subscribeMessage));
globalSubscriptionState.isSubscribed = true;
console.log('[AudioEvents] Subscribed to audio events');
// Subscribed to audio events
}
}, 100); // 100ms delay to debounce subscription attempts
}
@ -197,11 +197,11 @@ export function useAudioEvents(onAudioDeviceChanged?: (data: AudioDeviceChangedD
sendMessage(JSON.stringify(unsubscribeMessage));
globalSubscriptionState.isSubscribed = false;
globalSubscriptionState.subscriberCount = 0;
console.log('[AudioEvents] Sent unsubscribe message to backend');
// Sent unsubscribe message to backend
}
}
console.log('[AudioEvents] Component unsubscribed from audio events');
// Component unsubscribed from audio events
}, [readyState, isLocallySubscribed, sendMessage]);
// Handle incoming messages
@ -218,7 +218,7 @@ export function useAudioEvents(onAudioDeviceChanged?: (data: AudioDeviceChangedD
case 'audio-mute-changed': {
const muteData = audioEvent.data as AudioMuteData;
setAudioMuted(muteData.muted);
console.log('[AudioEvents] Audio mute changed:', muteData.muted);
// Audio mute changed
break;
}
@ -231,7 +231,7 @@ export function useAudioEvents(onAudioDeviceChanged?: (data: AudioDeviceChangedD
case 'microphone-state-changed': {
const micStateData = audioEvent.data as MicrophoneStateData;
setMicrophoneState(micStateData);
console.log('[AudioEvents] Microphone state changed:', micStateData);
// Microphone state changed
break;
}
@ -255,7 +255,7 @@ export function useAudioEvents(onAudioDeviceChanged?: (data: AudioDeviceChangedD
case 'audio-device-changed': {
const deviceChangedData = audioEvent.data as AudioDeviceChangedData;
console.log('[AudioEvents] Audio device changed:', deviceChangedData);
// Audio device changed
if (onAudioDeviceChanged) {
onAudioDeviceChanged(deviceChangedData);
}

View File

@ -104,8 +104,8 @@ export const useAudioLevel = (
// Use setInterval instead of requestAnimationFrame for more predictable timing
intervalRef.current = window.setInterval(updateLevel, updateInterval);
} catch (error) {
console.error('Failed to create audio level analyzer:', error);
} catch {
// Audio level analyzer creation failed - silently handle
setIsAnalyzing(false);
setAudioLevel(0);
}

View File

@ -57,19 +57,14 @@ export function useMicrophone() {
// Cleanup function to stop microphone stream
const stopMicrophoneStream = useCallback(async () => {
console.log("stopMicrophoneStream called - cleaning up stream");
console.trace("stopMicrophoneStream call stack");
// Cleaning up microphone stream
if (microphoneStreamRef.current) {
console.log("Stopping microphone stream:", microphoneStreamRef.current.id);
microphoneStreamRef.current.getTracks().forEach(track => {
track.stop();
});
microphoneStreamRef.current = null;
setMicrophoneStream(null);
console.log("Microphone stream cleared from ref and store");
} else {
console.log("No microphone stream to stop");
}
if (microphoneSender && peerConnection) {
@ -217,17 +212,7 @@ export function useMicrophone() {
audio: audioConstraints
});
console.log("Microphone stream created successfully:", {
streamId: stream.id,
audioTracks: stream.getAudioTracks().length,
videoTracks: stream.getVideoTracks().length,
audioTrackDetails: stream.getAudioTracks().map(track => ({
id: track.id,
label: track.label,
enabled: track.enabled,
readyState: track.readyState
}))
});
// Microphone stream created successfully
// Store the stream in both ref and store
microphoneStreamRef.current = stream;
@ -471,7 +456,7 @@ export function useMicrophone() {
setIsStarting(false);
return { success: true };
} catch (error) {
console.error("Failed to start microphone:", error);
// Failed to start microphone
let micError: MicrophoneError;
if (error instanceof Error) {

View File

@ -38,7 +38,7 @@ export function useUsbDeviceConfig() {
// Listen for audio device changes to update USB config in real-time
const handleAudioDeviceChanged = useCallback(() => {
console.log('[useUsbDeviceConfig] Audio device changed, refetching USB config');
// Audio device changed, refetching USB config
fetchUsbDeviceConfig();
}, [fetchUsbDeviceConfig]);