import { useEffect, useState } from "react"; import { MdGraphicEq, MdSignalWifi4Bar, MdError, MdMic } from "react-icons/md"; import { LuActivity, LuClock, LuHardDrive, LuSettings, LuCpu, LuMemoryStick } from "react-icons/lu"; import { AudioLevelMeter } from "@components/AudioLevelMeter"; import StatChart from "@components/StatChart"; import { cx } from "@/cva.config"; import { useMicrophone } from "@/hooks/useMicrophone"; import { useAudioLevel } from "@/hooks/useAudioLevel"; import { useAudioEvents } from "@/hooks/useAudioEvents"; import api from "@/api"; interface AudioMetrics { frames_received: number; frames_dropped: number; bytes_processed: number; last_frame_time: string; connection_drops: number; average_latency: string; } interface MicrophoneMetrics { frames_sent: number; frames_dropped: number; bytes_processed: number; last_frame_time: string; connection_drops: number; average_latency: string; } interface ProcessMetrics { cpu_percent: number; memory_percent: number; memory_rss: number; memory_vms: number; running: boolean; } interface AudioConfig { Quality: number; Bitrate: number; SampleRate: number; Channels: number; FrameSize: string; } const qualityLabels = { 0: "Low", 1: "Medium", 2: "High", 3: "Ultra" }; // Format percentage values to 2 decimal places function formatPercentage(value: number | null | undefined): string { if (value === null || value === undefined || isNaN(value)) { return "0.00%"; } return `${value.toFixed(2)}%`; } function formatMemoryMB(rssBytes: number | null | undefined): string { if (rssBytes === null || rssBytes === undefined || isNaN(rssBytes)) { return "0.00 MB"; } const mb = rssBytes / (1024 * 1024); return `${mb.toFixed(2)} MB`; } // Default system memory estimate in MB (will be replaced by actual value from backend) const DEFAULT_SYSTEM_MEMORY_MB = 4096; // 4GB default // Create chart array similar to connectionStats.tsx function createChartArray( stream: Map, metric: K, ): { date: number; stat: T[K] | null }[] { const stat = Array.from(stream).map(([key, stats]) => { return { date: key, stat: stats[metric] }; }); // Sort the dates to ensure they are in chronological order const sortedStat = stat.map(x => x.date).sort((a, b) => a - b); // Determine the earliest statistic date const earliestStat = sortedStat[0]; // Current time in seconds since the Unix epoch const now = Math.floor(Date.now() / 1000); // Determine the starting point for the chart data const firstChartDate = earliestStat ? Math.min(earliestStat, now - 120) : now - 120; // Generate the chart array for the range between 'firstChartDate' and 'now' return Array.from({ length: now - firstChartDate }, (_, i) => { const currentDate = firstChartDate + i; return { date: currentDate, // Find the statistic for 'currentDate', or use the last known statistic if none exists for that date stat: stat.find(x => x.date === currentDate)?.stat ?? null, }; }); } export default function AudioMetricsDashboard() { // System memory state const [systemMemoryMB, setSystemMemoryMB] = useState(DEFAULT_SYSTEM_MEMORY_MB); // Use WebSocket-based audio events for real-time updates const { audioMetrics, microphoneMetrics: wsMicrophoneMetrics, audioProcessMetrics: wsAudioProcessMetrics, microphoneProcessMetrics: wsMicrophoneProcessMetrics, isConnected: wsConnected } = useAudioEvents(); // Fetch system memory information on component mount useEffect(() => { const fetchSystemMemory = async () => { try { const response = await api.GET('/system/memory'); const data = await response.json(); setSystemMemoryMB(data.total_memory_mb); } catch { // Failed to fetch system memory, using default } }; fetchSystemMemory(); }, []); // Update historical data when WebSocket process metrics are received useEffect(() => { if (wsConnected && wsAudioProcessMetrics && wsAudioProcessMetrics.running) { const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart // Validate that now is a valid number if (isNaN(now)) return; const cpuStat = isNaN(wsAudioProcessMetrics.cpu_percent) ? null : wsAudioProcessMetrics.cpu_percent; setAudioCpuStats(prev => { const newMap = new Map(prev); newMap.set(now, { cpu_percent: cpuStat }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); setAudioMemoryStats(prev => { const newMap = new Map(prev); const memoryRss = isNaN(wsAudioProcessMetrics.memory_rss) ? null : wsAudioProcessMetrics.memory_rss; newMap.set(now, { memory_rss: memoryRss }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); } }, [wsConnected, wsAudioProcessMetrics]); useEffect(() => { if (wsConnected && wsMicrophoneProcessMetrics) { const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart // Validate that now is a valid number if (isNaN(now)) return; const cpuStat = isNaN(wsMicrophoneProcessMetrics.cpu_percent) ? null : wsMicrophoneProcessMetrics.cpu_percent; setMicCpuStats(prev => { const newMap = new Map(prev); newMap.set(now, { cpu_percent: cpuStat }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); setMicMemoryStats(prev => { const newMap = new Map(prev); const memoryRss = isNaN(wsMicrophoneProcessMetrics.memory_rss) ? null : wsMicrophoneProcessMetrics.memory_rss; newMap.set(now, { memory_rss: memoryRss }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); } }, [wsConnected, wsMicrophoneProcessMetrics]); // Fallback state for when WebSocket is not connected const [fallbackMetrics, setFallbackMetrics] = useState(null); const [fallbackMicrophoneMetrics, setFallbackMicrophoneMetrics] = useState(null); const [fallbackConnected, setFallbackConnected] = useState(false); // Process metrics state (fallback for when WebSocket is not connected) const [fallbackAudioProcessMetrics, setFallbackAudioProcessMetrics] = useState(null); const [fallbackMicrophoneProcessMetrics, setFallbackMicrophoneProcessMetrics] = useState(null); // Historical data for charts using Maps for better memory management const [audioCpuStats, setAudioCpuStats] = useState>(new Map()); const [audioMemoryStats, setAudioMemoryStats] = useState>(new Map()); const [micCpuStats, setMicCpuStats] = useState>(new Map()); const [micMemoryStats, setMicMemoryStats] = useState>(new Map()); // Configuration state (these don't change frequently, so we can load them once) const [config, setConfig] = useState(null); const [microphoneConfig, setMicrophoneConfig] = useState(null); const [lastUpdate, setLastUpdate] = useState(new Date()); // Use WebSocket data when available, fallback to polling data otherwise const metrics = wsConnected && audioMetrics !== null ? audioMetrics : fallbackMetrics; const microphoneMetrics = wsConnected && wsMicrophoneMetrics !== null ? wsMicrophoneMetrics : fallbackMicrophoneMetrics; const audioProcessMetrics = wsConnected && wsAudioProcessMetrics !== null ? wsAudioProcessMetrics : fallbackAudioProcessMetrics; const microphoneProcessMetrics = wsConnected && wsMicrophoneProcessMetrics !== null ? wsMicrophoneProcessMetrics : fallbackMicrophoneProcessMetrics; const isConnected = wsConnected ? wsConnected : fallbackConnected; // Microphone state for audio level monitoring const { isMicrophoneActive, isMicrophoneMuted, microphoneStream } = useMicrophone(); const { audioLevel, isAnalyzing } = useAudioLevel( isMicrophoneActive ? microphoneStream : null, { enabled: isMicrophoneActive, updateInterval: 120, }); useEffect(() => { // Load initial configuration (only once) loadAudioConfig(); // Set up fallback polling only when WebSocket is not connected if (!wsConnected) { loadAudioData(); const interval = setInterval(loadAudioData, 1000); return () => clearInterval(interval); } }, [wsConnected]); const loadAudioConfig = async () => { try { // Load config const configResp = await api.GET("/audio/quality"); if (configResp.ok) { const configData = await configResp.json(); setConfig(configData.current); } // Load microphone config try { const micConfigResp = await api.GET("/microphone/quality"); if (micConfigResp.ok) { const micConfigData = await micConfigResp.json(); setMicrophoneConfig(micConfigData.current); } } catch { // Microphone config not available } } catch (error) { console.error("Failed to load audio config:", error); } }; const loadAudioData = async () => { try { // Load metrics const metricsResp = await api.GET("/audio/metrics"); if (metricsResp.ok) { const metricsData = await metricsResp.json(); setFallbackMetrics(metricsData); // Consider connected if API call succeeds, regardless of frame count setFallbackConnected(true); setLastUpdate(new Date()); } else { setFallbackConnected(false); } // Load audio process metrics try { const audioProcessResp = await api.GET("/audio/process-metrics"); if (audioProcessResp.ok) { const audioProcessData = await audioProcessResp.json(); setFallbackAudioProcessMetrics(audioProcessData); // Update historical data for charts (keep last 120 seconds) if (audioProcessData.running) { const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart // Validate that now is a valid number if (isNaN(now)) return; const cpuStat = isNaN(audioProcessData.cpu_percent) ? null : audioProcessData.cpu_percent; const memoryRss = isNaN(audioProcessData.memory_rss) ? null : audioProcessData.memory_rss; setAudioCpuStats(prev => { const newMap = new Map(prev); newMap.set(now, { cpu_percent: cpuStat }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); setAudioMemoryStats(prev => { const newMap = new Map(prev); newMap.set(now, { memory_rss: memoryRss }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); } } } catch { // Audio process metrics not available } // Load microphone metrics try { const micResp = await api.GET("/microphone/metrics"); if (micResp.ok) { const micData = await micResp.json(); setFallbackMicrophoneMetrics(micData); } } catch { // Microphone metrics might not be available, that's okay // Microphone metrics not available } // Load microphone process metrics try { const micProcessResp = await api.GET("/microphone/process-metrics"); if (micProcessResp.ok) { const micProcessData = await micProcessResp.json(); setFallbackMicrophoneProcessMetrics(micProcessData); // Update historical data for charts (keep last 120 seconds) const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart // Validate that now is a valid number if (isNaN(now)) return; const cpuStat = isNaN(micProcessData.cpu_percent) ? null : micProcessData.cpu_percent; const memoryRss = isNaN(micProcessData.memory_rss) ? null : micProcessData.memory_rss; setMicCpuStats(prev => { const newMap = new Map(prev); newMap.set(now, { cpu_percent: cpuStat }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); setMicMemoryStats(prev => { const newMap = new Map(prev); newMap.set(now, { memory_rss: memoryRss }); // Keep only last 120 seconds of data for memory management const cutoff = now - 120; for (const [key] of newMap) { if (key < cutoff) newMap.delete(key); } return newMap; }); } } catch { // Microphone process metrics not available } } catch (error) { console.error("Failed to load audio data:", error); setFallbackConnected(false); } }; const formatBytes = (bytes: number) => { if (bytes === 0) return "0 B"; const k = 1024; const sizes = ["B", "KB", "MB", "GB"]; const i = Math.floor(Math.log(bytes) / Math.log(k)); return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + " " + sizes[i]; }; const formatNumber = (num: number) => { return new Intl.NumberFormat().format(num); }; const getDropRate = () => { if (!metrics || metrics.frames_received === 0) return 0; return ((metrics.frames_dropped / metrics.frames_received) * 100); }; const getQualityColor = (quality: number) => { switch (quality) { case 0: return "text-yellow-600 dark:text-yellow-400"; case 1: return "text-blue-600 dark:text-blue-400"; case 2: return "text-green-600 dark:text-green-400"; case 3: return "text-purple-600 dark:text-purple-400"; default: return "text-slate-600 dark:text-slate-400"; } }; return (
{/* Header */}

Audio Metrics

{isConnected ? "Active" : "Inactive"}
{/* Current Configuration */}
{config && (
Audio Output Config
Quality: {qualityLabels[config.Quality as keyof typeof qualityLabels]}
Bitrate: {config.Bitrate}kbps
Sample Rate: {config.SampleRate}Hz
Channels: {config.Channels}
)} {microphoneConfig && (
Microphone Input Config
Quality: {qualityLabels[microphoneConfig.Quality as keyof typeof qualityLabels]}
Bitrate: {microphoneConfig.Bitrate}kbps
Sample Rate: {microphoneConfig.SampleRate}Hz
Channels: {microphoneConfig.Channels}
)}
{/* Subprocess Resource Usage - Histogram View */}
{/* Audio Output Subprocess */} {audioProcessMetrics && (
Audio Output Process

CPU Usage

Memory Usage

({ date: item.date, stat: item.stat ? item.stat / (1024 * 1024) : null // Convert bytes to MB }))} unit="MB" domain={[0, systemMemoryMB]} />
{formatPercentage(audioProcessMetrics.cpu_percent)}
CPU
{formatMemoryMB(audioProcessMetrics.memory_rss)}
Memory
)} {/* Microphone Input Subprocess */} {microphoneProcessMetrics && (
Microphone Input Process

CPU Usage

Memory Usage

({ date: item.date, stat: item.stat ? item.stat / (1024 * 1024) : null // Convert bytes to MB }))} unit="MB" domain={[0, systemMemoryMB]} />
{formatPercentage(microphoneProcessMetrics.cpu_percent)}
CPU
{formatMemoryMB(microphoneProcessMetrics.memory_rss)}
Memory
)}
{/* Performance Metrics */} {metrics && (
{/* Audio Output Frames */}
Audio Output
{formatNumber(metrics.frames_received)}
Frames Received
0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(metrics.frames_dropped)}
Frames Dropped
{/* Drop Rate */}
Drop Rate 5 ? "text-red-600 dark:text-red-400" : getDropRate() > 1 ? "text-yellow-600 dark:text-yellow-400" : "text-green-600 dark:text-green-400" )}> {getDropRate().toFixed(2)}%
5 ? "bg-red-500" : getDropRate() > 1 ? "bg-yellow-500" : "bg-green-500" )} style={{ width: `${Math.min(getDropRate(), 100)}%` }} />
{/* Microphone Input Metrics */} {microphoneMetrics && (
Microphone Input
{formatNumber(microphoneMetrics.frames_sent)}
Frames Sent
0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(microphoneMetrics.frames_dropped)}
Frames Dropped
{/* Microphone Drop Rate */}
Drop Rate 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 5 ? "text-red-600 dark:text-red-400" : (microphoneMetrics.frames_sent > 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 1 ? "text-yellow-600 dark:text-yellow-400" : "text-green-600 dark:text-green-400" )}> {microphoneMetrics.frames_sent > 0 ? ((microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100).toFixed(2) : "0.00"}%
0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 5 ? "bg-red-500" : (microphoneMetrics.frames_sent > 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 1 ? "bg-yellow-500" : "bg-green-500" )} style={{ width: `${Math.min(microphoneMetrics.frames_sent > 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0, 100)}%` }} />
{/* Microphone Audio Level */} {isMicrophoneActive && (
)} {/* Microphone Connection Health */}
Connection Health
Connection Drops: 0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(microphoneMetrics.connection_drops)}
{microphoneMetrics.average_latency && (
Avg Latency: {microphoneMetrics.average_latency}
)}
)} {/* Data Transfer */}
Data Transfer
{formatBytes(metrics.bytes_processed)}
Total Processed
{/* Connection Health */}
Connection Health
Connection Drops: 0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(metrics.connection_drops)}
{metrics.average_latency && (
Avg Latency: {metrics.average_latency}
)}
)} {/* Last Update */}
Last updated: {lastUpdate.toLocaleTimeString()}
{/* No Data State */} {!metrics && (

No Audio Data

Audio metrics will appear when audio streaming is active.

)}
); }