import { useEffect, useState } from "react"; import { MdGraphicEq, MdSignalWifi4Bar, MdError, MdMic } from "react-icons/md"; import { LuActivity, LuClock, LuHardDrive, LuSettings, LuCpu, LuMemoryStick } from "react-icons/lu"; import { AudioLevelMeter } from "@components/AudioLevelMeter"; import { cx } from "@/cva.config"; import { useMicrophone } from "@/hooks/useMicrophone"; import { useAudioLevel } from "@/hooks/useAudioLevel"; import { useAudioEvents } from "@/hooks/useAudioEvents"; import api from "@/api"; interface AudioMetrics { frames_received: number; frames_dropped: number; bytes_processed: number; last_frame_time: string; connection_drops: number; average_latency: string; } interface MicrophoneMetrics { frames_sent: number; frames_dropped: number; bytes_processed: number; last_frame_time: string; connection_drops: number; average_latency: string; } interface ProcessMetrics { cpu_percent: number; memory_percent: number; memory_rss: number; memory_vms: number; running: boolean; } interface AudioConfig { Quality: number; Bitrate: number; SampleRate: number; Channels: number; FrameSize: string; } const qualityLabels = { 0: "Low", 1: "Medium", 2: "High", 3: "Ultra" }; export default function AudioMetricsDashboard() { // Use WebSocket-based audio events for real-time updates const { audioMetrics, microphoneMetrics: wsMicrophoneMetrics, isConnected: wsConnected } = useAudioEvents(); // Fallback state for when WebSocket is not connected const [fallbackMetrics, setFallbackMetrics] = useState(null); const [fallbackMicrophoneMetrics, setFallbackMicrophoneMetrics] = useState(null); const [fallbackConnected, setFallbackConnected] = useState(false); // Process metrics state const [audioProcessMetrics, setAudioProcessMetrics] = useState(null); const [microphoneProcessMetrics, setMicrophoneProcessMetrics] = useState(null); // Historical data for histograms (last 60 data points, ~1 minute at 1s intervals) const [audioCpuHistory, setAudioCpuHistory] = useState([]); const [audioMemoryHistory, setAudioMemoryHistory] = useState([]); const [micCpuHistory, setMicCpuHistory] = useState([]); const [micMemoryHistory, setMicMemoryHistory] = useState([]); // Configuration state (these don't change frequently, so we can load them once) const [config, setConfig] = useState(null); const [microphoneConfig, setMicrophoneConfig] = useState(null); const [lastUpdate, setLastUpdate] = useState(new Date()); // Use WebSocket data when available, fallback to polling data otherwise const metrics = wsConnected && audioMetrics !== null ? audioMetrics : fallbackMetrics; const microphoneMetrics = wsConnected && wsMicrophoneMetrics !== null ? wsMicrophoneMetrics : fallbackMicrophoneMetrics; const isConnected = wsConnected ? wsConnected : fallbackConnected; // Microphone state for audio level monitoring const { isMicrophoneActive, isMicrophoneMuted, microphoneStream } = useMicrophone(); const { audioLevel, isAnalyzing } = useAudioLevel( isMicrophoneActive ? microphoneStream : null, { enabled: isMicrophoneActive, updateInterval: 120, }); useEffect(() => { // Load initial configuration (only once) loadAudioConfig(); // Set up fallback polling only when WebSocket is not connected if (!wsConnected) { loadAudioData(); const interval = setInterval(loadAudioData, 1000); return () => clearInterval(interval); } }, [wsConnected]); const loadAudioConfig = async () => { try { // Load config const configResp = await api.GET("/audio/quality"); if (configResp.ok) { const configData = await configResp.json(); setConfig(configData.current); } // Load microphone config try { const micConfigResp = await api.GET("/microphone/quality"); if (micConfigResp.ok) { const micConfigData = await micConfigResp.json(); setMicrophoneConfig(micConfigData.current); } } catch (micConfigError) { console.debug("Microphone config not available:", micConfigError); } } catch (error) { console.error("Failed to load audio config:", error); } }; const loadAudioData = async () => { try { // Load metrics const metricsResp = await api.GET("/audio/metrics"); if (metricsResp.ok) { const metricsData = await metricsResp.json(); setFallbackMetrics(metricsData); // Consider connected if API call succeeds, regardless of frame count setFallbackConnected(true); setLastUpdate(new Date()); } else { setFallbackConnected(false); } // Load audio process metrics try { const audioProcessResp = await api.GET("/audio/process-metrics"); if (audioProcessResp.ok) { const audioProcessData = await audioProcessResp.json(); setAudioProcessMetrics(audioProcessData); // Update historical data for histograms (keep last 60 points) if (audioProcessData.running) { setAudioCpuHistory(prev => { const newHistory = [...prev, audioProcessData.cpu_percent]; return newHistory.slice(-60); // Keep last 60 data points }); setAudioMemoryHistory(prev => { const newHistory = [...prev, audioProcessData.memory_percent]; return newHistory.slice(-60); }); } } } catch (audioProcessError) { console.debug("Audio process metrics not available:", audioProcessError); } // Load microphone metrics try { const micResp = await api.GET("/microphone/metrics"); if (micResp.ok) { const micData = await micResp.json(); setFallbackMicrophoneMetrics(micData); } } catch (micError) { // Microphone metrics might not be available, that's okay console.debug("Microphone metrics not available:", micError); } // Load microphone process metrics try { const micProcessResp = await api.GET("/microphone/process-metrics"); if (micProcessResp.ok) { const micProcessData = await micProcessResp.json(); setMicrophoneProcessMetrics(micProcessData); // Update historical data for histograms (keep last 60 points) if (micProcessData.running) { setMicCpuHistory(prev => { const newHistory = [...prev, micProcessData.cpu_percent]; return newHistory.slice(-60); // Keep last 60 data points }); setMicMemoryHistory(prev => { const newHistory = [...prev, micProcessData.memory_percent]; return newHistory.slice(-60); }); } } } catch (micProcessError) { console.debug("Microphone process metrics not available:", micProcessError); } } catch (error) { console.error("Failed to load audio data:", error); setFallbackConnected(false); } }; const formatBytes = (bytes: number) => { if (bytes === 0) return "0 B"; const k = 1024; const sizes = ["B", "KB", "MB", "GB"]; const i = Math.floor(Math.log(bytes) / Math.log(k)); return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + " " + sizes[i]; }; const formatNumber = (num: number) => { return new Intl.NumberFormat().format(num); }; const getDropRate = () => { if (!metrics || metrics.frames_received === 0) return 0; return ((metrics.frames_dropped / metrics.frames_received) * 100); }; const formatMemory = (bytes: number) => { if (bytes === 0) return "0 MB"; const mb = bytes / (1024 * 1024); if (mb < 1024) { return `${mb.toFixed(1)} MB`; } const gb = mb / 1024; return `${gb.toFixed(2)} GB`; }; const getQualityColor = (quality: number) => { switch (quality) { case 0: return "text-yellow-600 dark:text-yellow-400"; case 1: return "text-blue-600 dark:text-blue-400"; case 2: return "text-green-600 dark:text-green-400"; case 3: return "text-purple-600 dark:text-purple-400"; default: return "text-slate-600 dark:text-slate-400"; } }; // Histogram component for displaying historical data const Histogram = ({ data, title, unit, color }: { data: number[], title: string, unit: string, color: string }) => { if (data.length === 0) return null; const maxValue = Math.max(...data, 1); // Avoid division by zero const minValue = Math.min(...data); const range = maxValue - minValue; return (
{title} {data.length > 0 ? `${data[data.length - 1].toFixed(1)}${unit}` : `0${unit}`}
{data.slice(-30).map((value, index) => { // Show last 30 points const height = range > 0 ? ((value - minValue) / range) * 100 : 0; return (
); })}
{minValue.toFixed(1)}{unit} {maxValue.toFixed(1)}{unit}
); }; return (
{/* Header */}

Audio Metrics

{isConnected ? "Active" : "Inactive"}
{/* Current Configuration */}
{config && (
Audio Output Config
Quality: {qualityLabels[config.Quality as keyof typeof qualityLabels]}
Bitrate: {config.Bitrate}kbps
Sample Rate: {config.SampleRate}Hz
Channels: {config.Channels}
)} {microphoneConfig && (
Microphone Input Config
Quality: {qualityLabels[microphoneConfig.Quality as keyof typeof qualityLabels]}
Bitrate: {microphoneConfig.Bitrate}kbps
Sample Rate: {microphoneConfig.SampleRate}Hz
Channels: {microphoneConfig.Channels}
)}
{/* Subprocess Resource Usage - Histogram View */}
{/* Audio Output Subprocess */} {audioProcessMetrics && (
Audio Output Process
{formatMemory(audioProcessMetrics.memory_rss)}
RSS
{formatMemory(audioProcessMetrics.memory_vms)}
VMS
)} {/* Microphone Input Subprocess */} {microphoneProcessMetrics && (
Microphone Input Process
{formatMemory(microphoneProcessMetrics.memory_rss)}
RSS
{formatMemory(microphoneProcessMetrics.memory_vms)}
VMS
)}
{/* Performance Metrics */} {metrics && (
{/* Audio Output Frames */}
Audio Output
{formatNumber(metrics.frames_received)}
Frames Received
0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(metrics.frames_dropped)}
Frames Dropped
{/* Drop Rate */}
Drop Rate 5 ? "text-red-600 dark:text-red-400" : getDropRate() > 1 ? "text-yellow-600 dark:text-yellow-400" : "text-green-600 dark:text-green-400" )}> {getDropRate().toFixed(2)}%
5 ? "bg-red-500" : getDropRate() > 1 ? "bg-yellow-500" : "bg-green-500" )} style={{ width: `${Math.min(getDropRate(), 100)}%` }} />
{/* Microphone Input Metrics */} {microphoneMetrics && (
Microphone Input
{formatNumber(microphoneMetrics.frames_sent)}
Frames Sent
0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(microphoneMetrics.frames_dropped)}
Frames Dropped
{/* Microphone Drop Rate */}
Drop Rate 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 5 ? "text-red-600 dark:text-red-400" : (microphoneMetrics.frames_sent > 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 1 ? "text-yellow-600 dark:text-yellow-400" : "text-green-600 dark:text-green-400" )}> {microphoneMetrics.frames_sent > 0 ? ((microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100).toFixed(2) : "0.00"}%
0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 5 ? "bg-red-500" : (microphoneMetrics.frames_sent > 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0) > 1 ? "bg-yellow-500" : "bg-green-500" )} style={{ width: `${Math.min(microphoneMetrics.frames_sent > 0 ? (microphoneMetrics.frames_dropped / microphoneMetrics.frames_sent) * 100 : 0, 100)}%` }} />
{/* Microphone Audio Level */} {isMicrophoneActive && (
)} {/* Microphone Connection Health */}
Connection Health
Connection Drops: 0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(microphoneMetrics.connection_drops)}
{microphoneMetrics.average_latency && (
Avg Latency: {microphoneMetrics.average_latency}
)}
)} {/* Data Transfer */}
Data Transfer
{formatBytes(metrics.bytes_processed)}
Total Processed
{/* Connection Health */}
Connection Health
Connection Drops: 0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400" )}> {formatNumber(metrics.connection_drops)}
{metrics.average_latency && (
Avg Latency: {metrics.average_latency}
)}
)} {/* Last Update */}
Last updated: {lastUpdate.toLocaleTimeString()}
{/* No Data State */} {!metrics && (

No Audio Data

Audio metrics will appear when audio streaming is active.

)}
); }