Compare commits

..

1 Commits

Author SHA1 Message Date
Alex c945d8bf23
Merge 7e83015932 into bb87fb5a1a 2025-08-22 11:16:51 +01:00
78 changed files with 1630 additions and 7865 deletions

View File

@ -27,64 +27,11 @@ jobs:
uses: actions/setup-go@fa96338abe5531f6e34c5cc0bbe28c1a533d5505 # v4.2.1 uses: actions/setup-go@fa96338abe5531f6e34c5cc0bbe28c1a533d5505 # v4.2.1
with: with:
go-version: 1.24.4 go-version: 1.24.4
- name: Setup build environment variables
id: build-env
run: |
# Extract versions from Makefile
ALSA_VERSION=$(grep '^ALSA_VERSION' Makefile | cut -d'=' -f2 | tr -d ' ')
OPUS_VERSION=$(grep '^OPUS_VERSION' Makefile | cut -d'=' -f2 | tr -d ' ')
# Get rv1106-system latest commit
RV1106_COMMIT=$(git ls-remote https://github.com/jetkvm/rv1106-system.git HEAD | cut -f1)
# Set environment variables
echo "ALSA_VERSION=$ALSA_VERSION" >> $GITHUB_ENV
echo "OPUS_VERSION=$OPUS_VERSION" >> $GITHUB_ENV
echo "RV1106_COMMIT=$RV1106_COMMIT" >> $GITHUB_ENV
# Set outputs for use in other steps
echo "alsa_version=$ALSA_VERSION" >> $GITHUB_OUTPUT
echo "opus_version=$OPUS_VERSION" >> $GITHUB_OUTPUT
echo "rv1106_commit=$RV1106_COMMIT" >> $GITHUB_OUTPUT
# Set resolved cache path
CACHE_PATH="$HOME/.jetkvm/audio-libs"
echo "CACHE_PATH=$CACHE_PATH" >> $GITHUB_ENV
echo "cache_path=$CACHE_PATH" >> $GITHUB_OUTPUT
echo "Extracted ALSA version: $ALSA_VERSION"
echo "Extracted Opus version: $OPUS_VERSION"
echo "Latest rv1106-system commit: $RV1106_COMMIT"
echo "Cache path: $CACHE_PATH"
- name: Restore audio dependencies cache
id: cache-audio-deps
uses: actions/cache/restore@v4
with:
path: ${{ steps.build-env.outputs.cache_path }}
key: audio-deps-${{ runner.os }}-alsa-${{ steps.build-env.outputs.alsa_version }}-opus-${{ steps.build-env.outputs.opus_version }}-rv1106-${{ steps.build-env.outputs.rv1106_commit }}
- name: Setup development environment
if: steps.cache-audio-deps.outputs.cache-hit != 'true'
run: make dev_env
env:
ALSA_VERSION: ${{ env.ALSA_VERSION }}
OPUS_VERSION: ${{ env.OPUS_VERSION }}
- name: Create empty resource directory - name: Create empty resource directory
run: | run: |
mkdir -p static && touch static/.gitkeep mkdir -p static && touch static/.gitkeep
- name: Save audio dependencies cache
if: always() && steps.cache-audio-deps.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ${{ steps.build-env.outputs.cache_path }}
key: ${{ steps.cache-audio-deps.outputs.cache-primary-key }}
- name: Lint - name: Lint
uses: golangci/golangci-lint-action@1481404843c368bc19ca9406f87d6e0fc97bdcfd # v7.0.0 uses: golangci/golangci-lint-action@1481404843c368bc19ca9406f87d6e0fc97bdcfd # v7.0.0
with: with:
args: --verbose args: --verbose
version: v2.0.2 version: v2.0.2
env:
CGO_ENABLED: 1
ALSA_VERSION: ${{ env.ALSA_VERSION }}
OPUS_VERSION: ${{ env.OPUS_VERSION }}
CGO_CFLAGS: "-I${{ steps.build-env.outputs.cache_path }}/alsa-lib-${{ steps.build-env.outputs.alsa_version }}/include -I${{ steps.build-env.outputs.cache_path }}/opus-${{ steps.build-env.outputs.opus_version }}/include -I${{ steps.build-env.outputs.cache_path }}/opus-${{ steps.build-env.outputs.opus_version }}/celt"
CGO_LDFLAGS: "-L${{ steps.build-env.outputs.cache_path }}/alsa-lib-${{ steps.build-env.outputs.alsa_version }}/src/.libs -lasound -L${{ steps.build-env.outputs.cache_path }}/opus-${{ steps.build-env.outputs.opus_version }}/.libs -lopus -lm -ldl -static"

View File

@ -26,7 +26,7 @@ Welcome to JetKVM development! This guide will help you get started quickly, whe
- **[Git](https://git-scm.com/downloads)** for version control - **[Git](https://git-scm.com/downloads)** for version control
- **[SSH access](https://jetkvm.com/docs/advanced-usage/developing#developer-mode)** to your JetKVM device - **[SSH access](https://jetkvm.com/docs/advanced-usage/developing#developer-mode)** to your JetKVM device
- **Audio build dependencies:** - **Audio build dependencies:**
- **New:** The audio system uses a dual-subprocess architecture with CGO, ALSA, and Opus integration. You must run the provided scripts in `tools/` to set up the cross-compiler and build static ALSA/Opus libraries for ARM. See below. - **New in this release:** The audio pipeline is now fully in-process using CGO, ALSA, and Opus. You must run the provided scripts in `tools/` to set up the cross-compiler and build static ALSA/Opus libraries for ARM. See below.
### Development Environment ### Development Environment
@ -71,7 +71,7 @@ This ensures compatibility with shell scripts and build tools used in the projec
# This will run tools/setup_rv1106_toolchain.sh and tools/build_audio_deps.sh # This will run tools/setup_rv1106_toolchain.sh and tools/build_audio_deps.sh
# It will clone the cross-compiler and build ALSA/Opus static libs in $HOME/.jetkvm # It will clone the cross-compiler and build ALSA/Opus static libs in $HOME/.jetkvm
# #
# **Note:** This is required for the audio subprocess architecture. If you skip this step, builds will not succeed. # **Note:** This is required for the new in-process audio pipeline. If you skip this step, audio will not work.
``` ```
4. **Find your JetKVM IP address** (check your router or device screen) 4. **Find your JetKVM IP address** (check your router or device screen)
@ -83,7 +83,7 @@ This ensures compatibility with shell scripts and build tools used in the projec
6. **Open in browser:** `http://192.168.1.100` 6. **Open in browser:** `http://192.168.1.100`
That's it! You're now running your own development version of JetKVM, **with bidirectional audio streaming using the dual-subprocess architecture.** That's it! You're now running your own development version of JetKVM, **with in-process audio streaming for the first time.**
--- ---
@ -135,14 +135,14 @@ tail -f /var/log/jetkvm.log
│ ├── src/routes/ # Pages (login, settings, etc.) │ ├── src/routes/ # Pages (login, settings, etc.)
│ └── src/components/ # UI components │ └── src/components/ # UI components
├── internal/ # Internal Go packages ├── internal/ # Internal Go packages
│ └── audio/ # Dual-subprocess audio architecture (CGO, ALSA, Opus) [NEW] │ └── audio/ # In-process audio pipeline (CGO, ALSA, Opus) [NEW]
├── tools/ # Toolchain and audio dependency setup scripts ├── tools/ # Toolchain and audio dependency setup scripts
└── Makefile # Build and dev automation (see audio targets) └── Makefile # Build and dev automation (see audio targets)
``` ```
**Key files for beginners:** **Key files for beginners:**
- `internal/audio/` - [NEW] Dual-subprocess audio architecture (CGO, ALSA, Opus) - `internal/audio/` - [NEW] In-process audio pipeline (CGO, ALSA, Opus)
- `web.go` - Add new API endpoints here - `web.go` - Add new API endpoints here
- `config.go` - Add new settings here - `config.go` - Add new settings here
- `ui/src/routes/` - Add new pages here - `ui/src/routes/` - Add new pages here
@ -174,7 +174,7 @@ npm install
### Quick Backend Changes ### Quick Backend Changes
*Best for: API, backend, or audio logic changes (including audio subprocess architecture)* *Best for: API, backend, or audio logic changes (including audio pipeline)*
```bash ```bash
# Skip frontend build for faster deployment # Skip frontend build for faster deployment
@ -231,103 +231,6 @@ systemctl restart jetkvm
cd ui && npm run lint cd ui && npm run lint
``` ```
### Essential Makefile Targets
The project includes several essential Makefile targets for development environment setup, building, and code quality:
#### Development Environment Setup
```bash
# Set up complete development environment (recommended first step)
make dev_env
# This runs setup_toolchain + build_audio_deps + installs Go tools
# - Clones rv1106-system toolchain to $HOME/.jetkvm/rv1106-system
# - Builds ALSA and Opus static libraries for ARM
# - Installs goimports and other Go development tools
# Set up only the cross-compiler toolchain
make setup_toolchain
# Build only the audio dependencies (requires setup_toolchain)
make build_audio_deps
```
#### Building
```bash
# Build development version with debug symbols
make build_dev
# Builds jetkvm_app with version like 0.4.7-dev20241222
# Requires: make dev_env (for toolchain and audio dependencies)
# Build release version (production)
make build_release
# Builds optimized release version
# Requires: make dev_env and frontend build
# Build test binaries for device testing
make build_dev_test
# Creates device-tests.tar.gz with all test binaries
```
#### Code Quality and Linting
```bash
# Run both Go and UI linting
make lint
# Run both Go and UI linting with auto-fix
make lint-fix
# Run only Go linting
make lint-go
# Run only Go linting with auto-fix
make lint-go-fix
# Run only UI linting
make lint-ui
# Run only UI linting with auto-fix
make lint-ui-fix
```
**Note:** The Go linting targets (`lint-go`, `lint-go-fix`, and the combined `lint`/`lint-fix` targets) require audio dependencies. Run `make dev_env` first if you haven't already.
### Development Deployment Script
The `dev_deploy.sh` script is the primary tool for deploying your development changes to a JetKVM device:
```bash
# Basic deployment (builds and deploys everything)
./dev_deploy.sh -r 192.168.1.100
# Skip UI build for faster backend-only deployment
./dev_deploy.sh -r 192.168.1.100 --skip-ui-build
# Run Go tests on the device after deployment
./dev_deploy.sh -r 192.168.1.100 --run-go-tests
# Deploy with release build and install
./dev_deploy.sh -r 192.168.1.100 -i
# View all available options
./dev_deploy.sh --help
```
**Key features:**
- Automatically builds the Go backend with proper cross-compilation
- Optionally builds the React frontend (unless `--skip-ui-build`)
- Deploys binaries to the device via SSH/SCP
- Restarts the JetKVM service
- Can run tests on the device
- Supports custom SSH user and various deployment options
**Requirements:**
- SSH access to your JetKVM device
- `make dev_env` must be run first (for toolchain and audio dependencies)
- Device IP address or hostname
### API Testing ### API Testing
```bash ```bash
@ -353,7 +256,7 @@ go clean -modcache
go mod tidy go mod tidy
make build_dev make build_dev
# If you see errors about missing ALSA/Opus or toolchain, run: # If you see errors about missing ALSA/Opus or toolchain, run:
make dev_env # Required for audio subprocess architecture make dev_env # Required for new audio support
``` ```
### "Can't connect to device" ### "Can't connect to device"

View File

@ -1,5 +1,5 @@
# --- JetKVM Audio/Toolchain Dev Environment Setup --- # --- JetKVM Audio/Toolchain Dev Environment Setup ---
.PHONY: setup_toolchain build_audio_deps dev_env lint lint-go lint-ui lint-fix lint-go-fix lint-ui-fix ui-lint .PHONY: setup_toolchain build_audio_deps dev_env
# Clone the rv1106-system toolchain to $HOME/.jetkvm/rv1106-system # Clone the rv1106-system toolchain to $HOME/.jetkvm/rv1106-system
setup_toolchain: setup_toolchain:
@ -9,10 +9,8 @@ setup_toolchain:
build_audio_deps: setup_toolchain build_audio_deps: setup_toolchain
bash tools/build_audio_deps.sh $(ALSA_VERSION) $(OPUS_VERSION) bash tools/build_audio_deps.sh $(ALSA_VERSION) $(OPUS_VERSION)
# Prepare everything needed for local development (toolchain + audio deps + Go tools) # Prepare everything needed for local development (toolchain + audio deps)
dev_env: build_audio_deps dev_env: build_audio_deps
@echo "Installing Go development tools..."
go install golang.org/x/tools/cmd/goimports@latest
@echo "Development environment ready." @echo "Development environment ready."
JETKVM_HOME ?= $(HOME)/.jetkvm JETKVM_HOME ?= $(HOME)/.jetkvm
TOOLCHAIN_DIR ?= $(JETKVM_HOME)/rv1106-system TOOLCHAIN_DIR ?= $(JETKVM_HOME)/rv1106-system
@ -128,44 +126,3 @@ release:
@shasum -a 256 bin/jetkvm_app | cut -d ' ' -f 1 > bin/jetkvm_app.sha256 @shasum -a 256 bin/jetkvm_app | cut -d ' ' -f 1 > bin/jetkvm_app.sha256
rclone copyto bin/jetkvm_app r2://jetkvm-update/app/$(VERSION)/jetkvm_app rclone copyto bin/jetkvm_app r2://jetkvm-update/app/$(VERSION)/jetkvm_app
rclone copyto bin/jetkvm_app.sha256 r2://jetkvm-update/app/$(VERSION)/jetkvm_app.sha256 rclone copyto bin/jetkvm_app.sha256 r2://jetkvm-update/app/$(VERSION)/jetkvm_app.sha256
# Run both Go and UI linting
lint: lint-go lint-ui
@echo "All linting completed successfully!"
# Run golangci-lint locally with the same configuration as CI
lint-go: build_audio_deps
@echo "Running golangci-lint..."
@mkdir -p static && touch static/.gitkeep
CGO_ENABLED=1 \
CGO_CFLAGS="-I$(AUDIO_LIBS_DIR)/alsa-lib-$(ALSA_VERSION)/include -I$(AUDIO_LIBS_DIR)/opus-$(OPUS_VERSION)/include -I$(AUDIO_LIBS_DIR)/opus-$(OPUS_VERSION)/celt" \
CGO_LDFLAGS="-L$(AUDIO_LIBS_DIR)/alsa-lib-$(ALSA_VERSION)/src/.libs -lasound -L$(AUDIO_LIBS_DIR)/opus-$(OPUS_VERSION)/.libs -lopus -lm -ldl -static" \
golangci-lint run --verbose
# Run both Go and UI linting with auto-fix
lint-fix: lint-go-fix lint-ui-fix
@echo "All linting with auto-fix completed successfully!"
# Run golangci-lint with auto-fix
lint-go-fix: build_audio_deps
@echo "Running golangci-lint with auto-fix..."
@mkdir -p static && touch static/.gitkeep
CGO_ENABLED=1 \
CGO_CFLAGS="-I$(AUDIO_LIBS_DIR)/alsa-lib-$(ALSA_VERSION)/include -I$(AUDIO_LIBS_DIR)/opus-$(OPUS_VERSION)/include -I$(AUDIO_LIBS_DIR)/opus-$(OPUS_VERSION)/celt" \
CGO_LDFLAGS="-L$(AUDIO_LIBS_DIR)/alsa-lib-$(ALSA_VERSION)/src/.libs -lasound -L$(AUDIO_LIBS_DIR)/opus-$(OPUS_VERSION)/.libs -lopus -lm -ldl -static" \
golangci-lint run --fix --verbose
# Run UI linting locally (mirrors GitHub workflow ui-lint.yml)
lint-ui:
@echo "Running UI lint..."
@cd ui && npm ci
@cd ui && npm run lint
# Run UI linting with auto-fix
lint-ui-fix:
@echo "Running UI lint with auto-fix..."
@cd ui && npm ci
@cd ui && npm run lint:fix
# Legacy alias for UI linting (for backward compatibility)
ui-lint: lint-ui

View File

@ -22,7 +22,7 @@ JetKVM is a high-performance, open-source KVM over IP (Keyboard, Video, Mouse, *
## Features ## Features
- **Ultra-low Latency** - 1080p@60FPS video with 30-60ms latency using H.264 encoding. Smooth mouse, keyboard, and audio for responsive remote control. - **Ultra-low Latency** - 1080p@60FPS video with 30-60ms latency using H.264 encoding. Smooth mouse, keyboard, and audio for responsive remote control.
- **First-Class Audio Support** - JetKVM now supports bidirectional, low-latency audio streaming using a dual-subprocess architecture with ALSA and Opus integration via CGO. Features both audio output (PC→Browser) and audio input (Browser→PC) with dedicated subprocesses for optimal performance and isolation. - **First-Class Audio Support** - JetKVM now supports in-process, low-latency audio streaming using ALSA and Opus, fully integrated via CGO. No external audio binaries or IPC required—audio is delivered directly from the device to your browser.
- **Free & Optional Remote Access** - Remote management via JetKVM Cloud using WebRTC. - **Free & Optional Remote Access** - Remote management via JetKVM Cloud using WebRTC.
- **Open-source software** - Written in Golang (with CGO for audio) on Linux. Easily customizable through SSH access to the JetKVM device. - **Open-source software** - Written in Golang (with CGO for audio) on Linux. Easily customizable through SSH access to the JetKVM device.
@ -42,7 +42,7 @@ If you've found an issue and want to report it, please check our [Issues](https:
# Development # Development
JetKVM is written in Go & TypeScript, with some C for low-level integration. **Audio support uses a sophisticated dual-subprocess architecture with CGO, ALSA, and Opus integration for bidirectional streaming with complete process isolation.** JetKVM is written in Go & TypeScript, with some C for low-level integration. **Audio support is now fully in-process using CGO, ALSA, and Opus—no external audio binaries required.**
The project contains two main parts: the backend software (Go, CGO) that runs on the KVM device, and the frontend software (React/TypeScript) that is served by the KVM device and the cloud. The project contains two main parts: the backend software (Go, CGO) that runs on the KVM device, and the frontend software (React/TypeScript) that is served by the KVM device and the cloud.
@ -53,7 +53,7 @@ For quick device development, use the `./dev_deploy.sh` script. It will build th
## Backend ## Backend
The backend is written in Go and is responsible for KVM device management, audio/video streaming, the cloud API, and the cloud web. **Audio uses dedicated subprocesses for both output and input streams, with CGO-based ALSA and Opus processing, IPC communication via Unix sockets, and comprehensive process supervision for reliability.** The backend is written in Go and is responsible for KVM device management, audio/video streaming, the cloud API, and the cloud web. **Audio is now captured and encoded in-process using ALSA and Opus via CGO, with no external processes or IPC.**
## Frontend ## Frontend

View File

@ -11,8 +11,6 @@ import (
func main() { func main() {
versionPtr := flag.Bool("version", false, "print version and exit") versionPtr := flag.Bool("version", false, "print version and exit")
versionJsonPtr := flag.Bool("version-json", false, "print version as json and exit") versionJsonPtr := flag.Bool("version-json", false, "print version as json and exit")
audioServerPtr := flag.Bool("audio-output-server", false, "Run as audio server subprocess")
audioInputServerPtr := flag.Bool("audio-input-server", false, "Run as audio input server subprocess")
flag.Parse() flag.Parse()
if *versionPtr || *versionJsonPtr { if *versionPtr || *versionJsonPtr {
@ -25,5 +23,5 @@ func main() {
return return
} }
kvm.Main(*audioServerPtr, *audioInputServerPtr) kvm.Main()
} }

View File

@ -159,8 +159,8 @@ else
msg_info "▶ Building development binary" msg_info "▶ Building development binary"
make build_dev make build_dev
# Kill any existing instances of the application (specific cleanup) # Kill any existing instances of the application
ssh "${REMOTE_USER}@${REMOTE_HOST}" "killall jetkvm_app || true; killall jetkvm_native || true; killall jetkvm_app_debug || true; sleep 2" ssh "${REMOTE_USER}@${REMOTE_HOST}" "killall jetkvm_app_debug || true"
# Copy the binary to the remote host # Copy the binary to the remote host
ssh "${REMOTE_USER}@${REMOTE_HOST}" "cat > ${REMOTE_PATH}/jetkvm_app_debug" < bin/jetkvm_app ssh "${REMOTE_USER}@${REMOTE_HOST}" "cat > ${REMOTE_PATH}/jetkvm_app_debug" < bin/jetkvm_app
@ -180,26 +180,48 @@ set -e
# Set the library path to include the directory where librockit.so is located # Set the library path to include the directory where librockit.so is located
export LD_LIBRARY_PATH=/oem/usr/lib:\$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/oem/usr/lib:\$LD_LIBRARY_PATH
# Kill any existing instances of the application (specific cleanup) # Check if production jetkvm_app is running and save its state
killall jetkvm_app || true PROD_APP_RUNNING=false
killall jetkvm_native || true if pgrep -f "/userdata/jetkvm/bin/jetkvm_app" > /dev/null; then
killall jetkvm_app_debug || true PROD_APP_RUNNING=true
sleep 2 echo "Production jetkvm_app is running, will restore after development session"
else
# Verify no processes are using port 80 echo "No production jetkvm_app detected"
if netstat -tlnp | grep :80 > /dev/null 2>&1; then
echo "Warning: Port 80 still in use, attempting to free it..."
fuser -k 80/tcp || true
sleep 1
fi fi
# Kill any existing instances of the application
pkill -f "/userdata/jetkvm/bin/jetkvm_app" || true
killall jetkvm_app_debug || true
# Navigate to the directory where the binary will be stored # Navigate to the directory where the binary will be stored
cd "${REMOTE_PATH}" cd "${REMOTE_PATH}"
# Make the new binary executable # Make the new binary executable
chmod +x jetkvm_app_debug chmod +x jetkvm_app_debug
# Run the application in the background # Create a cleanup script that will restore the production app
cat > /tmp/restore_jetkvm.sh << RESTORE_EOF
#!/bin/ash
set -e
export LD_LIBRARY_PATH=/oem/usr/lib:\$LD_LIBRARY_PATH
cd ${REMOTE_PATH}
if [ "$PROD_APP_RUNNING" = "true" ]; then
echo "Restoring production jetkvm_app..."
killall jetkvm_app_debug || true
nohup /userdata/jetkvm/bin/jetkvm_app > /tmp/jetkvm_app.log 2>&1 &
echo "Production jetkvm_app restored"
else
echo "No production app was running before, not restoring"
fi
RESTORE_EOF
chmod +x /tmp/restore_jetkvm.sh
# Set up signal handler to restore production app on exit
trap '/tmp/restore_jetkvm.sh' EXIT INT TERM
# Run the application in the foreground
echo "Starting development jetkvm_app_debug..."
PION_LOG_TRACE=${LOG_TRACE_SCOPES} ./jetkvm_app_debug | tee -a /tmp/jetkvm_app_debug.log PION_LOG_TRACE=${LOG_TRACE_SCOPES} ./jetkvm_app_debug | tee -a /tmp/jetkvm_app_debug.log
EOF EOF
fi fi

View File

@ -14,7 +14,7 @@ const (
// Input RPC Direct Handlers // Input RPC Direct Handlers
// This module provides optimized direct handlers for high-frequency input events, // This module provides optimized direct handlers for high-frequency input events,
// bypassing the reflection-based RPC system for improved performance. // bypassing the reflection-based RPC system for improved performance.
// //
// Performance benefits: // Performance benefits:
// - Eliminates reflection overhead (~2-3ms per call) // - Eliminates reflection overhead (~2-3ms per call)
// - Reduces memory allocations // - Reduces memory allocations
@ -214,4 +214,4 @@ func isInputMethod(method string) bool {
default: default:
return false return false
} }
} }

View File

@ -1,338 +0,0 @@
package audio
import (
"context"
"math"
"sync"
"sync/atomic"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// AdaptiveBufferConfig holds configuration for adaptive buffer sizing
type AdaptiveBufferConfig struct {
// Buffer size limits (in frames)
MinBufferSize int
MaxBufferSize int
DefaultBufferSize int
// System load thresholds
LowCPUThreshold float64 // Below this, increase buffer size
HighCPUThreshold float64 // Above this, decrease buffer size
LowMemoryThreshold float64 // Below this, increase buffer size
HighMemoryThreshold float64 // Above this, decrease buffer size
// Latency thresholds (in milliseconds)
TargetLatency time.Duration
MaxLatency time.Duration
// Adaptation parameters
AdaptationInterval time.Duration
SmoothingFactor float64 // 0.0-1.0, higher = more responsive
}
// DefaultAdaptiveBufferConfig returns optimized config for JetKVM hardware
func DefaultAdaptiveBufferConfig() AdaptiveBufferConfig {
return AdaptiveBufferConfig{
// Conservative buffer sizes for 256MB RAM constraint
MinBufferSize: 3, // Minimum 3 frames (slightly higher for stability)
MaxBufferSize: 20, // Maximum 20 frames (increased for high load scenarios)
DefaultBufferSize: 6, // Default 6 frames (increased for better stability)
// CPU thresholds optimized for single-core ARM Cortex A7 under load
LowCPUThreshold: 20.0, // Below 20% CPU
HighCPUThreshold: 60.0, // Above 60% CPU (lowered to be more responsive)
// Memory thresholds for 256MB total RAM
LowMemoryThreshold: 35.0, // Below 35% memory usage
HighMemoryThreshold: 75.0, // Above 75% memory usage (lowered for earlier response)
// Latency targets
TargetLatency: 20 * time.Millisecond, // Target 20ms latency
MaxLatency: 50 * time.Millisecond, // Max acceptable 50ms
// Adaptation settings
AdaptationInterval: 500 * time.Millisecond, // Check every 500ms
SmoothingFactor: 0.3, // Moderate responsiveness
}
}
// AdaptiveBufferManager manages dynamic buffer sizing based on system conditions
type AdaptiveBufferManager struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
currentInputBufferSize int64 // Current input buffer size (atomic)
currentOutputBufferSize int64 // Current output buffer size (atomic)
averageLatency int64 // Average latency in nanoseconds (atomic)
systemCPUPercent int64 // System CPU percentage * 100 (atomic)
systemMemoryPercent int64 // System memory percentage * 100 (atomic)
adaptationCount int64 // Metrics tracking (atomic)
config AdaptiveBufferConfig
logger zerolog.Logger
processMonitor *ProcessMonitor
// Control channels
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
// Metrics tracking
lastAdaptation time.Time
mutex sync.RWMutex
}
// NewAdaptiveBufferManager creates a new adaptive buffer manager
func NewAdaptiveBufferManager(config AdaptiveBufferConfig) *AdaptiveBufferManager {
ctx, cancel := context.WithCancel(context.Background())
return &AdaptiveBufferManager{
currentInputBufferSize: int64(config.DefaultBufferSize),
currentOutputBufferSize: int64(config.DefaultBufferSize),
config: config,
logger: logging.GetDefaultLogger().With().Str("component", "adaptive-buffer").Logger(),
processMonitor: GetProcessMonitor(),
ctx: ctx,
cancel: cancel,
lastAdaptation: time.Now(),
}
}
// Start begins the adaptive buffer management
func (abm *AdaptiveBufferManager) Start() {
abm.wg.Add(1)
go abm.adaptationLoop()
abm.logger.Info().Msg("Adaptive buffer manager started")
}
// Stop stops the adaptive buffer management
func (abm *AdaptiveBufferManager) Stop() {
abm.cancel()
abm.wg.Wait()
abm.logger.Info().Msg("Adaptive buffer manager stopped")
}
// GetInputBufferSize returns the current recommended input buffer size
func (abm *AdaptiveBufferManager) GetInputBufferSize() int {
return int(atomic.LoadInt64(&abm.currentInputBufferSize))
}
// GetOutputBufferSize returns the current recommended output buffer size
func (abm *AdaptiveBufferManager) GetOutputBufferSize() int {
return int(atomic.LoadInt64(&abm.currentOutputBufferSize))
}
// UpdateLatency updates the current latency measurement
func (abm *AdaptiveBufferManager) UpdateLatency(latency time.Duration) {
// Use exponential moving average for latency
currentAvg := atomic.LoadInt64(&abm.averageLatency)
newLatency := latency.Nanoseconds()
if currentAvg == 0 {
atomic.StoreInt64(&abm.averageLatency, newLatency)
} else {
// Exponential moving average: 70% historical, 30% current
newAvg := int64(float64(currentAvg)*0.7 + float64(newLatency)*0.3)
atomic.StoreInt64(&abm.averageLatency, newAvg)
}
}
// adaptationLoop is the main loop that adjusts buffer sizes
func (abm *AdaptiveBufferManager) adaptationLoop() {
defer abm.wg.Done()
ticker := time.NewTicker(abm.config.AdaptationInterval)
defer ticker.Stop()
for {
select {
case <-abm.ctx.Done():
return
case <-ticker.C:
abm.adaptBufferSizes()
}
}
}
// adaptBufferSizes analyzes system conditions and adjusts buffer sizes
func (abm *AdaptiveBufferManager) adaptBufferSizes() {
// Collect current system metrics
metrics := abm.processMonitor.GetCurrentMetrics()
if len(metrics) == 0 {
return // No metrics available
}
// Calculate system-wide CPU and memory usage
totalCPU := 0.0
totalMemory := 0.0
processCount := 0
for _, metric := range metrics {
totalCPU += metric.CPUPercent
totalMemory += metric.MemoryPercent
processCount++
}
if processCount == 0 {
return
}
// Store system metrics atomically
systemCPU := totalCPU // Total CPU across all monitored processes
systemMemory := totalMemory / float64(processCount) // Average memory usage
atomic.StoreInt64(&abm.systemCPUPercent, int64(systemCPU*100))
atomic.StoreInt64(&abm.systemMemoryPercent, int64(systemMemory*100))
// Get current latency
currentLatencyNs := atomic.LoadInt64(&abm.averageLatency)
currentLatency := time.Duration(currentLatencyNs)
// Calculate adaptation factors
cpuFactor := abm.calculateCPUFactor(systemCPU)
memoryFactor := abm.calculateMemoryFactor(systemMemory)
latencyFactor := abm.calculateLatencyFactor(currentLatency)
// Combine factors with weights (CPU has highest priority for KVM coexistence)
combinedFactor := 0.5*cpuFactor + 0.3*memoryFactor + 0.2*latencyFactor
// Apply adaptation with smoothing
currentInput := float64(atomic.LoadInt64(&abm.currentInputBufferSize))
currentOutput := float64(atomic.LoadInt64(&abm.currentOutputBufferSize))
// Calculate new buffer sizes
newInputSize := abm.applyAdaptation(currentInput, combinedFactor)
newOutputSize := abm.applyAdaptation(currentOutput, combinedFactor)
// Update buffer sizes if they changed significantly
adjustmentMade := false
if math.Abs(newInputSize-currentInput) >= 0.5 || math.Abs(newOutputSize-currentOutput) >= 0.5 {
atomic.StoreInt64(&abm.currentInputBufferSize, int64(math.Round(newInputSize)))
atomic.StoreInt64(&abm.currentOutputBufferSize, int64(math.Round(newOutputSize)))
atomic.AddInt64(&abm.adaptationCount, 1)
abm.mutex.Lock()
abm.lastAdaptation = time.Now()
abm.mutex.Unlock()
adjustmentMade = true
abm.logger.Debug().
Float64("cpu_percent", systemCPU).
Float64("memory_percent", systemMemory).
Dur("latency", currentLatency).
Float64("combined_factor", combinedFactor).
Int("new_input_size", int(newInputSize)).
Int("new_output_size", int(newOutputSize)).
Msg("Adapted buffer sizes")
}
// Update metrics with current state
currentInputSize := int(atomic.LoadInt64(&abm.currentInputBufferSize))
currentOutputSize := int(atomic.LoadInt64(&abm.currentOutputBufferSize))
UpdateAdaptiveBufferMetrics(currentInputSize, currentOutputSize, systemCPU, systemMemory, adjustmentMade)
}
// calculateCPUFactor returns adaptation factor based on CPU usage
// Returns: -1.0 (decrease buffers) to +1.0 (increase buffers)
func (abm *AdaptiveBufferManager) calculateCPUFactor(cpuPercent float64) float64 {
if cpuPercent > abm.config.HighCPUThreshold {
// High CPU: decrease buffers to reduce latency and give CPU to KVM
return -1.0
} else if cpuPercent < abm.config.LowCPUThreshold {
// Low CPU: increase buffers for better quality
return 1.0
}
// Medium CPU: linear interpolation
midpoint := (abm.config.HighCPUThreshold + abm.config.LowCPUThreshold) / 2
return (midpoint - cpuPercent) / (midpoint - abm.config.LowCPUThreshold)
}
// calculateMemoryFactor returns adaptation factor based on memory usage
func (abm *AdaptiveBufferManager) calculateMemoryFactor(memoryPercent float64) float64 {
if memoryPercent > abm.config.HighMemoryThreshold {
// High memory: decrease buffers to free memory
return -1.0
} else if memoryPercent < abm.config.LowMemoryThreshold {
// Low memory: increase buffers for better performance
return 1.0
}
// Medium memory: linear interpolation
midpoint := (abm.config.HighMemoryThreshold + abm.config.LowMemoryThreshold) / 2
return (midpoint - memoryPercent) / (midpoint - abm.config.LowMemoryThreshold)
}
// calculateLatencyFactor returns adaptation factor based on latency
func (abm *AdaptiveBufferManager) calculateLatencyFactor(latency time.Duration) float64 {
if latency > abm.config.MaxLatency {
// High latency: decrease buffers
return -1.0
} else if latency < abm.config.TargetLatency {
// Low latency: can increase buffers
return 1.0
}
// Medium latency: linear interpolation
midLatency := (abm.config.MaxLatency + abm.config.TargetLatency) / 2
return float64(midLatency-latency) / float64(midLatency-abm.config.TargetLatency)
}
// applyAdaptation applies the adaptation factor to current buffer size
func (abm *AdaptiveBufferManager) applyAdaptation(currentSize, factor float64) float64 {
// Calculate target size based on factor
var targetSize float64
if factor > 0 {
// Increase towards max
targetSize = currentSize + factor*(float64(abm.config.MaxBufferSize)-currentSize)
} else {
// Decrease towards min
targetSize = currentSize + factor*(currentSize-float64(abm.config.MinBufferSize))
}
// Apply smoothing
newSize := currentSize + abm.config.SmoothingFactor*(targetSize-currentSize)
// Clamp to valid range
return math.Max(float64(abm.config.MinBufferSize),
math.Min(float64(abm.config.MaxBufferSize), newSize))
}
// GetStats returns current adaptation statistics
func (abm *AdaptiveBufferManager) GetStats() map[string]interface{} {
abm.mutex.RLock()
lastAdaptation := abm.lastAdaptation
abm.mutex.RUnlock()
return map[string]interface{}{
"input_buffer_size": abm.GetInputBufferSize(),
"output_buffer_size": abm.GetOutputBufferSize(),
"average_latency_ms": float64(atomic.LoadInt64(&abm.averageLatency)) / 1e6,
"system_cpu_percent": float64(atomic.LoadInt64(&abm.systemCPUPercent)) / 100,
"system_memory_percent": float64(atomic.LoadInt64(&abm.systemMemoryPercent)) / 100,
"adaptation_count": atomic.LoadInt64(&abm.adaptationCount),
"last_adaptation": lastAdaptation,
}
}
// Global adaptive buffer manager instance
var globalAdaptiveBufferManager *AdaptiveBufferManager
var adaptiveBufferOnce sync.Once
// GetAdaptiveBufferManager returns the global adaptive buffer manager instance
func GetAdaptiveBufferManager() *AdaptiveBufferManager {
adaptiveBufferOnce.Do(func() {
globalAdaptiveBufferManager = NewAdaptiveBufferManager(DefaultAdaptiveBufferConfig())
})
return globalAdaptiveBufferManager
}
// StartAdaptiveBuffering starts the global adaptive buffer manager
func StartAdaptiveBuffering() {
GetAdaptiveBufferManager().Start()
}
// StopAdaptiveBuffering stops the global adaptive buffer manager
func StopAdaptiveBuffering() {
if globalAdaptiveBufferManager != nil {
globalAdaptiveBufferManager.Stop()
}
}

View File

@ -1,198 +0,0 @@
package audio
import (
"context"
"sync"
"sync/atomic"
"time"
"github.com/rs/zerolog"
)
// AdaptiveOptimizer automatically adjusts audio parameters based on latency metrics
type AdaptiveOptimizer struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
optimizationCount int64 // Number of optimizations performed (atomic)
lastOptimization int64 // Timestamp of last optimization (atomic)
optimizationLevel int64 // Current optimization level (0-10) (atomic)
latencyMonitor *LatencyMonitor
bufferManager *AdaptiveBufferManager
logger zerolog.Logger
// Control channels
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
// Configuration
config OptimizerConfig
}
// OptimizerConfig holds configuration for the adaptive optimizer
type OptimizerConfig struct {
MaxOptimizationLevel int // Maximum optimization level (0-10)
CooldownPeriod time.Duration // Minimum time between optimizations
Aggressiveness float64 // How aggressively to optimize (0.0-1.0)
RollbackThreshold time.Duration // Latency threshold to rollback optimizations
StabilityPeriod time.Duration // Time to wait for stability after optimization
}
// DefaultOptimizerConfig returns a sensible default configuration
func DefaultOptimizerConfig() OptimizerConfig {
return OptimizerConfig{
MaxOptimizationLevel: 8,
CooldownPeriod: 30 * time.Second,
Aggressiveness: 0.7,
RollbackThreshold: 300 * time.Millisecond,
StabilityPeriod: 10 * time.Second,
}
}
// NewAdaptiveOptimizer creates a new adaptive optimizer
func NewAdaptiveOptimizer(latencyMonitor *LatencyMonitor, bufferManager *AdaptiveBufferManager, config OptimizerConfig, logger zerolog.Logger) *AdaptiveOptimizer {
ctx, cancel := context.WithCancel(context.Background())
optimizer := &AdaptiveOptimizer{
latencyMonitor: latencyMonitor,
bufferManager: bufferManager,
config: config,
logger: logger.With().Str("component", "adaptive-optimizer").Logger(),
ctx: ctx,
cancel: cancel,
}
// Register as latency monitor callback
latencyMonitor.AddOptimizationCallback(optimizer.handleLatencyOptimization)
return optimizer
}
// Start begins the adaptive optimization process
func (ao *AdaptiveOptimizer) Start() {
ao.wg.Add(1)
go ao.optimizationLoop()
ao.logger.Info().Msg("Adaptive optimizer started")
}
// Stop stops the adaptive optimizer
func (ao *AdaptiveOptimizer) Stop() {
ao.cancel()
ao.wg.Wait()
ao.logger.Info().Msg("Adaptive optimizer stopped")
}
// initializeStrategies sets up the available optimization strategies
// handleLatencyOptimization is called when latency optimization is needed
func (ao *AdaptiveOptimizer) handleLatencyOptimization(metrics LatencyMetrics) error {
currentLevel := atomic.LoadInt64(&ao.optimizationLevel)
lastOpt := atomic.LoadInt64(&ao.lastOptimization)
// Check cooldown period
if time.Since(time.Unix(0, lastOpt)) < ao.config.CooldownPeriod {
return nil
}
// Determine if we need to increase or decrease optimization level
targetLevel := ao.calculateTargetOptimizationLevel(metrics)
if targetLevel > currentLevel {
return ao.increaseOptimization(int(targetLevel))
} else if targetLevel < currentLevel {
return ao.decreaseOptimization(int(targetLevel))
}
return nil
}
// calculateTargetOptimizationLevel determines the appropriate optimization level
func (ao *AdaptiveOptimizer) calculateTargetOptimizationLevel(metrics LatencyMetrics) int64 {
// Base calculation on current latency vs target
latencyRatio := float64(metrics.Current) / float64(50*time.Millisecond) // 50ms target
// Adjust based on trend
switch metrics.Trend {
case LatencyTrendIncreasing:
latencyRatio *= 1.2 // Be more aggressive
case LatencyTrendDecreasing:
latencyRatio *= 0.8 // Be less aggressive
case LatencyTrendVolatile:
latencyRatio *= 1.1 // Slightly more aggressive
}
// Apply aggressiveness factor
latencyRatio *= ao.config.Aggressiveness
// Convert to optimization level
targetLevel := int64(latencyRatio * 2) // Scale to 0-10 range
if targetLevel > int64(ao.config.MaxOptimizationLevel) {
targetLevel = int64(ao.config.MaxOptimizationLevel)
}
if targetLevel < 0 {
targetLevel = 0
}
return targetLevel
}
// increaseOptimization applies optimization strategies up to the target level
func (ao *AdaptiveOptimizer) increaseOptimization(targetLevel int) error {
atomic.StoreInt64(&ao.optimizationLevel, int64(targetLevel))
atomic.StoreInt64(&ao.lastOptimization, time.Now().UnixNano())
atomic.AddInt64(&ao.optimizationCount, 1)
return nil
}
// decreaseOptimization rolls back optimization strategies to the target level
func (ao *AdaptiveOptimizer) decreaseOptimization(targetLevel int) error {
atomic.StoreInt64(&ao.optimizationLevel, int64(targetLevel))
atomic.StoreInt64(&ao.lastOptimization, time.Now().UnixNano())
return nil
}
// optimizationLoop runs the main optimization monitoring loop
func (ao *AdaptiveOptimizer) optimizationLoop() {
defer ao.wg.Done()
ticker := time.NewTicker(ao.config.StabilityPeriod)
defer ticker.Stop()
for {
select {
case <-ao.ctx.Done():
return
case <-ticker.C:
ao.checkStability()
}
}
}
// checkStability monitors system stability and rolls back if needed
func (ao *AdaptiveOptimizer) checkStability() {
metrics := ao.latencyMonitor.GetMetrics()
// Check if we need to rollback due to excessive latency
if metrics.Current > ao.config.RollbackThreshold {
currentLevel := int(atomic.LoadInt64(&ao.optimizationLevel))
if currentLevel > 0 {
ao.logger.Warn().Dur("current_latency", metrics.Current).Dur("threshold", ao.config.RollbackThreshold).Msg("Rolling back optimizations due to excessive latency")
if err := ao.decreaseOptimization(currentLevel - 1); err != nil {
ao.logger.Error().Err(err).Msg("Failed to decrease optimization level")
}
}
}
}
// GetOptimizationStats returns current optimization statistics
func (ao *AdaptiveOptimizer) GetOptimizationStats() map[string]interface{} {
return map[string]interface{}{
"optimization_level": atomic.LoadInt64(&ao.optimizationLevel),
"optimization_count": atomic.LoadInt64(&ao.optimizationCount),
"last_optimization": time.Unix(0, atomic.LoadInt64(&ao.lastOptimization)),
}
}
// Strategy implementation methods (stubs for now)

View File

@ -1,72 +1,13 @@
package audio package audio
import ( // StartAudioStreaming launches the in-process audio stream and delivers Opus frames to the provided callback.
"os" // This is now a wrapper around the non-blocking audio implementation for backward compatibility.
"strings"
"sync/atomic"
"unsafe"
)
var (
// Global audio output supervisor instance
globalOutputSupervisor unsafe.Pointer // *AudioServerSupervisor
)
// isAudioServerProcess detects if we're running as the audio server subprocess
func isAudioServerProcess() bool {
for _, arg := range os.Args {
if strings.Contains(arg, "--audio-output-server") {
return true
}
}
return false
}
// StartAudioStreaming launches the audio stream.
// In audio server subprocess: uses CGO-based audio streaming
// In main process: this should not be called (use StartAudioRelay instead)
func StartAudioStreaming(send func([]byte)) error { func StartAudioStreaming(send func([]byte)) error {
if isAudioServerProcess() { return StartNonBlockingAudioStreaming(send)
// Audio server subprocess: use CGO audio processing
return StartAudioOutputStreaming(send)
} else {
// Main process: should use relay system instead
// This is kept for backward compatibility but not recommended
return StartAudioOutputStreaming(send)
}
} }
// StopAudioStreaming stops the audio stream. // StopAudioStreaming stops the in-process audio stream.
// This is now a wrapper around the non-blocking audio implementation for backward compatibility.
func StopAudioStreaming() { func StopAudioStreaming() {
if isAudioServerProcess() { StopNonBlockingAudioStreaming()
// Audio server subprocess: stop CGO audio processing
StopAudioOutputStreaming()
} else {
// Main process: stop relay if running
StopAudioRelay()
}
}
// StartNonBlockingAudioStreaming is an alias for backward compatibility
func StartNonBlockingAudioStreaming(send func([]byte)) error {
return StartAudioOutputStreaming(send)
}
// StopNonBlockingAudioStreaming is an alias for backward compatibility
func StopNonBlockingAudioStreaming() {
StopAudioOutputStreaming()
}
// SetAudioOutputSupervisor sets the global audio output supervisor
func SetAudioOutputSupervisor(supervisor *AudioServerSupervisor) {
atomic.StorePointer(&globalOutputSupervisor, unsafe.Pointer(supervisor))
}
// GetAudioOutputSupervisor returns the global audio output supervisor
func GetAudioOutputSupervisor() *AudioServerSupervisor {
ptr := atomic.LoadPointer(&globalOutputSupervisor)
if ptr == nil {
return nil
}
return (*AudioServerSupervisor)(ptr)
} }

View File

@ -4,6 +4,7 @@ import (
"errors" "errors"
"sync/atomic" "sync/atomic"
"time" "time"
// Explicit import for CGO audio stream glue
) )
var ( var (
@ -32,6 +33,7 @@ type AudioConfig struct {
} }
// AudioMetrics tracks audio performance metrics // AudioMetrics tracks audio performance metrics
// Note: 64-bit fields must be first for proper alignment on 32-bit ARM
type AudioMetrics struct { type AudioMetrics struct {
FramesReceived int64 FramesReceived int64
FramesDropped int64 FramesDropped int64
@ -59,67 +61,72 @@ var (
metrics AudioMetrics metrics AudioMetrics
) )
// qualityPresets defines the base quality configurations // GetAudioQualityPresets returns predefined quality configurations
var qualityPresets = map[AudioQuality]struct {
outputBitrate, inputBitrate int
sampleRate, channels int
frameSize time.Duration
}{
AudioQualityLow: {
outputBitrate: 32, inputBitrate: 16,
sampleRate: 22050, channels: 1,
frameSize: 40 * time.Millisecond,
},
AudioQualityMedium: {
outputBitrate: 64, inputBitrate: 32,
sampleRate: 44100, channels: 2,
frameSize: 20 * time.Millisecond,
},
AudioQualityHigh: {
outputBitrate: 128, inputBitrate: 64,
sampleRate: 48000, channels: 2,
frameSize: 20 * time.Millisecond,
},
AudioQualityUltra: {
outputBitrate: 192, inputBitrate: 96,
sampleRate: 48000, channels: 2,
frameSize: 10 * time.Millisecond,
},
}
// GetAudioQualityPresets returns predefined quality configurations for audio output
func GetAudioQualityPresets() map[AudioQuality]AudioConfig { func GetAudioQualityPresets() map[AudioQuality]AudioConfig {
result := make(map[AudioQuality]AudioConfig) return map[AudioQuality]AudioConfig{
for quality, preset := range qualityPresets { AudioQualityLow: {
result[quality] = AudioConfig{ Quality: AudioQualityLow,
Quality: quality, Bitrate: 32,
Bitrate: preset.outputBitrate, SampleRate: 22050,
SampleRate: preset.sampleRate, Channels: 1,
Channels: preset.channels, FrameSize: 40 * time.Millisecond,
FrameSize: preset.frameSize, },
} AudioQualityMedium: {
Quality: AudioQualityMedium,
Bitrate: 64,
SampleRate: 44100,
Channels: 2,
FrameSize: 20 * time.Millisecond,
},
AudioQualityHigh: {
Quality: AudioQualityHigh,
Bitrate: 128,
SampleRate: 48000,
Channels: 2,
FrameSize: 20 * time.Millisecond,
},
AudioQualityUltra: {
Quality: AudioQualityUltra,
Bitrate: 192,
SampleRate: 48000,
Channels: 2,
FrameSize: 10 * time.Millisecond,
},
} }
return result
} }
// GetMicrophoneQualityPresets returns predefined quality configurations for microphone input // GetMicrophoneQualityPresets returns predefined quality configurations for microphone input
func GetMicrophoneQualityPresets() map[AudioQuality]AudioConfig { func GetMicrophoneQualityPresets() map[AudioQuality]AudioConfig {
result := make(map[AudioQuality]AudioConfig) return map[AudioQuality]AudioConfig{
for quality, preset := range qualityPresets { AudioQualityLow: {
result[quality] = AudioConfig{ Quality: AudioQualityLow,
Quality: quality, Bitrate: 16,
Bitrate: preset.inputBitrate, SampleRate: 16000,
SampleRate: func() int { Channels: 1,
if quality == AudioQualityLow { FrameSize: 40 * time.Millisecond,
return 16000 },
} AudioQualityMedium: {
return preset.sampleRate Quality: AudioQualityMedium,
}(), Bitrate: 32,
Channels: 1, // Microphone is always mono SampleRate: 22050,
FrameSize: preset.frameSize, Channels: 1,
} FrameSize: 20 * time.Millisecond,
},
AudioQualityHigh: {
Quality: AudioQualityHigh,
Bitrate: 64,
SampleRate: 44100,
Channels: 1,
FrameSize: 20 * time.Millisecond,
},
AudioQualityUltra: {
Quality: AudioQualityUltra,
Bitrate: 96,
SampleRate: 48000,
Channels: 1,
FrameSize: 10 * time.Millisecond,
},
} }
return result
} }
// SetAudioQuality updates the current audio quality configuration // SetAudioQuality updates the current audio quality configuration
@ -150,20 +157,9 @@ func GetMicrophoneConfig() AudioConfig {
// GetAudioMetrics returns current audio metrics // GetAudioMetrics returns current audio metrics
func GetAudioMetrics() AudioMetrics { func GetAudioMetrics() AudioMetrics {
// Get base metrics
framesReceived := atomic.LoadInt64(&metrics.FramesReceived)
framesDropped := atomic.LoadInt64(&metrics.FramesDropped)
// If audio relay is running, use relay stats instead
if IsAudioRelayRunning() {
relayReceived, relayDropped := GetAudioRelayStats()
framesReceived = relayReceived
framesDropped = relayDropped
}
return AudioMetrics{ return AudioMetrics{
FramesReceived: framesReceived, FramesReceived: atomic.LoadInt64(&metrics.FramesReceived),
FramesDropped: framesDropped, FramesDropped: atomic.LoadInt64(&metrics.FramesDropped),
BytesProcessed: atomic.LoadInt64(&metrics.BytesProcessed), BytesProcessed: atomic.LoadInt64(&metrics.BytesProcessed),
LastFrameTime: metrics.LastFrameTime, LastFrameTime: metrics.LastFrameTime,
ConnectionDrops: atomic.LoadInt64(&metrics.ConnectionDrops), ConnectionDrops: atomic.LoadInt64(&metrics.ConnectionDrops),

View File

@ -2,6 +2,8 @@ package audio
import ( import (
"sync" "sync"
"github.com/jetkvm/kvm/internal/logging"
) )
var audioMuteState struct { var audioMuteState struct {
@ -11,7 +13,9 @@ var audioMuteState struct {
func SetAudioMuted(muted bool) { func SetAudioMuted(muted bool) {
audioMuteState.mu.Lock() audioMuteState.mu.Lock()
prev := audioMuteState.muted
audioMuteState.muted = muted audioMuteState.muted = muted
logging.GetDefaultLogger().Info().Str("component", "audio").Msgf("SetAudioMuted: prev=%v, new=%v", prev, muted)
audioMuteState.mu.Unlock() audioMuteState.mu.Unlock()
} }

View File

@ -28,23 +28,27 @@ type BatchAudioProcessor struct {
// Batch queues and state (atomic for lock-free access) // Batch queues and state (atomic for lock-free access)
readQueue chan batchReadRequest readQueue chan batchReadRequest
writeQueue chan batchWriteRequest
initialized int32 initialized int32
running int32 running int32
threadPinned int32 threadPinned int32
// Buffers (pre-allocated to avoid allocation overhead) // Buffers (pre-allocated to avoid allocation overhead)
readBufPool *sync.Pool readBufPool *sync.Pool
writeBufPool *sync.Pool
} }
type BatchAudioStats struct { type BatchAudioStats struct {
// int64 fields MUST be first for ARM32 alignment // int64 fields MUST be first for ARM32 alignment
BatchedReads int64 BatchedReads int64
SingleReads int64 BatchedWrites int64
BatchedFrames int64 SingleReads int64
SingleFrames int64 SingleWrites int64
CGOCallsReduced int64 BatchedFrames int64
OSThreadPinTime time.Duration // time.Duration is int64 internally SingleFrames int64
LastBatchTime time.Time CGOCallsReduced int64
OSThreadPinTime time.Duration // time.Duration is int64 internally
LastBatchTime time.Time
} }
type batchReadRequest struct { type batchReadRequest struct {
@ -53,11 +57,22 @@ type batchReadRequest struct {
timestamp time.Time timestamp time.Time
} }
type batchWriteRequest struct {
buffer []byte
resultChan chan batchWriteResult
timestamp time.Time
}
type batchReadResult struct { type batchReadResult struct {
length int length int
err error err error
} }
type batchWriteResult struct {
written int
err error
}
// NewBatchAudioProcessor creates a new batch audio processor // NewBatchAudioProcessor creates a new batch audio processor
func NewBatchAudioProcessor(batchSize int, batchDuration time.Duration) *BatchAudioProcessor { func NewBatchAudioProcessor(batchSize int, batchDuration time.Duration) *BatchAudioProcessor {
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
@ -70,11 +85,17 @@ func NewBatchAudioProcessor(batchSize int, batchDuration time.Duration) *BatchAu
batchSize: batchSize, batchSize: batchSize,
batchDuration: batchDuration, batchDuration: batchDuration,
readQueue: make(chan batchReadRequest, batchSize*2), readQueue: make(chan batchReadRequest, batchSize*2),
writeQueue: make(chan batchWriteRequest, batchSize*2),
readBufPool: &sync.Pool{ readBufPool: &sync.Pool{
New: func() interface{} { New: func() interface{} {
return make([]byte, 1500) // Max audio frame size return make([]byte, 1500) // Max audio frame size
}, },
}, },
writeBufPool: &sync.Pool{
New: func() interface{} {
return make([]byte, 4096) // Max write buffer size
},
},
} }
return processor return processor
@ -93,6 +114,7 @@ func (bap *BatchAudioProcessor) Start() error {
// Start batch processing goroutines // Start batch processing goroutines
go bap.batchReadProcessor() go bap.batchReadProcessor()
go bap.batchWriteProcessor()
bap.logger.Info().Int("batch_size", bap.batchSize). bap.logger.Info().Int("batch_size", bap.batchSize).
Dur("batch_duration", bap.batchDuration). Dur("batch_duration", bap.batchDuration).
@ -153,6 +175,44 @@ func (bap *BatchAudioProcessor) BatchReadEncode(buffer []byte) (int, error) {
} }
} }
// BatchDecodeWrite performs batched audio decode and write operations
func (bap *BatchAudioProcessor) BatchDecodeWrite(buffer []byte) (int, error) {
if atomic.LoadInt32(&bap.running) == 0 {
// Fallback to single operation if batch processor is not running
atomic.AddInt64(&bap.stats.SingleWrites, 1)
atomic.AddInt64(&bap.stats.SingleFrames, 1)
return CGOAudioDecodeWrite(buffer)
}
resultChan := make(chan batchWriteResult, 1)
request := batchWriteRequest{
buffer: buffer,
resultChan: resultChan,
timestamp: time.Now(),
}
select {
case bap.writeQueue <- request:
// Successfully queued
case <-time.After(5 * time.Millisecond):
// Queue is full or blocked, fallback to single operation
atomic.AddInt64(&bap.stats.SingleWrites, 1)
atomic.AddInt64(&bap.stats.SingleFrames, 1)
return CGOAudioDecodeWrite(buffer)
}
// Wait for result
select {
case result := <-resultChan:
return result.written, result.err
case <-time.After(50 * time.Millisecond):
// Timeout, fallback to single operation
atomic.AddInt64(&bap.stats.SingleWrites, 1)
atomic.AddInt64(&bap.stats.SingleFrames, 1)
return CGOAudioDecodeWrite(buffer)
}
}
// batchReadProcessor processes batched read operations // batchReadProcessor processes batched read operations
func (bap *BatchAudioProcessor) batchReadProcessor() { func (bap *BatchAudioProcessor) batchReadProcessor() {
defer bap.logger.Debug().Msg("batch read processor stopped") defer bap.logger.Debug().Msg("batch read processor stopped")
@ -189,6 +249,42 @@ func (bap *BatchAudioProcessor) batchReadProcessor() {
} }
} }
// batchWriteProcessor processes batched write operations
func (bap *BatchAudioProcessor) batchWriteProcessor() {
defer bap.logger.Debug().Msg("batch write processor stopped")
ticker := time.NewTicker(bap.batchDuration)
defer ticker.Stop()
var batch []batchWriteRequest
batch = make([]batchWriteRequest, 0, bap.batchSize)
for atomic.LoadInt32(&bap.running) == 1 {
select {
case <-bap.ctx.Done():
return
case req := <-bap.writeQueue:
batch = append(batch, req)
if len(batch) >= bap.batchSize {
bap.processBatchWrite(batch)
batch = batch[:0] // Clear slice but keep capacity
}
case <-ticker.C:
if len(batch) > 0 {
bap.processBatchWrite(batch)
batch = batch[:0] // Clear slice but keep capacity
}
}
}
// Process any remaining requests
if len(batch) > 0 {
bap.processBatchWrite(batch)
}
}
// processBatchRead processes a batch of read requests efficiently // processBatchRead processes a batch of read requests efficiently
func (bap *BatchAudioProcessor) processBatchRead(batch []batchReadRequest) { func (bap *BatchAudioProcessor) processBatchRead(batch []batchReadRequest) {
if len(batch) == 0 { if len(batch) == 0 {
@ -199,16 +295,7 @@ func (bap *BatchAudioProcessor) processBatchRead(batch []batchReadRequest) {
start := time.Now() start := time.Now()
if atomic.CompareAndSwapInt32(&bap.threadPinned, 0, 1) { if atomic.CompareAndSwapInt32(&bap.threadPinned, 0, 1) {
runtime.LockOSThread() runtime.LockOSThread()
// Set high priority for batch audio processing
if err := SetAudioThreadPriority(); err != nil {
bap.logger.Warn().Err(err).Msg("Failed to set batch audio processing priority")
}
defer func() { defer func() {
if err := ResetThreadPriority(); err != nil {
bap.logger.Warn().Err(err).Msg("Failed to reset thread priority")
}
runtime.UnlockOSThread() runtime.UnlockOSThread()
atomic.StoreInt32(&bap.threadPinned, 0) atomic.StoreInt32(&bap.threadPinned, 0)
bap.stats.OSThreadPinTime += time.Since(start) bap.stats.OSThreadPinTime += time.Since(start)
@ -241,11 +328,56 @@ func (bap *BatchAudioProcessor) processBatchRead(batch []batchReadRequest) {
bap.stats.LastBatchTime = time.Now() bap.stats.LastBatchTime = time.Now()
} }
// processBatchWrite processes a batch of write requests efficiently
func (bap *BatchAudioProcessor) processBatchWrite(batch []batchWriteRequest) {
if len(batch) == 0 {
return
}
// Pin to OS thread for the entire batch to minimize thread switching overhead
start := time.Now()
if atomic.CompareAndSwapInt32(&bap.threadPinned, 0, 1) {
runtime.LockOSThread()
defer func() {
runtime.UnlockOSThread()
atomic.StoreInt32(&bap.threadPinned, 0)
bap.stats.OSThreadPinTime += time.Since(start)
}()
}
batchSize := len(batch)
atomic.AddInt64(&bap.stats.BatchedWrites, 1)
atomic.AddInt64(&bap.stats.BatchedFrames, int64(batchSize))
if batchSize > 1 {
atomic.AddInt64(&bap.stats.CGOCallsReduced, int64(batchSize-1))
}
// Process each request in the batch
for _, req := range batch {
written, err := CGOAudioDecodeWrite(req.buffer)
result := batchWriteResult{
written: written,
err: err,
}
// Send result back (non-blocking)
select {
case req.resultChan <- result:
default:
// Requestor timed out, drop result
}
}
bap.stats.LastBatchTime = time.Now()
}
// GetStats returns current batch processor statistics // GetStats returns current batch processor statistics
func (bap *BatchAudioProcessor) GetStats() BatchAudioStats { func (bap *BatchAudioProcessor) GetStats() BatchAudioStats {
return BatchAudioStats{ return BatchAudioStats{
BatchedReads: atomic.LoadInt64(&bap.stats.BatchedReads), BatchedReads: atomic.LoadInt64(&bap.stats.BatchedReads),
BatchedWrites: atomic.LoadInt64(&bap.stats.BatchedWrites),
SingleReads: atomic.LoadInt64(&bap.stats.SingleReads), SingleReads: atomic.LoadInt64(&bap.stats.SingleReads),
SingleWrites: atomic.LoadInt64(&bap.stats.SingleWrites),
BatchedFrames: atomic.LoadInt64(&bap.stats.BatchedFrames), BatchedFrames: atomic.LoadInt64(&bap.stats.BatchedFrames),
SingleFrames: atomic.LoadInt64(&bap.stats.SingleFrames), SingleFrames: atomic.LoadInt64(&bap.stats.SingleFrames),
CGOCallsReduced: atomic.LoadInt64(&bap.stats.CGOCallsReduced), CGOCallsReduced: atomic.LoadInt64(&bap.stats.CGOCallsReduced),
@ -261,7 +393,7 @@ func (bap *BatchAudioProcessor) IsRunning() bool {
// Global batch processor instance // Global batch processor instance
var ( var (
globalBatchProcessor unsafe.Pointer // *BatchAudioProcessor globalBatchProcessor unsafe.Pointer // *BatchAudioProcessor
batchProcessorInitialized int32 batchProcessorInitialized int32
) )
@ -312,3 +444,12 @@ func BatchCGOAudioReadEncode(buffer []byte) (int, error) {
} }
return CGOAudioReadEncode(buffer) return CGOAudioReadEncode(buffer)
} }
// BatchCGOAudioDecodeWrite is a batched version of CGOAudioDecodeWrite
func BatchCGOAudioDecodeWrite(buffer []byte) (int, error) {
processor := GetBatchAudioProcessor()
if processor != nil && processor.IsRunning() {
return processor.BatchDecodeWrite(buffer)
}
return CGOAudioDecodeWrite(buffer)
}

View File

@ -2,211 +2,63 @@ package audio
import ( import (
"sync" "sync"
"sync/atomic"
) )
// AudioBufferPool manages reusable audio buffers to reduce allocations
type AudioBufferPool struct { type AudioBufferPool struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment) pool sync.Pool
currentSize int64 // Current pool size (atomic)
hitCount int64 // Pool hit counter (atomic)
missCount int64 // Pool miss counter (atomic)
// Other fields
pool sync.Pool
bufferSize int
maxPoolSize int
mutex sync.RWMutex
// Memory optimization fields
preallocated []*[]byte // Pre-allocated buffers for immediate use
preallocSize int // Number of pre-allocated buffers
} }
// NewAudioBufferPool creates a new buffer pool for audio frames
func NewAudioBufferPool(bufferSize int) *AudioBufferPool { func NewAudioBufferPool(bufferSize int) *AudioBufferPool {
// Pre-allocate 20% of max pool size for immediate availability
preallocSize := 20
preallocated := make([]*[]byte, 0, preallocSize)
// Pre-allocate buffers to reduce initial allocation overhead
for i := 0; i < preallocSize; i++ {
buf := make([]byte, 0, bufferSize)
preallocated = append(preallocated, &buf)
}
return &AudioBufferPool{ return &AudioBufferPool{
bufferSize: bufferSize,
maxPoolSize: 100, // Limit pool size to prevent excessive memory usage
preallocated: preallocated,
preallocSize: preallocSize,
pool: sync.Pool{ pool: sync.Pool{
New: func() interface{} { New: func() interface{} {
return make([]byte, 0, bufferSize) // Pre-allocate buffer with specified size
return make([]byte, bufferSize)
}, },
}, },
} }
} }
// Get retrieves a buffer from the pool
func (p *AudioBufferPool) Get() []byte { func (p *AudioBufferPool) Get() []byte {
// First try pre-allocated buffers for fastest access return p.pool.Get().([]byte)
p.mutex.Lock()
if len(p.preallocated) > 0 {
buf := p.preallocated[len(p.preallocated)-1]
p.preallocated = p.preallocated[:len(p.preallocated)-1]
p.mutex.Unlock()
atomic.AddInt64(&p.hitCount, 1)
return (*buf)[:0] // Reset length but keep capacity
}
p.mutex.Unlock()
// Try sync.Pool next
if buf := p.pool.Get(); buf != nil {
bufPtr := buf.(*[]byte)
// Update pool size counter when retrieving from pool
p.mutex.Lock()
if p.currentSize > 0 {
p.currentSize--
}
p.mutex.Unlock()
atomic.AddInt64(&p.hitCount, 1)
return (*bufPtr)[:0] // Reset length but keep capacity
}
// Last resort: allocate new buffer
atomic.AddInt64(&p.missCount, 1)
return make([]byte, 0, p.bufferSize)
} }
// Put returns a buffer to the pool
func (p *AudioBufferPool) Put(buf []byte) { func (p *AudioBufferPool) Put(buf []byte) {
if cap(buf) < p.bufferSize { // Reset length but keep capacity for reuse
return // Buffer too small, don't pool it if cap(buf) >= 1500 { // Only pool buffers of reasonable size
p.pool.Put(buf[:0])
} }
// Reset buffer for reuse
resetBuf := buf[:0]
// First try to return to pre-allocated pool for fastest reuse
p.mutex.Lock()
if len(p.preallocated) < p.preallocSize {
p.preallocated = append(p.preallocated, &resetBuf)
p.mutex.Unlock()
return
}
p.mutex.Unlock()
// Check sync.Pool size limit to prevent excessive memory usage
p.mutex.RLock()
currentSize := p.currentSize
p.mutex.RUnlock()
if currentSize >= int64(p.maxPoolSize) {
return // Pool is full, let GC handle this buffer
}
// Return to sync.Pool
p.pool.Put(&resetBuf)
// Update pool size counter
p.mutex.Lock()
p.currentSize++
p.mutex.Unlock()
} }
// Global buffer pools for different audio operations
var ( var (
audioFramePool = NewAudioBufferPool(1500) // Pool for 1500-byte audio frame buffers (Opus max frame size)
audioFramePool = NewAudioBufferPool(1500)
// Pool for smaller control buffers
audioControlPool = NewAudioBufferPool(64) audioControlPool = NewAudioBufferPool(64)
) )
// GetAudioFrameBuffer gets a reusable buffer for audio frames
func GetAudioFrameBuffer() []byte { func GetAudioFrameBuffer() []byte {
return audioFramePool.Get() return audioFramePool.Get()
} }
// PutAudioFrameBuffer returns a buffer to the frame pool
func PutAudioFrameBuffer(buf []byte) { func PutAudioFrameBuffer(buf []byte) {
audioFramePool.Put(buf) audioFramePool.Put(buf)
} }
// GetAudioControlBuffer gets a reusable buffer for control data
func GetAudioControlBuffer() []byte { func GetAudioControlBuffer() []byte {
return audioControlPool.Get() return audioControlPool.Get()
} }
// PutAudioControlBuffer returns a buffer to the control pool
func PutAudioControlBuffer(buf []byte) { func PutAudioControlBuffer(buf []byte) {
audioControlPool.Put(buf) audioControlPool.Put(buf)
} }
// GetPoolStats returns detailed statistics about this buffer pool
func (p *AudioBufferPool) GetPoolStats() AudioBufferPoolDetailedStats {
p.mutex.RLock()
preallocatedCount := len(p.preallocated)
currentSize := p.currentSize
p.mutex.RUnlock()
hitCount := atomic.LoadInt64(&p.hitCount)
missCount := atomic.LoadInt64(&p.missCount)
totalRequests := hitCount + missCount
var hitRate float64
if totalRequests > 0 {
hitRate = float64(hitCount) / float64(totalRequests) * 100
}
return AudioBufferPoolDetailedStats{
BufferSize: p.bufferSize,
MaxPoolSize: p.maxPoolSize,
CurrentPoolSize: currentSize,
PreallocatedCount: int64(preallocatedCount),
PreallocatedMax: int64(p.preallocSize),
HitCount: hitCount,
MissCount: missCount,
HitRate: hitRate,
}
}
// AudioBufferPoolDetailedStats provides detailed pool statistics
type AudioBufferPoolDetailedStats struct {
BufferSize int
MaxPoolSize int
CurrentPoolSize int64
PreallocatedCount int64
PreallocatedMax int64
HitCount int64
MissCount int64
HitRate float64 // Percentage
}
// GetAudioBufferPoolStats returns statistics about the audio buffer pools
type AudioBufferPoolStats struct {
FramePoolSize int64
FramePoolMax int
ControlPoolSize int64
ControlPoolMax int
// Enhanced statistics
FramePoolHitRate float64
ControlPoolHitRate float64
FramePoolDetails AudioBufferPoolDetailedStats
ControlPoolDetails AudioBufferPoolDetailedStats
}
func GetAudioBufferPoolStats() AudioBufferPoolStats {
audioFramePool.mutex.RLock()
frameSize := audioFramePool.currentSize
frameMax := audioFramePool.maxPoolSize
audioFramePool.mutex.RUnlock()
audioControlPool.mutex.RLock()
controlSize := audioControlPool.currentSize
controlMax := audioControlPool.maxPoolSize
audioControlPool.mutex.RUnlock()
// Get detailed statistics
frameDetails := audioFramePool.GetPoolStats()
controlDetails := audioControlPool.GetPoolStats()
return AudioBufferPoolStats{
FramePoolSize: frameSize,
FramePoolMax: frameMax,
ControlPoolSize: controlSize,
ControlPoolMax: controlMax,
FramePoolHitRate: frameDetails.HitRate,
ControlPoolHitRate: controlDetails.HitRate,
FramePoolDetails: frameDetails,
ControlPoolDetails: controlDetails,
}
}

View File

@ -8,7 +8,7 @@ import (
) )
/* /*
#cgo CFLAGS: -I$HOME/.jetkvm/audio-libs/alsa-lib-$ALSA_VERSION/include -I$HOME/.jetkvm/audio-libs/opus-$OPUS_VERSION/include -I$HOME/.jetkvm/audio-libs/opus-$OPUS_VERSION/celt #cgo CFLAGS: -I${SRCDIR}/../../tools/alsa-opus-includes
#cgo LDFLAGS: -L$HOME/.jetkvm/audio-libs/alsa-lib-$ALSA_VERSION/src/.libs -lasound -L$HOME/.jetkvm/audio-libs/opus-$OPUS_VERSION/.libs -lopus -lm -ldl -static #cgo LDFLAGS: -L$HOME/.jetkvm/audio-libs/alsa-lib-$ALSA_VERSION/src/.libs -lasound -L$HOME/.jetkvm/audio-libs/opus-$OPUS_VERSION/.libs -lopus -lm -ldl -static
#include <alsa/asoundlib.h> #include <alsa/asoundlib.h>
#include <opus.h> #include <opus.h>
@ -22,14 +22,8 @@ static snd_pcm_t *pcm_handle = NULL;
static snd_pcm_t *pcm_playback_handle = NULL; static snd_pcm_t *pcm_playback_handle = NULL;
static OpusEncoder *encoder = NULL; static OpusEncoder *encoder = NULL;
static OpusDecoder *decoder = NULL; static OpusDecoder *decoder = NULL;
// Optimized Opus encoder settings for ARM Cortex-A7 static int opus_bitrate = 64000;
static int opus_bitrate = 96000; // Increased for better quality static int opus_complexity = 5;
static int opus_complexity = 3; // Reduced for ARM performance
static int opus_vbr = 1; // Variable bitrate enabled
static int opus_vbr_constraint = 1; // Constrained VBR for consistent latency
static int opus_signal_type = OPUS_SIGNAL_MUSIC; // Optimized for general audio
static int opus_bandwidth = OPUS_BANDWIDTH_FULLBAND; // Full bandwidth
static int opus_dtx = 0; // Disable DTX for real-time audio
static int sample_rate = 48000; static int sample_rate = 48000;
static int channels = 2; static int channels = 2;
static int frame_size = 960; // 20ms for 48kHz static int frame_size = 960; // 20ms for 48kHz
@ -45,7 +39,7 @@ static volatile int playback_initialized = 0;
static int safe_alsa_open(snd_pcm_t **handle, const char *device, snd_pcm_stream_t stream) { static int safe_alsa_open(snd_pcm_t **handle, const char *device, snd_pcm_stream_t stream) {
int attempts = 3; int attempts = 3;
int err; int err;
while (attempts-- > 0) { while (attempts-- > 0) {
err = snd_pcm_open(handle, device, stream, SND_PCM_NONBLOCK); err = snd_pcm_open(handle, device, stream, SND_PCM_NONBLOCK);
if (err >= 0) { if (err >= 0) {
@ -53,7 +47,7 @@ static int safe_alsa_open(snd_pcm_t **handle, const char *device, snd_pcm_stream
snd_pcm_nonblock(*handle, 0); snd_pcm_nonblock(*handle, 0);
return 0; return 0;
} }
if (err == -EBUSY && attempts > 0) { if (err == -EBUSY && attempts > 0) {
// Device busy, wait and retry // Device busy, wait and retry
usleep(50000); // 50ms usleep(50000); // 50ms
@ -69,26 +63,26 @@ static int configure_alsa_device(snd_pcm_t *handle, const char *device_name) {
snd_pcm_hw_params_t *params; snd_pcm_hw_params_t *params;
snd_pcm_sw_params_t *sw_params; snd_pcm_sw_params_t *sw_params;
int err; int err;
if (!handle) return -1; if (!handle) return -1;
// Use stack allocation for better performance // Use stack allocation for better performance
snd_pcm_hw_params_alloca(&params); snd_pcm_hw_params_alloca(&params);
snd_pcm_sw_params_alloca(&sw_params); snd_pcm_sw_params_alloca(&sw_params);
// Hardware parameters // Hardware parameters
err = snd_pcm_hw_params_any(handle, params); err = snd_pcm_hw_params_any(handle, params);
if (err < 0) return err; if (err < 0) return err;
err = snd_pcm_hw_params_set_access(handle, params, SND_PCM_ACCESS_RW_INTERLEAVED); err = snd_pcm_hw_params_set_access(handle, params, SND_PCM_ACCESS_RW_INTERLEAVED);
if (err < 0) return err; if (err < 0) return err;
err = snd_pcm_hw_params_set_format(handle, params, SND_PCM_FORMAT_S16_LE); err = snd_pcm_hw_params_set_format(handle, params, SND_PCM_FORMAT_S16_LE);
if (err < 0) return err; if (err < 0) return err;
err = snd_pcm_hw_params_set_channels(handle, params, channels); err = snd_pcm_hw_params_set_channels(handle, params, channels);
if (err < 0) return err; if (err < 0) return err;
// Set exact rate for better performance // Set exact rate for better performance
err = snd_pcm_hw_params_set_rate(handle, params, sample_rate, 0); err = snd_pcm_hw_params_set_rate(handle, params, sample_rate, 0);
if (err < 0) { if (err < 0) {
@ -97,70 +91,70 @@ static int configure_alsa_device(snd_pcm_t *handle, const char *device_name) {
err = snd_pcm_hw_params_set_rate_near(handle, params, &rate, 0); err = snd_pcm_hw_params_set_rate_near(handle, params, &rate, 0);
if (err < 0) return err; if (err < 0) return err;
} }
// Optimize buffer sizes for low latency // Optimize buffer sizes for low latency
snd_pcm_uframes_t period_size = frame_size; snd_pcm_uframes_t period_size = frame_size;
err = snd_pcm_hw_params_set_period_size_near(handle, params, &period_size, 0); err = snd_pcm_hw_params_set_period_size_near(handle, params, &period_size, 0);
if (err < 0) return err; if (err < 0) return err;
// Set buffer size to 4 periods for good latency/stability balance // Set buffer size to 4 periods for good latency/stability balance
snd_pcm_uframes_t buffer_size = period_size * 4; snd_pcm_uframes_t buffer_size = period_size * 4;
err = snd_pcm_hw_params_set_buffer_size_near(handle, params, &buffer_size); err = snd_pcm_hw_params_set_buffer_size_near(handle, params, &buffer_size);
if (err < 0) return err; if (err < 0) return err;
err = snd_pcm_hw_params(handle, params); err = snd_pcm_hw_params(handle, params);
if (err < 0) return err; if (err < 0) return err;
// Software parameters for optimal performance // Software parameters for optimal performance
err = snd_pcm_sw_params_current(handle, sw_params); err = snd_pcm_sw_params_current(handle, sw_params);
if (err < 0) return err; if (err < 0) return err;
// Start playback/capture when buffer is period_size frames // Start playback/capture when buffer is period_size frames
err = snd_pcm_sw_params_set_start_threshold(handle, sw_params, period_size); err = snd_pcm_sw_params_set_start_threshold(handle, sw_params, period_size);
if (err < 0) return err; if (err < 0) return err;
// Allow transfers when at least period_size frames are available // Allow transfers when at least period_size frames are available
err = snd_pcm_sw_params_set_avail_min(handle, sw_params, period_size); err = snd_pcm_sw_params_set_avail_min(handle, sw_params, period_size);
if (err < 0) return err; if (err < 0) return err;
err = snd_pcm_sw_params(handle, sw_params); err = snd_pcm_sw_params(handle, sw_params);
if (err < 0) return err; if (err < 0) return err;
return snd_pcm_prepare(handle); return snd_pcm_prepare(handle);
} }
// Initialize ALSA and Opus encoder with improved safety // Initialize ALSA and Opus encoder with improved safety
int jetkvm_audio_init() { int jetkvm_audio_init() {
int err; int err;
// Prevent concurrent initialization // Prevent concurrent initialization
if (__sync_bool_compare_and_swap(&capture_initializing, 0, 1) == 0) { if (__sync_bool_compare_and_swap(&capture_initializing, 0, 1) == 0) {
return -EBUSY; // Already initializing return -EBUSY; // Already initializing
} }
// Check if already initialized // Check if already initialized
if (capture_initialized) { if (capture_initialized) {
capture_initializing = 0; capture_initializing = 0;
return 0; return 0;
} }
// Clean up any existing resources first // Clean up any existing resources first
if (encoder) { if (encoder) {
opus_encoder_destroy(encoder); opus_encoder_destroy(encoder);
encoder = NULL; encoder = NULL;
} }
if (pcm_handle) { if (pcm_handle) {
snd_pcm_close(pcm_handle); snd_pcm_close(pcm_handle);
pcm_handle = NULL; pcm_handle = NULL;
} }
// Try to open ALSA capture device // Try to open ALSA capture device
err = safe_alsa_open(&pcm_handle, "hw:1,0", SND_PCM_STREAM_CAPTURE); err = safe_alsa_open(&pcm_handle, "hw:1,0", SND_PCM_STREAM_CAPTURE);
if (err < 0) { if (err < 0) {
capture_initializing = 0; capture_initializing = 0;
return -1; return -1;
} }
// Configure the device // Configure the device
err = configure_alsa_device(pcm_handle, "capture"); err = configure_alsa_device(pcm_handle, "capture");
if (err < 0) { if (err < 0) {
@ -169,8 +163,8 @@ int jetkvm_audio_init() {
capture_initializing = 0; capture_initializing = 0;
return -1; return -1;
} }
// Initialize Opus encoder with optimized settings // Initialize Opus encoder
int opus_err = 0; int opus_err = 0;
encoder = opus_encoder_create(sample_rate, channels, OPUS_APPLICATION_AUDIO, &opus_err); encoder = opus_encoder_create(sample_rate, channels, OPUS_APPLICATION_AUDIO, &opus_err);
if (!encoder || opus_err != OPUS_OK) { if (!encoder || opus_err != OPUS_OK) {
@ -178,20 +172,10 @@ int jetkvm_audio_init() {
capture_initializing = 0; capture_initializing = 0;
return -2; return -2;
} }
// Apply optimized Opus encoder settings
opus_encoder_ctl(encoder, OPUS_SET_BITRATE(opus_bitrate)); opus_encoder_ctl(encoder, OPUS_SET_BITRATE(opus_bitrate));
opus_encoder_ctl(encoder, OPUS_SET_COMPLEXITY(opus_complexity)); opus_encoder_ctl(encoder, OPUS_SET_COMPLEXITY(opus_complexity));
opus_encoder_ctl(encoder, OPUS_SET_VBR(opus_vbr));
opus_encoder_ctl(encoder, OPUS_SET_VBR_CONSTRAINT(opus_vbr_constraint));
opus_encoder_ctl(encoder, OPUS_SET_SIGNAL(opus_signal_type));
opus_encoder_ctl(encoder, OPUS_SET_BANDWIDTH(opus_bandwidth));
opus_encoder_ctl(encoder, OPUS_SET_DTX(opus_dtx));
// Enable packet loss concealment for better resilience
opus_encoder_ctl(encoder, OPUS_SET_PACKET_LOSS_PERC(5));
// Set prediction disabled for lower latency
opus_encoder_ctl(encoder, OPUS_SET_PREDICTION_DISABLED(1));
capture_initialized = 1; capture_initialized = 1;
capture_initializing = 0; capture_initializing = 0;
return 0; return 0;
@ -202,21 +186,21 @@ int jetkvm_audio_read_encode(void *opus_buf) {
short pcm_buffer[1920]; // max 2ch*960 short pcm_buffer[1920]; // max 2ch*960
unsigned char *out = (unsigned char*)opus_buf; unsigned char *out = (unsigned char*)opus_buf;
int err = 0; int err = 0;
// Safety checks // Safety checks
if (!capture_initialized || !pcm_handle || !encoder || !opus_buf) { if (!capture_initialized || !pcm_handle || !encoder || !opus_buf) {
return -1; return -1;
} }
int pcm_rc = snd_pcm_readi(pcm_handle, pcm_buffer, frame_size); int pcm_rc = snd_pcm_readi(pcm_handle, pcm_buffer, frame_size);
// Handle ALSA errors with enhanced recovery // Handle ALSA errors with enhanced recovery
if (pcm_rc < 0) { if (pcm_rc < 0) {
if (pcm_rc == -EPIPE) { if (pcm_rc == -EPIPE) {
// Buffer underrun - try to recover // Buffer underrun - try to recover
err = snd_pcm_prepare(pcm_handle); err = snd_pcm_prepare(pcm_handle);
if (err < 0) return -1; if (err < 0) return -1;
pcm_rc = snd_pcm_readi(pcm_handle, pcm_buffer, frame_size); pcm_rc = snd_pcm_readi(pcm_handle, pcm_buffer, frame_size);
if (pcm_rc < 0) return -1; if (pcm_rc < 0) return -1;
} else if (pcm_rc == -EAGAIN) { } else if (pcm_rc == -EAGAIN) {
@ -237,12 +221,12 @@ int jetkvm_audio_read_encode(void *opus_buf) {
return -1; return -1;
} }
} }
// If we got fewer frames than expected, pad with silence // If we got fewer frames than expected, pad with silence
if (pcm_rc < frame_size) { if (pcm_rc < frame_size) {
memset(&pcm_buffer[pcm_rc * channels], 0, (frame_size - pcm_rc) * channels * sizeof(short)); memset(&pcm_buffer[pcm_rc * channels], 0, (frame_size - pcm_rc) * channels * sizeof(short));
} }
int nb_bytes = opus_encode(encoder, pcm_buffer, frame_size, out, max_packet_size); int nb_bytes = opus_encode(encoder, pcm_buffer, frame_size, out, max_packet_size);
return nb_bytes; return nb_bytes;
} }
@ -250,28 +234,28 @@ int jetkvm_audio_read_encode(void *opus_buf) {
// Initialize ALSA playback with improved safety // Initialize ALSA playback with improved safety
int jetkvm_audio_playback_init() { int jetkvm_audio_playback_init() {
int err; int err;
// Prevent concurrent initialization // Prevent concurrent initialization
if (__sync_bool_compare_and_swap(&playback_initializing, 0, 1) == 0) { if (__sync_bool_compare_and_swap(&playback_initializing, 0, 1) == 0) {
return -EBUSY; // Already initializing return -EBUSY; // Already initializing
} }
// Check if already initialized // Check if already initialized
if (playback_initialized) { if (playback_initialized) {
playback_initializing = 0; playback_initializing = 0;
return 0; return 0;
} }
// Clean up any existing resources first // Clean up any existing resources first
if (decoder) { if (decoder) {
opus_decoder_destroy(decoder); opus_decoder_destroy(decoder);
decoder = NULL; decoder = NULL;
} }
if (pcm_playback_handle) { if (pcm_playback_handle) {
snd_pcm_close(pcm_playback_handle); snd_pcm_close(pcm_playback_handle);
pcm_playback_handle = NULL; pcm_playback_handle = NULL;
} }
// Try to open the USB gadget audio device for playback // Try to open the USB gadget audio device for playback
err = safe_alsa_open(&pcm_playback_handle, "hw:1,0", SND_PCM_STREAM_PLAYBACK); err = safe_alsa_open(&pcm_playback_handle, "hw:1,0", SND_PCM_STREAM_PLAYBACK);
if (err < 0) { if (err < 0) {
@ -282,7 +266,7 @@ int jetkvm_audio_playback_init() {
return -1; return -1;
} }
} }
// Configure the device // Configure the device
err = configure_alsa_device(pcm_playback_handle, "playback"); err = configure_alsa_device(pcm_playback_handle, "playback");
if (err < 0) { if (err < 0) {
@ -291,7 +275,7 @@ int jetkvm_audio_playback_init() {
playback_initializing = 0; playback_initializing = 0;
return -1; return -1;
} }
// Initialize Opus decoder // Initialize Opus decoder
int opus_err = 0; int opus_err = 0;
decoder = opus_decoder_create(sample_rate, channels, &opus_err); decoder = opus_decoder_create(sample_rate, channels, &opus_err);
@ -301,7 +285,7 @@ int jetkvm_audio_playback_init() {
playback_initializing = 0; playback_initializing = 0;
return -2; return -2;
} }
playback_initialized = 1; playback_initialized = 1;
playback_initializing = 0; playback_initializing = 0;
return 0; return 0;
@ -312,21 +296,21 @@ int jetkvm_audio_decode_write(void *opus_buf, int opus_size) {
short pcm_buffer[1920]; // max 2ch*960 short pcm_buffer[1920]; // max 2ch*960
unsigned char *in = (unsigned char*)opus_buf; unsigned char *in = (unsigned char*)opus_buf;
int err = 0; int err = 0;
// Safety checks // Safety checks
if (!playback_initialized || !pcm_playback_handle || !decoder || !opus_buf || opus_size <= 0) { if (!playback_initialized || !pcm_playback_handle || !decoder || !opus_buf || opus_size <= 0) {
return -1; return -1;
} }
// Additional bounds checking // Additional bounds checking
if (opus_size > max_packet_size) { if (opus_size > max_packet_size) {
return -1; return -1;
} }
// Decode Opus to PCM // Decode Opus to PCM
int pcm_frames = opus_decode(decoder, in, opus_size, pcm_buffer, frame_size, 0); int pcm_frames = opus_decode(decoder, in, opus_size, pcm_buffer, frame_size, 0);
if (pcm_frames < 0) return -1; if (pcm_frames < 0) return -1;
// Write PCM to playback device with enhanced recovery // Write PCM to playback device with enhanced recovery
int pcm_rc = snd_pcm_writei(pcm_playback_handle, pcm_buffer, pcm_frames); int pcm_rc = snd_pcm_writei(pcm_playback_handle, pcm_buffer, pcm_frames);
if (pcm_rc < 0) { if (pcm_rc < 0) {
@ -334,7 +318,7 @@ int jetkvm_audio_decode_write(void *opus_buf, int opus_size) {
// Buffer underrun - try to recover // Buffer underrun - try to recover
err = snd_pcm_prepare(pcm_playback_handle); err = snd_pcm_prepare(pcm_playback_handle);
if (err < 0) return -2; if (err < 0) return -2;
pcm_rc = snd_pcm_writei(pcm_playback_handle, pcm_buffer, pcm_frames); pcm_rc = snd_pcm_writei(pcm_playback_handle, pcm_buffer, pcm_frames);
} else if (pcm_rc == -ESTRPIPE) { } else if (pcm_rc == -ESTRPIPE) {
// Device suspended, try to resume // Device suspended, try to resume
@ -349,7 +333,7 @@ int jetkvm_audio_decode_write(void *opus_buf, int opus_size) {
} }
if (pcm_rc < 0) return -2; if (pcm_rc < 0) return -2;
} }
return pcm_frames; return pcm_frames;
} }
@ -359,20 +343,20 @@ void jetkvm_audio_playback_close() {
while (playback_initializing) { while (playback_initializing) {
usleep(1000); // 1ms usleep(1000); // 1ms
} }
// Atomic check and set to prevent double cleanup // Atomic check and set to prevent double cleanup
if (__sync_bool_compare_and_swap(&playback_initialized, 1, 0) == 0) { if (__sync_bool_compare_and_swap(&playback_initialized, 1, 0) == 0) {
return; // Already cleaned up return; // Already cleaned up
} }
if (decoder) { if (decoder) {
opus_decoder_destroy(decoder); opus_decoder_destroy(decoder);
decoder = NULL; decoder = NULL;
} }
if (pcm_playback_handle) { if (pcm_playback_handle) {
snd_pcm_drain(pcm_playback_handle); snd_pcm_drain(pcm_playback_handle);
snd_pcm_close(pcm_playback_handle); snd_pcm_close(pcm_playback_handle);
pcm_playback_handle = NULL; pcm_playback_handle = NULL;
} }
} }
@ -382,19 +366,19 @@ void jetkvm_audio_close() {
while (capture_initializing) { while (capture_initializing) {
usleep(1000); // 1ms usleep(1000); // 1ms
} }
capture_initialized = 0; capture_initialized = 0;
if (encoder) { if (encoder) {
opus_encoder_destroy(encoder); opus_encoder_destroy(encoder);
encoder = NULL; encoder = NULL;
} }
if (pcm_handle) { if (pcm_handle) {
snd_pcm_drop(pcm_handle); // Drop pending samples snd_pcm_drop(pcm_handle); // Drop pending samples
snd_pcm_close(pcm_handle); snd_pcm_close(pcm_handle);
pcm_handle = NULL; pcm_handle = NULL;
} }
// Also clean up playback // Also clean up playback
jetkvm_audio_playback_close(); jetkvm_audio_playback_close();
} }
@ -403,15 +387,15 @@ import "C"
// Optimized Go wrappers with reduced overhead // Optimized Go wrappers with reduced overhead
var ( var (
errAudioInitFailed = errors.New("failed to init ALSA/Opus") errAudioInitFailed = errors.New("failed to init ALSA/Opus")
errBufferTooSmall = errors.New("buffer too small") errBufferTooSmall = errors.New("buffer too small")
errAudioReadEncode = errors.New("audio read/encode error") errAudioReadEncode = errors.New("audio read/encode error")
errAudioDecodeWrite = errors.New("audio decode/write error") errAudioDecodeWrite = errors.New("audio decode/write error")
errAudioPlaybackInit = errors.New("failed to init ALSA playback/Opus decoder") errAudioPlaybackInit = errors.New("failed to init ALSA playback/Opus decoder")
errEmptyBuffer = errors.New("empty buffer") errEmptyBuffer = errors.New("empty buffer")
errNilBuffer = errors.New("nil buffer") errNilBuffer = errors.New("nil buffer")
errBufferTooLarge = errors.New("buffer too large") errBufferTooLarge = errors.New("buffer too large")
errInvalidBufferPtr = errors.New("invalid buffer pointer") errInvalidBufferPtr = errors.New("invalid buffer pointer")
) )
func cgoAudioInit() error { func cgoAudioInit() error {
@ -426,11 +410,13 @@ func cgoAudioClose() {
C.jetkvm_audio_close() C.jetkvm_audio_close()
} }
// Optimized read and encode with pre-allocated error objects and reduced checks
func cgoAudioReadEncode(buf []byte) (int, error) { func cgoAudioReadEncode(buf []byte) (int, error) {
// Fast path: check minimum buffer size (reduced from 1500 to 1276 for 10ms frames)
if len(buf) < 1276 { if len(buf) < 1276 {
return 0, errBufferTooSmall return 0, errBufferTooSmall
} }
n := C.jetkvm_audio_read_encode(unsafe.Pointer(&buf[0])) n := C.jetkvm_audio_read_encode(unsafe.Pointer(&buf[0]))
if n < 0 { if n < 0 {
return 0, errAudioReadEncode return 0, errAudioReadEncode
@ -441,11 +427,11 @@ func cgoAudioReadEncode(buf []byte) (int, error) {
return int(n), nil return int(n), nil
} }
// Audio playback functions // Go wrappers for audio playback (microphone input)
func cgoAudioPlaybackInit() error { func cgoAudioPlaybackInit() error {
ret := C.jetkvm_audio_playback_init() ret := C.jetkvm_audio_playback_init()
if ret != 0 { if ret != 0 {
return errAudioPlaybackInit return errors.New("failed to init ALSA playback/Opus decoder")
} }
return nil return nil
} }
@ -454,41 +440,48 @@ func cgoAudioPlaybackClose() {
C.jetkvm_audio_playback_close() C.jetkvm_audio_playback_close()
} }
// Decodes Opus frame and writes to playback device
func cgoAudioDecodeWrite(buf []byte) (int, error) { func cgoAudioDecodeWrite(buf []byte) (int, error) {
if len(buf) == 0 { if len(buf) == 0 {
return 0, errEmptyBuffer return 0, errors.New("empty buffer")
} }
// Additional safety check to prevent segfault
if buf == nil { if buf == nil {
return 0, errNilBuffer return 0, errors.New("nil buffer")
} }
if len(buf) > 4096 {
return 0, errBufferTooLarge // Validate buffer size to prevent potential overruns
if len(buf) > 4096 { // Maximum reasonable Opus frame size
return 0, errors.New("buffer too large")
} }
// Ensure buffer is not deallocated by keeping a reference
bufPtr := unsafe.Pointer(&buf[0]) bufPtr := unsafe.Pointer(&buf[0])
if bufPtr == nil { if bufPtr == nil {
return 0, errInvalidBufferPtr return 0, errors.New("invalid buffer pointer")
} }
// Add recovery mechanism for C function crashes
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
_ = r // Log the panic but don't crash the entire program
// This should not happen with proper validation, but provides safety
} }
}() }()
n := C.jetkvm_audio_decode_write(bufPtr, C.int(len(buf))) n := C.jetkvm_audio_decode_write(bufPtr, C.int(len(buf)))
if n < 0 { if n < 0 {
return 0, errAudioDecodeWrite return 0, errors.New("audio decode/write error")
} }
return int(n), nil return int(n), nil
} }
// CGO function aliases // Wrapper functions for non-blocking audio manager
var ( var (
CGOAudioInit = cgoAudioInit CGOAudioInit = cgoAudioInit
CGOAudioClose = cgoAudioClose CGOAudioClose = cgoAudioClose
CGOAudioReadEncode = cgoAudioReadEncode CGOAudioReadEncode = cgoAudioReadEncode
CGOAudioPlaybackInit = cgoAudioPlaybackInit CGOAudioPlaybackInit = cgoAudioPlaybackInit
CGOAudioPlaybackClose = cgoAudioPlaybackClose CGOAudioPlaybackClose = cgoAudioPlaybackClose
CGOAudioDecodeWrite = cgoAudioDecodeWrite CGOAudioDecodeWrite = cgoAudioDecodeWrite
) )

View File

@ -1,29 +0,0 @@
package audio
import "time"
// MonitoringConfig contains configuration constants for audio monitoring
type MonitoringConfig struct {
// MetricsUpdateInterval defines how often metrics are collected and broadcast
MetricsUpdateInterval time.Duration
}
// DefaultMonitoringConfig returns the default monitoring configuration
func DefaultMonitoringConfig() MonitoringConfig {
return MonitoringConfig{
MetricsUpdateInterval: 1000 * time.Millisecond, // 1 second interval
}
}
// Global monitoring configuration instance
var monitoringConfig = DefaultMonitoringConfig()
// GetMetricsUpdateInterval returns the current metrics update interval
func GetMetricsUpdateInterval() time.Duration {
return monitoringConfig.MetricsUpdateInterval
}
// SetMetricsUpdateInterval sets the metrics update interval
func SetMetricsUpdateInterval(interval time.Duration) {
monitoringConfig.MetricsUpdateInterval = interval
}

View File

@ -2,7 +2,6 @@ package audio
import ( import (
"context" "context"
"fmt"
"strings" "strings"
"sync" "sync"
"time" "time"
@ -21,8 +20,6 @@ const (
AudioEventMetricsUpdate AudioEventType = "audio-metrics-update" AudioEventMetricsUpdate AudioEventType = "audio-metrics-update"
AudioEventMicrophoneState AudioEventType = "microphone-state-changed" AudioEventMicrophoneState AudioEventType = "microphone-state-changed"
AudioEventMicrophoneMetrics AudioEventType = "microphone-metrics-update" AudioEventMicrophoneMetrics AudioEventType = "microphone-metrics-update"
AudioEventProcessMetrics AudioEventType = "audio-process-metrics"
AudioEventMicProcessMetrics AudioEventType = "microphone-process-metrics"
) )
// AudioEvent represents a WebSocket audio event // AudioEvent represents a WebSocket audio event
@ -62,17 +59,6 @@ type MicrophoneMetricsData struct {
AverageLatency string `json:"average_latency"` AverageLatency string `json:"average_latency"`
} }
// ProcessMetricsData represents process metrics data for WebSocket events
type ProcessMetricsData struct {
PID int `json:"pid"`
CPUPercent float64 `json:"cpu_percent"`
MemoryRSS int64 `json:"memory_rss"`
MemoryVMS int64 `json:"memory_vms"`
MemoryPercent float64 `json:"memory_percent"`
Running bool `json:"running"`
ProcessName string `json:"process_name"`
}
// AudioEventSubscriber represents a WebSocket connection subscribed to audio events // AudioEventSubscriber represents a WebSocket connection subscribed to audio events
type AudioEventSubscriber struct { type AudioEventSubscriber struct {
conn *websocket.Conn conn *websocket.Conn
@ -92,26 +78,32 @@ var (
audioEventOnce sync.Once audioEventOnce sync.Once
) )
// initializeBroadcaster creates and initializes the audio event broadcaster
func initializeBroadcaster() {
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
audioEventBroadcaster = &AudioEventBroadcaster{
subscribers: make(map[string]*AudioEventSubscriber),
logger: &l,
}
// Start metrics broadcasting goroutine
go audioEventBroadcaster.startMetricsBroadcasting()
}
// InitializeAudioEventBroadcaster initializes the global audio event broadcaster // InitializeAudioEventBroadcaster initializes the global audio event broadcaster
func InitializeAudioEventBroadcaster() { func InitializeAudioEventBroadcaster() {
audioEventOnce.Do(initializeBroadcaster) audioEventOnce.Do(func() {
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
audioEventBroadcaster = &AudioEventBroadcaster{
subscribers: make(map[string]*AudioEventSubscriber),
logger: &l,
}
// Start metrics broadcasting goroutine
go audioEventBroadcaster.startMetricsBroadcasting()
})
} }
// GetAudioEventBroadcaster returns the singleton audio event broadcaster // GetAudioEventBroadcaster returns the singleton audio event broadcaster
func GetAudioEventBroadcaster() *AudioEventBroadcaster { func GetAudioEventBroadcaster() *AudioEventBroadcaster {
audioEventOnce.Do(initializeBroadcaster) audioEventOnce.Do(func() {
l := logging.GetDefaultLogger().With().Str("component", "audio-events").Logger()
audioEventBroadcaster = &AudioEventBroadcaster{
subscribers: make(map[string]*AudioEventSubscriber),
logger: &l,
}
// Start metrics broadcasting goroutine
go audioEventBroadcaster.startMetricsBroadcasting()
})
return audioEventBroadcaster return audioEventBroadcaster
} }
@ -151,16 +143,22 @@ func (aeb *AudioEventBroadcaster) Unsubscribe(connectionID string) {
// BroadcastAudioMuteChanged broadcasts audio mute state changes // BroadcastAudioMuteChanged broadcasts audio mute state changes
func (aeb *AudioEventBroadcaster) BroadcastAudioMuteChanged(muted bool) { func (aeb *AudioEventBroadcaster) BroadcastAudioMuteChanged(muted bool) {
event := createAudioEvent(AudioEventMuteChanged, AudioMuteData{Muted: muted}) event := AudioEvent{
Type: AudioEventMuteChanged,
Data: AudioMuteData{Muted: muted},
}
aeb.broadcast(event) aeb.broadcast(event)
} }
// BroadcastMicrophoneStateChanged broadcasts microphone state changes // BroadcastMicrophoneStateChanged broadcasts microphone state changes
func (aeb *AudioEventBroadcaster) BroadcastMicrophoneStateChanged(running, sessionActive bool) { func (aeb *AudioEventBroadcaster) BroadcastMicrophoneStateChanged(running, sessionActive bool) {
event := createAudioEvent(AudioEventMicrophoneState, MicrophoneStateData{ event := AudioEvent{
Running: running, Type: AudioEventMicrophoneState,
SessionActive: sessionActive, Data: MicrophoneStateData{
}) Running: running,
SessionActive: sessionActive,
},
}
aeb.broadcast(event) aeb.broadcast(event)
} }
@ -204,186 +202,60 @@ func (aeb *AudioEventBroadcaster) sendInitialState(connectionID string) {
aeb.sendCurrentMetrics(subscriber) aeb.sendCurrentMetrics(subscriber)
} }
// convertAudioMetricsToEventData converts internal audio metrics to AudioMetricsData for events
func convertAudioMetricsToEventData(metrics AudioMetrics) AudioMetricsData {
return AudioMetricsData{
FramesReceived: metrics.FramesReceived,
FramesDropped: metrics.FramesDropped,
BytesProcessed: metrics.BytesProcessed,
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: metrics.ConnectionDrops,
AverageLatency: metrics.AverageLatency.String(),
}
}
// convertAudioMetricsToEventDataWithLatencyMs converts internal audio metrics to AudioMetricsData with millisecond latency formatting
func convertAudioMetricsToEventDataWithLatencyMs(metrics AudioMetrics) AudioMetricsData {
return AudioMetricsData{
FramesReceived: metrics.FramesReceived,
FramesDropped: metrics.FramesDropped,
BytesProcessed: metrics.BytesProcessed,
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: metrics.ConnectionDrops,
AverageLatency: fmt.Sprintf("%.1fms", float64(metrics.AverageLatency.Nanoseconds())/1e6),
}
}
// convertAudioInputMetricsToEventData converts internal audio input metrics to MicrophoneMetricsData for events
func convertAudioInputMetricsToEventData(metrics AudioInputMetrics) MicrophoneMetricsData {
return MicrophoneMetricsData{
FramesSent: metrics.FramesSent,
FramesDropped: metrics.FramesDropped,
BytesProcessed: metrics.BytesProcessed,
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: metrics.ConnectionDrops,
AverageLatency: metrics.AverageLatency.String(),
}
}
// convertAudioInputMetricsToEventDataWithLatencyMs converts internal audio input metrics to MicrophoneMetricsData with millisecond latency formatting
func convertAudioInputMetricsToEventDataWithLatencyMs(metrics AudioInputMetrics) MicrophoneMetricsData {
return MicrophoneMetricsData{
FramesSent: metrics.FramesSent,
FramesDropped: metrics.FramesDropped,
BytesProcessed: metrics.BytesProcessed,
LastFrameTime: metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: metrics.ConnectionDrops,
AverageLatency: fmt.Sprintf("%.1fms", float64(metrics.AverageLatency.Nanoseconds())/1e6),
}
}
// convertProcessMetricsToEventData converts internal process metrics to ProcessMetricsData for events
func convertProcessMetricsToEventData(metrics ProcessMetrics, running bool) ProcessMetricsData {
return ProcessMetricsData{
PID: metrics.PID,
CPUPercent: metrics.CPUPercent,
MemoryRSS: metrics.MemoryRSS,
MemoryVMS: metrics.MemoryVMS,
MemoryPercent: metrics.MemoryPercent,
Running: running,
ProcessName: metrics.ProcessName,
}
}
// createProcessMetricsData creates ProcessMetricsData from ProcessMetrics with running status
func createProcessMetricsData(metrics *ProcessMetrics, running bool, processName string) ProcessMetricsData {
if metrics == nil {
return ProcessMetricsData{
PID: 0,
CPUPercent: 0.0,
MemoryRSS: 0,
MemoryVMS: 0,
MemoryPercent: 0.0,
Running: false,
ProcessName: processName,
}
}
return ProcessMetricsData{
PID: metrics.PID,
CPUPercent: metrics.CPUPercent,
MemoryRSS: metrics.MemoryRSS,
MemoryVMS: metrics.MemoryVMS,
MemoryPercent: metrics.MemoryPercent,
Running: running,
ProcessName: metrics.ProcessName,
}
}
// getInactiveProcessMetrics returns ProcessMetricsData for an inactive audio input process
func getInactiveProcessMetrics() ProcessMetricsData {
return createProcessMetricsData(nil, false, "audio-input-server")
}
// getActiveAudioInputSupervisor safely retrieves the audio input supervisor if session is active
func getActiveAudioInputSupervisor() *AudioInputSupervisor {
sessionProvider := GetSessionProvider()
if !sessionProvider.IsSessionActive() {
return nil
}
inputManager := sessionProvider.GetAudioInputManager()
if inputManager == nil {
return nil
}
return inputManager.GetSupervisor()
}
// createAudioEvent creates an AudioEvent
func createAudioEvent(eventType AudioEventType, data interface{}) AudioEvent {
return AudioEvent{
Type: eventType,
Data: data,
}
}
func (aeb *AudioEventBroadcaster) getMicrophoneProcessMetrics() ProcessMetricsData {
inputSupervisor := getActiveAudioInputSupervisor()
if inputSupervisor == nil {
return getInactiveProcessMetrics()
}
processMetrics := inputSupervisor.GetProcessMetrics()
if processMetrics == nil {
return getInactiveProcessMetrics()
}
// If process is running but CPU is 0%, it means we're waiting for the second sample
// to calculate CPU percentage. Return metrics with correct running status.
if inputSupervisor.IsRunning() && processMetrics.CPUPercent == 0.0 {
return createProcessMetricsData(processMetrics, true, processMetrics.ProcessName)
}
// Subprocess is running, return actual metrics
return createProcessMetricsData(processMetrics, inputSupervisor.IsRunning(), processMetrics.ProcessName)
}
// sendCurrentMetrics sends current audio and microphone metrics to a subscriber // sendCurrentMetrics sends current audio and microphone metrics to a subscriber
func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) { func (aeb *AudioEventBroadcaster) sendCurrentMetrics(subscriber *AudioEventSubscriber) {
// Send audio metrics // Send audio metrics
audioMetrics := GetAudioMetrics() audioMetrics := GetAudioMetrics()
audioMetricsEvent := createAudioEvent(AudioEventMetricsUpdate, convertAudioMetricsToEventData(audioMetrics)) audioMetricsEvent := AudioEvent{
aeb.sendToSubscriber(subscriber, audioMetricsEvent) Type: AudioEventMetricsUpdate,
Data: AudioMetricsData{
// Send audio process metrics FramesReceived: audioMetrics.FramesReceived,
if outputSupervisor := GetAudioOutputSupervisor(); outputSupervisor != nil { FramesDropped: audioMetrics.FramesDropped,
if processMetrics := outputSupervisor.GetProcessMetrics(); processMetrics != nil { BytesProcessed: audioMetrics.BytesProcessed,
audioProcessEvent := createAudioEvent(AudioEventProcessMetrics, convertProcessMetricsToEventData(*processMetrics, outputSupervisor.IsRunning())) LastFrameTime: audioMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
aeb.sendToSubscriber(subscriber, audioProcessEvent) ConnectionDrops: audioMetrics.ConnectionDrops,
} AverageLatency: audioMetrics.AverageLatency.String(),
},
} }
aeb.sendToSubscriber(subscriber, audioMetricsEvent)
// Send microphone metrics using session provider // Send microphone metrics using session provider
sessionProvider := GetSessionProvider() sessionProvider := GetSessionProvider()
if sessionProvider.IsSessionActive() { if sessionProvider.IsSessionActive() {
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil { if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
micMetrics := inputManager.GetMetrics() micMetrics := inputManager.GetMetrics()
micMetricsEvent := createAudioEvent(AudioEventMicrophoneMetrics, convertAudioInputMetricsToEventData(micMetrics)) micMetricsEvent := AudioEvent{
Type: AudioEventMicrophoneMetrics,
Data: MicrophoneMetricsData{
FramesSent: micMetrics.FramesSent,
FramesDropped: micMetrics.FramesDropped,
BytesProcessed: micMetrics.BytesProcessed,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
}
aeb.sendToSubscriber(subscriber, micMetricsEvent) aeb.sendToSubscriber(subscriber, micMetricsEvent)
} }
} }
// Send microphone process metrics (always send, even when subprocess is not running)
micProcessEvent := createAudioEvent(AudioEventMicProcessMetrics, aeb.getMicrophoneProcessMetrics())
aeb.sendToSubscriber(subscriber, micProcessEvent)
} }
// startMetricsBroadcasting starts a goroutine that periodically broadcasts metrics // startMetricsBroadcasting starts a goroutine that periodically broadcasts metrics
func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() { func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
// Use centralized interval to match process monitor frequency for synchronized metrics // Use 5-second interval instead of 2 seconds for constrained environments
ticker := time.NewTicker(GetMetricsUpdateInterval()) ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop() defer ticker.Stop()
for range ticker.C { for range ticker.C {
aeb.mutex.RLock() aeb.mutex.RLock()
subscriberCount := len(aeb.subscribers) subscriberCount := len(aeb.subscribers)
// Early exit if no subscribers to save CPU // Early exit if no subscribers to save CPU
if subscriberCount == 0 { if subscriberCount == 0 {
aeb.mutex.RUnlock() aeb.mutex.RUnlock()
continue continue
} }
// Create a copy for safe iteration // Create a copy for safe iteration
subscribersCopy := make([]*AudioEventSubscriber, 0, subscriberCount) subscribersCopy := make([]*AudioEventSubscriber, 0, subscriberCount)
for _, sub := range aeb.subscribers { for _, sub := range aeb.subscribers {
@ -398,7 +270,7 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
activeSubscribers++ activeSubscribers++
} }
} }
// Skip metrics gathering if no active subscribers // Skip metrics gathering if no active subscribers
if activeSubscribers == 0 { if activeSubscribers == 0 {
continue continue
@ -406,7 +278,17 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
// Broadcast audio metrics // Broadcast audio metrics
audioMetrics := GetAudioMetrics() audioMetrics := GetAudioMetrics()
audioMetricsEvent := createAudioEvent(AudioEventMetricsUpdate, convertAudioMetricsToEventDataWithLatencyMs(audioMetrics)) audioMetricsEvent := AudioEvent{
Type: AudioEventMetricsUpdate,
Data: AudioMetricsData{
FramesReceived: audioMetrics.FramesReceived,
FramesDropped: audioMetrics.FramesDropped,
BytesProcessed: audioMetrics.BytesProcessed,
LastFrameTime: audioMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: audioMetrics.ConnectionDrops,
AverageLatency: audioMetrics.AverageLatency.String(),
},
}
aeb.broadcast(audioMetricsEvent) aeb.broadcast(audioMetricsEvent)
// Broadcast microphone metrics if available using session provider // Broadcast microphone metrics if available using session provider
@ -414,22 +296,20 @@ func (aeb *AudioEventBroadcaster) startMetricsBroadcasting() {
if sessionProvider.IsSessionActive() { if sessionProvider.IsSessionActive() {
if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil { if inputManager := sessionProvider.GetAudioInputManager(); inputManager != nil {
micMetrics := inputManager.GetMetrics() micMetrics := inputManager.GetMetrics()
micMetricsEvent := createAudioEvent(AudioEventMicrophoneMetrics, convertAudioInputMetricsToEventDataWithLatencyMs(micMetrics)) micMetricsEvent := AudioEvent{
Type: AudioEventMicrophoneMetrics,
Data: MicrophoneMetricsData{
FramesSent: micMetrics.FramesSent,
FramesDropped: micMetrics.FramesDropped,
BytesProcessed: micMetrics.BytesProcessed,
LastFrameTime: micMetrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
ConnectionDrops: micMetrics.ConnectionDrops,
AverageLatency: micMetrics.AverageLatency.String(),
},
}
aeb.broadcast(micMetricsEvent) aeb.broadcast(micMetricsEvent)
} }
} }
// Broadcast audio process metrics
if outputSupervisor := GetAudioOutputSupervisor(); outputSupervisor != nil {
if processMetrics := outputSupervisor.GetProcessMetrics(); processMetrics != nil {
audioProcessEvent := createAudioEvent(AudioEventProcessMetrics, convertProcessMetricsToEventData(*processMetrics, outputSupervisor.IsRunning()))
aeb.broadcast(audioProcessEvent)
}
}
// Broadcast microphone process metrics (always broadcast, even when subprocess is not running)
micProcessEvent := createAudioEvent(AudioEventMicProcessMetrics, aeb.getMicrophoneProcessMetrics())
aeb.broadcast(micProcessEvent)
} }
} }
@ -477,9 +357,9 @@ func (aeb *AudioEventBroadcaster) sendToSubscriber(subscriber *AudioEventSubscri
err := wsjson.Write(ctx, subscriber.conn, event) err := wsjson.Write(ctx, subscriber.conn, event)
if err != nil { if err != nil {
// Don't log network errors for closed connections as warnings, they're expected // Don't log network errors for closed connections as warnings, they're expected
if strings.Contains(err.Error(), "use of closed network connection") || if strings.Contains(err.Error(), "use of closed network connection") ||
strings.Contains(err.Error(), "connection reset by peer") || strings.Contains(err.Error(), "connection reset by peer") ||
strings.Contains(err.Error(), "context canceled") { strings.Contains(err.Error(), "context canceled") {
subscriber.logger.Debug().Err(err).Msg("websocket connection closed during audio event send") subscriber.logger.Debug().Err(err).Msg("websocket connection closed during audio event send")
} else { } else {
subscriber.logger.Warn().Err(err).Msg("failed to send audio event to subscriber") subscriber.logger.Warn().Err(err).Msg("failed to send audio event to subscriber")

View File

@ -9,8 +9,9 @@ import (
) )
// AudioInputMetrics holds metrics for microphone input // AudioInputMetrics holds metrics for microphone input
// Note: int64 fields must be 64-bit aligned for atomic operations on ARM
type AudioInputMetrics struct { type AudioInputMetrics struct {
FramesSent int64 FramesSent int64 // Must be first for alignment
FramesDropped int64 FramesDropped int64
BytesProcessed int64 BytesProcessed int64
ConnectionDrops int64 ConnectionDrops int64
@ -18,20 +19,21 @@ type AudioInputMetrics struct {
LastFrameTime time.Time LastFrameTime time.Time
} }
// AudioInputManager manages microphone input stream using IPC mode only // AudioInputManager manages microphone input stream from WebRTC to USB gadget
type AudioInputManager struct { type AudioInputManager struct {
// metrics MUST be first for ARM32 alignment (contains int64 fields)
metrics AudioInputMetrics metrics AudioInputMetrics
ipcManager *AudioInputIPCManager inputBuffer chan []byte
logger zerolog.Logger logger zerolog.Logger
running int32 running int32
} }
// NewAudioInputManager creates a new audio input manager (IPC mode only) // NewAudioInputManager creates a new audio input manager
func NewAudioInputManager() *AudioInputManager { func NewAudioInputManager() *AudioInputManager {
return &AudioInputManager{ return &AudioInputManager{
ipcManager: NewAudioInputIPCManager(), inputBuffer: make(chan []byte, 100), // Buffer up to 100 frames
logger: logging.GetDefaultLogger().With().Str("component", "audio-input").Logger(), logger: logging.GetDefaultLogger().With().Str("component", "audio-input").Logger(),
} }
} }
@ -43,10 +45,9 @@ func (aim *AudioInputManager) Start() error {
aim.logger.Info().Msg("Starting audio input manager") aim.logger.Info().Msg("Starting audio input manager")
// Start the IPC-based audio input // Start the non-blocking audio input stream
err := aim.ipcManager.Start() err := StartNonBlockingAudioInput(aim.inputBuffer)
if err != nil { if err != nil {
aim.logger.Error().Err(err).Msg("Failed to start IPC audio input")
atomic.StoreInt32(&aim.running, 0) atomic.StoreInt32(&aim.running, 0)
return err return err
} }
@ -62,150 +63,57 @@ func (aim *AudioInputManager) Stop() {
aim.logger.Info().Msg("Stopping audio input manager") aim.logger.Info().Msg("Stopping audio input manager")
// Stop the IPC-based audio input // Stop the non-blocking audio input stream
aim.ipcManager.Stop() StopNonBlockingAudioInput()
// Drain the input buffer
go func() {
for {
select {
case <-aim.inputBuffer:
// Drain
case <-time.After(100 * time.Millisecond):
return
}
}
}()
aim.logger.Info().Msg("Audio input manager stopped") aim.logger.Info().Msg("Audio input manager stopped")
} }
// WriteOpusFrame writes an Opus frame to the audio input system with latency tracking // WriteOpusFrame writes an Opus frame to the input buffer
func (aim *AudioInputManager) WriteOpusFrame(frame []byte) error { func (aim *AudioInputManager) WriteOpusFrame(frame []byte) error {
if !aim.IsRunning() { if atomic.LoadInt32(&aim.running) == 0 {
return nil // Not running, silently drop return nil // Not running, ignore
} }
// Track end-to-end latency from WebRTC to IPC select {
startTime := time.Now() case aim.inputBuffer <- frame:
err := aim.ipcManager.WriteOpusFrame(frame) atomic.AddInt64(&aim.metrics.FramesSent, 1)
processingTime := time.Since(startTime) atomic.AddInt64(&aim.metrics.BytesProcessed, int64(len(frame)))
aim.metrics.LastFrameTime = time.Now()
// Log high latency warnings return nil
if processingTime > 10*time.Millisecond { default:
aim.logger.Warn(). // Buffer full, drop frame
Dur("latency_ms", processingTime).
Msg("High audio processing latency detected")
}
if err != nil {
atomic.AddInt64(&aim.metrics.FramesDropped, 1)
return err
}
// Update metrics
atomic.AddInt64(&aim.metrics.FramesSent, 1)
atomic.AddInt64(&aim.metrics.BytesProcessed, int64(len(frame)))
aim.metrics.LastFrameTime = time.Now()
aim.metrics.AverageLatency = processingTime
return nil
}
// WriteOpusFrameZeroCopy writes an Opus frame using zero-copy optimization
func (aim *AudioInputManager) WriteOpusFrameZeroCopy(frame *ZeroCopyAudioFrame) error {
if !aim.IsRunning() {
return nil // Not running, silently drop
}
if frame == nil {
atomic.AddInt64(&aim.metrics.FramesDropped, 1) atomic.AddInt64(&aim.metrics.FramesDropped, 1)
aim.logger.Warn().Msg("Audio input buffer full, dropping frame")
return nil return nil
} }
// Track end-to-end latency from WebRTC to IPC
startTime := time.Now()
err := aim.ipcManager.WriteOpusFrameZeroCopy(frame)
processingTime := time.Since(startTime)
// Log high latency warnings
if processingTime > 10*time.Millisecond {
aim.logger.Warn().
Dur("latency_ms", processingTime).
Msg("High audio processing latency detected")
}
if err != nil {
atomic.AddInt64(&aim.metrics.FramesDropped, 1)
return err
}
// Update metrics
atomic.AddInt64(&aim.metrics.FramesSent, 1)
atomic.AddInt64(&aim.metrics.BytesProcessed, int64(frame.Length()))
aim.metrics.LastFrameTime = time.Now()
aim.metrics.AverageLatency = processingTime
return nil
} }
// GetMetrics returns current audio input metrics // GetMetrics returns current microphone input metrics
func (aim *AudioInputManager) GetMetrics() AudioInputMetrics { func (aim *AudioInputManager) GetMetrics() AudioInputMetrics {
return AudioInputMetrics{ return AudioInputMetrics{
FramesSent: atomic.LoadInt64(&aim.metrics.FramesSent), FramesSent: atomic.LoadInt64(&aim.metrics.FramesSent),
FramesDropped: atomic.LoadInt64(&aim.metrics.FramesDropped), FramesDropped: atomic.LoadInt64(&aim.metrics.FramesDropped),
BytesProcessed: atomic.LoadInt64(&aim.metrics.BytesProcessed), BytesProcessed: atomic.LoadInt64(&aim.metrics.BytesProcessed),
AverageLatency: aim.metrics.AverageLatency, LastFrameTime: aim.metrics.LastFrameTime,
LastFrameTime: aim.metrics.LastFrameTime, ConnectionDrops: atomic.LoadInt64(&aim.metrics.ConnectionDrops),
AverageLatency: aim.metrics.AverageLatency,
} }
} }
// GetComprehensiveMetrics returns detailed performance metrics across all components
func (aim *AudioInputManager) GetComprehensiveMetrics() map[string]interface{} {
// Get base metrics
baseMetrics := aim.GetMetrics()
// Get detailed IPC metrics
ipcMetrics, detailedStats := aim.ipcManager.GetDetailedMetrics()
comprehensiveMetrics := map[string]interface{}{
"manager": map[string]interface{}{
"frames_sent": baseMetrics.FramesSent,
"frames_dropped": baseMetrics.FramesDropped,
"bytes_processed": baseMetrics.BytesProcessed,
"average_latency_ms": float64(baseMetrics.AverageLatency.Nanoseconds()) / 1e6,
"last_frame_time": baseMetrics.LastFrameTime,
"running": aim.IsRunning(),
},
"ipc": map[string]interface{}{
"frames_sent": ipcMetrics.FramesSent,
"frames_dropped": ipcMetrics.FramesDropped,
"bytes_processed": ipcMetrics.BytesProcessed,
"average_latency_ms": float64(ipcMetrics.AverageLatency.Nanoseconds()) / 1e6,
"last_frame_time": ipcMetrics.LastFrameTime,
},
"detailed": detailedStats,
}
return comprehensiveMetrics
}
// LogPerformanceStats logs current performance statistics
func (aim *AudioInputManager) LogPerformanceStats() {
metrics := aim.GetComprehensiveMetrics()
managerStats := metrics["manager"].(map[string]interface{})
ipcStats := metrics["ipc"].(map[string]interface{})
detailedStats := metrics["detailed"].(map[string]interface{})
aim.logger.Info().
Int64("manager_frames_sent", managerStats["frames_sent"].(int64)).
Int64("manager_frames_dropped", managerStats["frames_dropped"].(int64)).
Float64("manager_latency_ms", managerStats["average_latency_ms"].(float64)).
Int64("ipc_frames_sent", ipcStats["frames_sent"].(int64)).
Int64("ipc_frames_dropped", ipcStats["frames_dropped"].(int64)).
Float64("ipc_latency_ms", ipcStats["average_latency_ms"].(float64)).
Float64("client_drop_rate", detailedStats["client_drop_rate"].(float64)).
Float64("frames_per_second", detailedStats["frames_per_second"].(float64)).
Msg("Audio input performance metrics")
}
// IsRunning returns whether the audio input manager is running // IsRunning returns whether the audio input manager is running
func (aim *AudioInputManager) IsRunning() bool { func (aim *AudioInputManager) IsRunning() bool {
return atomic.LoadInt32(&aim.running) == 1 return atomic.LoadInt32(&aim.running) == 1
} }
// IsReady returns whether the audio input manager is ready to receive frames
// This checks both that it's running and that the IPC connection is established
func (aim *AudioInputManager) IsReady() bool {
if !aim.IsRunning() {
return false
}
return aim.ipcManager.IsReady()
}

View File

@ -1,94 +0,0 @@
package audio
import (
"sync/atomic"
"unsafe"
)
var (
// Global audio input manager instance
globalInputManager unsafe.Pointer // *AudioInputManager
)
// AudioInputInterface defines the common interface for audio input managers
type AudioInputInterface interface {
Start() error
Stop()
WriteOpusFrame(frame []byte) error
IsRunning() bool
GetMetrics() AudioInputMetrics
}
// GetSupervisor returns the audio input supervisor for advanced management
func (m *AudioInputManager) GetSupervisor() *AudioInputSupervisor {
return m.ipcManager.GetSupervisor()
}
// getAudioInputManager returns the audio input manager
func getAudioInputManager() AudioInputInterface {
ptr := atomic.LoadPointer(&globalInputManager)
if ptr == nil {
// Create new manager
newManager := NewAudioInputManager()
if atomic.CompareAndSwapPointer(&globalInputManager, nil, unsafe.Pointer(newManager)) {
return newManager
}
// Another goroutine created it, use that one
ptr = atomic.LoadPointer(&globalInputManager)
}
return (*AudioInputManager)(ptr)
}
// StartAudioInput starts the audio input system using the appropriate manager
func StartAudioInput() error {
manager := getAudioInputManager()
return manager.Start()
}
// StopAudioInput stops the audio input system
func StopAudioInput() {
manager := getAudioInputManager()
manager.Stop()
}
// WriteAudioInputFrame writes an Opus frame to the audio input system
func WriteAudioInputFrame(frame []byte) error {
manager := getAudioInputManager()
return manager.WriteOpusFrame(frame)
}
// IsAudioInputRunning returns whether the audio input system is running
func IsAudioInputRunning() bool {
manager := getAudioInputManager()
return manager.IsRunning()
}
// GetAudioInputMetrics returns current audio input metrics
func GetAudioInputMetrics() AudioInputMetrics {
manager := getAudioInputManager()
return manager.GetMetrics()
}
// GetAudioInputIPCSupervisor returns the IPC supervisor
func GetAudioInputIPCSupervisor() *AudioInputSupervisor {
ptr := atomic.LoadPointer(&globalInputManager)
if ptr == nil {
return nil
}
manager := (*AudioInputManager)(ptr)
return manager.GetSupervisor()
}
// Helper functions
// ResetAudioInputManagers resets the global manager (for testing)
func ResetAudioInputManagers() {
// Stop existing manager first
if ptr := atomic.LoadPointer(&globalInputManager); ptr != nil {
(*AudioInputManager)(ptr).Stop()
}
// Reset pointer
atomic.StorePointer(&globalInputManager, nil)
}

View File

@ -1,961 +0,0 @@
package audio
import (
"context"
"encoding/binary"
"fmt"
"io"
"net"
"os"
"path/filepath"
"runtime"
"sync"
"sync/atomic"
"time"
"github.com/jetkvm/kvm/internal/logging"
)
const (
inputMagicNumber uint32 = 0x4A4B4D49 // "JKMI" (JetKVM Microphone Input)
inputSocketName = "audio_input.sock"
maxFrameSize = 4096 // Maximum Opus frame size
writeTimeout = 15 * time.Millisecond // Non-blocking write timeout (increased for high load)
maxDroppedFrames = 100 // Maximum consecutive dropped frames before reconnect
headerSize = 17 // Fixed header size: 4+1+4+8 bytes
messagePoolSize = 256 // Pre-allocated message pool size
)
// InputMessageType represents the type of IPC message
type InputMessageType uint8
const (
InputMessageTypeOpusFrame InputMessageType = iota
InputMessageTypeConfig
InputMessageTypeStop
InputMessageTypeHeartbeat
InputMessageTypeAck
)
// InputIPCMessage represents a message sent over IPC
type InputIPCMessage struct {
Magic uint32
Type InputMessageType
Length uint32
Timestamp int64
Data []byte
}
// OptimizedIPCMessage represents an optimized message with pre-allocated buffers
type OptimizedIPCMessage struct {
header [headerSize]byte // Pre-allocated header buffer
data []byte // Reusable data buffer
msg InputIPCMessage // Embedded message
}
// MessagePool manages a pool of reusable messages to reduce allocations
type MessagePool struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
hitCount int64 // Pool hit counter (atomic)
missCount int64 // Pool miss counter (atomic)
// Other fields
pool chan *OptimizedIPCMessage
// Memory optimization fields
preallocated []*OptimizedIPCMessage // Pre-allocated messages for immediate use
preallocSize int // Number of pre-allocated messages
maxPoolSize int // Maximum pool size to prevent memory bloat
mutex sync.RWMutex // Protects preallocated slice
}
// Global message pool instance
var globalMessagePool = &MessagePool{
pool: make(chan *OptimizedIPCMessage, messagePoolSize),
}
var messagePoolInitOnce sync.Once
// initializeMessagePool initializes the message pool with pre-allocated messages
func initializeMessagePool() {
messagePoolInitOnce.Do(func() {
// Pre-allocate 30% of pool size for immediate availability
preallocSize := messagePoolSize * 30 / 100
globalMessagePool.preallocSize = preallocSize
globalMessagePool.maxPoolSize = messagePoolSize * 2 // Allow growth up to 2x
globalMessagePool.preallocated = make([]*OptimizedIPCMessage, 0, preallocSize)
// Pre-allocate messages to reduce initial allocation overhead
for i := 0; i < preallocSize; i++ {
msg := &OptimizedIPCMessage{
data: make([]byte, 0, maxFrameSize),
}
globalMessagePool.preallocated = append(globalMessagePool.preallocated, msg)
}
// Fill the channel pool with remaining messages
for i := preallocSize; i < messagePoolSize; i++ {
globalMessagePool.pool <- &OptimizedIPCMessage{
data: make([]byte, 0, maxFrameSize),
}
}
})
}
// Get retrieves a message from the pool
func (mp *MessagePool) Get() *OptimizedIPCMessage {
initializeMessagePool()
// First try pre-allocated messages for fastest access
mp.mutex.Lock()
if len(mp.preallocated) > 0 {
msg := mp.preallocated[len(mp.preallocated)-1]
mp.preallocated = mp.preallocated[:len(mp.preallocated)-1]
mp.mutex.Unlock()
atomic.AddInt64(&mp.hitCount, 1)
return msg
}
mp.mutex.Unlock()
// Try channel pool next
select {
case msg := <-mp.pool:
atomic.AddInt64(&mp.hitCount, 1)
return msg
default:
// Pool exhausted, create new message
atomic.AddInt64(&mp.missCount, 1)
return &OptimizedIPCMessage{
data: make([]byte, 0, maxFrameSize),
}
}
}
// Put returns a message to the pool
func (mp *MessagePool) Put(msg *OptimizedIPCMessage) {
// Reset the message for reuse
msg.data = msg.data[:0]
msg.msg = InputIPCMessage{}
// First try to return to pre-allocated pool for fastest reuse
mp.mutex.Lock()
if len(mp.preallocated) < mp.preallocSize {
mp.preallocated = append(mp.preallocated, msg)
mp.mutex.Unlock()
return
}
mp.mutex.Unlock()
// Try channel pool next
select {
case mp.pool <- msg:
// Successfully returned to pool
default:
// Pool full, let GC handle it
}
}
// InputIPCConfig represents configuration for audio input
type InputIPCConfig struct {
SampleRate int
Channels int
FrameSize int
}
// AudioInputServer handles IPC communication for audio input processing
type AudioInputServer struct {
// Atomic fields must be first for proper alignment on ARM
bufferSize int64 // Current buffer size (atomic)
processingTime int64 // Average processing time in nanoseconds (atomic)
droppedFrames int64 // Dropped frames counter (atomic)
totalFrames int64 // Total frames counter (atomic)
listener net.Listener
conn net.Conn
mtx sync.Mutex
running bool
// Triple-goroutine architecture
messageChan chan *InputIPCMessage // Buffered channel for incoming messages
processChan chan *InputIPCMessage // Buffered channel for processing queue
stopChan chan struct{} // Stop signal for all goroutines
wg sync.WaitGroup // Wait group for goroutine coordination
// Socket buffer configuration
socketBufferConfig SocketBufferConfig
}
// NewAudioInputServer creates a new audio input server
func NewAudioInputServer() (*AudioInputServer, error) {
socketPath := getInputSocketPath()
// Remove existing socket if any
os.Remove(socketPath)
listener, err := net.Listen("unix", socketPath)
if err != nil {
return nil, fmt.Errorf("failed to create unix socket: %w", err)
}
// Get initial buffer size from adaptive buffer manager
adaptiveManager := GetAdaptiveBufferManager()
initialBufferSize := int64(adaptiveManager.GetInputBufferSize())
// Initialize socket buffer configuration
socketBufferConfig := DefaultSocketBufferConfig()
return &AudioInputServer{
listener: listener,
messageChan: make(chan *InputIPCMessage, initialBufferSize),
processChan: make(chan *InputIPCMessage, initialBufferSize),
stopChan: make(chan struct{}),
bufferSize: initialBufferSize,
socketBufferConfig: socketBufferConfig,
}, nil
}
// Start starts the audio input server
func (ais *AudioInputServer) Start() error {
ais.mtx.Lock()
defer ais.mtx.Unlock()
if ais.running {
return fmt.Errorf("server already running")
}
ais.running = true
// Start triple-goroutine architecture
ais.startReaderGoroutine()
ais.startProcessorGoroutine()
ais.startMonitorGoroutine()
// Accept connections in a goroutine
go ais.acceptConnections()
return nil
}
// Stop stops the audio input server
func (ais *AudioInputServer) Stop() {
ais.mtx.Lock()
defer ais.mtx.Unlock()
if !ais.running {
return
}
ais.running = false
// Signal all goroutines to stop
close(ais.stopChan)
ais.wg.Wait()
if ais.conn != nil {
ais.conn.Close()
ais.conn = nil
}
if ais.listener != nil {
ais.listener.Close()
}
}
// Close closes the server and cleans up resources
func (ais *AudioInputServer) Close() {
ais.Stop()
// Remove socket file
os.Remove(getInputSocketPath())
}
// acceptConnections accepts incoming connections
func (ais *AudioInputServer) acceptConnections() {
for ais.running {
conn, err := ais.listener.Accept()
if err != nil {
if ais.running {
// Only log error if we're still supposed to be running
continue
}
return
}
// Configure socket buffers for optimal performance
if err := ConfigureSocketBuffers(conn, ais.socketBufferConfig); err != nil {
// Log warning but don't fail - socket buffer optimization is not critical
logger := logging.GetDefaultLogger().With().Str("component", "audio-input-server").Logger()
logger.Warn().Err(err).Msg("Failed to configure socket buffers, continuing with defaults")
} else {
// Record socket buffer metrics for monitoring
RecordSocketBufferMetrics(conn, "audio-input")
}
ais.mtx.Lock()
// Close existing connection if any
if ais.conn != nil {
ais.conn.Close()
}
ais.conn = conn
ais.mtx.Unlock()
// Handle this connection
go ais.handleConnection(conn)
}
}
// handleConnection handles a single client connection
func (ais *AudioInputServer) handleConnection(conn net.Conn) {
defer conn.Close()
// Connection is now handled by the reader goroutine
// Just wait for connection to close or stop signal
for {
select {
case <-ais.stopChan:
return
default:
// Check if connection is still alive
if ais.conn == nil {
return
}
time.Sleep(100 * time.Millisecond)
}
}
}
// readMessage reads a complete message from the connection
func (ais *AudioInputServer) readMessage(conn net.Conn) (*InputIPCMessage, error) {
// Get optimized message from pool
optMsg := globalMessagePool.Get()
defer globalMessagePool.Put(optMsg)
// Read header directly into pre-allocated buffer
_, err := io.ReadFull(conn, optMsg.header[:])
if err != nil {
return nil, err
}
// Parse header using optimized access
msg := &optMsg.msg
msg.Magic = binary.LittleEndian.Uint32(optMsg.header[0:4])
msg.Type = InputMessageType(optMsg.header[4])
msg.Length = binary.LittleEndian.Uint32(optMsg.header[5:9])
msg.Timestamp = int64(binary.LittleEndian.Uint64(optMsg.header[9:17]))
// Validate magic number
if msg.Magic != inputMagicNumber {
return nil, fmt.Errorf("invalid magic number: %x", msg.Magic)
}
// Validate message length
if msg.Length > maxFrameSize {
return nil, fmt.Errorf("message too large: %d bytes", msg.Length)
}
// Read data if present using pooled buffer
if msg.Length > 0 {
// Ensure buffer capacity
if cap(optMsg.data) < int(msg.Length) {
optMsg.data = make([]byte, msg.Length)
} else {
optMsg.data = optMsg.data[:msg.Length]
}
_, err = io.ReadFull(conn, optMsg.data)
if err != nil {
return nil, err
}
msg.Data = optMsg.data
}
// Return a copy of the message (data will be copied by caller if needed)
result := &InputIPCMessage{
Magic: msg.Magic,
Type: msg.Type,
Length: msg.Length,
Timestamp: msg.Timestamp,
}
if msg.Length > 0 {
// Copy data to ensure it's not affected by buffer reuse
result.Data = make([]byte, msg.Length)
copy(result.Data, msg.Data)
}
return result, nil
}
// processMessage processes a received message
func (ais *AudioInputServer) processMessage(msg *InputIPCMessage) error {
switch msg.Type {
case InputMessageTypeOpusFrame:
return ais.processOpusFrame(msg.Data)
case InputMessageTypeConfig:
return ais.processConfig(msg.Data)
case InputMessageTypeStop:
return fmt.Errorf("stop message received")
case InputMessageTypeHeartbeat:
return ais.sendAck()
default:
return fmt.Errorf("unknown message type: %d", msg.Type)
}
}
// processOpusFrame processes an Opus audio frame
func (ais *AudioInputServer) processOpusFrame(data []byte) error {
if len(data) == 0 {
return nil // Empty frame, ignore
}
// Process the Opus frame using CGO
_, err := CGOAudioDecodeWrite(data)
return err
}
// processConfig processes a configuration update
func (ais *AudioInputServer) processConfig(data []byte) error {
// Acknowledge configuration receipt
return ais.sendAck()
}
// sendAck sends an acknowledgment message
func (ais *AudioInputServer) sendAck() error {
ais.mtx.Lock()
defer ais.mtx.Unlock()
if ais.conn == nil {
return fmt.Errorf("no connection")
}
msg := &InputIPCMessage{
Magic: inputMagicNumber,
Type: InputMessageTypeAck,
Length: 0,
Timestamp: time.Now().UnixNano(),
}
return ais.writeMessage(ais.conn, msg)
}
// writeMessage writes a message to the connection using optimized buffers
func (ais *AudioInputServer) writeMessage(conn net.Conn, msg *InputIPCMessage) error {
// Get optimized message from pool for header preparation
optMsg := globalMessagePool.Get()
defer globalMessagePool.Put(optMsg)
// Prepare header in pre-allocated buffer
binary.LittleEndian.PutUint32(optMsg.header[0:4], msg.Magic)
optMsg.header[4] = byte(msg.Type)
binary.LittleEndian.PutUint32(optMsg.header[5:9], msg.Length)
binary.LittleEndian.PutUint64(optMsg.header[9:17], uint64(msg.Timestamp))
// Write header
_, err := conn.Write(optMsg.header[:])
if err != nil {
return err
}
// Write data if present
if msg.Length > 0 && msg.Data != nil {
_, err = conn.Write(msg.Data)
if err != nil {
return err
}
}
return nil
}
// AudioInputClient handles IPC communication from the main process
type AudioInputClient struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
droppedFrames int64 // Atomic counter for dropped frames
totalFrames int64 // Atomic counter for total frames
conn net.Conn
mtx sync.Mutex
running bool
}
// NewAudioInputClient creates a new audio input client
func NewAudioInputClient() *AudioInputClient {
return &AudioInputClient{}
}
// Connect connects to the audio input server
func (aic *AudioInputClient) Connect() error {
aic.mtx.Lock()
defer aic.mtx.Unlock()
if aic.running {
return nil // Already connected
}
socketPath := getInputSocketPath()
// Try connecting multiple times as the server might not be ready
// Reduced retry count and delay for faster startup
for i := 0; i < 10; i++ {
conn, err := net.Dial("unix", socketPath)
if err == nil {
aic.conn = conn
aic.running = true
return nil
}
// Exponential backoff starting at 50ms
delay := time.Duration(50*(1<<uint(i/3))) * time.Millisecond
if delay > 500*time.Millisecond {
delay = 500 * time.Millisecond
}
time.Sleep(delay)
}
return fmt.Errorf("failed to connect to audio input server")
}
// Disconnect disconnects from the audio input server
func (aic *AudioInputClient) Disconnect() {
aic.mtx.Lock()
defer aic.mtx.Unlock()
if !aic.running {
return
}
aic.running = false
if aic.conn != nil {
// Send stop message
msg := &InputIPCMessage{
Magic: inputMagicNumber,
Type: InputMessageTypeStop,
Length: 0,
Timestamp: time.Now().UnixNano(),
}
_ = aic.writeMessage(msg) // Ignore errors during shutdown
aic.conn.Close()
aic.conn = nil
}
}
// SendFrame sends an Opus frame to the audio input server
func (aic *AudioInputClient) SendFrame(frame []byte) error {
aic.mtx.Lock()
defer aic.mtx.Unlock()
if !aic.running || aic.conn == nil {
return fmt.Errorf("not connected")
}
if len(frame) == 0 {
return nil // Empty frame, ignore
}
if len(frame) > maxFrameSize {
return fmt.Errorf("frame too large: %d bytes", len(frame))
}
msg := &InputIPCMessage{
Magic: inputMagicNumber,
Type: InputMessageTypeOpusFrame,
Length: uint32(len(frame)),
Timestamp: time.Now().UnixNano(),
Data: frame,
}
return aic.writeMessage(msg)
}
// SendFrameZeroCopy sends a zero-copy Opus frame to the audio input server
func (aic *AudioInputClient) SendFrameZeroCopy(frame *ZeroCopyAudioFrame) error {
aic.mtx.Lock()
defer aic.mtx.Unlock()
if !aic.running || aic.conn == nil {
return fmt.Errorf("not connected")
}
if frame == nil || frame.Length() == 0 {
return nil // Empty frame, ignore
}
if frame.Length() > maxFrameSize {
return fmt.Errorf("frame too large: %d bytes", frame.Length())
}
// Use zero-copy data directly
msg := &InputIPCMessage{
Magic: inputMagicNumber,
Type: InputMessageTypeOpusFrame,
Length: uint32(frame.Length()),
Timestamp: time.Now().UnixNano(),
Data: frame.Data(), // Zero-copy data access
}
return aic.writeMessage(msg)
}
// SendConfig sends a configuration update to the audio input server
func (aic *AudioInputClient) SendConfig(config InputIPCConfig) error {
aic.mtx.Lock()
defer aic.mtx.Unlock()
if !aic.running || aic.conn == nil {
return fmt.Errorf("not connected")
}
// Serialize config (simple binary format)
data := make([]byte, 12) // 3 * int32
binary.LittleEndian.PutUint32(data[0:4], uint32(config.SampleRate))
binary.LittleEndian.PutUint32(data[4:8], uint32(config.Channels))
binary.LittleEndian.PutUint32(data[8:12], uint32(config.FrameSize))
msg := &InputIPCMessage{
Magic: inputMagicNumber,
Type: InputMessageTypeConfig,
Length: uint32(len(data)),
Timestamp: time.Now().UnixNano(),
Data: data,
}
return aic.writeMessage(msg)
}
// SendHeartbeat sends a heartbeat message
func (aic *AudioInputClient) SendHeartbeat() error {
aic.mtx.Lock()
defer aic.mtx.Unlock()
if !aic.running || aic.conn == nil {
return fmt.Errorf("not connected")
}
msg := &InputIPCMessage{
Magic: inputMagicNumber,
Type: InputMessageTypeHeartbeat,
Length: 0,
Timestamp: time.Now().UnixNano(),
}
return aic.writeMessage(msg)
}
// writeMessage writes a message to the server
func (aic *AudioInputClient) writeMessage(msg *InputIPCMessage) error {
// Increment total frames counter
atomic.AddInt64(&aic.totalFrames, 1)
// Get optimized message from pool for header preparation
optMsg := globalMessagePool.Get()
defer globalMessagePool.Put(optMsg)
// Prepare header in pre-allocated buffer
binary.LittleEndian.PutUint32(optMsg.header[0:4], msg.Magic)
optMsg.header[4] = byte(msg.Type)
binary.LittleEndian.PutUint32(optMsg.header[5:9], msg.Length)
binary.LittleEndian.PutUint64(optMsg.header[9:17], uint64(msg.Timestamp))
// Use non-blocking write with timeout
ctx, cancel := context.WithTimeout(context.Background(), writeTimeout)
defer cancel()
// Create a channel to signal write completion
done := make(chan error, 1)
go func() {
// Write header using pre-allocated buffer
_, err := aic.conn.Write(optMsg.header[:])
if err != nil {
done <- err
return
}
// Write data if present
if msg.Length > 0 && msg.Data != nil {
_, err = aic.conn.Write(msg.Data)
if err != nil {
done <- err
return
}
}
done <- nil
}()
// Wait for completion or timeout
select {
case err := <-done:
if err != nil {
atomic.AddInt64(&aic.droppedFrames, 1)
return err
}
return nil
case <-ctx.Done():
// Timeout occurred - drop frame to prevent blocking
atomic.AddInt64(&aic.droppedFrames, 1)
return fmt.Errorf("write timeout - frame dropped")
}
}
// IsConnected returns whether the client is connected
func (aic *AudioInputClient) IsConnected() bool {
aic.mtx.Lock()
defer aic.mtx.Unlock()
return aic.running && aic.conn != nil
}
// GetFrameStats returns frame statistics
func (aic *AudioInputClient) GetFrameStats() (total, dropped int64) {
return atomic.LoadInt64(&aic.totalFrames), atomic.LoadInt64(&aic.droppedFrames)
}
// GetDropRate returns the current frame drop rate as a percentage
func (aic *AudioInputClient) GetDropRate() float64 {
total := atomic.LoadInt64(&aic.totalFrames)
dropped := atomic.LoadInt64(&aic.droppedFrames)
if total == 0 {
return 0.0
}
return float64(dropped) / float64(total) * 100.0
}
// ResetStats resets frame statistics
func (aic *AudioInputClient) ResetStats() {
atomic.StoreInt64(&aic.totalFrames, 0)
atomic.StoreInt64(&aic.droppedFrames, 0)
}
// startReaderGoroutine starts the message reader goroutine
func (ais *AudioInputServer) startReaderGoroutine() {
ais.wg.Add(1)
go func() {
defer ais.wg.Done()
for {
select {
case <-ais.stopChan:
return
default:
if ais.conn != nil {
msg, err := ais.readMessage(ais.conn)
if err != nil {
continue // Connection error, retry
}
// Send to message channel with non-blocking write
select {
case ais.messageChan <- msg:
atomic.AddInt64(&ais.totalFrames, 1)
default:
// Channel full, drop message
atomic.AddInt64(&ais.droppedFrames, 1)
}
}
}
}
}()
}
// startProcessorGoroutine starts the message processor goroutine
func (ais *AudioInputServer) startProcessorGoroutine() {
ais.wg.Add(1)
go func() {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
// Set high priority for audio processing
logger := logging.GetDefaultLogger().With().Str("component", "audio-input-processor").Logger()
if err := SetAudioThreadPriority(); err != nil {
logger.Warn().Err(err).Msg("Failed to set audio processing priority")
}
defer func() {
if err := ResetThreadPriority(); err != nil {
logger.Warn().Err(err).Msg("Failed to reset thread priority")
}
}()
defer ais.wg.Done()
for {
select {
case <-ais.stopChan:
return
case msg := <-ais.messageChan:
// Intelligent frame dropping: prioritize recent frames
if msg.Type == InputMessageTypeOpusFrame {
// Check if processing queue is getting full
queueLen := len(ais.processChan)
bufferSize := int(atomic.LoadInt64(&ais.bufferSize))
if queueLen > bufferSize*3/4 {
// Drop oldest frames, keep newest
select {
case <-ais.processChan: // Remove oldest
atomic.AddInt64(&ais.droppedFrames, 1)
default:
}
}
}
// Send to processing queue
select {
case ais.processChan <- msg:
default:
// Processing queue full, drop frame
atomic.AddInt64(&ais.droppedFrames, 1)
}
}
}
}()
}
// startMonitorGoroutine starts the performance monitoring goroutine
func (ais *AudioInputServer) startMonitorGoroutine() {
ais.wg.Add(1)
go func() {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
// Set I/O priority for monitoring
logger := logging.GetDefaultLogger().With().Str("component", "audio-input-monitor").Logger()
if err := SetAudioIOThreadPriority(); err != nil {
logger.Warn().Err(err).Msg("Failed to set audio I/O priority")
}
defer func() {
if err := ResetThreadPriority(); err != nil {
logger.Warn().Err(err).Msg("Failed to reset thread priority")
}
}()
defer ais.wg.Done()
ticker := time.NewTicker(100 * time.Millisecond)
defer ticker.Stop()
// Buffer size update ticker (less frequent)
bufferUpdateTicker := time.NewTicker(500 * time.Millisecond)
defer bufferUpdateTicker.Stop()
for {
select {
case <-ais.stopChan:
return
case <-ticker.C:
// Process frames from processing queue
for {
select {
case msg := <-ais.processChan:
start := time.Now()
err := ais.processMessage(msg)
processingTime := time.Since(start)
// Calculate end-to-end latency using message timestamp
var latency time.Duration
if msg.Type == InputMessageTypeOpusFrame && msg.Timestamp > 0 {
msgTime := time.Unix(0, msg.Timestamp)
latency = time.Since(msgTime)
// Use exponential moving average for end-to-end latency tracking
currentAvg := atomic.LoadInt64(&ais.processingTime)
// Weight: 90% historical, 10% current (for smoother averaging)
newAvg := (currentAvg*9 + latency.Nanoseconds()) / 10
atomic.StoreInt64(&ais.processingTime, newAvg)
} else {
// Fallback to processing time only
latency = processingTime
currentAvg := atomic.LoadInt64(&ais.processingTime)
newAvg := (currentAvg + processingTime.Nanoseconds()) / 2
atomic.StoreInt64(&ais.processingTime, newAvg)
}
// Report latency to adaptive buffer manager
ais.ReportLatency(latency)
if err != nil {
atomic.AddInt64(&ais.droppedFrames, 1)
}
default:
// No more messages to process
goto checkBufferUpdate
}
}
checkBufferUpdate:
// Check if we need to update buffer size
select {
case <-bufferUpdateTicker.C:
// Update buffer size from adaptive buffer manager
ais.UpdateBufferSize()
default:
// No buffer update needed
}
}
}
}()
}
// GetServerStats returns server performance statistics
func (ais *AudioInputServer) GetServerStats() (total, dropped int64, avgProcessingTime time.Duration, bufferSize int64) {
return atomic.LoadInt64(&ais.totalFrames),
atomic.LoadInt64(&ais.droppedFrames),
time.Duration(atomic.LoadInt64(&ais.processingTime)),
atomic.LoadInt64(&ais.bufferSize)
}
// UpdateBufferSize updates the buffer size from adaptive buffer manager
func (ais *AudioInputServer) UpdateBufferSize() {
adaptiveManager := GetAdaptiveBufferManager()
newSize := int64(adaptiveManager.GetInputBufferSize())
atomic.StoreInt64(&ais.bufferSize, newSize)
}
// ReportLatency reports processing latency to adaptive buffer manager
func (ais *AudioInputServer) ReportLatency(latency time.Duration) {
adaptiveManager := GetAdaptiveBufferManager()
adaptiveManager.UpdateLatency(latency)
}
// GetMessagePoolStats returns detailed statistics about the message pool
func (mp *MessagePool) GetMessagePoolStats() MessagePoolStats {
mp.mutex.RLock()
preallocatedCount := len(mp.preallocated)
mp.mutex.RUnlock()
hitCount := atomic.LoadInt64(&mp.hitCount)
missCount := atomic.LoadInt64(&mp.missCount)
totalRequests := hitCount + missCount
var hitRate float64
if totalRequests > 0 {
hitRate = float64(hitCount) / float64(totalRequests) * 100
}
// Calculate channel pool size
channelPoolSize := len(mp.pool)
return MessagePoolStats{
MaxPoolSize: mp.maxPoolSize,
ChannelPoolSize: channelPoolSize,
PreallocatedCount: int64(preallocatedCount),
PreallocatedMax: int64(mp.preallocSize),
HitCount: hitCount,
MissCount: missCount,
HitRate: hitRate,
}
}
// MessagePoolStats provides detailed message pool statistics
type MessagePoolStats struct {
MaxPoolSize int
ChannelPoolSize int
PreallocatedCount int64
PreallocatedMax int64
HitCount int64
MissCount int64
HitRate float64 // Percentage
}
// GetGlobalMessagePoolStats returns statistics for the global message pool
func GetGlobalMessagePoolStats() MessagePoolStats {
return globalMessagePool.GetMessagePoolStats()
}
// Helper functions
// getInputSocketPath returns the path to the input socket
func getInputSocketPath() string {
if path := os.Getenv("JETKVM_AUDIO_INPUT_SOCKET"); path != "" {
return path
}
return filepath.Join("/var/run", inputSocketName)
}

View File

@ -1,238 +0,0 @@
package audio
import (
"context"
"sync/atomic"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// AudioInputIPCManager manages microphone input using IPC when enabled
type AudioInputIPCManager struct {
metrics AudioInputMetrics
supervisor *AudioInputSupervisor
logger zerolog.Logger
running int32
ctx context.Context
cancel context.CancelFunc
}
// NewAudioInputIPCManager creates a new IPC-based audio input manager
func NewAudioInputIPCManager() *AudioInputIPCManager {
ctx, cancel := context.WithCancel(context.Background())
return &AudioInputIPCManager{
supervisor: NewAudioInputSupervisor(),
logger: logging.GetDefaultLogger().With().Str("component", "audio-input-ipc").Logger(),
ctx: ctx,
cancel: cancel,
}
}
// Start starts the IPC-based audio input system
func (aim *AudioInputIPCManager) Start() error {
if !atomic.CompareAndSwapInt32(&aim.running, 0, 1) {
return nil
}
aim.logger.Info().Msg("Starting IPC-based audio input system")
err := aim.supervisor.Start()
if err != nil {
atomic.StoreInt32(&aim.running, 0)
aim.logger.Error().Err(err).Msg("Failed to start audio input supervisor")
return err
}
config := InputIPCConfig{
SampleRate: 48000,
Channels: 2,
FrameSize: 960,
}
// Wait with timeout for subprocess readiness
select {
case <-time.After(200 * time.Millisecond):
case <-aim.ctx.Done():
aim.supervisor.Stop()
atomic.StoreInt32(&aim.running, 0)
return aim.ctx.Err()
}
err = aim.supervisor.SendConfig(config)
if err != nil {
aim.logger.Warn().Err(err).Msg("Failed to send initial config, will retry later")
}
aim.logger.Info().Msg("IPC-based audio input system started")
return nil
}
// Stop stops the IPC-based audio input system
func (aim *AudioInputIPCManager) Stop() {
if !atomic.CompareAndSwapInt32(&aim.running, 1, 0) {
return
}
aim.logger.Info().Msg("Stopping IPC-based audio input system")
aim.cancel()
aim.supervisor.Stop()
aim.logger.Info().Msg("IPC-based audio input system stopped")
}
// WriteOpusFrame sends an Opus frame to the audio input server via IPC
func (aim *AudioInputIPCManager) WriteOpusFrame(frame []byte) error {
if atomic.LoadInt32(&aim.running) == 0 {
return nil // Not running, silently ignore
}
if len(frame) == 0 {
return nil // Empty frame, ignore
}
// Start latency measurement
startTime := time.Now()
// Update metrics
atomic.AddInt64(&aim.metrics.FramesSent, 1)
atomic.AddInt64(&aim.metrics.BytesProcessed, int64(len(frame)))
aim.metrics.LastFrameTime = startTime
// Send frame via IPC
err := aim.supervisor.SendFrame(frame)
if err != nil {
// Count as dropped frame
atomic.AddInt64(&aim.metrics.FramesDropped, 1)
aim.logger.Debug().Err(err).Msg("Failed to send frame via IPC")
return err
}
// Calculate and update latency (end-to-end IPC transmission time)
latency := time.Since(startTime)
aim.updateLatencyMetrics(latency)
return nil
}
// WriteOpusFrameZeroCopy sends an Opus frame via IPC using zero-copy optimization
func (aim *AudioInputIPCManager) WriteOpusFrameZeroCopy(frame *ZeroCopyAudioFrame) error {
if atomic.LoadInt32(&aim.running) == 0 {
return nil // Not running, silently ignore
}
if frame == nil || frame.Length() == 0 {
return nil // Empty frame, ignore
}
// Start latency measurement
startTime := time.Now()
// Update metrics
atomic.AddInt64(&aim.metrics.FramesSent, 1)
atomic.AddInt64(&aim.metrics.BytesProcessed, int64(frame.Length()))
aim.metrics.LastFrameTime = startTime
// Send frame via IPC using zero-copy data
err := aim.supervisor.SendFrameZeroCopy(frame)
if err != nil {
// Count as dropped frame
atomic.AddInt64(&aim.metrics.FramesDropped, 1)
aim.logger.Debug().Err(err).Msg("Failed to send zero-copy frame via IPC")
return err
}
// Calculate and update latency (end-to-end IPC transmission time)
latency := time.Since(startTime)
aim.updateLatencyMetrics(latency)
return nil
}
// IsRunning returns whether the IPC manager is running
func (aim *AudioInputIPCManager) IsRunning() bool {
return atomic.LoadInt32(&aim.running) == 1
}
// IsReady returns whether the IPC manager is ready to receive frames
// This checks that the supervisor is connected to the audio input server
func (aim *AudioInputIPCManager) IsReady() bool {
if !aim.IsRunning() {
return false
}
return aim.supervisor.IsConnected()
}
// GetMetrics returns current metrics
func (aim *AudioInputIPCManager) GetMetrics() AudioInputMetrics {
return AudioInputMetrics{
FramesSent: atomic.LoadInt64(&aim.metrics.FramesSent),
FramesDropped: atomic.LoadInt64(&aim.metrics.FramesDropped),
BytesProcessed: atomic.LoadInt64(&aim.metrics.BytesProcessed),
ConnectionDrops: atomic.LoadInt64(&aim.metrics.ConnectionDrops),
AverageLatency: aim.metrics.AverageLatency,
LastFrameTime: aim.metrics.LastFrameTime,
}
}
// updateLatencyMetrics updates the latency metrics with exponential moving average
func (aim *AudioInputIPCManager) updateLatencyMetrics(latency time.Duration) {
// Use exponential moving average for smooth latency calculation
currentAvg := aim.metrics.AverageLatency
if currentAvg == 0 {
aim.metrics.AverageLatency = latency
} else {
// EMA with alpha = 0.1 for smooth averaging
aim.metrics.AverageLatency = time.Duration(float64(currentAvg)*0.9 + float64(latency)*0.1)
}
}
// GetDetailedMetrics returns comprehensive performance metrics
func (aim *AudioInputIPCManager) GetDetailedMetrics() (AudioInputMetrics, map[string]interface{}) {
metrics := aim.GetMetrics()
// Get client frame statistics
client := aim.supervisor.GetClient()
totalFrames, droppedFrames := int64(0), int64(0)
dropRate := 0.0
if client != nil {
totalFrames, droppedFrames = client.GetFrameStats()
dropRate = client.GetDropRate()
}
// Get server statistics if available
serverStats := make(map[string]interface{})
if aim.supervisor.IsRunning() {
serverStats["status"] = "running"
} else {
serverStats["status"] = "stopped"
}
detailedStats := map[string]interface{}{
"client_total_frames": totalFrames,
"client_dropped_frames": droppedFrames,
"client_drop_rate": dropRate,
"server_stats": serverStats,
"ipc_latency_ms": float64(metrics.AverageLatency.Nanoseconds()) / 1e6,
"frames_per_second": aim.calculateFrameRate(),
}
return metrics, detailedStats
}
// calculateFrameRate calculates the current frame rate
func (aim *AudioInputIPCManager) calculateFrameRate() float64 {
framesSent := atomic.LoadInt64(&aim.metrics.FramesSent)
if framesSent == 0 {
return 0.0
}
// Return typical Opus frame rate
return 50.0
}
// GetSupervisor returns the supervisor for advanced operations
func (aim *AudioInputIPCManager) GetSupervisor() *AudioInputSupervisor {
return aim.supervisor
}

View File

@ -1,71 +0,0 @@
package audio
import (
"context"
"os"
"os/signal"
"syscall"
"time"
"github.com/jetkvm/kvm/internal/logging"
)
// RunAudioInputServer runs the audio input server subprocess
// This should be called from main() when the subprocess is detected
func RunAudioInputServer() error {
logger := logging.GetDefaultLogger().With().Str("component", "audio-input-server").Logger()
logger.Info().Msg("Starting audio input server subprocess")
// Start adaptive buffer management for optimal performance
StartAdaptiveBuffering()
defer StopAdaptiveBuffering()
// Initialize CGO audio system
err := CGOAudioPlaybackInit()
if err != nil {
logger.Error().Err(err).Msg("Failed to initialize CGO audio playback")
return err
}
defer CGOAudioPlaybackClose()
// Create and start the IPC server
server, err := NewAudioInputServer()
if err != nil {
logger.Error().Err(err).Msg("Failed to create audio input server")
return err
}
defer server.Close()
err = server.Start()
if err != nil {
logger.Error().Err(err).Msg("Failed to start audio input server")
return err
}
logger.Info().Msg("Audio input server started, waiting for connections")
// Set up signal handling for graceful shutdown
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
sigChan := make(chan os.Signal, 1)
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
// Wait for shutdown signal
select {
case sig := <-sigChan:
logger.Info().Str("signal", sig.String()).Msg("Received shutdown signal")
case <-ctx.Done():
logger.Info().Msg("Context cancelled")
}
// Graceful shutdown
logger.Info().Msg("Shutting down audio input server")
server.Stop()
// Give some time for cleanup
time.Sleep(100 * time.Millisecond)
logger.Info().Msg("Audio input server subprocess stopped")
return nil
}

View File

@ -1,271 +0,0 @@
package audio
import (
"context"
"fmt"
"os"
"os/exec"
"sync"
"syscall"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// AudioInputSupervisor manages the audio input server subprocess
type AudioInputSupervisor struct {
cmd *exec.Cmd
cancel context.CancelFunc
mtx sync.Mutex
running bool
logger zerolog.Logger
client *AudioInputClient
processMonitor *ProcessMonitor
}
// NewAudioInputSupervisor creates a new audio input supervisor
func NewAudioInputSupervisor() *AudioInputSupervisor {
return &AudioInputSupervisor{
logger: logging.GetDefaultLogger().With().Str("component", "audio-input-supervisor").Logger(),
client: NewAudioInputClient(),
processMonitor: GetProcessMonitor(),
}
}
// Start starts the audio input server subprocess
func (ais *AudioInputSupervisor) Start() error {
ais.mtx.Lock()
defer ais.mtx.Unlock()
if ais.running {
return fmt.Errorf("audio input supervisor already running")
}
// Create context for subprocess management
ctx, cancel := context.WithCancel(context.Background())
ais.cancel = cancel
// Get current executable path
execPath, err := os.Executable()
if err != nil {
return fmt.Errorf("failed to get executable path: %w", err)
}
// Create command for audio input server subprocess
cmd := exec.CommandContext(ctx, execPath, "--audio-input-server")
cmd.Env = append(os.Environ(),
"JETKVM_AUDIO_INPUT_IPC=true", // Enable IPC mode
)
// Set process group to allow clean termination
cmd.SysProcAttr = &syscall.SysProcAttr{
Setpgid: true,
}
ais.cmd = cmd
ais.running = true
// Start the subprocess
err = cmd.Start()
if err != nil {
ais.running = false
cancel()
return fmt.Errorf("failed to start audio input server: %w", err)
}
ais.logger.Info().Int("pid", cmd.Process.Pid).Msg("Audio input server subprocess started")
// Add process to monitoring
ais.processMonitor.AddProcess(cmd.Process.Pid, "audio-input-server")
// Monitor the subprocess in a goroutine
go ais.monitorSubprocess()
// Connect client to the server
go ais.connectClient()
return nil
}
// Stop stops the audio input server subprocess
func (ais *AudioInputSupervisor) Stop() {
ais.mtx.Lock()
defer ais.mtx.Unlock()
if !ais.running {
return
}
ais.running = false
// Disconnect client first
if ais.client != nil {
ais.client.Disconnect()
}
// Cancel context to signal subprocess to stop
if ais.cancel != nil {
ais.cancel()
}
// Try graceful termination first
if ais.cmd != nil && ais.cmd.Process != nil {
ais.logger.Info().Int("pid", ais.cmd.Process.Pid).Msg("Stopping audio input server subprocess")
// Send SIGTERM
err := ais.cmd.Process.Signal(syscall.SIGTERM)
if err != nil {
ais.logger.Warn().Err(err).Msg("Failed to send SIGTERM to audio input server")
}
// Wait for graceful shutdown with timeout
done := make(chan error, 1)
go func() {
done <- ais.cmd.Wait()
}()
select {
case <-done:
ais.logger.Info().Msg("Audio input server subprocess stopped gracefully")
case <-time.After(5 * time.Second):
// Force kill if graceful shutdown failed
ais.logger.Warn().Msg("Audio input server subprocess did not stop gracefully, force killing")
err := ais.cmd.Process.Kill()
if err != nil {
ais.logger.Error().Err(err).Msg("Failed to kill audio input server subprocess")
}
}
}
ais.cmd = nil
ais.cancel = nil
}
// IsRunning returns whether the supervisor is running
func (ais *AudioInputSupervisor) IsRunning() bool {
ais.mtx.Lock()
defer ais.mtx.Unlock()
return ais.running
}
// IsConnected returns whether the client is connected to the audio input server
func (ais *AudioInputSupervisor) IsConnected() bool {
if !ais.IsRunning() {
return false
}
return ais.client.IsConnected()
}
// GetClient returns the IPC client for sending audio frames
func (ais *AudioInputSupervisor) GetClient() *AudioInputClient {
return ais.client
}
// GetProcessMetrics returns current process metrics if the process is running
func (ais *AudioInputSupervisor) GetProcessMetrics() *ProcessMetrics {
ais.mtx.Lock()
defer ais.mtx.Unlock()
if ais.cmd == nil || ais.cmd.Process == nil {
return nil
}
pid := ais.cmd.Process.Pid
metrics := ais.processMonitor.GetCurrentMetrics()
for _, metric := range metrics {
if metric.PID == pid {
return &metric
}
}
return nil
}
// monitorSubprocess monitors the subprocess and handles unexpected exits
func (ais *AudioInputSupervisor) monitorSubprocess() {
if ais.cmd == nil {
return
}
pid := ais.cmd.Process.Pid
err := ais.cmd.Wait()
// Remove process from monitoring
ais.processMonitor.RemoveProcess(pid)
ais.mtx.Lock()
defer ais.mtx.Unlock()
if ais.running {
// Unexpected exit
if err != nil {
ais.logger.Error().Err(err).Msg("Audio input server subprocess exited unexpectedly")
} else {
ais.logger.Warn().Msg("Audio input server subprocess exited unexpectedly")
}
// Disconnect client
if ais.client != nil {
ais.client.Disconnect()
}
// Mark as not running
ais.running = false
ais.cmd = nil
ais.logger.Info().Msg("Audio input server subprocess monitoring stopped")
}
}
// connectClient attempts to connect the client to the server
func (ais *AudioInputSupervisor) connectClient() {
// Wait briefly for the server to start (reduced from 500ms)
time.Sleep(100 * time.Millisecond)
err := ais.client.Connect()
if err != nil {
ais.logger.Error().Err(err).Msg("Failed to connect to audio input server")
return
}
ais.logger.Info().Msg("Connected to audio input server")
}
// SendFrame sends an audio frame to the subprocess (convenience method)
func (ais *AudioInputSupervisor) SendFrame(frame []byte) error {
if ais.client == nil {
return fmt.Errorf("client not initialized")
}
if !ais.client.IsConnected() {
return fmt.Errorf("client not connected")
}
return ais.client.SendFrame(frame)
}
// SendFrameZeroCopy sends a zero-copy frame to the subprocess
func (ais *AudioInputSupervisor) SendFrameZeroCopy(frame *ZeroCopyAudioFrame) error {
if ais.client == nil {
return fmt.Errorf("client not initialized")
}
if !ais.client.IsConnected() {
return fmt.Errorf("client not connected")
}
return ais.client.SendFrameZeroCopy(frame)
}
// SendConfig sends a configuration update to the subprocess (convenience method)
func (ais *AudioInputSupervisor) SendConfig(config InputIPCConfig) error {
if ais.client == nil {
return fmt.Errorf("client not initialized")
}
if !ais.client.IsConnected() {
return fmt.Errorf("client not connected")
}
return ais.client.SendConfig(config)
}

View File

@ -1,525 +0,0 @@
package audio
import (
"context"
"encoding/binary"
"fmt"
"io"
"net"
"os"
"path/filepath"
"sync"
"sync/atomic"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
const (
outputMagicNumber uint32 = 0x4A4B4F55 // "JKOU" (JetKVM Output)
outputSocketName = "audio_output.sock"
outputMaxFrameSize = 4096 // Maximum Opus frame size
outputWriteTimeout = 10 * time.Millisecond // Non-blocking write timeout (increased for high load)
outputMaxDroppedFrames = 50 // Maximum consecutive dropped frames
outputHeaderSize = 17 // Fixed header size: 4+1+4+8 bytes
outputMessagePoolSize = 128 // Pre-allocated message pool size
)
// OutputMessageType represents the type of IPC message
type OutputMessageType uint8
const (
OutputMessageTypeOpusFrame OutputMessageType = iota
OutputMessageTypeConfig
OutputMessageTypeStop
OutputMessageTypeHeartbeat
OutputMessageTypeAck
)
// OutputIPCMessage represents an IPC message for audio output
type OutputIPCMessage struct {
Magic uint32
Type OutputMessageType
Length uint32
Timestamp int64
Data []byte
}
// OutputOptimizedMessage represents a pre-allocated message for zero-allocation operations
type OutputOptimizedMessage struct {
header [outputHeaderSize]byte // Pre-allocated header buffer
data []byte // Reusable data buffer
}
// OutputMessagePool manages pre-allocated messages for zero-allocation IPC
type OutputMessagePool struct {
pool chan *OutputOptimizedMessage
}
// NewOutputMessagePool creates a new message pool
func NewOutputMessagePool(size int) *OutputMessagePool {
pool := &OutputMessagePool{
pool: make(chan *OutputOptimizedMessage, size),
}
// Pre-allocate messages
for i := 0; i < size; i++ {
msg := &OutputOptimizedMessage{
data: make([]byte, outputMaxFrameSize),
}
pool.pool <- msg
}
return pool
}
// Get retrieves a message from the pool
func (p *OutputMessagePool) Get() *OutputOptimizedMessage {
select {
case msg := <-p.pool:
return msg
default:
// Pool exhausted, create new message
return &OutputOptimizedMessage{
data: make([]byte, outputMaxFrameSize),
}
}
}
// Put returns a message to the pool
func (p *OutputMessagePool) Put(msg *OutputOptimizedMessage) {
select {
case p.pool <- msg:
// Successfully returned to pool
default:
// Pool full, let GC handle it
}
}
// Global message pool for output IPC
var globalOutputMessagePool = NewOutputMessagePool(outputMessagePoolSize)
type AudioServer struct {
// Atomic fields must be first for proper alignment on ARM
bufferSize int64 // Current buffer size (atomic)
droppedFrames int64 // Dropped frames counter (atomic)
totalFrames int64 // Total frames counter (atomic)
listener net.Listener
conn net.Conn
mtx sync.Mutex
running bool
// Advanced message handling
messageChan chan *OutputIPCMessage // Buffered channel for incoming messages
stopChan chan struct{} // Stop signal
wg sync.WaitGroup // Wait group for goroutine coordination
// Latency monitoring
latencyMonitor *LatencyMonitor
adaptiveOptimizer *AdaptiveOptimizer
// Socket buffer configuration
socketBufferConfig SocketBufferConfig
}
func NewAudioServer() (*AudioServer, error) {
socketPath := getOutputSocketPath()
// Remove existing socket if any
os.Remove(socketPath)
listener, err := net.Listen("unix", socketPath)
if err != nil {
return nil, fmt.Errorf("failed to create unix socket: %w", err)
}
// Initialize with adaptive buffer size (start with 500 frames)
initialBufferSize := int64(500)
// Initialize latency monitoring
latencyConfig := DefaultLatencyConfig()
logger := zerolog.New(os.Stderr).With().Timestamp().Str("component", "audio-server").Logger()
latencyMonitor := NewLatencyMonitor(latencyConfig, logger)
// Initialize adaptive buffer manager with default config
bufferConfig := DefaultAdaptiveBufferConfig()
bufferManager := NewAdaptiveBufferManager(bufferConfig)
// Initialize adaptive optimizer
optimizerConfig := DefaultOptimizerConfig()
adaptiveOptimizer := NewAdaptiveOptimizer(latencyMonitor, bufferManager, optimizerConfig, logger)
// Initialize socket buffer configuration
socketBufferConfig := DefaultSocketBufferConfig()
return &AudioServer{
listener: listener,
messageChan: make(chan *OutputIPCMessage, initialBufferSize),
stopChan: make(chan struct{}),
bufferSize: initialBufferSize,
latencyMonitor: latencyMonitor,
adaptiveOptimizer: adaptiveOptimizer,
socketBufferConfig: socketBufferConfig,
}, nil
}
func (s *AudioServer) Start() error {
s.mtx.Lock()
defer s.mtx.Unlock()
if s.running {
return fmt.Errorf("server already running")
}
s.running = true
// Start latency monitoring and adaptive optimization
if s.latencyMonitor != nil {
s.latencyMonitor.Start()
}
if s.adaptiveOptimizer != nil {
s.adaptiveOptimizer.Start()
}
// Start message processor goroutine
s.startProcessorGoroutine()
// Accept connections in a goroutine
go s.acceptConnections()
return nil
}
// acceptConnections accepts incoming connections
func (s *AudioServer) acceptConnections() {
for s.running {
conn, err := s.listener.Accept()
if err != nil {
if s.running {
// Only log error if we're still supposed to be running
continue
}
return
}
// Configure socket buffers for optimal performance
if err := ConfigureSocketBuffers(conn, s.socketBufferConfig); err != nil {
// Log warning but don't fail - socket buffer optimization is not critical
logger := logging.GetDefaultLogger().With().Str("component", "audio-server").Logger()
logger.Warn().Err(err).Msg("Failed to configure socket buffers, continuing with defaults")
} else {
// Record socket buffer metrics for monitoring
RecordSocketBufferMetrics(conn, "audio-output")
}
s.mtx.Lock()
// Close existing connection if any
if s.conn != nil {
s.conn.Close()
}
s.conn = conn
s.mtx.Unlock()
}
}
// startProcessorGoroutine starts the message processor
func (s *AudioServer) startProcessorGoroutine() {
s.wg.Add(1)
go func() {
defer s.wg.Done()
for {
select {
case msg := <-s.messageChan:
// Process message (currently just frame sending)
if msg.Type == OutputMessageTypeOpusFrame {
if err := s.sendFrameToClient(msg.Data); err != nil {
// Log error but continue processing
atomic.AddInt64(&s.droppedFrames, 1)
}
}
case <-s.stopChan:
return
}
}
}()
}
func (s *AudioServer) Stop() {
s.mtx.Lock()
defer s.mtx.Unlock()
if !s.running {
return
}
s.running = false
// Stop latency monitoring and adaptive optimization
if s.adaptiveOptimizer != nil {
s.adaptiveOptimizer.Stop()
}
if s.latencyMonitor != nil {
s.latencyMonitor.Stop()
}
// Signal processor to stop
close(s.stopChan)
s.wg.Wait()
if s.conn != nil {
s.conn.Close()
s.conn = nil
}
}
func (s *AudioServer) Close() error {
s.Stop()
if s.listener != nil {
s.listener.Close()
}
// Remove socket file
os.Remove(getOutputSocketPath())
return nil
}
func (s *AudioServer) SendFrame(frame []byte) error {
if len(frame) > outputMaxFrameSize {
return fmt.Errorf("frame size %d exceeds maximum %d", len(frame), outputMaxFrameSize)
}
start := time.Now()
// Create IPC message
msg := &OutputIPCMessage{
Magic: outputMagicNumber,
Type: OutputMessageTypeOpusFrame,
Length: uint32(len(frame)),
Timestamp: start.UnixNano(),
Data: frame,
}
// Try to send via message channel (non-blocking)
select {
case s.messageChan <- msg:
atomic.AddInt64(&s.totalFrames, 1)
// Record latency for monitoring
if s.latencyMonitor != nil {
processingTime := time.Since(start)
s.latencyMonitor.RecordLatency(processingTime, "ipc_send")
}
return nil
default:
// Channel full, drop frame to prevent blocking
atomic.AddInt64(&s.droppedFrames, 1)
return fmt.Errorf("message channel full - frame dropped")
}
}
// sendFrameToClient sends frame data directly to the connected client
func (s *AudioServer) sendFrameToClient(frame []byte) error {
s.mtx.Lock()
defer s.mtx.Unlock()
if s.conn == nil {
return fmt.Errorf("no client connected")
}
start := time.Now()
// Get optimized message from pool
optMsg := globalOutputMessagePool.Get()
defer globalOutputMessagePool.Put(optMsg)
// Prepare header in pre-allocated buffer
binary.LittleEndian.PutUint32(optMsg.header[0:4], outputMagicNumber)
optMsg.header[4] = byte(OutputMessageTypeOpusFrame)
binary.LittleEndian.PutUint32(optMsg.header[5:9], uint32(len(frame)))
binary.LittleEndian.PutUint64(optMsg.header[9:17], uint64(start.UnixNano()))
// Use non-blocking write with timeout
ctx, cancel := context.WithTimeout(context.Background(), outputWriteTimeout)
defer cancel()
// Create a channel to signal write completion
done := make(chan error, 1)
go func() {
// Write header using pre-allocated buffer
_, err := s.conn.Write(optMsg.header[:])
if err != nil {
done <- err
return
}
// Write frame data
if len(frame) > 0 {
_, err = s.conn.Write(frame)
if err != nil {
done <- err
return
}
}
done <- nil
}()
// Wait for completion or timeout
select {
case err := <-done:
if err != nil {
atomic.AddInt64(&s.droppedFrames, 1)
return err
}
// Record latency for monitoring
if s.latencyMonitor != nil {
writeLatency := time.Since(start)
s.latencyMonitor.RecordLatency(writeLatency, "ipc_write")
}
return nil
case <-ctx.Done():
// Timeout occurred - drop frame to prevent blocking
atomic.AddInt64(&s.droppedFrames, 1)
return fmt.Errorf("write timeout - frame dropped")
}
}
// GetServerStats returns server performance statistics
func (s *AudioServer) GetServerStats() (total, dropped int64, bufferSize int64) {
return atomic.LoadInt64(&s.totalFrames),
atomic.LoadInt64(&s.droppedFrames),
atomic.LoadInt64(&s.bufferSize)
}
type AudioClient struct {
// Atomic fields must be first for proper alignment on ARM
droppedFrames int64 // Atomic counter for dropped frames
totalFrames int64 // Atomic counter for total frames
conn net.Conn
mtx sync.Mutex
running bool
}
func NewAudioClient() *AudioClient {
return &AudioClient{}
}
// Connect connects to the audio output server
func (c *AudioClient) Connect() error {
c.mtx.Lock()
defer c.mtx.Unlock()
if c.running {
return nil // Already connected
}
socketPath := getOutputSocketPath()
// Try connecting multiple times as the server might not be ready
// Reduced retry count and delay for faster startup
for i := 0; i < 8; i++ {
conn, err := net.Dial("unix", socketPath)
if err == nil {
c.conn = conn
c.running = true
return nil
}
// Exponential backoff starting at 50ms
delay := time.Duration(50*(1<<uint(i/3))) * time.Millisecond
if delay > 400*time.Millisecond {
delay = 400 * time.Millisecond
}
time.Sleep(delay)
}
return fmt.Errorf("failed to connect to audio output server")
}
// Disconnect disconnects from the audio output server
func (c *AudioClient) Disconnect() {
c.mtx.Lock()
defer c.mtx.Unlock()
if !c.running {
return
}
c.running = false
if c.conn != nil {
c.conn.Close()
c.conn = nil
}
}
// IsConnected returns whether the client is connected
func (c *AudioClient) IsConnected() bool {
c.mtx.Lock()
defer c.mtx.Unlock()
return c.running && c.conn != nil
}
func (c *AudioClient) Close() error {
c.Disconnect()
return nil
}
func (c *AudioClient) ReceiveFrame() ([]byte, error) {
c.mtx.Lock()
defer c.mtx.Unlock()
if !c.running || c.conn == nil {
return nil, fmt.Errorf("not connected")
}
// Get optimized message from pool for header reading
optMsg := globalOutputMessagePool.Get()
defer globalOutputMessagePool.Put(optMsg)
// Read header
if _, err := io.ReadFull(c.conn, optMsg.header[:]); err != nil {
return nil, fmt.Errorf("failed to read header: %w", err)
}
// Parse header
magic := binary.LittleEndian.Uint32(optMsg.header[0:4])
if magic != outputMagicNumber {
return nil, fmt.Errorf("invalid magic number: %x", magic)
}
msgType := OutputMessageType(optMsg.header[4])
if msgType != OutputMessageTypeOpusFrame {
return nil, fmt.Errorf("unexpected message type: %d", msgType)
}
size := binary.LittleEndian.Uint32(optMsg.header[5:9])
if size > outputMaxFrameSize {
return nil, fmt.Errorf("frame size %d exceeds maximum %d", size, outputMaxFrameSize)
}
// Read frame data
frame := make([]byte, size)
if size > 0 {
if _, err := io.ReadFull(c.conn, frame); err != nil {
return nil, fmt.Errorf("failed to read frame data: %w", err)
}
}
atomic.AddInt64(&c.totalFrames, 1)
return frame, nil
}
// GetClientStats returns client performance statistics
func (c *AudioClient) GetClientStats() (total, dropped int64) {
return atomic.LoadInt64(&c.totalFrames),
atomic.LoadInt64(&c.droppedFrames)
}
// Helper functions
// getOutputSocketPath returns the path to the output socket
func getOutputSocketPath() string {
if path := os.Getenv("JETKVM_AUDIO_OUTPUT_SOCKET"); path != "" {
return path
}
return filepath.Join("/var/run", outputSocketName)
}

View File

@ -1,312 +0,0 @@
package audio
import (
"context"
"sync"
"sync/atomic"
"time"
"github.com/rs/zerolog"
)
// LatencyMonitor tracks and optimizes audio latency in real-time
type LatencyMonitor struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
currentLatency int64 // Current latency in nanoseconds (atomic)
averageLatency int64 // Rolling average latency in nanoseconds (atomic)
minLatency int64 // Minimum observed latency in nanoseconds (atomic)
maxLatency int64 // Maximum observed latency in nanoseconds (atomic)
latencySamples int64 // Number of latency samples collected (atomic)
jitterAccumulator int64 // Accumulated jitter for variance calculation (atomic)
lastOptimization int64 // Timestamp of last optimization in nanoseconds (atomic)
config LatencyConfig
logger zerolog.Logger
// Control channels
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
// Optimization callbacks
optimizationCallbacks []OptimizationCallback
mutex sync.RWMutex
// Performance tracking
latencyHistory []LatencyMeasurement
historyMutex sync.RWMutex
}
// LatencyConfig holds configuration for latency monitoring
type LatencyConfig struct {
TargetLatency time.Duration // Target latency to maintain
MaxLatency time.Duration // Maximum acceptable latency
OptimizationInterval time.Duration // How often to run optimization
HistorySize int // Number of latency measurements to keep
JitterThreshold time.Duration // Jitter threshold for optimization
AdaptiveThreshold float64 // Threshold for adaptive adjustments (0.0-1.0)
}
// LatencyMeasurement represents a single latency measurement
type LatencyMeasurement struct {
Timestamp time.Time
Latency time.Duration
Jitter time.Duration
Source string // Source of the measurement (e.g., "input", "output", "processing")
}
// OptimizationCallback is called when latency optimization is triggered
type OptimizationCallback func(metrics LatencyMetrics) error
// LatencyMetrics provides comprehensive latency statistics
type LatencyMetrics struct {
Current time.Duration
Average time.Duration
Min time.Duration
Max time.Duration
Jitter time.Duration
SampleCount int64
Trend LatencyTrend
}
// LatencyTrend indicates the direction of latency changes
type LatencyTrend int
const (
LatencyTrendStable LatencyTrend = iota
LatencyTrendIncreasing
LatencyTrendDecreasing
LatencyTrendVolatile
)
// DefaultLatencyConfig returns a sensible default configuration
func DefaultLatencyConfig() LatencyConfig {
return LatencyConfig{
TargetLatency: 50 * time.Millisecond,
MaxLatency: 200 * time.Millisecond,
OptimizationInterval: 5 * time.Second,
HistorySize: 100,
JitterThreshold: 20 * time.Millisecond,
AdaptiveThreshold: 0.8, // Trigger optimization when 80% above target
}
}
// NewLatencyMonitor creates a new latency monitoring system
func NewLatencyMonitor(config LatencyConfig, logger zerolog.Logger) *LatencyMonitor {
ctx, cancel := context.WithCancel(context.Background())
return &LatencyMonitor{
config: config,
logger: logger.With().Str("component", "latency-monitor").Logger(),
ctx: ctx,
cancel: cancel,
latencyHistory: make([]LatencyMeasurement, 0, config.HistorySize),
minLatency: int64(time.Hour), // Initialize to high value
}
}
// Start begins latency monitoring and optimization
func (lm *LatencyMonitor) Start() {
lm.wg.Add(1)
go lm.monitoringLoop()
lm.logger.Info().Msg("Latency monitor started")
}
// Stop stops the latency monitor
func (lm *LatencyMonitor) Stop() {
lm.cancel()
lm.wg.Wait()
lm.logger.Info().Msg("Latency monitor stopped")
}
// RecordLatency records a new latency measurement
func (lm *LatencyMonitor) RecordLatency(latency time.Duration, source string) {
now := time.Now()
latencyNanos := latency.Nanoseconds()
// Update atomic counters
atomic.StoreInt64(&lm.currentLatency, latencyNanos)
atomic.AddInt64(&lm.latencySamples, 1)
// Update min/max
for {
oldMin := atomic.LoadInt64(&lm.minLatency)
if latencyNanos >= oldMin || atomic.CompareAndSwapInt64(&lm.minLatency, oldMin, latencyNanos) {
break
}
}
for {
oldMax := atomic.LoadInt64(&lm.maxLatency)
if latencyNanos <= oldMax || atomic.CompareAndSwapInt64(&lm.maxLatency, oldMax, latencyNanos) {
break
}
}
// Update rolling average using exponential moving average
oldAvg := atomic.LoadInt64(&lm.averageLatency)
newAvg := oldAvg + (latencyNanos-oldAvg)/10 // Alpha = 0.1
atomic.StoreInt64(&lm.averageLatency, newAvg)
// Calculate jitter (difference from average)
jitter := latencyNanos - newAvg
if jitter < 0 {
jitter = -jitter
}
atomic.AddInt64(&lm.jitterAccumulator, jitter)
// Store in history
lm.historyMutex.Lock()
measurement := LatencyMeasurement{
Timestamp: now,
Latency: latency,
Jitter: time.Duration(jitter),
Source: source,
}
if len(lm.latencyHistory) >= lm.config.HistorySize {
// Remove oldest measurement
copy(lm.latencyHistory, lm.latencyHistory[1:])
lm.latencyHistory[len(lm.latencyHistory)-1] = measurement
} else {
lm.latencyHistory = append(lm.latencyHistory, measurement)
}
lm.historyMutex.Unlock()
}
// GetMetrics returns current latency metrics
func (lm *LatencyMonitor) GetMetrics() LatencyMetrics {
current := atomic.LoadInt64(&lm.currentLatency)
average := atomic.LoadInt64(&lm.averageLatency)
min := atomic.LoadInt64(&lm.minLatency)
max := atomic.LoadInt64(&lm.maxLatency)
samples := atomic.LoadInt64(&lm.latencySamples)
jitterSum := atomic.LoadInt64(&lm.jitterAccumulator)
var jitter time.Duration
if samples > 0 {
jitter = time.Duration(jitterSum / samples)
}
return LatencyMetrics{
Current: time.Duration(current),
Average: time.Duration(average),
Min: time.Duration(min),
Max: time.Duration(max),
Jitter: jitter,
SampleCount: samples,
Trend: lm.calculateTrend(),
}
}
// AddOptimizationCallback adds a callback for latency optimization
func (lm *LatencyMonitor) AddOptimizationCallback(callback OptimizationCallback) {
lm.mutex.Lock()
lm.optimizationCallbacks = append(lm.optimizationCallbacks, callback)
lm.mutex.Unlock()
}
// monitoringLoop runs the main monitoring and optimization loop
func (lm *LatencyMonitor) monitoringLoop() {
defer lm.wg.Done()
ticker := time.NewTicker(lm.config.OptimizationInterval)
defer ticker.Stop()
for {
select {
case <-lm.ctx.Done():
return
case <-ticker.C:
lm.runOptimization()
}
}
}
// runOptimization checks if optimization is needed and triggers callbacks
func (lm *LatencyMonitor) runOptimization() {
metrics := lm.GetMetrics()
// Check if optimization is needed
needsOptimization := false
// Check if current latency exceeds threshold
if metrics.Current > lm.config.MaxLatency {
needsOptimization = true
lm.logger.Warn().Dur("current_latency", metrics.Current).Dur("max_latency", lm.config.MaxLatency).Msg("Latency exceeds maximum threshold")
}
// Check if average latency is above adaptive threshold
adaptiveThreshold := time.Duration(float64(lm.config.TargetLatency.Nanoseconds()) * (1.0 + lm.config.AdaptiveThreshold))
if metrics.Average > adaptiveThreshold {
needsOptimization = true
lm.logger.Info().Dur("average_latency", metrics.Average).Dur("threshold", adaptiveThreshold).Msg("Average latency above adaptive threshold")
}
// Check if jitter is too high
if metrics.Jitter > lm.config.JitterThreshold {
needsOptimization = true
lm.logger.Info().Dur("jitter", metrics.Jitter).Dur("threshold", lm.config.JitterThreshold).Msg("Jitter above threshold")
}
if needsOptimization {
atomic.StoreInt64(&lm.lastOptimization, time.Now().UnixNano())
// Run optimization callbacks
lm.mutex.RLock()
callbacks := make([]OptimizationCallback, len(lm.optimizationCallbacks))
copy(callbacks, lm.optimizationCallbacks)
lm.mutex.RUnlock()
for _, callback := range callbacks {
if err := callback(metrics); err != nil {
lm.logger.Error().Err(err).Msg("Optimization callback failed")
}
}
lm.logger.Info().Interface("metrics", metrics).Msg("Latency optimization triggered")
}
}
// calculateTrend analyzes recent latency measurements to determine trend
func (lm *LatencyMonitor) calculateTrend() LatencyTrend {
lm.historyMutex.RLock()
defer lm.historyMutex.RUnlock()
if len(lm.latencyHistory) < 10 {
return LatencyTrendStable
}
// Analyze last 10 measurements
recentMeasurements := lm.latencyHistory[len(lm.latencyHistory)-10:]
var increasing, decreasing int
for i := 1; i < len(recentMeasurements); i++ {
if recentMeasurements[i].Latency > recentMeasurements[i-1].Latency {
increasing++
} else if recentMeasurements[i].Latency < recentMeasurements[i-1].Latency {
decreasing++
}
}
// Determine trend based on direction changes
if increasing > 6 {
return LatencyTrendIncreasing
} else if decreasing > 6 {
return LatencyTrendDecreasing
} else if increasing+decreasing > 7 {
return LatencyTrendVolatile
}
return LatencyTrendStable
}
// GetLatencyHistory returns a copy of recent latency measurements
func (lm *LatencyMonitor) GetLatencyHistory() []LatencyMeasurement {
lm.historyMutex.RLock()
defer lm.historyMutex.RUnlock()
history := make([]LatencyMeasurement, len(lm.latencyHistory))
copy(history, lm.latencyHistory)
return history
}

View File

@ -1,198 +0,0 @@
package audio
import (
"encoding/json"
"net/http"
"runtime"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// MemoryMetrics provides comprehensive memory allocation statistics
type MemoryMetrics struct {
// Runtime memory statistics
RuntimeStats RuntimeMemoryStats `json:"runtime_stats"`
// Audio buffer pool statistics
BufferPools AudioBufferPoolStats `json:"buffer_pools"`
// Zero-copy frame pool statistics
ZeroCopyPool ZeroCopyFramePoolStats `json:"zero_copy_pool"`
// Message pool statistics
MessagePool MessagePoolStats `json:"message_pool"`
// Batch processor statistics
BatchProcessor BatchProcessorMemoryStats `json:"batch_processor,omitempty"`
// Collection timestamp
Timestamp time.Time `json:"timestamp"`
}
// RuntimeMemoryStats provides Go runtime memory statistics
type RuntimeMemoryStats struct {
Alloc uint64 `json:"alloc"` // Bytes allocated and not yet freed
TotalAlloc uint64 `json:"total_alloc"` // Total bytes allocated (cumulative)
Sys uint64 `json:"sys"` // Total bytes obtained from OS
Lookups uint64 `json:"lookups"` // Number of pointer lookups
Mallocs uint64 `json:"mallocs"` // Number of mallocs
Frees uint64 `json:"frees"` // Number of frees
HeapAlloc uint64 `json:"heap_alloc"` // Bytes allocated and not yet freed (heap)
HeapSys uint64 `json:"heap_sys"` // Bytes obtained from OS for heap
HeapIdle uint64 `json:"heap_idle"` // Bytes in idle spans
HeapInuse uint64 `json:"heap_inuse"` // Bytes in non-idle spans
HeapReleased uint64 `json:"heap_released"` // Bytes released to OS
HeapObjects uint64 `json:"heap_objects"` // Total number of allocated objects
StackInuse uint64 `json:"stack_inuse"` // Bytes used by stack spans
StackSys uint64 `json:"stack_sys"` // Bytes obtained from OS for stack
MSpanInuse uint64 `json:"mspan_inuse"` // Bytes used by mspan structures
MSpanSys uint64 `json:"mspan_sys"` // Bytes obtained from OS for mspan
MCacheInuse uint64 `json:"mcache_inuse"` // Bytes used by mcache structures
MCacheSys uint64 `json:"mcache_sys"` // Bytes obtained from OS for mcache
BuckHashSys uint64 `json:"buck_hash_sys"` // Bytes used by profiling bucket hash table
GCSys uint64 `json:"gc_sys"` // Bytes used for garbage collection metadata
OtherSys uint64 `json:"other_sys"` // Bytes used for other system allocations
NextGC uint64 `json:"next_gc"` // Target heap size for next GC
LastGC uint64 `json:"last_gc"` // Time of last GC (nanoseconds since epoch)
PauseTotalNs uint64 `json:"pause_total_ns"` // Total GC pause time
NumGC uint32 `json:"num_gc"` // Number of completed GC cycles
NumForcedGC uint32 `json:"num_forced_gc"` // Number of forced GC cycles
GCCPUFraction float64 `json:"gc_cpu_fraction"` // Fraction of CPU time used by GC
}
// BatchProcessorMemoryStats provides batch processor memory statistics
type BatchProcessorMemoryStats struct {
Initialized bool `json:"initialized"`
Running bool `json:"running"`
Stats BatchAudioStats `json:"stats"`
BufferPool AudioBufferPoolDetailedStats `json:"buffer_pool,omitempty"`
}
// GetBatchAudioProcessor is defined in batch_audio.go
// BatchAudioStats is defined in batch_audio.go
var memoryMetricsLogger *zerolog.Logger
func getMemoryMetricsLogger() *zerolog.Logger {
if memoryMetricsLogger == nil {
logger := logging.GetDefaultLogger().With().Str("component", "memory-metrics").Logger()
memoryMetricsLogger = &logger
}
return memoryMetricsLogger
}
// CollectMemoryMetrics gathers comprehensive memory allocation statistics
func CollectMemoryMetrics() MemoryMetrics {
// Collect runtime memory statistics
var m runtime.MemStats
runtime.ReadMemStats(&m)
runtimeStats := RuntimeMemoryStats{
Alloc: m.Alloc,
TotalAlloc: m.TotalAlloc,
Sys: m.Sys,
Lookups: m.Lookups,
Mallocs: m.Mallocs,
Frees: m.Frees,
HeapAlloc: m.HeapAlloc,
HeapSys: m.HeapSys,
HeapIdle: m.HeapIdle,
HeapInuse: m.HeapInuse,
HeapReleased: m.HeapReleased,
HeapObjects: m.HeapObjects,
StackInuse: m.StackInuse,
StackSys: m.StackSys,
MSpanInuse: m.MSpanInuse,
MSpanSys: m.MSpanSys,
MCacheInuse: m.MCacheInuse,
MCacheSys: m.MCacheSys,
BuckHashSys: m.BuckHashSys,
GCSys: m.GCSys,
OtherSys: m.OtherSys,
NextGC: m.NextGC,
LastGC: m.LastGC,
PauseTotalNs: m.PauseTotalNs,
NumGC: m.NumGC,
NumForcedGC: m.NumForcedGC,
GCCPUFraction: m.GCCPUFraction,
}
// Collect audio buffer pool statistics
bufferPoolStats := GetAudioBufferPoolStats()
// Collect zero-copy frame pool statistics
zeroCopyStats := GetGlobalZeroCopyPoolStats()
// Collect message pool statistics
messagePoolStats := GetGlobalMessagePoolStats()
// Collect batch processor statistics if available
var batchStats BatchProcessorMemoryStats
if processor := GetBatchAudioProcessor(); processor != nil {
batchStats.Initialized = true
batchStats.Running = processor.IsRunning()
batchStats.Stats = processor.GetStats()
// Note: BatchAudioProcessor uses sync.Pool, detailed stats not available
}
return MemoryMetrics{
RuntimeStats: runtimeStats,
BufferPools: bufferPoolStats,
ZeroCopyPool: zeroCopyStats,
MessagePool: messagePoolStats,
BatchProcessor: batchStats,
Timestamp: time.Now(),
}
}
// HandleMemoryMetrics provides an HTTP handler for memory metrics
func HandleMemoryMetrics(w http.ResponseWriter, r *http.Request) {
logger := getMemoryMetricsLogger()
if r.Method != http.MethodGet {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
metrics := CollectMemoryMetrics()
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-cache")
if err := json.NewEncoder(w).Encode(metrics); err != nil {
logger.Error().Err(err).Msg("failed to encode memory metrics")
http.Error(w, "Internal server error", http.StatusInternalServerError)
return
}
logger.Debug().Msg("memory metrics served")
}
// LogMemoryMetrics logs current memory metrics for debugging
func LogMemoryMetrics() {
logger := getMemoryMetricsLogger()
metrics := CollectMemoryMetrics()
logger.Info().
Uint64("heap_alloc_mb", metrics.RuntimeStats.HeapAlloc/1024/1024).
Uint64("heap_sys_mb", metrics.RuntimeStats.HeapSys/1024/1024).
Uint64("heap_objects", metrics.RuntimeStats.HeapObjects).
Uint32("num_gc", metrics.RuntimeStats.NumGC).
Float64("gc_cpu_fraction", metrics.RuntimeStats.GCCPUFraction).
Float64("buffer_pool_hit_rate", metrics.BufferPools.FramePoolHitRate).
Float64("zero_copy_hit_rate", metrics.ZeroCopyPool.HitRate).
Float64("message_pool_hit_rate", metrics.MessagePool.HitRate).
Msg("memory metrics snapshot")
}
// StartMemoryMetricsLogging starts periodic memory metrics logging
func StartMemoryMetricsLogging(interval time.Duration) {
logger := getMemoryMetricsLogger()
logger.Info().Dur("interval", interval).Msg("starting memory metrics logging")
go func() {
ticker := time.NewTicker(interval)
defer ticker.Stop()
for range ticker.C {
LogMemoryMetrics()
}
}()
}

View File

@ -1,480 +0,0 @@
package audio
import (
"sync"
"sync/atomic"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
)
var (
// Adaptive buffer metrics
adaptiveInputBufferSize = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_adaptive_input_buffer_size_bytes",
Help: "Current adaptive input buffer size in bytes",
},
)
adaptiveOutputBufferSize = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_adaptive_output_buffer_size_bytes",
Help: "Current adaptive output buffer size in bytes",
},
)
adaptiveBufferAdjustmentsTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_adaptive_buffer_adjustments_total",
Help: "Total number of adaptive buffer size adjustments",
},
)
adaptiveSystemCpuPercent = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_adaptive_system_cpu_percent",
Help: "System CPU usage percentage used by adaptive buffer manager",
},
)
adaptiveSystemMemoryPercent = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_adaptive_system_memory_percent",
Help: "System memory usage percentage used by adaptive buffer manager",
},
)
// Socket buffer metrics
socketBufferSizeGauge = promauto.NewGaugeVec(
prometheus.GaugeOpts{
Name: "jetkvm_audio_socket_buffer_size_bytes",
Help: "Current socket buffer size in bytes",
},
[]string{"component", "buffer_type"}, // buffer_type: send, receive
)
socketBufferUtilizationGauge = promauto.NewGaugeVec(
prometheus.GaugeOpts{
Name: "jetkvm_audio_socket_buffer_utilization_percent",
Help: "Socket buffer utilization percentage",
},
[]string{"component", "buffer_type"}, // buffer_type: send, receive
)
socketBufferOverflowCounter = promauto.NewCounterVec(
prometheus.CounterOpts{
Name: "jetkvm_audio_socket_buffer_overflow_total",
Help: "Total number of socket buffer overflows",
},
[]string{"component", "buffer_type"}, // buffer_type: send, receive
)
// Audio output metrics
audioFramesReceivedTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_audio_frames_received_total",
Help: "Total number of audio frames received",
},
)
audioFramesDroppedTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_audio_frames_dropped_total",
Help: "Total number of audio frames dropped",
},
)
audioBytesProcessedTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_audio_bytes_processed_total",
Help: "Total number of audio bytes processed",
},
)
audioConnectionDropsTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_audio_connection_drops_total",
Help: "Total number of audio connection drops",
},
)
audioAverageLatencySeconds = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_average_latency_seconds",
Help: "Average audio latency in seconds",
},
)
audioLastFrameTimestamp = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_last_frame_timestamp_seconds",
Help: "Timestamp of the last audio frame received",
},
)
// Microphone input metrics
microphoneFramesSentTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_microphone_frames_sent_total",
Help: "Total number of microphone frames sent",
},
)
microphoneFramesDroppedTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_microphone_frames_dropped_total",
Help: "Total number of microphone frames dropped",
},
)
microphoneBytesProcessedTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_microphone_bytes_processed_total",
Help: "Total number of microphone bytes processed",
},
)
microphoneConnectionDropsTotal = promauto.NewCounter(
prometheus.CounterOpts{
Name: "jetkvm_microphone_connection_drops_total",
Help: "Total number of microphone connection drops",
},
)
microphoneAverageLatencySeconds = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_average_latency_seconds",
Help: "Average microphone latency in seconds",
},
)
microphoneLastFrameTimestamp = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_last_frame_timestamp_seconds",
Help: "Timestamp of the last microphone frame sent",
},
)
// Audio subprocess process metrics
audioProcessCpuPercent = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_process_cpu_percent",
Help: "CPU usage percentage of audio output subprocess",
},
)
audioProcessMemoryPercent = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_process_memory_percent",
Help: "Memory usage percentage of audio output subprocess",
},
)
audioProcessMemoryRssBytes = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_process_memory_rss_bytes",
Help: "RSS memory usage in bytes of audio output subprocess",
},
)
audioProcessMemoryVmsBytes = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_process_memory_vms_bytes",
Help: "VMS memory usage in bytes of audio output subprocess",
},
)
audioProcessRunning = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_process_running",
Help: "Whether audio output subprocess is running (1=running, 0=stopped)",
},
)
// Microphone subprocess process metrics
microphoneProcessCpuPercent = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_process_cpu_percent",
Help: "CPU usage percentage of microphone input subprocess",
},
)
microphoneProcessMemoryPercent = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_process_memory_percent",
Help: "Memory usage percentage of microphone input subprocess",
},
)
microphoneProcessMemoryRssBytes = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_process_memory_rss_bytes",
Help: "RSS memory usage in bytes of microphone input subprocess",
},
)
microphoneProcessMemoryVmsBytes = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_process_memory_vms_bytes",
Help: "VMS memory usage in bytes of microphone input subprocess",
},
)
microphoneProcessRunning = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_process_running",
Help: "Whether microphone input subprocess is running (1=running, 0=stopped)",
},
)
// Audio configuration metrics
audioConfigQuality = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_config_quality",
Help: "Current audio quality setting (0=Low, 1=Medium, 2=High, 3=Ultra)",
},
)
audioConfigBitrate = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_config_bitrate_kbps",
Help: "Current audio bitrate in kbps",
},
)
audioConfigSampleRate = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_config_sample_rate_hz",
Help: "Current audio sample rate in Hz",
},
)
audioConfigChannels = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_audio_config_channels",
Help: "Current audio channel count",
},
)
microphoneConfigQuality = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_config_quality",
Help: "Current microphone quality setting (0=Low, 1=Medium, 2=High, 3=Ultra)",
},
)
microphoneConfigBitrate = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_config_bitrate_kbps",
Help: "Current microphone bitrate in kbps",
},
)
microphoneConfigSampleRate = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_config_sample_rate_hz",
Help: "Current microphone sample rate in Hz",
},
)
microphoneConfigChannels = promauto.NewGauge(
prometheus.GaugeOpts{
Name: "jetkvm_microphone_config_channels",
Help: "Current microphone channel count",
},
)
// Metrics update tracking
metricsUpdateMutex sync.RWMutex
lastMetricsUpdate int64
// Counter value tracking (since prometheus counters don't have Get() method)
audioFramesReceivedValue int64
audioFramesDroppedValue int64
audioBytesProcessedValue int64
audioConnectionDropsValue int64
micFramesSentValue int64
micFramesDroppedValue int64
micBytesProcessedValue int64
micConnectionDropsValue int64
)
// UpdateAudioMetrics updates Prometheus metrics with current audio data
func UpdateAudioMetrics(metrics AudioMetrics) {
oldReceived := atomic.SwapInt64(&audioFramesReceivedValue, metrics.FramesReceived)
if metrics.FramesReceived > oldReceived {
audioFramesReceivedTotal.Add(float64(metrics.FramesReceived - oldReceived))
}
oldDropped := atomic.SwapInt64(&audioFramesDroppedValue, metrics.FramesDropped)
if metrics.FramesDropped > oldDropped {
audioFramesDroppedTotal.Add(float64(metrics.FramesDropped - oldDropped))
}
oldBytes := atomic.SwapInt64(&audioBytesProcessedValue, metrics.BytesProcessed)
if metrics.BytesProcessed > oldBytes {
audioBytesProcessedTotal.Add(float64(metrics.BytesProcessed - oldBytes))
}
oldDrops := atomic.SwapInt64(&audioConnectionDropsValue, metrics.ConnectionDrops)
if metrics.ConnectionDrops > oldDrops {
audioConnectionDropsTotal.Add(float64(metrics.ConnectionDrops - oldDrops))
}
// Update gauges
audioAverageLatencySeconds.Set(float64(metrics.AverageLatency.Nanoseconds()) / 1e9)
if !metrics.LastFrameTime.IsZero() {
audioLastFrameTimestamp.Set(float64(metrics.LastFrameTime.Unix()))
}
atomic.StoreInt64(&lastMetricsUpdate, time.Now().Unix())
}
// UpdateMicrophoneMetrics updates Prometheus metrics with current microphone data
func UpdateMicrophoneMetrics(metrics AudioInputMetrics) {
oldSent := atomic.SwapInt64(&micFramesSentValue, metrics.FramesSent)
if metrics.FramesSent > oldSent {
microphoneFramesSentTotal.Add(float64(metrics.FramesSent - oldSent))
}
oldDropped := atomic.SwapInt64(&micFramesDroppedValue, metrics.FramesDropped)
if metrics.FramesDropped > oldDropped {
microphoneFramesDroppedTotal.Add(float64(metrics.FramesDropped - oldDropped))
}
oldBytes := atomic.SwapInt64(&micBytesProcessedValue, metrics.BytesProcessed)
if metrics.BytesProcessed > oldBytes {
microphoneBytesProcessedTotal.Add(float64(metrics.BytesProcessed - oldBytes))
}
oldDrops := atomic.SwapInt64(&micConnectionDropsValue, metrics.ConnectionDrops)
if metrics.ConnectionDrops > oldDrops {
microphoneConnectionDropsTotal.Add(float64(metrics.ConnectionDrops - oldDrops))
}
// Update gauges
microphoneAverageLatencySeconds.Set(float64(metrics.AverageLatency.Nanoseconds()) / 1e9)
if !metrics.LastFrameTime.IsZero() {
microphoneLastFrameTimestamp.Set(float64(metrics.LastFrameTime.Unix()))
}
atomic.StoreInt64(&lastMetricsUpdate, time.Now().Unix())
}
// UpdateAudioProcessMetrics updates Prometheus metrics with audio subprocess data
func UpdateAudioProcessMetrics(metrics ProcessMetrics, isRunning bool) {
metricsUpdateMutex.Lock()
defer metricsUpdateMutex.Unlock()
audioProcessCpuPercent.Set(metrics.CPUPercent)
audioProcessMemoryPercent.Set(metrics.MemoryPercent)
audioProcessMemoryRssBytes.Set(float64(metrics.MemoryRSS))
audioProcessMemoryVmsBytes.Set(float64(metrics.MemoryVMS))
if isRunning {
audioProcessRunning.Set(1)
} else {
audioProcessRunning.Set(0)
}
atomic.StoreInt64(&lastMetricsUpdate, time.Now().Unix())
}
// UpdateMicrophoneProcessMetrics updates Prometheus metrics with microphone subprocess data
func UpdateMicrophoneProcessMetrics(metrics ProcessMetrics, isRunning bool) {
metricsUpdateMutex.Lock()
defer metricsUpdateMutex.Unlock()
microphoneProcessCpuPercent.Set(metrics.CPUPercent)
microphoneProcessMemoryPercent.Set(metrics.MemoryPercent)
microphoneProcessMemoryRssBytes.Set(float64(metrics.MemoryRSS))
microphoneProcessMemoryVmsBytes.Set(float64(metrics.MemoryVMS))
if isRunning {
microphoneProcessRunning.Set(1)
} else {
microphoneProcessRunning.Set(0)
}
atomic.StoreInt64(&lastMetricsUpdate, time.Now().Unix())
}
// UpdateAudioConfigMetrics updates Prometheus metrics with audio configuration
func UpdateAudioConfigMetrics(config AudioConfig) {
metricsUpdateMutex.Lock()
defer metricsUpdateMutex.Unlock()
audioConfigQuality.Set(float64(config.Quality))
audioConfigBitrate.Set(float64(config.Bitrate))
audioConfigSampleRate.Set(float64(config.SampleRate))
audioConfigChannels.Set(float64(config.Channels))
atomic.StoreInt64(&lastMetricsUpdate, time.Now().Unix())
}
// UpdateMicrophoneConfigMetrics updates Prometheus metrics with microphone configuration
func UpdateMicrophoneConfigMetrics(config AudioConfig) {
metricsUpdateMutex.Lock()
defer metricsUpdateMutex.Unlock()
microphoneConfigQuality.Set(float64(config.Quality))
microphoneConfigBitrate.Set(float64(config.Bitrate))
microphoneConfigSampleRate.Set(float64(config.SampleRate))
microphoneConfigChannels.Set(float64(config.Channels))
atomic.StoreInt64(&lastMetricsUpdate, time.Now().Unix())
}
// UpdateAdaptiveBufferMetrics updates Prometheus metrics with adaptive buffer information
func UpdateAdaptiveBufferMetrics(inputBufferSize, outputBufferSize int, cpuPercent, memoryPercent float64, adjustmentMade bool) {
metricsUpdateMutex.Lock()
defer metricsUpdateMutex.Unlock()
adaptiveInputBufferSize.Set(float64(inputBufferSize))
adaptiveOutputBufferSize.Set(float64(outputBufferSize))
adaptiveSystemCpuPercent.Set(cpuPercent)
adaptiveSystemMemoryPercent.Set(memoryPercent)
if adjustmentMade {
adaptiveBufferAdjustmentsTotal.Inc()
}
atomic.StoreInt64(&lastMetricsUpdate, time.Now().Unix())
}
// GetLastMetricsUpdate returns the timestamp of the last metrics update
func GetLastMetricsUpdate() time.Time {
timestamp := atomic.LoadInt64(&lastMetricsUpdate)
return time.Unix(timestamp, 0)
}
// StartMetricsUpdater starts a goroutine that periodically updates Prometheus metrics
func StartMetricsUpdater() {
go func() {
ticker := time.NewTicker(5 * time.Second) // Update every 5 seconds
defer ticker.Stop()
for range ticker.C {
// Update audio output metrics
audioMetrics := GetAudioMetrics()
UpdateAudioMetrics(audioMetrics)
// Update microphone input metrics
micMetrics := GetAudioInputMetrics()
UpdateMicrophoneMetrics(micMetrics)
// Update microphone subprocess process metrics
if inputSupervisor := GetAudioInputIPCSupervisor(); inputSupervisor != nil {
if processMetrics := inputSupervisor.GetProcessMetrics(); processMetrics != nil {
UpdateMicrophoneProcessMetrics(*processMetrics, inputSupervisor.IsRunning())
}
}
// Update audio configuration metrics
audioConfig := GetAudioConfig()
UpdateAudioConfigMetrics(audioConfig)
micConfig := GetMicrophoneConfig()
UpdateMicrophoneConfigMetrics(micConfig)
}
}()
}

View File

@ -6,76 +6,96 @@ import (
"unsafe" "unsafe"
) )
// MicrophoneContentionManager manages microphone access with cooldown periods // MicrophoneContentionManager provides optimized microphone operation locking
// with reduced contention using atomic operations and conditional locking
type MicrophoneContentionManager struct { type MicrophoneContentionManager struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment) // Atomic fields (must be 64-bit aligned on 32-bit systems)
lastOpNano int64 lastOpNano int64 // Unix nanoseconds of last operation
cooldownNanos int64 cooldownNanos int64 // Cooldown duration in nanoseconds
operationID int64 operationID int64 // Incremental operation ID for tracking
lockPtr unsafe.Pointer // Lock-free state flags (using atomic.Pointer for lock-free updates)
lockPtr unsafe.Pointer // *sync.Mutex - conditionally allocated
} }
// NewMicrophoneContentionManager creates a new microphone contention manager
func NewMicrophoneContentionManager(cooldown time.Duration) *MicrophoneContentionManager { func NewMicrophoneContentionManager(cooldown time.Duration) *MicrophoneContentionManager {
return &MicrophoneContentionManager{ return &MicrophoneContentionManager{
cooldownNanos: int64(cooldown), cooldownNanos: int64(cooldown),
} }
} }
// OperationResult represents the result of attempting a microphone operation
type OperationResult struct { type OperationResult struct {
Allowed bool Allowed bool
RemainingCooldown time.Duration RemainingCooldown time.Duration
OperationID int64 OperationID int64
} }
// TryOperation attempts to perform a microphone operation with optimized contention handling
func (mcm *MicrophoneContentionManager) TryOperation() OperationResult { func (mcm *MicrophoneContentionManager) TryOperation() OperationResult {
now := time.Now().UnixNano() now := time.Now().UnixNano()
cooldown := atomic.LoadInt64(&mcm.cooldownNanos) cooldown := atomic.LoadInt64(&mcm.cooldownNanos)
// Fast path: check if we're clearly outside cooldown period using atomic read
lastOp := atomic.LoadInt64(&mcm.lastOpNano) lastOp := atomic.LoadInt64(&mcm.lastOpNano)
elapsed := now - lastOp elapsed := now - lastOp
if elapsed >= cooldown { if elapsed >= cooldown {
// Attempt atomic update without locking
if atomic.CompareAndSwapInt64(&mcm.lastOpNano, lastOp, now) { if atomic.CompareAndSwapInt64(&mcm.lastOpNano, lastOp, now) {
opID := atomic.AddInt64(&mcm.operationID, 1) opID := atomic.AddInt64(&mcm.operationID, 1)
return OperationResult{ return OperationResult{
Allowed: true, Allowed: true,
RemainingCooldown: 0, RemainingCooldown: 0,
OperationID: opID, OperationID: opID,
}
}
// Retry once if CAS failed
lastOp = atomic.LoadInt64(&mcm.lastOpNano)
elapsed = now - lastOp
if elapsed >= cooldown && atomic.CompareAndSwapInt64(&mcm.lastOpNano, lastOp, now) {
opID := atomic.AddInt64(&mcm.operationID, 1)
return OperationResult{
Allowed: true,
RemainingCooldown: 0,
OperationID: opID,
} }
} }
} }
remaining := time.Duration(cooldown - elapsed) // Slow path: potential contention, check remaining cooldown
currentLastOp := atomic.LoadInt64(&mcm.lastOpNano)
currentElapsed := now - currentLastOp
if currentElapsed >= cooldown {
// Race condition: another operation might have updated lastOpNano
// Try once more with CAS
if atomic.CompareAndSwapInt64(&mcm.lastOpNano, currentLastOp, now) {
opID := atomic.AddInt64(&mcm.operationID, 1)
return OperationResult{
Allowed: true,
RemainingCooldown: 0,
OperationID: opID,
}
}
// If CAS failed, fall through to cooldown calculation
currentLastOp = atomic.LoadInt64(&mcm.lastOpNano)
currentElapsed = now - currentLastOp
}
remaining := time.Duration(cooldown - currentElapsed)
if remaining < 0 { if remaining < 0 {
remaining = 0 remaining = 0
} }
return OperationResult{ return OperationResult{
Allowed: false, Allowed: false,
RemainingCooldown: remaining, RemainingCooldown: remaining,
OperationID: atomic.LoadInt64(&mcm.operationID), OperationID: atomic.LoadInt64(&mcm.operationID),
} }
} }
// SetCooldown updates the cooldown duration atomically
func (mcm *MicrophoneContentionManager) SetCooldown(cooldown time.Duration) { func (mcm *MicrophoneContentionManager) SetCooldown(cooldown time.Duration) {
atomic.StoreInt64(&mcm.cooldownNanos, int64(cooldown)) atomic.StoreInt64(&mcm.cooldownNanos, int64(cooldown))
} }
// GetCooldown returns the current cooldown duration
func (mcm *MicrophoneContentionManager) GetCooldown() time.Duration { func (mcm *MicrophoneContentionManager) GetCooldown() time.Duration {
return time.Duration(atomic.LoadInt64(&mcm.cooldownNanos)) return time.Duration(atomic.LoadInt64(&mcm.cooldownNanos))
} }
// GetLastOperationTime returns the time of the last operation
func (mcm *MicrophoneContentionManager) GetLastOperationTime() time.Time { func (mcm *MicrophoneContentionManager) GetLastOperationTime() time.Time {
nanos := atomic.LoadInt64(&mcm.lastOpNano) nanos := atomic.LoadInt64(&mcm.lastOpNano)
if nanos == 0 { if nanos == 0 {
@ -84,44 +104,55 @@ func (mcm *MicrophoneContentionManager) GetLastOperationTime() time.Time {
return time.Unix(0, nanos) return time.Unix(0, nanos)
} }
// GetOperationCount returns the total number of successful operations
func (mcm *MicrophoneContentionManager) GetOperationCount() int64 { func (mcm *MicrophoneContentionManager) GetOperationCount() int64 {
return atomic.LoadInt64(&mcm.operationID) return atomic.LoadInt64(&mcm.operationID)
} }
// Reset resets the contention manager state
func (mcm *MicrophoneContentionManager) Reset() { func (mcm *MicrophoneContentionManager) Reset() {
atomic.StoreInt64(&mcm.lastOpNano, 0) atomic.StoreInt64(&mcm.lastOpNano, 0)
atomic.StoreInt64(&mcm.operationID, 0) atomic.StoreInt64(&mcm.operationID, 0)
} }
// Global instance for microphone contention management
var ( var (
globalMicContentionManager unsafe.Pointer globalMicContentionManager unsafe.Pointer // *MicrophoneContentionManager
micContentionInitialized int32 micContentionInitialized int32
) )
// GetMicrophoneContentionManager returns the global microphone contention manager
func GetMicrophoneContentionManager() *MicrophoneContentionManager { func GetMicrophoneContentionManager() *MicrophoneContentionManager {
ptr := atomic.LoadPointer(&globalMicContentionManager) ptr := atomic.LoadPointer(&globalMicContentionManager)
if ptr != nil { if ptr != nil {
return (*MicrophoneContentionManager)(ptr) return (*MicrophoneContentionManager)(ptr)
} }
// Initialize on first use
if atomic.CompareAndSwapInt32(&micContentionInitialized, 0, 1) { if atomic.CompareAndSwapInt32(&micContentionInitialized, 0, 1) {
manager := NewMicrophoneContentionManager(200 * time.Millisecond) manager := NewMicrophoneContentionManager(200 * time.Millisecond)
atomic.StorePointer(&globalMicContentionManager, unsafe.Pointer(manager)) atomic.StorePointer(&globalMicContentionManager, unsafe.Pointer(manager))
return manager return manager
} }
// Another goroutine initialized it, try again
ptr = atomic.LoadPointer(&globalMicContentionManager) ptr = atomic.LoadPointer(&globalMicContentionManager)
if ptr != nil { if ptr != nil {
return (*MicrophoneContentionManager)(ptr) return (*MicrophoneContentionManager)(ptr)
} }
// Fallback: create a new manager (should rarely happen)
return NewMicrophoneContentionManager(200 * time.Millisecond) return NewMicrophoneContentionManager(200 * time.Millisecond)
} }
// TryMicrophoneOperation provides a convenient global function for microphone operations
func TryMicrophoneOperation() OperationResult { func TryMicrophoneOperation() OperationResult {
return GetMicrophoneContentionManager().TryOperation() manager := GetMicrophoneContentionManager()
return manager.TryOperation()
} }
// SetMicrophoneCooldown updates the global microphone cooldown
func SetMicrophoneCooldown(cooldown time.Duration) { func SetMicrophoneCooldown(cooldown time.Duration) {
GetMicrophoneContentionManager().SetCooldown(cooldown) manager := GetMicrophoneContentionManager()
} manager.SetCooldown(cooldown)
}

View File

@ -0,0 +1,115 @@
package audio
import (
"sync/atomic"
"unsafe"
)
var (
// Use unsafe.Pointer for atomic operations instead of mutex
globalNonBlockingManager unsafe.Pointer // *NonBlockingAudioManager
)
// loadManager atomically loads the global manager
func loadManager() *NonBlockingAudioManager {
ptr := atomic.LoadPointer(&globalNonBlockingManager)
if ptr == nil {
return nil
}
return (*NonBlockingAudioManager)(ptr)
}
// storeManager atomically stores the global manager
func storeManager(manager *NonBlockingAudioManager) {
atomic.StorePointer(&globalNonBlockingManager, unsafe.Pointer(manager))
}
// compareAndSwapManager atomically compares and swaps the global manager
func compareAndSwapManager(old, new *NonBlockingAudioManager) bool {
return atomic.CompareAndSwapPointer(&globalNonBlockingManager,
unsafe.Pointer(old), unsafe.Pointer(new))
}
// StartNonBlockingAudioStreaming starts the non-blocking audio streaming system
func StartNonBlockingAudioStreaming(send func([]byte)) error {
manager := loadManager()
if manager != nil && manager.IsOutputRunning() {
return nil // Already running, this is not an error
}
if manager == nil {
newManager := NewNonBlockingAudioManager()
if !compareAndSwapManager(nil, newManager) {
// Another goroutine created manager, use it
manager = loadManager()
} else {
manager = newManager
}
}
return manager.StartAudioOutput(send)
}
// StartNonBlockingAudioInput starts the non-blocking audio input system
func StartNonBlockingAudioInput(receiveChan <-chan []byte) error {
manager := loadManager()
if manager == nil {
newManager := NewNonBlockingAudioManager()
if !compareAndSwapManager(nil, newManager) {
// Another goroutine created manager, use it
manager = loadManager()
} else {
manager = newManager
}
}
// Check if input is already running to avoid unnecessary operations
if manager.IsInputRunning() {
return nil // Already running, this is not an error
}
return manager.StartAudioInput(receiveChan)
}
// StopNonBlockingAudioStreaming stops the non-blocking audio streaming system
func StopNonBlockingAudioStreaming() {
manager := loadManager()
if manager != nil {
manager.Stop()
storeManager(nil)
}
}
// StopNonBlockingAudioInput stops only the audio input without affecting output
func StopNonBlockingAudioInput() {
manager := loadManager()
if manager != nil && manager.IsInputRunning() {
manager.StopAudioInput()
// If both input and output are stopped, recreate manager to ensure clean state
if !manager.IsRunning() {
storeManager(nil)
}
}
}
// GetNonBlockingAudioStats returns statistics from the non-blocking audio system
func GetNonBlockingAudioStats() NonBlockingAudioStats {
manager := loadManager()
if manager != nil {
return manager.GetStats()
}
return NonBlockingAudioStats{}
}
// IsNonBlockingAudioRunning returns true if the non-blocking audio system is running
func IsNonBlockingAudioRunning() bool {
manager := loadManager()
return manager != nil && manager.IsRunning()
}
// IsNonBlockingAudioInputRunning returns true if the non-blocking audio input is running
func IsNonBlockingAudioInputRunning() bool {
manager := loadManager()
return manager != nil && manager.IsInputRunning()
}

View File

@ -0,0 +1,564 @@
package audio
import (
"context"
"errors"
// "runtime" // removed: no longer directly pinning OS thread here; batching handles it
"sync"
"sync/atomic"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// NonBlockingAudioManager manages audio operations in separate worker threads
// to prevent blocking of mouse/keyboard operations
type NonBlockingAudioManager struct {
// Statistics - MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
stats NonBlockingAudioStats
// Control
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
logger *zerolog.Logger
// Audio output (capture from device, send to WebRTC)
outputSendFunc func([]byte)
outputWorkChan chan audioWorkItem
outputResultChan chan audioResult
// Audio input (receive from WebRTC, playback to device)
inputReceiveChan <-chan []byte
inputWorkChan chan audioWorkItem
inputResultChan chan audioResult
// Worker threads and flags - int32 fields grouped together
outputRunning int32
inputRunning int32
outputWorkerRunning int32
inputWorkerRunning int32
}
type audioWorkItem struct {
workType audioWorkType
data []byte
resultChan chan audioResult
}
type audioWorkType int
const (
audioWorkInit audioWorkType = iota
audioWorkReadEncode
audioWorkDecodeWrite
audioWorkClose
)
type audioResult struct {
success bool
data []byte
length int
err error
}
type NonBlockingAudioStats struct {
// int64 fields MUST be first for ARM32 alignment
OutputFramesProcessed int64
OutputFramesDropped int64
InputFramesProcessed int64
InputFramesDropped int64
WorkerErrors int64
// time.Time is int64 internally, so it's also aligned
LastProcessTime time.Time
}
// NewNonBlockingAudioManager creates a new non-blocking audio manager
func NewNonBlockingAudioManager() *NonBlockingAudioManager {
ctx, cancel := context.WithCancel(context.Background())
logger := logging.GetDefaultLogger().With().Str("component", "nonblocking-audio").Logger()
return &NonBlockingAudioManager{
ctx: ctx,
cancel: cancel,
logger: &logger,
outputWorkChan: make(chan audioWorkItem, 10), // Buffer for work items
outputResultChan: make(chan audioResult, 10), // Buffer for results
inputWorkChan: make(chan audioWorkItem, 10),
inputResultChan: make(chan audioResult, 10),
}
}
// StartAudioOutput starts non-blocking audio output (capture and encode)
func (nam *NonBlockingAudioManager) StartAudioOutput(sendFunc func([]byte)) error {
if !atomic.CompareAndSwapInt32(&nam.outputRunning, 0, 1) {
return ErrAudioAlreadyRunning
}
nam.outputSendFunc = sendFunc
// Enable batch audio processing for performance
EnableBatchAudioProcessing()
// Start the blocking worker thread
nam.wg.Add(1)
go nam.outputWorkerThread()
// Start the non-blocking coordinator
nam.wg.Add(1)
go nam.outputCoordinatorThread()
nam.logger.Info().Msg("non-blocking audio output started with batch processing")
return nil
}
// StartAudioInput starts non-blocking audio input (receive and decode)
func (nam *NonBlockingAudioManager) StartAudioInput(receiveChan <-chan []byte) error {
if !atomic.CompareAndSwapInt32(&nam.inputRunning, 0, 1) {
return ErrAudioAlreadyRunning
}
nam.inputReceiveChan = receiveChan
// Enable batch audio processing for performance
EnableBatchAudioProcessing()
// Start the blocking worker thread
nam.wg.Add(1)
go nam.inputWorkerThread()
// Start the non-blocking coordinator
nam.wg.Add(1)
go nam.inputCoordinatorThread()
nam.logger.Info().Msg("non-blocking audio input started with batch processing")
return nil
}
// outputWorkerThread handles all blocking audio output operations
func (nam *NonBlockingAudioManager) outputWorkerThread() {
defer nam.wg.Done()
defer atomic.StoreInt32(&nam.outputWorkerRunning, 0)
atomic.StoreInt32(&nam.outputWorkerRunning, 1)
nam.logger.Debug().Msg("output worker thread started")
// Initialize audio in worker thread
if err := CGOAudioInit(); err != nil {
nam.logger.Error().Err(err).Msg("failed to initialize audio in worker thread")
return
}
defer CGOAudioClose()
// Use buffer pool to avoid allocations
buf := GetAudioFrameBuffer()
defer PutAudioFrameBuffer(buf)
for {
select {
case <-nam.ctx.Done():
nam.logger.Debug().Msg("output worker thread stopping")
return
case workItem := <-nam.outputWorkChan:
switch workItem.workType {
case audioWorkReadEncode:
n, err := BatchCGOAudioReadEncode(buf)
result := audioResult{
success: err == nil,
length: n,
err: err,
}
if err == nil && n > 0 {
// Get buffer from pool and copy data
resultBuf := GetAudioFrameBuffer()
copy(resultBuf[:n], buf[:n])
result.data = resultBuf[:n]
}
// Send result back (non-blocking)
select {
case workItem.resultChan <- result:
case <-nam.ctx.Done():
return
default:
// Drop result if coordinator is not ready
if result.data != nil {
PutAudioFrameBuffer(result.data)
}
atomic.AddInt64(&nam.stats.OutputFramesDropped, 1)
}
case audioWorkClose:
nam.logger.Debug().Msg("output worker received close signal")
return
}
}
}
}
// outputCoordinatorThread coordinates audio output without blocking
func (nam *NonBlockingAudioManager) outputCoordinatorThread() {
defer nam.wg.Done()
defer atomic.StoreInt32(&nam.outputRunning, 0)
nam.logger.Debug().Msg("output coordinator thread started")
ticker := time.NewTicker(20 * time.Millisecond) // Match frame timing
defer ticker.Stop()
pendingWork := false
resultChan := make(chan audioResult, 1)
for atomic.LoadInt32(&nam.outputRunning) == 1 {
select {
case <-nam.ctx.Done():
nam.logger.Debug().Msg("output coordinator stopping")
return
case <-ticker.C:
// Only submit work if worker is ready and no pending work
if !pendingWork && atomic.LoadInt32(&nam.outputWorkerRunning) == 1 {
if IsAudioMuted() {
continue // Skip when muted
}
workItem := audioWorkItem{
workType: audioWorkReadEncode,
resultChan: resultChan,
}
// Submit work (non-blocking)
select {
case nam.outputWorkChan <- workItem:
pendingWork = true
default:
// Worker is busy, drop this frame
atomic.AddInt64(&nam.stats.OutputFramesDropped, 1)
}
}
case result := <-resultChan:
pendingWork = false
nam.stats.LastProcessTime = time.Now()
if result.success && result.data != nil && result.length > 0 {
// Send to WebRTC (non-blocking)
if nam.outputSendFunc != nil {
nam.outputSendFunc(result.data)
atomic.AddInt64(&nam.stats.OutputFramesProcessed, 1)
RecordFrameReceived(result.length)
}
// Return buffer to pool after use
PutAudioFrameBuffer(result.data)
} else if result.success && result.length == 0 {
// No data available - this is normal, not an error
// Just continue without logging or counting as error
} else {
atomic.AddInt64(&nam.stats.OutputFramesDropped, 1)
atomic.AddInt64(&nam.stats.WorkerErrors, 1)
if result.err != nil {
nam.logger.Warn().Err(result.err).Msg("audio output worker error")
}
// Clean up buffer if present
if result.data != nil {
PutAudioFrameBuffer(result.data)
}
RecordFrameDropped()
}
}
}
// Signal worker to close
select {
case nam.outputWorkChan <- audioWorkItem{workType: audioWorkClose}:
case <-time.After(100 * time.Millisecond):
nam.logger.Warn().Msg("timeout signaling output worker to close")
}
nam.logger.Info().Msg("output coordinator thread stopped")
}
// inputWorkerThread handles all blocking audio input operations
func (nam *NonBlockingAudioManager) inputWorkerThread() {
defer nam.wg.Done()
// Cleanup CGO resources properly to avoid double-close scenarios
// The outputWorkerThread's CGOAudioClose() will handle all cleanup
atomic.StoreInt32(&nam.inputWorkerRunning, 0)
atomic.StoreInt32(&nam.inputWorkerRunning, 1)
nam.logger.Debug().Msg("input worker thread started")
// Initialize audio playback in worker thread
if err := CGOAudioPlaybackInit(); err != nil {
nam.logger.Error().Err(err).Msg("failed to initialize audio playback in worker thread")
return
}
// Ensure CGO cleanup happens even if we exit unexpectedly
cgoInitialized := true
defer func() {
if cgoInitialized {
nam.logger.Debug().Msg("cleaning up CGO audio playback")
// Add extra safety: ensure no more CGO calls can happen
atomic.StoreInt32(&nam.inputWorkerRunning, 0)
// Note: Don't call CGOAudioPlaybackClose() here to avoid double-close
// The outputWorkerThread's CGOAudioClose() will handle all cleanup
}
}()
for {
// If coordinator has stopped, exit worker loop
if atomic.LoadInt32(&nam.inputRunning) == 0 {
return
}
select {
case <-nam.ctx.Done():
nam.logger.Debug().Msg("input worker thread stopping due to context cancellation")
return
case workItem := <-nam.inputWorkChan:
switch workItem.workType {
case audioWorkDecodeWrite:
// Check if we're still supposed to be running before processing
if atomic.LoadInt32(&nam.inputWorkerRunning) == 0 || atomic.LoadInt32(&nam.inputRunning) == 0 {
nam.logger.Debug().Msg("input worker stopping, ignoring decode work")
// Do not send to resultChan; coordinator may have exited
return
}
// Validate input data before CGO call
if workItem.data == nil || len(workItem.data) == 0 {
result := audioResult{
success: false,
err: errors.New("invalid audio data"),
}
// Check if coordinator is still running before sending result
if atomic.LoadInt32(&nam.inputRunning) == 1 {
select {
case workItem.resultChan <- result:
case <-nam.ctx.Done():
return
case <-time.After(10 * time.Millisecond):
// Timeout - coordinator may have stopped, drop result
atomic.AddInt64(&nam.stats.InputFramesDropped, 1)
}
} else {
// Coordinator has stopped, drop result
atomic.AddInt64(&nam.stats.InputFramesDropped, 1)
}
continue
}
// Perform blocking CGO operation with panic recovery
var result audioResult
func() {
defer func() {
if r := recover(); r != nil {
nam.logger.Error().Interface("panic", r).Msg("CGO decode write panic recovered")
result = audioResult{
success: false,
err: errors.New("CGO decode write panic"),
}
}
}()
// Double-check we're still running before CGO call
if atomic.LoadInt32(&nam.inputWorkerRunning) == 0 {
result = audioResult{success: false, err: errors.New("worker shutting down")}
return
}
n, err := BatchCGOAudioDecodeWrite(workItem.data)
result = audioResult{
success: err == nil,
length: n,
err: err,
}
}()
// Send result back (non-blocking) - check if coordinator is still running
if atomic.LoadInt32(&nam.inputRunning) == 1 {
select {
case workItem.resultChan <- result:
case <-nam.ctx.Done():
return
case <-time.After(10 * time.Millisecond):
// Timeout - coordinator may have stopped, drop result
atomic.AddInt64(&nam.stats.InputFramesDropped, 1)
}
} else {
// Coordinator has stopped, drop result
atomic.AddInt64(&nam.stats.InputFramesDropped, 1)
}
case audioWorkClose:
nam.logger.Debug().Msg("input worker received close signal")
return
}
}
}
}
// inputCoordinatorThread coordinates audio input without blocking
func (nam *NonBlockingAudioManager) inputCoordinatorThread() {
defer nam.wg.Done()
defer atomic.StoreInt32(&nam.inputRunning, 0)
nam.logger.Debug().Msg("input coordinator thread started")
resultChan := make(chan audioResult, 1)
// Do not close resultChan to avoid races with worker sends during shutdown
for atomic.LoadInt32(&nam.inputRunning) == 1 {
select {
case <-nam.ctx.Done():
nam.logger.Debug().Msg("input coordinator stopping")
return
case frame := <-nam.inputReceiveChan:
if len(frame) == 0 {
continue
}
// Submit work to worker (non-blocking)
if atomic.LoadInt32(&nam.inputWorkerRunning) == 1 {
workItem := audioWorkItem{
workType: audioWorkDecodeWrite,
data: frame,
resultChan: resultChan,
}
select {
case nam.inputWorkChan <- workItem:
// Wait for result with timeout and context cancellation
select {
case result := <-resultChan:
if result.success {
atomic.AddInt64(&nam.stats.InputFramesProcessed, 1)
} else {
atomic.AddInt64(&nam.stats.InputFramesDropped, 1)
atomic.AddInt64(&nam.stats.WorkerErrors, 1)
if result.err != nil {
nam.logger.Warn().Err(result.err).Msg("audio input worker error")
}
}
case <-nam.ctx.Done():
nam.logger.Debug().Msg("input coordinator stopping during result wait")
return
case <-time.After(50 * time.Millisecond):
// Timeout waiting for result
atomic.AddInt64(&nam.stats.InputFramesDropped, 1)
nam.logger.Warn().Msg("timeout waiting for input worker result")
// Drain any pending result to prevent worker blocking
select {
case <-resultChan:
default:
}
}
default:
// Worker is busy, drop this frame
atomic.AddInt64(&nam.stats.InputFramesDropped, 1)
}
}
case <-time.After(250 * time.Millisecond):
// Periodic timeout to prevent blocking
continue
}
}
// Avoid sending close signals or touching channels here; inputRunning=0 will stop worker via checks
nam.logger.Info().Msg("input coordinator thread stopped")
}
// Stop stops all audio operations
func (nam *NonBlockingAudioManager) Stop() {
nam.logger.Info().Msg("stopping non-blocking audio manager")
// Signal all threads to stop
nam.cancel()
// Stop coordinators
atomic.StoreInt32(&nam.outputRunning, 0)
atomic.StoreInt32(&nam.inputRunning, 0)
// Wait for all goroutines to finish
nam.wg.Wait()
// Disable batch processing to free resources
DisableBatchAudioProcessing()
nam.logger.Info().Msg("non-blocking audio manager stopped")
}
// StopAudioInput stops only the audio input operations
func (nam *NonBlockingAudioManager) StopAudioInput() {
nam.logger.Info().Msg("stopping audio input")
// Stop only the input coordinator
atomic.StoreInt32(&nam.inputRunning, 0)
// Drain the receive channel to prevent blocking senders
go func() {
for {
select {
case <-nam.inputReceiveChan:
// Drain any remaining frames
case <-time.After(100 * time.Millisecond):
return
}
}
}()
// Wait for the worker to actually stop to prevent race conditions
timeout := time.After(2 * time.Second)
ticker := time.NewTicker(10 * time.Millisecond)
defer ticker.Stop()
for {
select {
case <-timeout:
nam.logger.Warn().Msg("timeout waiting for input worker to stop")
return
case <-ticker.C:
if atomic.LoadInt32(&nam.inputWorkerRunning) == 0 {
nam.logger.Info().Msg("audio input stopped successfully")
// Close ALSA playback resources now that input worker has stopped
CGOAudioPlaybackClose()
return
}
}
}
}
// GetStats returns current statistics
func (nam *NonBlockingAudioManager) GetStats() NonBlockingAudioStats {
return NonBlockingAudioStats{
OutputFramesProcessed: atomic.LoadInt64(&nam.stats.OutputFramesProcessed),
OutputFramesDropped: atomic.LoadInt64(&nam.stats.OutputFramesDropped),
InputFramesProcessed: atomic.LoadInt64(&nam.stats.InputFramesProcessed),
InputFramesDropped: atomic.LoadInt64(&nam.stats.InputFramesDropped),
WorkerErrors: atomic.LoadInt64(&nam.stats.WorkerErrors),
LastProcessTime: nam.stats.LastProcessTime,
}
}
// IsRunning returns true if any audio operations are running
func (nam *NonBlockingAudioManager) IsRunning() bool {
return atomic.LoadInt32(&nam.outputRunning) == 1 || atomic.LoadInt32(&nam.inputRunning) == 1
}
// IsInputRunning returns true if audio input is running
func (nam *NonBlockingAudioManager) IsInputRunning() bool {
return atomic.LoadInt32(&nam.inputRunning) == 1
}
// IsOutputRunning returns true if audio output is running
func (nam *NonBlockingAudioManager) IsOutputRunning() bool {
return atomic.LoadInt32(&nam.outputRunning) == 1
}

View File

@ -1,71 +0,0 @@
package audio
import (
"context"
"os"
"os/signal"
"syscall"
"time"
"github.com/jetkvm/kvm/internal/logging"
)
// RunAudioOutputServer runs the audio output server subprocess
// This should be called from main() when the subprocess is detected
func RunAudioOutputServer() error {
logger := logging.GetDefaultLogger().With().Str("component", "audio-output-server").Logger()
logger.Info().Msg("Starting audio output server subprocess")
// Create audio server
server, err := NewAudioServer()
if err != nil {
logger.Error().Err(err).Msg("failed to create audio server")
return err
}
defer server.Close()
// Start accepting connections
if err := server.Start(); err != nil {
logger.Error().Err(err).Msg("failed to start audio server")
return err
}
// Initialize audio processing
err = StartNonBlockingAudioStreaming(func(frame []byte) {
if err := server.SendFrame(frame); err != nil {
logger.Warn().Err(err).Msg("failed to send audio frame")
RecordFrameDropped()
}
})
if err != nil {
logger.Error().Err(err).Msg("failed to start audio processing")
return err
}
logger.Info().Msg("Audio output server started, waiting for connections")
// Set up signal handling for graceful shutdown
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
sigChan := make(chan os.Signal, 1)
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
// Wait for shutdown signal
select {
case sig := <-sigChan:
logger.Info().Str("signal", sig.String()).Msg("Received shutdown signal")
case <-ctx.Done():
logger.Info().Msg("Context cancelled")
}
// Graceful shutdown
logger.Info().Msg("Shutting down audio output server")
StopNonBlockingAudioStreaming()
// Give some time for cleanup
time.Sleep(100 * time.Millisecond)
logger.Info().Msg("Audio output server subprocess stopped")
return nil
}

View File

@ -1,369 +0,0 @@
package audio
import (
"context"
"fmt"
"runtime"
"sync"
"sync/atomic"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// OutputStreamer manages high-performance audio output streaming
type OutputStreamer struct {
// Atomic fields must be first for proper alignment on ARM
processedFrames int64 // Total processed frames counter (atomic)
droppedFrames int64 // Dropped frames counter (atomic)
processingTime int64 // Average processing time in nanoseconds (atomic)
lastStatsTime int64 // Last statistics update time (atomic)
client *AudioClient
bufferPool *AudioBufferPool
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
running bool
mtx sync.Mutex
// Performance optimization fields
batchSize int // Adaptive batch size for frame processing
processingChan chan []byte // Buffered channel for frame processing
statsInterval time.Duration // Statistics reporting interval
}
var (
outputStreamingRunning int32
outputStreamingCancel context.CancelFunc
outputStreamingLogger *zerolog.Logger
)
func getOutputStreamingLogger() *zerolog.Logger {
if outputStreamingLogger == nil {
logger := logging.GetDefaultLogger().With().Str("component", "audio-output").Logger()
outputStreamingLogger = &logger
}
return outputStreamingLogger
}
func NewOutputStreamer() (*OutputStreamer, error) {
client := NewAudioClient()
// Get initial batch size from adaptive buffer manager
adaptiveManager := GetAdaptiveBufferManager()
initialBatchSize := adaptiveManager.GetOutputBufferSize()
ctx, cancel := context.WithCancel(context.Background())
return &OutputStreamer{
client: client,
bufferPool: NewAudioBufferPool(MaxAudioFrameSize), // Use existing buffer pool
ctx: ctx,
cancel: cancel,
batchSize: initialBatchSize, // Use adaptive batch size
processingChan: make(chan []byte, 500), // Large buffer for smooth processing
statsInterval: 5 * time.Second, // Statistics every 5 seconds
lastStatsTime: time.Now().UnixNano(),
}, nil
}
func (s *OutputStreamer) Start() error {
s.mtx.Lock()
defer s.mtx.Unlock()
if s.running {
return fmt.Errorf("output streamer already running")
}
// Connect to audio output server
if err := s.client.Connect(); err != nil {
return fmt.Errorf("failed to connect to audio output server: %w", err)
}
s.running = true
// Start multiple goroutines for optimal performance
s.wg.Add(3)
go s.streamLoop() // Main streaming loop
go s.processingLoop() // Frame processing loop
go s.statisticsLoop() // Performance monitoring loop
return nil
}
func (s *OutputStreamer) Stop() {
s.mtx.Lock()
defer s.mtx.Unlock()
if !s.running {
return
}
s.running = false
s.cancel()
// Close processing channel to signal goroutines
close(s.processingChan)
// Wait for all goroutines to finish
s.wg.Wait()
if s.client != nil {
s.client.Close()
}
}
func (s *OutputStreamer) streamLoop() {
defer s.wg.Done()
// Pin goroutine to OS thread for consistent performance
runtime.LockOSThread()
defer runtime.UnlockOSThread()
// Adaptive timing for frame reading
frameInterval := time.Duration(20) * time.Millisecond // 50 FPS base rate
ticker := time.NewTicker(frameInterval)
defer ticker.Stop()
// Batch size update ticker
batchUpdateTicker := time.NewTicker(500 * time.Millisecond)
defer batchUpdateTicker.Stop()
for {
select {
case <-s.ctx.Done():
return
case <-batchUpdateTicker.C:
// Update batch size from adaptive buffer manager
s.UpdateBatchSize()
case <-ticker.C:
// Read audio data from CGO with timing measurement
startTime := time.Now()
frameBuf := s.bufferPool.Get()
n, err := CGOAudioReadEncode(frameBuf)
processingDuration := time.Since(startTime)
if err != nil {
getOutputStreamingLogger().Warn().Err(err).Msg("Failed to read audio data")
s.bufferPool.Put(frameBuf)
atomic.AddInt64(&s.droppedFrames, 1)
continue
}
if n > 0 {
// Send frame for processing (non-blocking)
frameData := make([]byte, n)
copy(frameData, frameBuf[:n])
select {
case s.processingChan <- frameData:
atomic.AddInt64(&s.processedFrames, 1)
// Update processing time statistics
atomic.StoreInt64(&s.processingTime, int64(processingDuration))
// Report latency to adaptive buffer manager
s.ReportLatency(processingDuration)
default:
// Processing channel full, drop frame
atomic.AddInt64(&s.droppedFrames, 1)
}
}
s.bufferPool.Put(frameBuf)
}
}
}
// processingLoop handles frame processing in a separate goroutine
func (s *OutputStreamer) processingLoop() {
defer s.wg.Done()
// Pin goroutine to OS thread for consistent performance
runtime.LockOSThread()
defer runtime.UnlockOSThread()
// Set high priority for audio output processing
if err := SetAudioThreadPriority(); err != nil {
getOutputStreamingLogger().Warn().Err(err).Msg("Failed to set audio output processing priority")
}
defer func() {
if err := ResetThreadPriority(); err != nil {
getOutputStreamingLogger().Warn().Err(err).Msg("Failed to reset thread priority")
}
}()
for range s.processingChan {
// Process frame (currently just receiving, but can be extended)
if _, err := s.client.ReceiveFrame(); err != nil {
if s.client.IsConnected() {
getOutputStreamingLogger().Warn().Err(err).Msg("Failed to receive frame")
atomic.AddInt64(&s.droppedFrames, 1)
}
// Try to reconnect if disconnected
if !s.client.IsConnected() {
if err := s.client.Connect(); err != nil {
getOutputStreamingLogger().Warn().Err(err).Msg("Failed to reconnect")
}
}
}
}
}
// statisticsLoop monitors and reports performance statistics
func (s *OutputStreamer) statisticsLoop() {
defer s.wg.Done()
ticker := time.NewTicker(s.statsInterval)
defer ticker.Stop()
for {
select {
case <-s.ctx.Done():
return
case <-ticker.C:
s.reportStatistics()
}
}
}
// reportStatistics logs current performance statistics
func (s *OutputStreamer) reportStatistics() {
processed := atomic.LoadInt64(&s.processedFrames)
dropped := atomic.LoadInt64(&s.droppedFrames)
processingTime := atomic.LoadInt64(&s.processingTime)
if processed > 0 {
dropRate := float64(dropped) / float64(processed+dropped) * 100
avgProcessingTime := time.Duration(processingTime)
getOutputStreamingLogger().Info().Int64("processed", processed).Int64("dropped", dropped).Float64("drop_rate", dropRate).Dur("avg_processing", avgProcessingTime).Msg("Output Audio Stats")
// Get client statistics
clientTotal, clientDropped := s.client.GetClientStats()
getOutputStreamingLogger().Info().Int64("total", clientTotal).Int64("dropped", clientDropped).Msg("Client Stats")
}
}
// GetStats returns streaming statistics
func (s *OutputStreamer) GetStats() (processed, dropped int64, avgProcessingTime time.Duration) {
processed = atomic.LoadInt64(&s.processedFrames)
dropped = atomic.LoadInt64(&s.droppedFrames)
processingTimeNs := atomic.LoadInt64(&s.processingTime)
avgProcessingTime = time.Duration(processingTimeNs)
return
}
// GetDetailedStats returns comprehensive streaming statistics
func (s *OutputStreamer) GetDetailedStats() map[string]interface{} {
processed := atomic.LoadInt64(&s.processedFrames)
dropped := atomic.LoadInt64(&s.droppedFrames)
processingTime := atomic.LoadInt64(&s.processingTime)
stats := map[string]interface{}{
"processed_frames": processed,
"dropped_frames": dropped,
"avg_processing_time_ns": processingTime,
"batch_size": s.batchSize,
"channel_buffer_size": cap(s.processingChan),
"channel_current_size": len(s.processingChan),
"connected": s.client.IsConnected(),
}
if processed+dropped > 0 {
stats["drop_rate_percent"] = float64(dropped) / float64(processed+dropped) * 100
}
// Add client statistics
clientTotal, clientDropped := s.client.GetClientStats()
stats["client_total_frames"] = clientTotal
stats["client_dropped_frames"] = clientDropped
return stats
}
// UpdateBatchSize updates the batch size from adaptive buffer manager
func (s *OutputStreamer) UpdateBatchSize() {
s.mtx.Lock()
adaptiveManager := GetAdaptiveBufferManager()
s.batchSize = adaptiveManager.GetOutputBufferSize()
s.mtx.Unlock()
}
// ReportLatency reports processing latency to adaptive buffer manager
func (s *OutputStreamer) ReportLatency(latency time.Duration) {
adaptiveManager := GetAdaptiveBufferManager()
adaptiveManager.UpdateLatency(latency)
}
// StartAudioOutputStreaming starts audio output streaming (capturing system audio)
func StartAudioOutputStreaming(send func([]byte)) error {
if !atomic.CompareAndSwapInt32(&outputStreamingRunning, 0, 1) {
return ErrAudioAlreadyRunning
}
// Initialize CGO audio capture
if err := CGOAudioInit(); err != nil {
atomic.StoreInt32(&outputStreamingRunning, 0)
return err
}
ctx, cancel := context.WithCancel(context.Background())
outputStreamingCancel = cancel
// Start audio capture loop
go func() {
defer func() {
CGOAudioClose()
atomic.StoreInt32(&outputStreamingRunning, 0)
getOutputStreamingLogger().Info().Msg("Audio output streaming stopped")
}()
getOutputStreamingLogger().Info().Msg("Audio output streaming started")
buffer := make([]byte, MaxAudioFrameSize)
for {
select {
case <-ctx.Done():
return
default:
// Capture audio frame
n, err := CGOAudioReadEncode(buffer)
if err != nil {
getOutputStreamingLogger().Warn().Err(err).Msg("Failed to read/encode audio")
continue
}
if n > 0 {
// Get frame buffer from pool to reduce allocations
frame := GetAudioFrameBuffer()
frame = frame[:n] // Resize to actual frame size
copy(frame, buffer[:n])
send(frame)
// Return buffer to pool after sending
PutAudioFrameBuffer(frame)
RecordFrameReceived(n)
}
// Small delay to prevent busy waiting
time.Sleep(10 * time.Millisecond)
}
}
}()
return nil
}
// StopAudioOutputStreaming stops audio output streaming
func StopAudioOutputStreaming() {
if atomic.LoadInt32(&outputStreamingRunning) == 0 {
return
}
if outputStreamingCancel != nil {
outputStreamingCancel()
outputStreamingCancel = nil
}
// Wait for streaming to stop
for atomic.LoadInt32(&outputStreamingRunning) == 1 {
time.Sleep(10 * time.Millisecond)
}
}

View File

@ -1,165 +0,0 @@
//go:build linux
package audio
import (
"runtime"
"syscall"
"unsafe"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// SchedParam represents scheduling parameters for Linux
type SchedParam struct {
Priority int32
}
// Priority levels for audio processing
const (
// SCHED_FIFO priorities (1-99, higher = more priority)
AudioHighPriority = 80 // High priority for critical audio processing
AudioMediumPriority = 60 // Medium priority for regular audio processing
AudioLowPriority = 40 // Low priority for background audio tasks
// SCHED_NORMAL is the default (priority 0)
NormalPriority = 0
)
// Scheduling policies
const (
SCHED_NORMAL = 0
SCHED_FIFO = 1
SCHED_RR = 2
)
// PriorityScheduler manages thread priorities for audio processing
type PriorityScheduler struct {
logger zerolog.Logger
enabled bool
}
// NewPriorityScheduler creates a new priority scheduler
func NewPriorityScheduler() *PriorityScheduler {
return &PriorityScheduler{
logger: logging.GetDefaultLogger().With().Str("component", "priority-scheduler").Logger(),
enabled: true,
}
}
// SetThreadPriority sets the priority of the current thread
func (ps *PriorityScheduler) SetThreadPriority(priority int, policy int) error {
if !ps.enabled {
return nil
}
// Lock to OS thread to ensure we're setting priority for the right thread
runtime.LockOSThread()
// Get current thread ID
tid := syscall.Gettid()
// Set scheduling parameters
param := &SchedParam{
Priority: int32(priority),
}
// Use syscall to set scheduler
_, _, errno := syscall.Syscall(syscall.SYS_SCHED_SETSCHEDULER,
uintptr(tid),
uintptr(policy),
uintptr(unsafe.Pointer(param)))
if errno != 0 {
// If we can't set real-time priority, try nice value instead
if policy != SCHED_NORMAL {
ps.logger.Warn().Int("errno", int(errno)).Msg("Failed to set real-time priority, falling back to nice")
return ps.setNicePriority(priority)
}
return errno
}
ps.logger.Debug().Int("tid", tid).Int("priority", priority).Int("policy", policy).Msg("Thread priority set")
return nil
}
// setNicePriority sets nice value as fallback when real-time scheduling is not available
func (ps *PriorityScheduler) setNicePriority(rtPriority int) error {
// Convert real-time priority to nice value (inverse relationship)
// RT priority 80 -> nice -10, RT priority 40 -> nice 0
niceValue := (40 - rtPriority) / 4
if niceValue < -20 {
niceValue = -20
}
if niceValue > 19 {
niceValue = 19
}
err := syscall.Setpriority(syscall.PRIO_PROCESS, 0, niceValue)
if err != nil {
ps.logger.Warn().Err(err).Int("nice", niceValue).Msg("Failed to set nice priority")
return err
}
ps.logger.Debug().Int("nice", niceValue).Msg("Nice priority set as fallback")
return nil
}
// SetAudioProcessingPriority sets high priority for audio processing threads
func (ps *PriorityScheduler) SetAudioProcessingPriority() error {
return ps.SetThreadPriority(AudioHighPriority, SCHED_FIFO)
}
// SetAudioIOPriority sets medium priority for audio I/O threads
func (ps *PriorityScheduler) SetAudioIOPriority() error {
return ps.SetThreadPriority(AudioMediumPriority, SCHED_FIFO)
}
// SetAudioBackgroundPriority sets low priority for background audio tasks
func (ps *PriorityScheduler) SetAudioBackgroundPriority() error {
return ps.SetThreadPriority(AudioLowPriority, SCHED_FIFO)
}
// ResetPriority resets thread to normal scheduling
func (ps *PriorityScheduler) ResetPriority() error {
return ps.SetThreadPriority(NormalPriority, SCHED_NORMAL)
}
// Disable disables priority scheduling (useful for testing or fallback)
func (ps *PriorityScheduler) Disable() {
ps.enabled = false
ps.logger.Info().Msg("Priority scheduling disabled")
}
// Enable enables priority scheduling
func (ps *PriorityScheduler) Enable() {
ps.enabled = true
ps.logger.Info().Msg("Priority scheduling enabled")
}
// Global priority scheduler instance
var globalPriorityScheduler *PriorityScheduler
// GetPriorityScheduler returns the global priority scheduler instance
func GetPriorityScheduler() *PriorityScheduler {
if globalPriorityScheduler == nil {
globalPriorityScheduler = NewPriorityScheduler()
}
return globalPriorityScheduler
}
// SetAudioThreadPriority is a convenience function to set audio processing priority
func SetAudioThreadPriority() error {
return GetPriorityScheduler().SetAudioProcessingPriority()
}
// SetAudioIOThreadPriority is a convenience function to set audio I/O priority
func SetAudioIOThreadPriority() error {
return GetPriorityScheduler().SetAudioIOPriority()
}
// ResetThreadPriority is a convenience function to reset thread priority
func ResetThreadPriority() error {
return GetPriorityScheduler().ResetPriority()
}

View File

@ -1,384 +0,0 @@
package audio
import (
"bufio"
"fmt"
"os"
"strconv"
"strings"
"sync"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
// Constants for process monitoring
const (
// System constants
pageSize = 4096
maxCPUPercent = 100.0
minCPUPercent = 0.01
defaultClockTicks = 250.0 // Common for embedded ARM systems
defaultMemoryGB = 8
// Monitoring thresholds
maxWarmupSamples = 3
warmupCPUSamples = 2
logThrottleInterval = 10
// Channel buffer size
metricsChannelBuffer = 100
// Clock tick detection ranges
minValidClockTicks = 50
maxValidClockTicks = 1000
)
// ProcessMetrics represents CPU and memory usage metrics for a process
type ProcessMetrics struct {
PID int `json:"pid"`
CPUPercent float64 `json:"cpu_percent"`
MemoryRSS int64 `json:"memory_rss_bytes"`
MemoryVMS int64 `json:"memory_vms_bytes"`
MemoryPercent float64 `json:"memory_percent"`
Timestamp time.Time `json:"timestamp"`
ProcessName string `json:"process_name"`
}
type ProcessMonitor struct {
logger zerolog.Logger
mutex sync.RWMutex
monitoredPIDs map[int]*processState
running bool
stopChan chan struct{}
metricsChan chan ProcessMetrics
updateInterval time.Duration
totalMemory int64
memoryOnce sync.Once
clockTicks float64
clockTicksOnce sync.Once
}
// processState tracks the state needed for CPU calculation
type processState struct {
name string
lastCPUTime int64
lastSysTime int64
lastUserTime int64
lastSample time.Time
warmupSamples int
}
// NewProcessMonitor creates a new process monitor
func NewProcessMonitor() *ProcessMonitor {
return &ProcessMonitor{
logger: logging.GetDefaultLogger().With().Str("component", "process-monitor").Logger(),
monitoredPIDs: make(map[int]*processState),
stopChan: make(chan struct{}),
metricsChan: make(chan ProcessMetrics, metricsChannelBuffer),
updateInterval: GetMetricsUpdateInterval(),
}
}
// Start begins monitoring processes
func (pm *ProcessMonitor) Start() {
pm.mutex.Lock()
defer pm.mutex.Unlock()
if pm.running {
return
}
pm.running = true
go pm.monitorLoop()
pm.logger.Info().Msg("Process monitor started")
}
// Stop stops monitoring processes
func (pm *ProcessMonitor) Stop() {
pm.mutex.Lock()
defer pm.mutex.Unlock()
if !pm.running {
return
}
pm.running = false
close(pm.stopChan)
pm.logger.Info().Msg("Process monitor stopped")
}
// AddProcess adds a process to monitor
func (pm *ProcessMonitor) AddProcess(pid int, name string) {
pm.mutex.Lock()
defer pm.mutex.Unlock()
pm.monitoredPIDs[pid] = &processState{
name: name,
lastSample: time.Now(),
}
pm.logger.Info().Int("pid", pid).Str("name", name).Msg("Added process to monitor")
}
// RemoveProcess removes a process from monitoring
func (pm *ProcessMonitor) RemoveProcess(pid int) {
pm.mutex.Lock()
defer pm.mutex.Unlock()
delete(pm.monitoredPIDs, pid)
pm.logger.Info().Int("pid", pid).Msg("Removed process from monitor")
}
// GetMetricsChan returns the channel for receiving metrics
func (pm *ProcessMonitor) GetMetricsChan() <-chan ProcessMetrics {
return pm.metricsChan
}
// GetCurrentMetrics returns current metrics for all monitored processes
func (pm *ProcessMonitor) GetCurrentMetrics() []ProcessMetrics {
pm.mutex.RLock()
defer pm.mutex.RUnlock()
var metrics []ProcessMetrics
for pid, state := range pm.monitoredPIDs {
if metric, err := pm.collectMetrics(pid, state); err == nil {
metrics = append(metrics, metric)
}
}
return metrics
}
// monitorLoop is the main monitoring loop
func (pm *ProcessMonitor) monitorLoop() {
ticker := time.NewTicker(pm.updateInterval)
defer ticker.Stop()
for {
select {
case <-pm.stopChan:
return
case <-ticker.C:
pm.collectAllMetrics()
}
}
}
func (pm *ProcessMonitor) collectAllMetrics() {
pm.mutex.RLock()
pidsToCheck := make([]int, 0, len(pm.monitoredPIDs))
states := make([]*processState, 0, len(pm.monitoredPIDs))
for pid, state := range pm.monitoredPIDs {
pidsToCheck = append(pidsToCheck, pid)
states = append(states, state)
}
pm.mutex.RUnlock()
deadPIDs := make([]int, 0)
for i, pid := range pidsToCheck {
if metric, err := pm.collectMetrics(pid, states[i]); err == nil {
select {
case pm.metricsChan <- metric:
default:
}
} else {
deadPIDs = append(deadPIDs, pid)
}
}
for _, pid := range deadPIDs {
pm.RemoveProcess(pid)
}
}
func (pm *ProcessMonitor) collectMetrics(pid int, state *processState) (ProcessMetrics, error) {
now := time.Now()
metric := ProcessMetrics{
PID: pid,
Timestamp: now,
ProcessName: state.name,
}
statPath := fmt.Sprintf("/proc/%d/stat", pid)
statData, err := os.ReadFile(statPath)
if err != nil {
return metric, err
}
fields := strings.Fields(string(statData))
if len(fields) < 24 {
return metric, fmt.Errorf("invalid stat format")
}
utime, _ := strconv.ParseInt(fields[13], 10, 64)
stime, _ := strconv.ParseInt(fields[14], 10, 64)
totalCPUTime := utime + stime
vsize, _ := strconv.ParseInt(fields[22], 10, 64)
rss, _ := strconv.ParseInt(fields[23], 10, 64)
metric.MemoryRSS = rss * pageSize
metric.MemoryVMS = vsize
// Calculate CPU percentage
metric.CPUPercent = pm.calculateCPUPercent(totalCPUTime, state, now)
// Increment warmup counter
if state.warmupSamples < maxWarmupSamples {
state.warmupSamples++
}
// Calculate memory percentage (RSS / total system memory)
if totalMem := pm.getTotalMemory(); totalMem > 0 {
metric.MemoryPercent = float64(metric.MemoryRSS) / float64(totalMem) * 100.0
}
// Update state for next calculation
state.lastCPUTime = totalCPUTime
state.lastUserTime = utime
state.lastSysTime = stime
state.lastSample = now
return metric, nil
}
// calculateCPUPercent calculates CPU percentage for a process
func (pm *ProcessMonitor) calculateCPUPercent(totalCPUTime int64, state *processState, now time.Time) float64 {
if state.lastSample.IsZero() {
// First sample - initialize baseline
state.warmupSamples = 0
return 0.0
}
timeDelta := now.Sub(state.lastSample).Seconds()
cpuDelta := float64(totalCPUTime - state.lastCPUTime)
if timeDelta <= 0 {
return 0.0
}
if cpuDelta > 0 {
// Convert from clock ticks to seconds using actual system clock ticks
clockTicks := pm.getClockTicks()
cpuSeconds := cpuDelta / clockTicks
cpuPercent := (cpuSeconds / timeDelta) * 100.0
// Apply bounds
if cpuPercent > maxCPUPercent {
cpuPercent = maxCPUPercent
}
if cpuPercent < minCPUPercent {
cpuPercent = minCPUPercent
}
return cpuPercent
}
// No CPU delta - process was idle
if state.warmupSamples < warmupCPUSamples {
// During warmup, provide a small non-zero value to indicate process is alive
return minCPUPercent
}
return 0.0
}
func (pm *ProcessMonitor) getClockTicks() float64 {
pm.clockTicksOnce.Do(func() {
// Try to detect actual clock ticks from kernel boot parameters or /proc/stat
if data, err := os.ReadFile("/proc/cmdline"); err == nil {
// Look for HZ parameter in kernel command line
cmdline := string(data)
if strings.Contains(cmdline, "HZ=") {
fields := strings.Fields(cmdline)
for _, field := range fields {
if strings.HasPrefix(field, "HZ=") {
if hz, err := strconv.ParseFloat(field[3:], 64); err == nil && hz > 0 {
pm.clockTicks = hz
return
}
}
}
}
}
// Try reading from /proc/timer_list for more accurate detection
if data, err := os.ReadFile("/proc/timer_list"); err == nil {
timer := string(data)
// Look for tick device frequency
lines := strings.Split(timer, "\n")
for _, line := range lines {
if strings.Contains(line, "tick_period:") {
fields := strings.Fields(line)
if len(fields) >= 2 {
if period, err := strconv.ParseInt(fields[1], 10, 64); err == nil && period > 0 {
// Convert nanoseconds to Hz
hz := 1000000000.0 / float64(period)
if hz >= minValidClockTicks && hz <= maxValidClockTicks {
pm.clockTicks = hz
return
}
}
}
}
}
}
// Fallback: Most embedded ARM systems (like jetKVM) use 250 Hz or 1000 Hz
// rather than the traditional 100 Hz
pm.clockTicks = defaultClockTicks
pm.logger.Warn().Float64("clock_ticks", pm.clockTicks).Msg("Using fallback clock ticks value")
// Log successful detection for non-fallback values
if pm.clockTicks != defaultClockTicks {
pm.logger.Info().Float64("clock_ticks", pm.clockTicks).Msg("Detected system clock ticks")
}
})
return pm.clockTicks
}
func (pm *ProcessMonitor) getTotalMemory() int64 {
pm.memoryOnce.Do(func() {
file, err := os.Open("/proc/meminfo")
if err != nil {
pm.totalMemory = defaultMemoryGB * 1024 * 1024 * 1024
return
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, "MemTotal:") {
fields := strings.Fields(line)
if len(fields) >= 2 {
if kb, err := strconv.ParseInt(fields[1], 10, 64); err == nil {
pm.totalMemory = kb * 1024
return
}
}
break
}
}
pm.totalMemory = defaultMemoryGB * 1024 * 1024 * 1024 // Fallback
})
return pm.totalMemory
}
// GetTotalMemory returns total system memory in bytes (public method)
func (pm *ProcessMonitor) GetTotalMemory() int64 {
return pm.getTotalMemory()
}
// Global process monitor instance
var globalProcessMonitor *ProcessMonitor
var processMonitorOnce sync.Once
// GetProcessMonitor returns the global process monitor instance
func GetProcessMonitor() *ProcessMonitor {
processMonitorOnce.Do(func() {
globalProcessMonitor = NewProcessMonitor()
globalProcessMonitor.Start()
})
return globalProcessMonitor
}

View File

@ -1,208 +0,0 @@
package audio
import (
"context"
"fmt"
"sync"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/pion/webrtc/v4/pkg/media"
"github.com/rs/zerolog"
)
// AudioRelay handles forwarding audio frames from the audio server subprocess
// to WebRTC without any CGO audio processing. This runs in the main process.
type AudioRelay struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
framesRelayed int64
framesDropped int64
client *AudioClient
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
logger *zerolog.Logger
running bool
mutex sync.RWMutex
// WebRTC integration
audioTrack AudioTrackWriter
config AudioConfig
muted bool
}
// AudioTrackWriter interface for WebRTC audio track
type AudioTrackWriter interface {
WriteSample(sample media.Sample) error
}
// NewAudioRelay creates a new audio relay for the main process
func NewAudioRelay() *AudioRelay {
ctx, cancel := context.WithCancel(context.Background())
logger := logging.GetDefaultLogger().With().Str("component", "audio-relay").Logger()
return &AudioRelay{
ctx: ctx,
cancel: cancel,
logger: &logger,
}
}
// Start begins the audio relay process
func (r *AudioRelay) Start(audioTrack AudioTrackWriter, config AudioConfig) error {
r.mutex.Lock()
defer r.mutex.Unlock()
if r.running {
return nil // Already running
}
// Create audio client to connect to subprocess
client := NewAudioClient()
r.client = client
r.audioTrack = audioTrack
r.config = config
// Connect to the audio output server
if err := client.Connect(); err != nil {
return fmt.Errorf("failed to connect to audio output server: %w", err)
}
// Start relay goroutine
r.wg.Add(1)
go r.relayLoop()
r.running = true
r.logger.Info().Msg("Audio relay started")
return nil
}
// Stop stops the audio relay
func (r *AudioRelay) Stop() {
r.mutex.Lock()
defer r.mutex.Unlock()
if !r.running {
return
}
r.cancel()
r.wg.Wait()
if r.client != nil {
r.client.Disconnect()
r.client = nil
}
r.running = false
r.logger.Info().Msg("Audio relay stopped")
}
// SetMuted sets the mute state
func (r *AudioRelay) SetMuted(muted bool) {
r.mutex.Lock()
defer r.mutex.Unlock()
r.muted = muted
}
// IsMuted returns the current mute state (checks both relay and global mute)
func (r *AudioRelay) IsMuted() bool {
r.mutex.RLock()
defer r.mutex.RUnlock()
return r.muted || IsAudioMuted()
}
// GetStats returns relay statistics
func (r *AudioRelay) GetStats() (framesRelayed, framesDropped int64) {
r.mutex.RLock()
defer r.mutex.RUnlock()
return r.framesRelayed, r.framesDropped
}
// UpdateTrack updates the WebRTC audio track for the relay
func (r *AudioRelay) UpdateTrack(audioTrack AudioTrackWriter) {
r.mutex.Lock()
defer r.mutex.Unlock()
r.audioTrack = audioTrack
}
func (r *AudioRelay) relayLoop() {
defer r.wg.Done()
r.logger.Debug().Msg("Audio relay loop started")
const maxConsecutiveErrors = 10
consecutiveErrors := 0
for {
select {
case <-r.ctx.Done():
r.logger.Debug().Msg("Audio relay loop stopping")
return
default:
frame, err := r.client.ReceiveFrame()
if err != nil {
consecutiveErrors++
r.logger.Error().Err(err).Int("consecutive_errors", consecutiveErrors).Msg("Failed to receive audio frame")
r.incrementDropped()
if consecutiveErrors >= maxConsecutiveErrors {
r.logger.Error().Msg("Too many consecutive errors, stopping relay")
return
}
time.Sleep(10 * time.Millisecond)
continue
}
consecutiveErrors = 0
if err := r.forwardToWebRTC(frame); err != nil {
r.logger.Warn().Err(err).Msg("Failed to forward frame to WebRTC")
r.incrementDropped()
} else {
r.incrementRelayed()
}
}
}
}
// forwardToWebRTC forwards a frame to the WebRTC audio track
func (r *AudioRelay) forwardToWebRTC(frame []byte) error {
r.mutex.RLock()
audioTrack := r.audioTrack
config := r.config
muted := r.muted
r.mutex.RUnlock()
if audioTrack == nil {
return nil // No audio track available
}
// Prepare sample data
var sampleData []byte
if muted {
// Send silence when muted
sampleData = make([]byte, len(frame))
} else {
sampleData = frame
}
// Write sample to WebRTC track
return audioTrack.WriteSample(media.Sample{
Data: sampleData,
Duration: config.FrameSize,
})
}
// incrementRelayed atomically increments the relayed frames counter
func (r *AudioRelay) incrementRelayed() {
r.mutex.Lock()
r.framesRelayed++
r.mutex.Unlock()
}
// incrementDropped atomically increments the dropped frames counter
func (r *AudioRelay) incrementDropped() {
r.mutex.Lock()
r.framesDropped++
r.mutex.Unlock()
}

View File

@ -1,109 +0,0 @@
package audio
import (
"sync"
)
// Global relay instance for the main process
var (
globalRelay *AudioRelay
relayMutex sync.RWMutex
)
// StartAudioRelay starts the audio relay system for the main process
// This replaces the CGO-based audio system when running in main process mode
// audioTrack can be nil initially and updated later via UpdateAudioRelayTrack
func StartAudioRelay(audioTrack AudioTrackWriter) error {
relayMutex.Lock()
defer relayMutex.Unlock()
if globalRelay != nil {
return nil // Already running
}
// Create new relay
relay := NewAudioRelay()
// Get current audio config
config := GetAudioConfig()
// Start the relay (audioTrack can be nil initially)
if err := relay.Start(audioTrack, config); err != nil {
return err
}
globalRelay = relay
return nil
}
// StopAudioRelay stops the audio relay system
func StopAudioRelay() {
relayMutex.Lock()
defer relayMutex.Unlock()
if globalRelay != nil {
globalRelay.Stop()
globalRelay = nil
}
}
// SetAudioRelayMuted sets the mute state for the audio relay
func SetAudioRelayMuted(muted bool) {
relayMutex.RLock()
defer relayMutex.RUnlock()
if globalRelay != nil {
globalRelay.SetMuted(muted)
}
}
// IsAudioRelayMuted returns the current mute state of the audio relay
func IsAudioRelayMuted() bool {
relayMutex.RLock()
defer relayMutex.RUnlock()
if globalRelay != nil {
return globalRelay.IsMuted()
}
return false
}
// GetAudioRelayStats returns statistics from the audio relay
func GetAudioRelayStats() (framesRelayed, framesDropped int64) {
relayMutex.RLock()
defer relayMutex.RUnlock()
if globalRelay != nil {
return globalRelay.GetStats()
}
return 0, 0
}
// IsAudioRelayRunning returns whether the audio relay is currently running
func IsAudioRelayRunning() bool {
relayMutex.RLock()
defer relayMutex.RUnlock()
return globalRelay != nil
}
// UpdateAudioRelayTrack updates the WebRTC audio track for the relay
func UpdateAudioRelayTrack(audioTrack AudioTrackWriter) error {
relayMutex.Lock()
defer relayMutex.Unlock()
if globalRelay == nil {
// No relay running, start one with the provided track
relay := NewAudioRelay()
config := GetAudioConfig()
if err := relay.Start(audioTrack, config); err != nil {
return err
}
globalRelay = relay
return nil
}
// Update the track in the existing relay
globalRelay.UpdateTrack(audioTrack)
return nil
}

View File

@ -1,160 +0,0 @@
package audio
import (
"fmt"
"net"
"syscall"
)
const (
// Socket buffer sizes optimized for JetKVM's audio workload
OptimalSocketBuffer = 128 * 1024 // 128KB (32 frames @ 4KB each)
MaxSocketBuffer = 256 * 1024 // 256KB for high-load scenarios
MinSocketBuffer = 32 * 1024 // 32KB minimum for basic functionality
)
// SocketBufferConfig holds socket buffer configuration
type SocketBufferConfig struct {
SendBufferSize int
RecvBufferSize int
Enabled bool
}
// DefaultSocketBufferConfig returns the default socket buffer configuration
func DefaultSocketBufferConfig() SocketBufferConfig {
return SocketBufferConfig{
SendBufferSize: OptimalSocketBuffer,
RecvBufferSize: OptimalSocketBuffer,
Enabled: true,
}
}
// HighLoadSocketBufferConfig returns configuration for high-load scenarios
func HighLoadSocketBufferConfig() SocketBufferConfig {
return SocketBufferConfig{
SendBufferSize: MaxSocketBuffer,
RecvBufferSize: MaxSocketBuffer,
Enabled: true,
}
}
// ConfigureSocketBuffers applies socket buffer configuration to a Unix socket connection
func ConfigureSocketBuffers(conn net.Conn, config SocketBufferConfig) error {
if !config.Enabled {
return nil
}
if err := ValidateSocketBufferConfig(config); err != nil {
return fmt.Errorf("invalid socket buffer config: %w", err)
}
unixConn, ok := conn.(*net.UnixConn)
if !ok {
return fmt.Errorf("connection is not a Unix socket")
}
file, err := unixConn.File()
if err != nil {
return fmt.Errorf("failed to get socket file descriptor: %w", err)
}
defer file.Close()
fd := int(file.Fd())
if config.SendBufferSize > 0 {
if err := syscall.SetsockoptInt(fd, syscall.SOL_SOCKET, syscall.SO_SNDBUF, config.SendBufferSize); err != nil {
return fmt.Errorf("failed to set SO_SNDBUF to %d: %w", config.SendBufferSize, err)
}
}
if config.RecvBufferSize > 0 {
if err := syscall.SetsockoptInt(fd, syscall.SOL_SOCKET, syscall.SO_RCVBUF, config.RecvBufferSize); err != nil {
return fmt.Errorf("failed to set SO_RCVBUF to %d: %w", config.RecvBufferSize, err)
}
}
return nil
}
// GetSocketBufferSizes retrieves current socket buffer sizes
func GetSocketBufferSizes(conn net.Conn) (sendSize, recvSize int, err error) {
unixConn, ok := conn.(*net.UnixConn)
if !ok {
return 0, 0, fmt.Errorf("socket buffer query only supported for Unix sockets")
}
file, err := unixConn.File()
if err != nil {
return 0, 0, fmt.Errorf("failed to get socket file descriptor: %w", err)
}
defer file.Close()
fd := int(file.Fd())
// Get send buffer size
sendSize, err = syscall.GetsockoptInt(fd, syscall.SOL_SOCKET, syscall.SO_SNDBUF)
if err != nil {
return 0, 0, fmt.Errorf("failed to get SO_SNDBUF: %w", err)
}
// Get receive buffer size
recvSize, err = syscall.GetsockoptInt(fd, syscall.SOL_SOCKET, syscall.SO_RCVBUF)
if err != nil {
return 0, 0, fmt.Errorf("failed to get SO_RCVBUF: %w", err)
}
return sendSize, recvSize, nil
}
// ValidateSocketBufferConfig validates socket buffer configuration
func ValidateSocketBufferConfig(config SocketBufferConfig) error {
if !config.Enabled {
return nil
}
if config.SendBufferSize < MinSocketBuffer {
return fmt.Errorf("send buffer size %d is below minimum %d", config.SendBufferSize, MinSocketBuffer)
}
if config.RecvBufferSize < MinSocketBuffer {
return fmt.Errorf("receive buffer size %d is below minimum %d", config.RecvBufferSize, MinSocketBuffer)
}
if config.SendBufferSize > MaxSocketBuffer {
return fmt.Errorf("send buffer size %d exceeds maximum %d", config.SendBufferSize, MaxSocketBuffer)
}
if config.RecvBufferSize > MaxSocketBuffer {
return fmt.Errorf("receive buffer size %d exceeds maximum %d", config.RecvBufferSize, MaxSocketBuffer)
}
return nil
}
// RecordSocketBufferMetrics records socket buffer metrics for monitoring
func RecordSocketBufferMetrics(conn net.Conn, component string) {
if conn == nil {
return
}
// Get current socket buffer sizes
sendSize, recvSize, err := GetSocketBufferSizes(conn)
if err != nil {
// Log error but don't fail
return
}
// Record buffer sizes
socketBufferSizeGauge.WithLabelValues(component, "send").Set(float64(sendSize))
socketBufferSizeGauge.WithLabelValues(component, "receive").Set(float64(recvSize))
}
// RecordSocketBufferOverflow records a socket buffer overflow event
func RecordSocketBufferOverflow(component, bufferType string) {
socketBufferOverflowCounter.WithLabelValues(component, bufferType).Inc()
}
// UpdateSocketBufferUtilization updates socket buffer utilization metrics
func UpdateSocketBufferUtilization(component, bufferType string, utilizationPercent float64) {
socketBufferUtilizationGauge.WithLabelValues(component, bufferType).Set(utilizationPercent)
}

View File

@ -1,429 +0,0 @@
//go:build cgo
// +build cgo
package audio
import (
"context"
"fmt"
"os"
"os/exec"
"sync"
"sync/atomic"
"syscall"
"time"
"github.com/jetkvm/kvm/internal/logging"
"github.com/rs/zerolog"
)
const (
// Maximum number of restart attempts within the restart window
maxRestartAttempts = 5
// Time window for counting restart attempts
restartWindow = 5 * time.Minute
// Delay between restart attempts
restartDelay = 2 * time.Second
// Maximum restart delay (exponential backoff)
maxRestartDelay = 30 * time.Second
)
// AudioServerSupervisor manages the audio server subprocess lifecycle
type AudioServerSupervisor struct {
ctx context.Context
cancel context.CancelFunc
logger *zerolog.Logger
mutex sync.RWMutex
running int32
// Process management
cmd *exec.Cmd
processPID int
// Restart management
restartAttempts []time.Time
lastExitCode int
lastExitTime time.Time
// Channels for coordination
processDone chan struct{}
stopChan chan struct{}
// Process monitoring
processMonitor *ProcessMonitor
// Callbacks
onProcessStart func(pid int)
onProcessExit func(pid int, exitCode int, crashed bool)
onRestart func(attempt int, delay time.Duration)
}
// NewAudioServerSupervisor creates a new audio server supervisor
func NewAudioServerSupervisor() *AudioServerSupervisor {
ctx, cancel := context.WithCancel(context.Background())
logger := logging.GetDefaultLogger().With().Str("component", "audio-supervisor").Logger()
return &AudioServerSupervisor{
ctx: ctx,
cancel: cancel,
logger: &logger,
processDone: make(chan struct{}),
stopChan: make(chan struct{}),
processMonitor: GetProcessMonitor(),
}
}
// SetCallbacks sets optional callbacks for process lifecycle events
func (s *AudioServerSupervisor) SetCallbacks(
onStart func(pid int),
onExit func(pid int, exitCode int, crashed bool),
onRestart func(attempt int, delay time.Duration),
) {
s.mutex.Lock()
defer s.mutex.Unlock()
s.onProcessStart = onStart
s.onProcessExit = onExit
s.onRestart = onRestart
}
// Start begins supervising the audio server process
func (s *AudioServerSupervisor) Start() error {
if !atomic.CompareAndSwapInt32(&s.running, 0, 1) {
return fmt.Errorf("supervisor already running")
}
s.logger.Info().Msg("starting audio server supervisor")
// Start the supervision loop
go s.supervisionLoop()
return nil
}
// Stop gracefully stops the audio server and supervisor
func (s *AudioServerSupervisor) Stop() error {
if !atomic.CompareAndSwapInt32(&s.running, 1, 0) {
return nil // Already stopped
}
s.logger.Info().Msg("stopping audio server supervisor")
// Signal stop and wait for cleanup
close(s.stopChan)
s.cancel()
// Wait for process to exit
select {
case <-s.processDone:
s.logger.Info().Msg("audio server process stopped gracefully")
case <-time.After(10 * time.Second):
s.logger.Warn().Msg("audio server process did not stop gracefully, forcing termination")
s.forceKillProcess()
}
return nil
}
// IsRunning returns true if the supervisor is running
func (s *AudioServerSupervisor) IsRunning() bool {
return atomic.LoadInt32(&s.running) == 1
}
// GetProcessPID returns the current process PID (0 if not running)
func (s *AudioServerSupervisor) GetProcessPID() int {
s.mutex.RLock()
defer s.mutex.RUnlock()
return s.processPID
}
// GetLastExitInfo returns information about the last process exit
func (s *AudioServerSupervisor) GetLastExitInfo() (exitCode int, exitTime time.Time) {
s.mutex.RLock()
defer s.mutex.RUnlock()
return s.lastExitCode, s.lastExitTime
}
// GetProcessMetrics returns current process metrics if the process is running
func (s *AudioServerSupervisor) GetProcessMetrics() *ProcessMetrics {
s.mutex.RLock()
pid := s.processPID
s.mutex.RUnlock()
if pid == 0 {
return nil
}
metrics := s.processMonitor.GetCurrentMetrics()
for _, metric := range metrics {
if metric.PID == pid {
return &metric
}
}
return nil
}
// supervisionLoop is the main supervision loop
func (s *AudioServerSupervisor) supervisionLoop() {
defer func() {
close(s.processDone)
s.logger.Info().Msg("audio server supervision ended")
}()
for atomic.LoadInt32(&s.running) == 1 {
select {
case <-s.stopChan:
s.logger.Info().Msg("received stop signal")
s.terminateProcess()
return
case <-s.ctx.Done():
s.logger.Info().Msg("context cancelled")
s.terminateProcess()
return
default:
// Start or restart the process
if err := s.startProcess(); err != nil {
s.logger.Error().Err(err).Msg("failed to start audio server process")
// Check if we should attempt restart
if !s.shouldRestart() {
s.logger.Error().Msg("maximum restart attempts exceeded, stopping supervisor")
return
}
delay := s.calculateRestartDelay()
s.logger.Warn().Dur("delay", delay).Msg("retrying process start after delay")
if s.onRestart != nil {
s.onRestart(len(s.restartAttempts), delay)
}
select {
case <-time.After(delay):
case <-s.stopChan:
return
case <-s.ctx.Done():
return
}
continue
}
// Wait for process to exit
s.waitForProcessExit()
// Check if we should restart
if !s.shouldRestart() {
s.logger.Error().Msg("maximum restart attempts exceeded, stopping supervisor")
return
}
// Calculate restart delay
delay := s.calculateRestartDelay()
s.logger.Info().Dur("delay", delay).Msg("restarting audio server process after delay")
if s.onRestart != nil {
s.onRestart(len(s.restartAttempts), delay)
}
// Wait for restart delay
select {
case <-time.After(delay):
case <-s.stopChan:
return
case <-s.ctx.Done():
return
}
}
}
}
// startProcess starts the audio server process
func (s *AudioServerSupervisor) startProcess() error {
execPath, err := os.Executable()
if err != nil {
return fmt.Errorf("failed to get executable path: %w", err)
}
s.mutex.Lock()
defer s.mutex.Unlock()
// Create new command
s.cmd = exec.CommandContext(s.ctx, execPath, "--audio-output-server")
s.cmd.Stdout = os.Stdout
s.cmd.Stderr = os.Stderr
// Start the process
if err := s.cmd.Start(); err != nil {
return fmt.Errorf("failed to start process: %w", err)
}
s.processPID = s.cmd.Process.Pid
s.logger.Info().Int("pid", s.processPID).Msg("audio server process started")
// Add process to monitoring
s.processMonitor.AddProcess(s.processPID, "audio-output-server")
if s.onProcessStart != nil {
s.onProcessStart(s.processPID)
}
return nil
}
// waitForProcessExit waits for the current process to exit and logs the result
func (s *AudioServerSupervisor) waitForProcessExit() {
s.mutex.RLock()
cmd := s.cmd
pid := s.processPID
s.mutex.RUnlock()
if cmd == nil {
return
}
// Wait for process to exit
err := cmd.Wait()
s.mutex.Lock()
s.lastExitTime = time.Now()
s.processPID = 0
var exitCode int
var crashed bool
if err != nil {
if exitError, ok := err.(*exec.ExitError); ok {
exitCode = exitError.ExitCode()
crashed = exitCode != 0
} else {
// Process was killed or other error
exitCode = -1
crashed = true
}
} else {
exitCode = 0
crashed = false
}
s.lastExitCode = exitCode
s.mutex.Unlock()
// Remove process from monitoring
s.processMonitor.RemoveProcess(pid)
if crashed {
s.logger.Error().Int("pid", pid).Int("exit_code", exitCode).Msg("audio server process crashed")
s.recordRestartAttempt()
} else {
s.logger.Info().Int("pid", pid).Msg("audio server process exited gracefully")
}
if s.onProcessExit != nil {
s.onProcessExit(pid, exitCode, crashed)
}
}
// terminateProcess gracefully terminates the current process
func (s *AudioServerSupervisor) terminateProcess() {
s.mutex.RLock()
cmd := s.cmd
pid := s.processPID
s.mutex.RUnlock()
if cmd == nil || cmd.Process == nil {
return
}
s.logger.Info().Int("pid", pid).Msg("terminating audio server process")
// Send SIGTERM first
if err := cmd.Process.Signal(syscall.SIGTERM); err != nil {
s.logger.Warn().Err(err).Int("pid", pid).Msg("failed to send SIGTERM")
}
// Wait for graceful shutdown
done := make(chan struct{})
go func() {
_ = cmd.Wait()
close(done)
}()
select {
case <-done:
s.logger.Info().Int("pid", pid).Msg("audio server process terminated gracefully")
case <-time.After(5 * time.Second):
s.logger.Warn().Int("pid", pid).Msg("process did not terminate gracefully, sending SIGKILL")
s.forceKillProcess()
}
}
// forceKillProcess forcefully kills the current process
func (s *AudioServerSupervisor) forceKillProcess() {
s.mutex.RLock()
cmd := s.cmd
pid := s.processPID
s.mutex.RUnlock()
if cmd == nil || cmd.Process == nil {
return
}
s.logger.Warn().Int("pid", pid).Msg("force killing audio server process")
if err := cmd.Process.Kill(); err != nil {
s.logger.Error().Err(err).Int("pid", pid).Msg("failed to kill process")
}
}
// shouldRestart determines if the process should be restarted
func (s *AudioServerSupervisor) shouldRestart() bool {
if atomic.LoadInt32(&s.running) == 0 {
return false // Supervisor is stopping
}
s.mutex.RLock()
defer s.mutex.RUnlock()
// Clean up old restart attempts outside the window
now := time.Now()
var recentAttempts []time.Time
for _, attempt := range s.restartAttempts {
if now.Sub(attempt) < restartWindow {
recentAttempts = append(recentAttempts, attempt)
}
}
s.restartAttempts = recentAttempts
return len(s.restartAttempts) < maxRestartAttempts
}
// recordRestartAttempt records a restart attempt
func (s *AudioServerSupervisor) recordRestartAttempt() {
s.mutex.Lock()
defer s.mutex.Unlock()
s.restartAttempts = append(s.restartAttempts, time.Now())
}
// calculateRestartDelay calculates the delay before next restart attempt
func (s *AudioServerSupervisor) calculateRestartDelay() time.Duration {
s.mutex.RLock()
defer s.mutex.RUnlock()
// Exponential backoff based on recent restart attempts
attempts := len(s.restartAttempts)
if attempts == 0 {
return restartDelay
}
// Calculate exponential backoff: 2^attempts * base delay
delay := restartDelay
for i := 0; i < attempts && delay < maxRestartDelay; i++ {
delay *= 2
}
if delay > maxRestartDelay {
delay = maxRestartDelay
}
return delay
}

View File

@ -1,314 +0,0 @@
package audio
import (
"sync"
"sync/atomic"
"unsafe"
)
// ZeroCopyAudioFrame represents an audio frame that can be passed between
// components without copying the underlying data
type ZeroCopyAudioFrame struct {
data []byte
length int
capacity int
refCount int32
mutex sync.RWMutex
pooled bool
}
// ZeroCopyFramePool manages reusable zero-copy audio frames
type ZeroCopyFramePool struct {
// Atomic fields MUST be first for ARM32 alignment (int64 fields need 8-byte alignment)
counter int64 // Frame counter (atomic)
hitCount int64 // Pool hit counter (atomic)
missCount int64 // Pool miss counter (atomic)
// Other fields
pool sync.Pool
maxSize int
mutex sync.RWMutex
// Memory optimization fields
preallocated []*ZeroCopyAudioFrame // Pre-allocated frames for immediate use
preallocSize int // Number of pre-allocated frames
maxPoolSize int // Maximum pool size to prevent memory bloat
}
// NewZeroCopyFramePool creates a new zero-copy frame pool
func NewZeroCopyFramePool(maxFrameSize int) *ZeroCopyFramePool {
// Pre-allocate 15 frames for immediate availability
preallocSize := 15
maxPoolSize := 50 // Limit total pool size
preallocated := make([]*ZeroCopyAudioFrame, 0, preallocSize)
// Pre-allocate frames to reduce initial allocation overhead
for i := 0; i < preallocSize; i++ {
frame := &ZeroCopyAudioFrame{
data: make([]byte, 0, maxFrameSize),
capacity: maxFrameSize,
pooled: true,
}
preallocated = append(preallocated, frame)
}
return &ZeroCopyFramePool{
maxSize: maxFrameSize,
preallocated: preallocated,
preallocSize: preallocSize,
maxPoolSize: maxPoolSize,
pool: sync.Pool{
New: func() interface{} {
return &ZeroCopyAudioFrame{
data: make([]byte, 0, maxFrameSize),
capacity: maxFrameSize,
pooled: true,
}
},
},
}
}
// Get retrieves a zero-copy frame from the pool
func (p *ZeroCopyFramePool) Get() *ZeroCopyAudioFrame {
// First try pre-allocated frames for fastest access
p.mutex.Lock()
if len(p.preallocated) > 0 {
frame := p.preallocated[len(p.preallocated)-1]
p.preallocated = p.preallocated[:len(p.preallocated)-1]
p.mutex.Unlock()
frame.mutex.Lock()
frame.refCount = 1
frame.length = 0
frame.data = frame.data[:0]
frame.mutex.Unlock()
atomic.AddInt64(&p.hitCount, 1)
return frame
}
p.mutex.Unlock()
// Try sync.Pool next
frame := p.pool.Get().(*ZeroCopyAudioFrame)
frame.mutex.Lock()
frame.refCount = 1
frame.length = 0
frame.data = frame.data[:0]
frame.mutex.Unlock()
atomic.AddInt64(&p.hitCount, 1)
return frame
}
// Put returns a zero-copy frame to the pool
func (p *ZeroCopyFramePool) Put(frame *ZeroCopyAudioFrame) {
if frame == nil || !frame.pooled {
return
}
frame.mutex.Lock()
frame.refCount--
if frame.refCount <= 0 {
frame.refCount = 0
frame.length = 0
frame.data = frame.data[:0]
frame.mutex.Unlock()
// First try to return to pre-allocated pool for fastest reuse
p.mutex.Lock()
if len(p.preallocated) < p.preallocSize {
p.preallocated = append(p.preallocated, frame)
p.mutex.Unlock()
return
}
p.mutex.Unlock()
// Check pool size limit to prevent excessive memory usage
p.mutex.RLock()
currentCount := atomic.LoadInt64(&p.counter)
p.mutex.RUnlock()
if currentCount >= int64(p.maxPoolSize) {
return // Pool is full, let GC handle this frame
}
// Return to sync.Pool
p.pool.Put(frame)
atomic.AddInt64(&p.counter, 1)
} else {
frame.mutex.Unlock()
}
}
// Data returns the frame data as a slice (zero-copy view)
func (f *ZeroCopyAudioFrame) Data() []byte {
f.mutex.RLock()
defer f.mutex.RUnlock()
return f.data[:f.length]
}
// SetData sets the frame data (zero-copy if possible)
func (f *ZeroCopyAudioFrame) SetData(data []byte) error {
f.mutex.Lock()
defer f.mutex.Unlock()
if len(data) > f.capacity {
// Need to reallocate - not zero-copy but necessary
f.data = make([]byte, len(data))
f.capacity = len(data)
f.pooled = false // Can't return to pool anymore
}
// Zero-copy assignment when data fits in existing buffer
if cap(f.data) >= len(data) {
f.data = f.data[:len(data)]
copy(f.data, data)
} else {
f.data = append(f.data[:0], data...)
}
f.length = len(data)
return nil
}
// SetDataDirect sets frame data using direct buffer assignment (true zero-copy)
// WARNING: The caller must ensure the buffer remains valid for the frame's lifetime
func (f *ZeroCopyAudioFrame) SetDataDirect(data []byte) {
f.mutex.Lock()
defer f.mutex.Unlock()
f.data = data
f.length = len(data)
f.capacity = cap(data)
f.pooled = false // Direct assignment means we can't pool this frame
}
// AddRef increments the reference count for shared access
func (f *ZeroCopyAudioFrame) AddRef() {
f.mutex.Lock()
f.refCount++
f.mutex.Unlock()
}
// Release decrements the reference count
func (f *ZeroCopyAudioFrame) Release() {
f.mutex.Lock()
f.refCount--
f.mutex.Unlock()
}
// Length returns the current data length
func (f *ZeroCopyAudioFrame) Length() int {
f.mutex.RLock()
defer f.mutex.RUnlock()
return f.length
}
// Capacity returns the buffer capacity
func (f *ZeroCopyAudioFrame) Capacity() int {
f.mutex.RLock()
defer f.mutex.RUnlock()
return f.capacity
}
// UnsafePointer returns an unsafe pointer to the data for CGO calls
// WARNING: Only use this for CGO interop, ensure frame lifetime
func (f *ZeroCopyAudioFrame) UnsafePointer() unsafe.Pointer {
f.mutex.RLock()
defer f.mutex.RUnlock()
if len(f.data) == 0 {
return nil
}
return unsafe.Pointer(&f.data[0])
}
// Global zero-copy frame pool
// GetZeroCopyPoolStats returns detailed statistics about the zero-copy frame pool
func (p *ZeroCopyFramePool) GetZeroCopyPoolStats() ZeroCopyFramePoolStats {
p.mutex.RLock()
preallocatedCount := len(p.preallocated)
currentCount := atomic.LoadInt64(&p.counter)
p.mutex.RUnlock()
hitCount := atomic.LoadInt64(&p.hitCount)
missCount := atomic.LoadInt64(&p.missCount)
totalRequests := hitCount + missCount
var hitRate float64
if totalRequests > 0 {
hitRate = float64(hitCount) / float64(totalRequests) * 100
}
return ZeroCopyFramePoolStats{
MaxFrameSize: p.maxSize,
MaxPoolSize: p.maxPoolSize,
CurrentPoolSize: currentCount,
PreallocatedCount: int64(preallocatedCount),
PreallocatedMax: int64(p.preallocSize),
HitCount: hitCount,
MissCount: missCount,
HitRate: hitRate,
}
}
// ZeroCopyFramePoolStats provides detailed zero-copy pool statistics
type ZeroCopyFramePoolStats struct {
MaxFrameSize int
MaxPoolSize int
CurrentPoolSize int64
PreallocatedCount int64
PreallocatedMax int64
HitCount int64
MissCount int64
HitRate float64 // Percentage
}
var (
globalZeroCopyPool = NewZeroCopyFramePool(MaxAudioFrameSize)
)
// GetZeroCopyFrame gets a frame from the global pool
func GetZeroCopyFrame() *ZeroCopyAudioFrame {
return globalZeroCopyPool.Get()
}
// GetGlobalZeroCopyPoolStats returns statistics for the global zero-copy pool
func GetGlobalZeroCopyPoolStats() ZeroCopyFramePoolStats {
return globalZeroCopyPool.GetZeroCopyPoolStats()
}
// PutZeroCopyFrame returns a frame to the global pool
func PutZeroCopyFrame(frame *ZeroCopyAudioFrame) {
globalZeroCopyPool.Put(frame)
}
// ZeroCopyAudioReadEncode performs audio read and encode with zero-copy optimization
func ZeroCopyAudioReadEncode() (*ZeroCopyAudioFrame, error) {
frame := GetZeroCopyFrame()
// Ensure frame has enough capacity
if frame.Capacity() < MaxAudioFrameSize {
// Reallocate if needed
frame.data = make([]byte, MaxAudioFrameSize)
frame.capacity = MaxAudioFrameSize
frame.pooled = false
}
// Use unsafe pointer for direct CGO call
n, err := CGOAudioReadEncode(frame.data[:MaxAudioFrameSize])
if err != nil {
PutZeroCopyFrame(frame)
return nil, err
}
if n == 0 {
PutZeroCopyFrame(frame)
return nil, nil
}
// Set the actual data length
frame.mutex.Lock()
frame.length = n
frame.data = frame.data[:n]
frame.mutex.Unlock()
return frame, nil
}

141
main.go
View File

@ -2,7 +2,6 @@ package kvm
import ( import (
"context" "context"
"fmt"
"net/http" "net/http"
"os" "os"
"os/signal" "os/signal"
@ -11,106 +10,12 @@ import (
"github.com/gwatts/rootcerts" "github.com/gwatts/rootcerts"
"github.com/jetkvm/kvm/internal/audio" "github.com/jetkvm/kvm/internal/audio"
"github.com/pion/webrtc/v4/pkg/media"
) )
var ( var appCtx context.Context
appCtx context.Context
isAudioServer bool
audioProcessDone chan struct{}
audioSupervisor *audio.AudioServerSupervisor
)
// runAudioServer is now handled by audio.RunAudioOutputServer func Main() {
// This function is kept for backward compatibility but delegates to the audio package
func runAudioServer() {
err := audio.RunAudioOutputServer()
if err != nil {
logger.Error().Err(err).Msg("audio output server failed")
os.Exit(1)
}
}
func startAudioSubprocess() error {
// Start adaptive buffer management for optimal performance
audio.StartAdaptiveBuffering()
// Create audio server supervisor
audioSupervisor = audio.NewAudioServerSupervisor()
// Set the global supervisor for access from audio package
audio.SetAudioOutputSupervisor(audioSupervisor)
// Set up callbacks for process lifecycle events
audioSupervisor.SetCallbacks(
// onProcessStart
func(pid int) {
logger.Info().Int("pid", pid).Msg("audio server process started")
// Start audio relay system for main process without a track initially
// The track will be updated when a WebRTC session is created
if err := audio.StartAudioRelay(nil); err != nil {
logger.Error().Err(err).Msg("failed to start audio relay")
}
},
// onProcessExit
func(pid int, exitCode int, crashed bool) {
if crashed {
logger.Error().Int("pid", pid).Int("exit_code", exitCode).Msg("audio server process crashed")
} else {
logger.Info().Int("pid", pid).Msg("audio server process exited gracefully")
}
// Stop audio relay when process exits
audio.StopAudioRelay()
// Stop adaptive buffering
audio.StopAdaptiveBuffering()
},
// onRestart
func(attempt int, delay time.Duration) {
logger.Warn().Int("attempt", attempt).Dur("delay", delay).Msg("restarting audio server process")
},
)
// Start the supervisor
if err := audioSupervisor.Start(); err != nil {
return fmt.Errorf("failed to start audio supervisor: %w", err)
}
// Monitor supervisor and handle cleanup
go func() {
defer close(audioProcessDone)
// Wait for supervisor to stop
for audioSupervisor.IsRunning() {
time.Sleep(100 * time.Millisecond)
}
logger.Info().Msg("audio supervisor stopped")
}()
return nil
}
func Main(audioServer bool, audioInputServer bool) {
// Initialize channel and set audio server flag
isAudioServer = audioServer
audioProcessDone = make(chan struct{})
// If running as audio server, only initialize audio processing
if isAudioServer {
runAudioServer()
return
}
// If running as audio input server, only initialize audio input processing
if audioInputServer {
err := audio.RunAudioInputServer()
if err != nil {
logger.Error().Err(err).Msg("audio input server failed")
os.Exit(1)
}
return
}
LoadConfig() LoadConfig()
var cancel context.CancelFunc var cancel context.CancelFunc
@ -175,10 +80,30 @@ func Main(audioServer bool, audioInputServer bool) {
// initialize usb gadget // initialize usb gadget
initUsbGadget() initUsbGadget()
// Start audio subprocess // Start non-blocking audio streaming and deliver Opus frames to WebRTC
err = startAudioSubprocess() err = audio.StartNonBlockingAudioStreaming(func(frame []byte) {
// Deliver Opus frame to WebRTC audio track if session is active
if currentSession != nil {
config := audio.GetAudioConfig()
var sampleData []byte
if audio.IsAudioMuted() {
sampleData = make([]byte, len(frame)) // silence
} else {
sampleData = frame
}
if err := currentSession.AudioTrack.WriteSample(media.Sample{
Data: sampleData,
Duration: config.FrameSize,
}); err != nil {
logger.Warn().Err(err).Msg("error writing audio sample")
audio.RecordFrameDropped()
}
} else {
audio.RecordFrameDropped()
}
})
if err != nil { if err != nil {
logger.Warn().Err(err).Msg("failed to start audio subprocess") logger.Warn().Err(err).Msg("failed to start non-blocking audio streaming")
} }
// Initialize session provider for audio events // Initialize session provider for audio events
@ -238,18 +163,8 @@ func Main(audioServer bool, audioInputServer bool) {
<-sigs <-sigs
logger.Info().Msg("JetKVM Shutting Down") logger.Info().Msg("JetKVM Shutting Down")
// Stop audio subprocess and wait for cleanup // Stop non-blocking audio manager
if !isAudioServer { audio.StopNonBlockingAudioStreaming()
if audioSupervisor != nil {
logger.Info().Msg("stopping audio supervisor")
if err := audioSupervisor.Stop(); err != nil {
logger.Error().Err(err).Msg("failed to stop audio supervisor")
}
}
<-audioProcessDone
} else {
audio.StopNonBlockingAudioStreaming()
}
//if fuseServer != nil { //if fuseServer != nil {
// err := setMassStorageImage(" ") // err := setMassStorageImage(" ")
// if err != nil { // if err != nil {

View File

@ -1,7 +1,6 @@
package kvm package kvm
import ( import (
"github.com/jetkvm/kvm/internal/audio"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
versioncollector "github.com/prometheus/client_golang/prometheus/collectors/version" versioncollector "github.com/prometheus/client_golang/prometheus/collectors/version"
"github.com/prometheus/common/version" "github.com/prometheus/common/version"
@ -11,7 +10,4 @@ func initPrometheus() {
// A Prometheus metrics endpoint. // A Prometheus metrics endpoint.
version.Version = builtAppVersion version.Version = builtAppVersion
prometheus.MustRegister(versioncollector.NewCollector("jetkvm")) prometheus.MustRegister(versioncollector.NewCollector("jetkvm"))
// Start audio metrics collection
audio.StartMetricsUpdater()
} }

2
resource/dev_test.sh Executable file → Normal file
View File

@ -1,4 +1,4 @@
#!/bin/bash #!/bin/sh
JSON_OUTPUT=false JSON_OUTPUT=false
GET_COMMANDS=false GET_COMMANDS=false
if [ "$1" = "-json" ]; then if [ "$1" = "-json" ]; then

0
tools/build_audio_deps.sh Executable file → Normal file
View File

0
tools/setup_rv1106_toolchain.sh Executable file → Normal file
View File

View File

@ -2,7 +2,7 @@ import { MdOutlineContentPasteGo, MdVolumeOff, MdVolumeUp, MdGraphicEq } from "r
import { LuCable, LuHardDrive, LuMaximize, LuSettings, LuSignal } from "react-icons/lu"; import { LuCable, LuHardDrive, LuMaximize, LuSettings, LuSignal } from "react-icons/lu";
import { FaKeyboard } from "react-icons/fa6"; import { FaKeyboard } from "react-icons/fa6";
import { Popover, PopoverButton, PopoverPanel } from "@headlessui/react"; import { Popover, PopoverButton, PopoverPanel } from "@headlessui/react";
import { Fragment, useCallback, useRef } from "react"; import { Fragment, useCallback, useEffect, useRef, useState } from "react";
import { CommandLineIcon } from "@heroicons/react/20/solid"; import { CommandLineIcon } from "@heroicons/react/20/solid";
import { Button } from "@components/Button"; import { Button } from "@components/Button";
@ -21,7 +21,7 @@ import ExtensionPopover from "@/components/popovers/ExtensionPopover";
import AudioControlPopover from "@/components/popovers/AudioControlPopover"; import AudioControlPopover from "@/components/popovers/AudioControlPopover";
import { useDeviceUiNavigation } from "@/hooks/useAppNavigation"; import { useDeviceUiNavigation } from "@/hooks/useAppNavigation";
import { useAudioEvents } from "@/hooks/useAudioEvents"; import { useAudioEvents } from "@/hooks/useAudioEvents";
import api from "@/api";
// Type for microphone error // Type for microphone error
interface MicrophoneError { interface MicrophoneError {
@ -83,10 +83,35 @@ export default function Actionbar({
); );
// Use WebSocket-based audio events for real-time updates // Use WebSocket-based audio events for real-time updates
const { audioMuted } = useAudioEvents(); const { audioMuted, isConnected } = useAudioEvents();
// Use WebSocket data exclusively - no polling fallback // Fallback to polling if WebSocket is not connected
const isMuted = audioMuted ?? false; // Default to false if WebSocket data not available yet const [fallbackMuted, setFallbackMuted] = useState(false);
useEffect(() => {
if (!isConnected) {
// Load initial state
api.GET("/audio/mute").then(async resp => {
if (resp.ok) {
const data = await resp.json();
setFallbackMuted(!!data.muted);
}
});
// Fallback polling when WebSocket is not available
const interval = setInterval(async () => {
const resp = await api.GET("/audio/mute");
if (resp.ok) {
const data = await resp.json();
setFallbackMuted(!!data.muted);
}
}, 1000);
return () => clearInterval(interval);
}
}, [isConnected]);
// Use WebSocket data when available, fallback to polling data otherwise
const isMuted = isConnected && audioMuted !== null ? audioMuted : fallbackMuted;
return ( return (
<Container className="border-b border-b-slate-800/20 bg-white dark:border-b-slate-300/20 dark:bg-slate-900"> <Container className="border-b border-b-slate-800/20 bg-white dark:border-b-slate-300/20 dark:bg-slate-900">

View File

@ -1,9 +1,8 @@
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { MdGraphicEq, MdSignalWifi4Bar, MdError, MdMic } from "react-icons/md"; import { MdGraphicEq, MdSignalWifi4Bar, MdError, MdMic } from "react-icons/md";
import { LuActivity, LuClock, LuHardDrive, LuSettings, LuCpu, LuMemoryStick } from "react-icons/lu"; import { LuActivity, LuClock, LuHardDrive, LuSettings } from "react-icons/lu";
import { AudioLevelMeter } from "@components/AudioLevelMeter"; import { AudioLevelMeter } from "@components/AudioLevelMeter";
import StatChart from "@components/StatChart";
import { cx } from "@/cva.config"; import { cx } from "@/cva.config";
import { useMicrophone } from "@/hooks/useMicrophone"; import { useMicrophone } from "@/hooks/useMicrophone";
import { useAudioLevel } from "@/hooks/useAudioLevel"; import { useAudioLevel } from "@/hooks/useAudioLevel";
@ -28,14 +27,6 @@ interface MicrophoneMetrics {
average_latency: string; average_latency: string;
} }
interface ProcessMetrics {
cpu_percent: number;
memory_percent: number;
memory_rss: number;
memory_vms: number;
running: boolean;
}
interface AudioConfig { interface AudioConfig {
Quality: number; Quality: number;
Bitrate: number; Bitrate: number;
@ -51,166 +42,19 @@ const qualityLabels = {
3: "Ultra" 3: "Ultra"
}; };
// Format percentage values to 2 decimal places
function formatPercentage(value: number | null | undefined): string {
if (value === null || value === undefined || isNaN(value)) {
return "0.00%";
}
return `${value.toFixed(2)}%`;
}
function formatMemoryMB(rssBytes: number | null | undefined): string {
if (rssBytes === null || rssBytes === undefined || isNaN(rssBytes)) {
return "0.00 MB";
}
const mb = rssBytes / (1024 * 1024);
return `${mb.toFixed(2)} MB`;
}
// Default system memory estimate in MB (will be replaced by actual value from backend)
const DEFAULT_SYSTEM_MEMORY_MB = 4096; // 4GB default
// Create chart array similar to connectionStats.tsx
function createChartArray<T, K extends keyof T>(
stream: Map<number, T>,
metric: K,
): { date: number; stat: T[K] | null }[] {
const stat = Array.from(stream).map(([key, stats]) => {
return { date: key, stat: stats[metric] };
});
// Sort the dates to ensure they are in chronological order
const sortedStat = stat.map(x => x.date).sort((a, b) => a - b);
// Determine the earliest statistic date
const earliestStat = sortedStat[0];
// Current time in seconds since the Unix epoch
const now = Math.floor(Date.now() / 1000);
// Determine the starting point for the chart data
const firstChartDate = earliestStat ? Math.min(earliestStat, now - 120) : now - 120;
// Generate the chart array for the range between 'firstChartDate' and 'now'
return Array.from({ length: now - firstChartDate }, (_, i) => {
const currentDate = firstChartDate + i;
return {
date: currentDate,
// Find the statistic for 'currentDate', or use the last known statistic if none exists for that date
stat: stat.find(x => x.date === currentDate)?.stat ?? null,
};
});
}
export default function AudioMetricsDashboard() { export default function AudioMetricsDashboard() {
// System memory state
const [systemMemoryMB, setSystemMemoryMB] = useState(DEFAULT_SYSTEM_MEMORY_MB);
// Use WebSocket-based audio events for real-time updates // Use WebSocket-based audio events for real-time updates
const { const {
audioMetrics, audioMetrics,
microphoneMetrics: wsMicrophoneMetrics, microphoneMetrics: wsMicrophoneMetrics,
audioProcessMetrics: wsAudioProcessMetrics,
microphoneProcessMetrics: wsMicrophoneProcessMetrics,
isConnected: wsConnected isConnected: wsConnected
} = useAudioEvents(); } = useAudioEvents();
// Fetch system memory information on component mount
useEffect(() => {
const fetchSystemMemory = async () => {
try {
const response = await api.GET('/system/memory');
const data = await response.json();
setSystemMemoryMB(data.total_memory_mb);
} catch (error) {
console.warn('Failed to fetch system memory, using default:', error);
}
};
fetchSystemMemory();
}, []);
// Update historical data when WebSocket process metrics are received
useEffect(() => {
if (wsConnected && wsAudioProcessMetrics && wsAudioProcessMetrics.running) {
const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart
// Validate that now is a valid number
if (isNaN(now)) return;
const cpuStat = isNaN(wsAudioProcessMetrics.cpu_percent) ? null : wsAudioProcessMetrics.cpu_percent;
setAudioCpuStats(prev => {
const newMap = new Map(prev);
newMap.set(now, { cpu_percent: cpuStat });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
setAudioMemoryStats(prev => {
const newMap = new Map(prev);
const memoryRss = isNaN(wsAudioProcessMetrics.memory_rss) ? null : wsAudioProcessMetrics.memory_rss;
newMap.set(now, { memory_rss: memoryRss });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
}
}, [wsConnected, wsAudioProcessMetrics]);
useEffect(() => {
if (wsConnected && wsMicrophoneProcessMetrics) {
const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart
// Validate that now is a valid number
if (isNaN(now)) return;
const cpuStat = isNaN(wsMicrophoneProcessMetrics.cpu_percent) ? null : wsMicrophoneProcessMetrics.cpu_percent;
setMicCpuStats(prev => {
const newMap = new Map(prev);
newMap.set(now, { cpu_percent: cpuStat });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
setMicMemoryStats(prev => {
const newMap = new Map(prev);
const memoryRss = isNaN(wsMicrophoneProcessMetrics.memory_rss) ? null : wsMicrophoneProcessMetrics.memory_rss;
newMap.set(now, { memory_rss: memoryRss });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
}
}, [wsConnected, wsMicrophoneProcessMetrics]);
// Fallback state for when WebSocket is not connected // Fallback state for when WebSocket is not connected
const [fallbackMetrics, setFallbackMetrics] = useState<AudioMetrics | null>(null); const [fallbackMetrics, setFallbackMetrics] = useState<AudioMetrics | null>(null);
const [fallbackMicrophoneMetrics, setFallbackMicrophoneMetrics] = useState<MicrophoneMetrics | null>(null); const [fallbackMicrophoneMetrics, setFallbackMicrophoneMetrics] = useState<MicrophoneMetrics | null>(null);
const [fallbackConnected, setFallbackConnected] = useState(false); const [fallbackConnected, setFallbackConnected] = useState(false);
// Process metrics state (fallback for when WebSocket is not connected)
const [fallbackAudioProcessMetrics, setFallbackAudioProcessMetrics] = useState<ProcessMetrics | null>(null);
const [fallbackMicrophoneProcessMetrics, setFallbackMicrophoneProcessMetrics] = useState<ProcessMetrics | null>(null);
// Historical data for charts using Maps for better memory management
const [audioCpuStats, setAudioCpuStats] = useState<Map<number, { cpu_percent: number | null }>>(new Map());
const [audioMemoryStats, setAudioMemoryStats] = useState<Map<number, { memory_rss: number | null }>>(new Map());
const [micCpuStats, setMicCpuStats] = useState<Map<number, { cpu_percent: number | null }>>(new Map());
const [micMemoryStats, setMicMemoryStats] = useState<Map<number, { memory_rss: number | null }>>(new Map());
// Configuration state (these don't change frequently, so we can load them once) // Configuration state (these don't change frequently, so we can load them once)
const [config, setConfig] = useState<AudioConfig | null>(null); const [config, setConfig] = useState<AudioConfig | null>(null);
const [microphoneConfig, setMicrophoneConfig] = useState<AudioConfig | null>(null); const [microphoneConfig, setMicrophoneConfig] = useState<AudioConfig | null>(null);
@ -219,8 +63,6 @@ export default function AudioMetricsDashboard() {
// Use WebSocket data when available, fallback to polling data otherwise // Use WebSocket data when available, fallback to polling data otherwise
const metrics = wsConnected && audioMetrics !== null ? audioMetrics : fallbackMetrics; const metrics = wsConnected && audioMetrics !== null ? audioMetrics : fallbackMetrics;
const microphoneMetrics = wsConnected && wsMicrophoneMetrics !== null ? wsMicrophoneMetrics : fallbackMicrophoneMetrics; const microphoneMetrics = wsConnected && wsMicrophoneMetrics !== null ? wsMicrophoneMetrics : fallbackMicrophoneMetrics;
const audioProcessMetrics = wsConnected && wsAudioProcessMetrics !== null ? wsAudioProcessMetrics : fallbackAudioProcessMetrics;
const microphoneProcessMetrics = wsConnected && wsMicrophoneProcessMetrics !== null ? wsMicrophoneProcessMetrics : fallbackMicrophoneProcessMetrics;
const isConnected = wsConnected ? wsConnected : fallbackConnected; const isConnected = wsConnected ? wsConnected : fallbackConnected;
// Microphone state for audio level monitoring // Microphone state for audio level monitoring
@ -282,49 +124,6 @@ export default function AudioMetricsDashboard() {
setFallbackConnected(false); setFallbackConnected(false);
} }
// Load audio process metrics
try {
const audioProcessResp = await api.GET("/audio/process-metrics");
if (audioProcessResp.ok) {
const audioProcessData = await audioProcessResp.json();
setFallbackAudioProcessMetrics(audioProcessData);
// Update historical data for charts (keep last 120 seconds)
if (audioProcessData.running) {
const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart
// Validate that now is a valid number
if (isNaN(now)) return;
const cpuStat = isNaN(audioProcessData.cpu_percent) ? null : audioProcessData.cpu_percent;
const memoryRss = isNaN(audioProcessData.memory_rss) ? null : audioProcessData.memory_rss;
setAudioCpuStats(prev => {
const newMap = new Map(prev);
newMap.set(now, { cpu_percent: cpuStat });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
setAudioMemoryStats(prev => {
const newMap = new Map(prev);
newMap.set(now, { memory_rss: memoryRss });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
}
}
} catch (audioProcessError) {
console.debug("Audio process metrics not available:", audioProcessError);
}
// Load microphone metrics // Load microphone metrics
try { try {
const micResp = await api.GET("/microphone/metrics"); const micResp = await api.GET("/microphone/metrics");
@ -336,47 +135,6 @@ export default function AudioMetricsDashboard() {
// Microphone metrics might not be available, that's okay // Microphone metrics might not be available, that's okay
console.debug("Microphone metrics not available:", micError); console.debug("Microphone metrics not available:", micError);
} }
// Load microphone process metrics
try {
const micProcessResp = await api.GET("/microphone/process-metrics");
if (micProcessResp.ok) {
const micProcessData = await micProcessResp.json();
setFallbackMicrophoneProcessMetrics(micProcessData);
// Update historical data for charts (keep last 120 seconds)
const now = Math.floor(Date.now() / 1000); // Convert to seconds for StatChart
// Validate that now is a valid number
if (isNaN(now)) return;
const cpuStat = isNaN(micProcessData.cpu_percent) ? null : micProcessData.cpu_percent;
const memoryRss = isNaN(micProcessData.memory_rss) ? null : micProcessData.memory_rss;
setMicCpuStats(prev => {
const newMap = new Map(prev);
newMap.set(now, { cpu_percent: cpuStat });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
setMicMemoryStats(prev => {
const newMap = new Map(prev);
newMap.set(now, { memory_rss: memoryRss });
// Keep only last 120 seconds of data for memory management
const cutoff = now - 120;
for (const [key] of newMap) {
if (key < cutoff) newMap.delete(key);
}
return newMap;
});
}
} catch (micProcessError) {
console.debug("Microphone process metrics not available:", micProcessError);
}
} catch (error) { } catch (error) {
console.error("Failed to load audio data:", error); console.error("Failed to load audio data:", error);
setFallbackConnected(false); setFallbackConnected(false);
@ -400,10 +158,6 @@ export default function AudioMetricsDashboard() {
return ((metrics.frames_dropped / metrics.frames_received) * 100); return ((metrics.frames_dropped / metrics.frames_received) * 100);
}; };
const getQualityColor = (quality: number) => { const getQualityColor = (quality: number) => {
switch (quality) { switch (quality) {
case 0: return "text-yellow-600 dark:text-yellow-400"; case 0: return "text-yellow-600 dark:text-yellow-400";
@ -512,119 +266,6 @@ export default function AudioMetricsDashboard() {
)} )}
</div> </div>
{/* Subprocess Resource Usage - Histogram View */}
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
{/* Audio Output Subprocess */}
{audioProcessMetrics && (
<div className="rounded-lg border border-slate-200 p-3 dark:border-slate-700">
<div className="mb-3 flex items-center gap-2">
<LuCpu className="h-4 w-4 text-blue-600 dark:text-blue-400" />
<span className="font-medium text-slate-900 dark:text-slate-100">
Audio Output Process
</span>
<div className={cx(
"h-2 w-2 rounded-full ml-auto",
audioProcessMetrics.running ? "bg-green-500" : "bg-red-500"
)} />
</div>
<div className="space-y-4">
<div>
<h4 className="text-sm font-medium text-slate-900 dark:text-slate-100 mb-2">CPU Usage</h4>
<div className="h-24">
<StatChart
data={createChartArray(audioCpuStats, 'cpu_percent')}
unit="%"
domain={[0, 100]}
/>
</div>
</div>
<div>
<h4 className="text-sm font-medium text-slate-900 dark:text-slate-100 mb-2">Memory Usage</h4>
<div className="h-24">
<StatChart
data={createChartArray(audioMemoryStats, 'memory_rss').map(item => ({
date: item.date,
stat: item.stat ? item.stat / (1024 * 1024) : null // Convert bytes to MB
}))}
unit="MB"
domain={[0, systemMemoryMB]}
/>
</div>
</div>
<div className="grid grid-cols-2 gap-2 text-xs">
<div className="text-center p-2 bg-slate-50 dark:bg-slate-800 rounded">
<div className="font-medium text-slate-900 dark:text-slate-100">
{formatPercentage(audioProcessMetrics.cpu_percent)}
</div>
<div className="text-slate-500 dark:text-slate-400">CPU</div>
</div>
<div className="text-center p-2 bg-slate-50 dark:bg-slate-800 rounded">
<div className="font-medium text-slate-900 dark:text-slate-100">
{formatMemoryMB(audioProcessMetrics.memory_rss)}
</div>
<div className="text-slate-500 dark:text-slate-400">Memory</div>
</div>
</div>
</div>
</div>
)}
{/* Microphone Input Subprocess */}
{microphoneProcessMetrics && (
<div className="rounded-lg border border-slate-200 p-3 dark:border-slate-700">
<div className="mb-3 flex items-center gap-2">
<LuMemoryStick className="h-4 w-4 text-green-600 dark:text-green-400" />
<span className="font-medium text-slate-900 dark:text-slate-100">
Microphone Input Process
</span>
<div className={cx(
"h-2 w-2 rounded-full ml-auto",
microphoneProcessMetrics.running ? "bg-green-500" : "bg-red-500"
)} />
</div>
<div className="space-y-4">
<div>
<h4 className="text-sm font-medium text-slate-900 dark:text-slate-100 mb-2">CPU Usage</h4>
<div className="h-24">
<StatChart
data={createChartArray(micCpuStats, 'cpu_percent')}
unit="%"
domain={[0, 100]}
/>
</div>
</div>
<div>
<h4 className="text-sm font-medium text-slate-900 dark:text-slate-100 mb-2">Memory Usage</h4>
<div className="h-24">
<StatChart
data={createChartArray(micMemoryStats, 'memory_rss').map(item => ({
date: item.date,
stat: item.stat ? item.stat / (1024 * 1024) : null // Convert bytes to MB
}))}
unit="MB"
domain={[0, systemMemoryMB]}
/>
</div>
</div>
<div className="grid grid-cols-2 gap-2 text-xs">
<div className="text-center p-2 bg-slate-50 dark:bg-slate-800 rounded">
<div className="font-medium text-slate-900 dark:text-slate-100">
{formatPercentage(microphoneProcessMetrics.cpu_percent)}
</div>
<div className="text-slate-500 dark:text-slate-400">CPU</div>
</div>
<div className="text-center p-2 bg-slate-50 dark:bg-slate-800 rounded">
<div className="font-medium text-slate-900 dark:text-slate-100">
{formatMemoryMB(microphoneProcessMetrics.memory_rss)}
</div>
<div className="text-slate-500 dark:text-slate-400">Memory</div>
</div>
</div>
</div>
</div>
)}
</div>
{/* Performance Metrics */} {/* Performance Metrics */}
{metrics && ( {metrics && (
<div className="space-y-3"> <div className="space-y-3">
@ -771,41 +412,6 @@ export default function AudioMetricsDashboard() {
/> />
</div> </div>
)} )}
{/* Microphone Connection Health */}
<div className="mt-3 rounded-md bg-slate-50 p-2 dark:bg-slate-700">
<div className="mb-2 flex items-center gap-2">
<MdSignalWifi4Bar className="h-3 w-3 text-purple-600 dark:text-purple-400" />
<span className="text-sm font-medium text-slate-900 dark:text-slate-100">
Connection Health
</span>
</div>
<div className="space-y-2">
<div className="flex justify-between">
<span className="text-xs text-slate-500 dark:text-slate-400">
Connection Drops:
</span>
<span className={cx(
"text-xs font-medium",
microphoneMetrics.connection_drops > 0
? "text-red-600 dark:text-red-400"
: "text-green-600 dark:text-green-400"
)}>
{formatNumber(microphoneMetrics.connection_drops)}
</span>
</div>
{microphoneMetrics.average_latency && (
<div className="flex justify-between">
<span className="text-xs text-slate-500 dark:text-slate-400">
Avg Latency:
</span>
<span className="text-xs font-medium text-slate-900 dark:text-slate-100">
{microphoneMetrics.average_latency}
</span>
</div>
)}
</div>
</div>
</div> </div>
)} )}

View File

@ -2,7 +2,7 @@ import { useEffect, useMemo, useState } from "react";
import { LuExternalLink } from "react-icons/lu"; import { LuExternalLink } from "react-icons/lu";
import { Button, LinkButton } from "@components/Button"; import { Button, LinkButton } from "@components/Button";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { InputFieldWithLabel } from "./InputField"; import { InputFieldWithLabel } from "./InputField";
import { SelectMenuBasic } from "./SelectMenuBasic"; import { SelectMenuBasic } from "./SelectMenuBasic";
@ -34,7 +34,7 @@ export function JigglerSetting({
const [timezones, setTimezones] = useState<string[]>([]); const [timezones, setTimezones] = useState<string[]>([]);
useEffect(() => { useEffect(() => {
send("getTimezones", {}, (resp: JsonRpcResponse) => { send("getTimezones", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setTimezones(resp.result as string[]); setTimezones(resp.result as string[]);
}); });

View File

@ -10,7 +10,7 @@ import { useJsonRpc } from "@/hooks/useJsonRpc";
export default function MacroBar() { export default function MacroBar() {
const { macros, initialized, loadMacros, setSendFn } = useMacrosStore(); const { macros, initialized, loadMacros, setSendFn } = useMacrosStore();
const { executeMacro } = useKeyboard(); const { executeMacro } = useKeyboard();
const { send } = useJsonRpc(); const [send] = useJsonRpc();
useEffect(() => { useEffect(() => {
setSendFn(send); setSendFn(send);

View File

@ -1,6 +1,6 @@
import { useCallback , useEffect, useState } from "react"; import { useCallback , useEffect, useState } from "react";
import { JsonRpcResponse, useJsonRpc } from "../hooks/useJsonRpc"; import { useJsonRpc } from "../hooks/useJsonRpc";
import notifications from "../notifications"; import notifications from "../notifications";
import { SettingsItem } from "../routes/devices.$id.settings"; import { SettingsItem } from "../routes/devices.$id.settings";
@ -59,7 +59,7 @@ const usbPresets = [
]; ];
export function UsbDeviceSetting() { export function UsbDeviceSetting() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [usbDeviceConfig, setUsbDeviceConfig] = const [usbDeviceConfig, setUsbDeviceConfig] =
@ -67,7 +67,7 @@ export function UsbDeviceSetting() {
const [selectedPreset, setSelectedPreset] = useState<string>("default"); const [selectedPreset, setSelectedPreset] = useState<string>("default");
const syncUsbDeviceConfig = useCallback(() => { const syncUsbDeviceConfig = useCallback(() => {
send("getUsbDevices", {}, (resp: JsonRpcResponse) => { send("getUsbDevices", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
console.error("Failed to load USB devices:", resp.error); console.error("Failed to load USB devices:", resp.error);
notifications.error( notifications.error(
@ -97,7 +97,7 @@ export function UsbDeviceSetting() {
const handleUsbConfigChange = useCallback( const handleUsbConfigChange = useCallback(
(devices: UsbDeviceConfig) => { (devices: UsbDeviceConfig) => {
setLoading(true); setLoading(true);
send("setUsbDevices", { devices }, async (resp: JsonRpcResponse) => { send("setUsbDevices", { devices }, async resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set usb devices: ${resp.error.data || "Unknown error"}`, `Failed to set usb devices: ${resp.error.data || "Unknown error"}`,

View File

@ -4,7 +4,7 @@ import { Button } from "@components/Button";
import { UsbConfigState } from "../hooks/stores"; import { UsbConfigState } from "../hooks/stores";
import { JsonRpcResponse, useJsonRpc } from "../hooks/useJsonRpc"; import { useJsonRpc } from "../hooks/useJsonRpc";
import notifications from "../notifications"; import notifications from "../notifications";
import { SettingsItem } from "../routes/devices.$id.settings"; import { SettingsItem } from "../routes/devices.$id.settings";
@ -54,7 +54,7 @@ const usbConfigs = [
type UsbConfigMap = Record<string, USBConfig>; type UsbConfigMap = Record<string, USBConfig>;
export function UsbInfoSetting() { export function UsbInfoSetting() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [usbConfigProduct, setUsbConfigProduct] = useState(""); const [usbConfigProduct, setUsbConfigProduct] = useState("");
@ -94,7 +94,7 @@ export function UsbInfoSetting() {
); );
const syncUsbConfigProduct = useCallback(() => { const syncUsbConfigProduct = useCallback(() => {
send("getUsbConfig", {}, (resp: JsonRpcResponse) => { send("getUsbConfig", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
console.error("Failed to load USB Config:", resp.error); console.error("Failed to load USB Config:", resp.error);
notifications.error( notifications.error(
@ -114,7 +114,7 @@ export function UsbInfoSetting() {
const handleUsbConfigChange = useCallback( const handleUsbConfigChange = useCallback(
(usbConfig: USBConfig) => { (usbConfig: USBConfig) => {
setLoading(true); setLoading(true);
send("setUsbConfig", { usbConfig }, async (resp: JsonRpcResponse) => { send("setUsbConfig", { usbConfig }, async resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set usb config: ${resp.error.data || "Unknown error"}`, `Failed to set usb config: ${resp.error.data || "Unknown error"}`,
@ -137,7 +137,7 @@ export function UsbInfoSetting() {
); );
useEffect(() => { useEffect(() => {
send("getDeviceID", {}, async (resp: JsonRpcResponse) => { send("getDeviceID", {}, async resp => {
if ("error" in resp) { if ("error" in resp) {
return notifications.error( return notifications.error(
`Failed to get device ID: ${resp.error.data || "Unknown error"}`, `Failed to get device ID: ${resp.error.data || "Unknown error"}`,
@ -205,10 +205,10 @@ function USBConfigDialog({
product: "", product: "",
}); });
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const syncUsbConfig = useCallback(() => { const syncUsbConfig = useCallback(() => {
send("getUsbConfig", {}, (resp: JsonRpcResponse) => { send("getUsbConfig", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
console.error("Failed to load USB Config:", resp.error); console.error("Failed to load USB Config:", resp.error);
} else { } else {

View File

@ -99,7 +99,7 @@ export default function WebRTCVideo({ microphone }: WebRTCVideoProps) {
const [blockWheelEvent, setBlockWheelEvent] = useState(false); const [blockWheelEvent, setBlockWheelEvent] = useState(false);
// Misc states and hooks // Misc states and hooks
const { send } = useJsonRpc(); const [send] = useJsonRpc();
// Video-related // Video-related
useResizeObserver({ useResizeObserver({

View File

@ -7,7 +7,7 @@ import { SettingsPageHeader } from "@components/SettingsPageheader";
import notifications from "@/notifications"; import notifications from "@/notifications";
import LoadingSpinner from "@/components/LoadingSpinner"; import LoadingSpinner from "@/components/LoadingSpinner";
import { JsonRpcResponse, useJsonRpc } from "../../hooks/useJsonRpc"; import { useJsonRpc } from "../../hooks/useJsonRpc";
const LONG_PRESS_DURATION = 3000; // 3 seconds for long press const LONG_PRESS_DURATION = 3000; // 3 seconds for long press
@ -23,7 +23,7 @@ export function ATXPowerControl() {
> | null>(null); > | null>(null);
const [atxState, setAtxState] = useState<ATXState | null>(null); const [atxState, setAtxState] = useState<ATXState | null>(null);
const { send } = useJsonRpc(function onRequest(resp) { const [send] = useJsonRpc(function onRequest(resp) {
if (resp.method === "atxState") { if (resp.method === "atxState") {
setAtxState(resp.params as ATXState); setAtxState(resp.params as ATXState);
} }
@ -31,7 +31,7 @@ export function ATXPowerControl() {
// Request initial state // Request initial state
useEffect(() => { useEffect(() => {
send("getATXState", {}, (resp: JsonRpcResponse) => { send("getATXState", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to get ATX state: ${resp.error.data || "Unknown error"}`, `Failed to get ATX state: ${resp.error.data || "Unknown error"}`,
@ -54,7 +54,7 @@ export function ATXPowerControl() {
const timer = setTimeout(() => { const timer = setTimeout(() => {
// Send long press action // Send long press action
console.log("Sending long press ATX power action"); console.log("Sending long press ATX power action");
send("setATXPowerAction", { action: "power-long" }, (resp: JsonRpcResponse) => { send("setATXPowerAction", { action: "power-long" }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to send ATX power action: ${resp.error.data || "Unknown error"}`, `Failed to send ATX power action: ${resp.error.data || "Unknown error"}`,
@ -75,7 +75,7 @@ export function ATXPowerControl() {
// Send short press action // Send short press action
console.log("Sending short press ATX power action"); console.log("Sending short press ATX power action");
send("setATXPowerAction", { action: "power-short" }, (resp: JsonRpcResponse) => { send("setATXPowerAction", { action: "power-short" }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to send ATX power action: ${resp.error.data || "Unknown error"}`, `Failed to send ATX power action: ${resp.error.data || "Unknown error"}`,
@ -127,7 +127,7 @@ export function ATXPowerControl() {
LeadingIcon={LuRotateCcw} LeadingIcon={LuRotateCcw}
text="Reset" text="Reset"
onClick={() => { onClick={() => {
send("setATXPowerAction", { action: "reset" }, (resp: JsonRpcResponse) => { send("setATXPowerAction", { action: "reset" }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to send ATX power action: ${resp.error.data || "Unknown error"}`, `Failed to send ATX power action: ${resp.error.data || "Unknown error"}`,

View File

@ -4,7 +4,7 @@ import { useCallback, useEffect, useState } from "react";
import { Button } from "@components/Button"; import { Button } from "@components/Button";
import Card from "@components/Card"; import Card from "@components/Card";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import notifications from "@/notifications"; import notifications from "@/notifications";
import FieldLabel from "@components/FieldLabel"; import FieldLabel from "@components/FieldLabel";
import LoadingSpinner from "@components/LoadingSpinner"; import LoadingSpinner from "@components/LoadingSpinner";
@ -19,11 +19,11 @@ interface DCPowerState {
} }
export function DCPowerControl() { export function DCPowerControl() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [powerState, setPowerState] = useState<DCPowerState | null>(null); const [powerState, setPowerState] = useState<DCPowerState | null>(null);
const getDCPowerState = useCallback(() => { const getDCPowerState = useCallback(() => {
send("getDCPowerState", {}, (resp: JsonRpcResponse) => { send("getDCPowerState", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to get DC power state: ${resp.error.data || "Unknown error"}`, `Failed to get DC power state: ${resp.error.data || "Unknown error"}`,
@ -35,7 +35,7 @@ export function DCPowerControl() {
}, [send]); }, [send]);
const handlePowerToggle = (enabled: boolean) => { const handlePowerToggle = (enabled: boolean) => {
send("setDCPowerState", { enabled }, (resp: JsonRpcResponse) => { send("setDCPowerState", { enabled }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set DC power state: ${resp.error.data || "Unknown error"}`, `Failed to set DC power state: ${resp.error.data || "Unknown error"}`,
@ -47,7 +47,7 @@ export function DCPowerControl() {
}; };
const handleRestoreChange = (state: number) => { const handleRestoreChange = (state: number) => {
// const state = powerState?.restoreState === 0 ? 1 : powerState?.restoreState === 1 ? 2 : 0; // const state = powerState?.restoreState === 0 ? 1 : powerState?.restoreState === 1 ? 2 : 0;
send("setDCRestoreState", { state }, (resp: JsonRpcResponse) => { send("setDCRestoreState", { state }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set DC power state: ${resp.error.data || "Unknown error"}`, `Failed to set DC power state: ${resp.error.data || "Unknown error"}`,

View File

@ -4,7 +4,7 @@ import { useEffect, useState } from "react";
import { Button } from "@components/Button"; import { Button } from "@components/Button";
import Card from "@components/Card"; import Card from "@components/Card";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import notifications from "@/notifications"; import notifications from "@/notifications";
import { useUiStore } from "@/hooks/stores"; import { useUiStore } from "@/hooks/stores";
import { SelectMenuBasic } from "@components/SelectMenuBasic"; import { SelectMenuBasic } from "@components/SelectMenuBasic";
@ -17,7 +17,7 @@ interface SerialSettings {
} }
export function SerialConsole() { export function SerialConsole() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [settings, setSettings] = useState<SerialSettings>({ const [settings, setSettings] = useState<SerialSettings>({
baudRate: "9600", baudRate: "9600",
dataBits: "8", dataBits: "8",
@ -26,7 +26,7 @@ export function SerialConsole() {
}); });
useEffect(() => { useEffect(() => {
send("getSerialSettings", {}, (resp: JsonRpcResponse) => { send("getSerialSettings", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to get serial settings: ${resp.error.data || "Unknown error"}`, `Failed to get serial settings: ${resp.error.data || "Unknown error"}`,
@ -39,7 +39,7 @@ export function SerialConsole() {
const handleSettingChange = (setting: keyof SerialSettings, value: string) => { const handleSettingChange = (setting: keyof SerialSettings, value: string) => {
const newSettings = { ...settings, [setting]: value }; const newSettings = { ...settings, [setting]: value };
send("setSerialSettings", { settings: newSettings }, (resp: JsonRpcResponse) => { send("setSerialSettings", { settings: newSettings }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to update serial settings: ${resp.error.data || "Unknown error"}`, `Failed to update serial settings: ${resp.error.data || "Unknown error"}`,

View File

@ -41,6 +41,26 @@ interface AudioConfig {
FrameSize: string; FrameSize: string;
} }
interface AudioMetrics {
frames_received: number;
frames_dropped: number;
bytes_processed: number;
last_frame_time: string;
connection_drops: number;
average_latency: string;
}
interface MicrophoneMetrics {
frames_sent: number;
frames_dropped: number;
bytes_processed: number;
last_frame_time: string;
connection_drops: number;
average_latency: string;
}
const qualityLabels = { const qualityLabels = {
0: "Low (32kbps)", 0: "Low (32kbps)",
1: "Medium (64kbps)", 1: "Medium (64kbps)",
@ -74,7 +94,11 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
isConnected: wsConnected isConnected: wsConnected
} = useAudioEvents(); } = useAudioEvents();
// WebSocket-only implementation - no fallback polling // Fallback state for when WebSocket is not connected
const [fallbackMuted, setFallbackMuted] = useState(false);
const [fallbackMetrics, setFallbackMetrics] = useState<AudioMetrics | null>(null);
const [fallbackMicMetrics, setFallbackMicMetrics] = useState<MicrophoneMetrics | null>(null);
const [fallbackConnected, setFallbackConnected] = useState(false);
// Microphone state from props // Microphone state from props
const { const {
@ -91,11 +115,11 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
isToggling, isToggling,
} = microphone; } = microphone;
// Use WebSocket data exclusively - no polling fallback // Use WebSocket data when available, fallback to polling data otherwise
const isMuted = audioMuted ?? false; const isMuted = wsConnected && audioMuted !== null ? audioMuted : fallbackMuted;
const metrics = audioMetrics; const metrics = wsConnected && audioMetrics !== null ? audioMetrics : fallbackMetrics;
const micMetrics = microphoneMetrics; const micMetrics = wsConnected && microphoneMetrics !== null ? microphoneMetrics : fallbackMicMetrics;
const isConnected = wsConnected; const isConnected = wsConnected ? wsConnected : fallbackConnected;
// Audio level monitoring - enable only when popover is open and microphone is active to save resources // Audio level monitoring - enable only when popover is open and microphone is active to save resources
const analysisEnabled = (open ?? true) && isMicrophoneActive; const analysisEnabled = (open ?? true) && isMicrophoneActive;
@ -126,15 +150,34 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
} }
}, [configsLoaded]); }, [configsLoaded]);
// WebSocket-only implementation - sync microphone state when needed // Optimize fallback polling - only run when WebSocket is not connected
useEffect(() => { useEffect(() => {
if (!wsConnected && !configsLoaded) {
// Load state once if configs aren't loaded yet
loadAudioState();
}
if (!wsConnected) {
loadAudioMetrics();
loadMicrophoneMetrics();
// Reduced frequency for fallback polling (every 3 seconds instead of 2)
const metricsInterval = setInterval(() => {
if (!wsConnected) { // Double-check to prevent unnecessary requests
loadAudioMetrics();
loadMicrophoneMetrics();
}
}, 3000);
return () => clearInterval(metricsInterval);
}
// Always sync microphone state, but debounce it // Always sync microphone state, but debounce it
const syncTimeout = setTimeout(() => { const syncTimeout = setTimeout(() => {
syncMicrophoneState(); syncMicrophoneState();
}, 500); }, 500);
return () => clearTimeout(syncTimeout); return () => clearTimeout(syncTimeout);
}, [syncMicrophoneState]); }, [wsConnected, syncMicrophoneState, configsLoaded]);
const loadAudioConfigurations = async () => { const loadAudioConfigurations = async () => {
try { try {
@ -160,14 +203,60 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
} }
}; };
const loadAudioState = async () => {
try {
// Load mute state only (configurations are loaded separately)
const muteResp = await api.GET("/audio/mute");
if (muteResp.ok) {
const muteData = await muteResp.json();
setFallbackMuted(!!muteData.muted);
}
} catch (error) {
console.error("Failed to load audio state:", error);
}
};
const loadAudioMetrics = async () => {
try {
const resp = await api.GET("/audio/metrics");
if (resp.ok) {
const data = await resp.json();
setFallbackMetrics(data);
// Consider connected if API call succeeds, regardless of frame count
setFallbackConnected(true);
} else {
setFallbackConnected(false);
}
} catch (error) {
console.error("Failed to load audio metrics:", error);
setFallbackConnected(false);
}
};
const loadMicrophoneMetrics = async () => {
try {
const resp = await api.GET("/microphone/metrics");
if (resp.ok) {
const data = await resp.json();
setFallbackMicMetrics(data);
}
} catch (error) {
console.error("Failed to load microphone metrics:", error);
}
};
const handleToggleMute = async () => { const handleToggleMute = async () => {
setIsLoading(true); setIsLoading(true);
try { try {
const resp = await api.POST("/audio/mute", { muted: !isMuted }); const resp = await api.POST("/audio/mute", { muted: !isMuted });
if (!resp.ok) { if (resp.ok) {
console.error("Failed to toggle mute:", resp.statusText); // WebSocket will handle the state update, but update fallback for immediate feedback
if (!wsConnected) {
setFallbackMuted(!isMuted);
}
} }
// WebSocket will handle the state update automatically
} catch (error) { } catch (error) {
console.error("Failed to toggle mute:", error); console.error("Failed to toggle mute:", error);
} finally { } finally {
@ -207,6 +296,7 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
// Prevent rapid clicking - if any operation is in progress or within cooldown, ignore the click // Prevent rapid clicking - if any operation is in progress or within cooldown, ignore the click
if (isStarting || isStopping || isToggling || (now - lastClickTime < CLICK_COOLDOWN)) { if (isStarting || isStopping || isToggling || (now - lastClickTime < CLICK_COOLDOWN)) {
console.log("Microphone operation already in progress or within cooldown, ignoring click");
return; return;
} }
@ -228,6 +318,7 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
// Prevent rapid clicking - if any operation is in progress or within cooldown, ignore the click // Prevent rapid clicking - if any operation is in progress or within cooldown, ignore the click
if (isStarting || isStopping || isToggling || (now - lastClickTime < CLICK_COOLDOWN)) { if (isStarting || isStopping || isToggling || (now - lastClickTime < CLICK_COOLDOWN)) {
console.log("Microphone operation already in progress or within cooldown, ignoring mute toggle");
return; return;
} }
@ -273,6 +364,7 @@ export default function AudioControlPopover({ microphone, open }: AudioControlPo
if (videoElement && 'setSinkId' in videoElement) { if (videoElement && 'setSinkId' in videoElement) {
try { try {
await (videoElement as HTMLVideoElement & { setSinkId: (deviceId: string) => Promise<void> }).setSinkId(deviceId); await (videoElement as HTMLVideoElement & { setSinkId: (deviceId: string) => Promise<void> }).setSinkId(deviceId);
console.log('Audio output device changed to:', deviceId);
} catch (error: unknown) { } catch (error: unknown) {
console.error('Failed to change audio output device:', error); console.error('Failed to change audio output device:', error);
} }

View File

@ -1,7 +1,7 @@
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { LuPower, LuTerminal, LuPlugZap } from "react-icons/lu"; import { LuPower, LuTerminal, LuPlugZap } from "react-icons/lu";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import Card, { GridCard } from "@components/Card"; import Card, { GridCard } from "@components/Card";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { ATXPowerControl } from "@components/extensions/ATXPowerControl"; import { ATXPowerControl } from "@components/extensions/ATXPowerControl";
@ -39,12 +39,12 @@ const AVAILABLE_EXTENSIONS: Extension[] = [
]; ];
export default function ExtensionPopover() { export default function ExtensionPopover() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [activeExtension, setActiveExtension] = useState<Extension | null>(null); const [activeExtension, setActiveExtension] = useState<Extension | null>(null);
// Load active extension on component mount // Load active extension on component mount
useEffect(() => { useEffect(() => {
send("getActiveExtension", {}, (resp: JsonRpcResponse) => { send("getActiveExtension", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
const extensionId = resp.result as string; const extensionId = resp.result as string;
if (extensionId) { if (extensionId) {
@ -57,7 +57,7 @@ export default function ExtensionPopover() {
}, [send]); }, [send]);
const handleSetActiveExtension = (extension: Extension | null) => { const handleSetActiveExtension = (extension: Extension | null) => {
send("setActiveExtension", { extensionId: extension?.id || "" }, (resp: JsonRpcResponse) => { send("setActiveExtension", { extensionId: extension?.id || "" }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set active extension: ${resp.error.data || "Unknown error"}`, `Failed to set active extension: ${resp.error.data || "Unknown error"}`,

View File

@ -16,13 +16,13 @@ import Card, { GridCard } from "@components/Card";
import { formatters } from "@/utils"; import { formatters } from "@/utils";
import { RemoteVirtualMediaState, useMountMediaStore, useRTCStore } from "@/hooks/stores"; import { RemoteVirtualMediaState, useMountMediaStore, useRTCStore } from "@/hooks/stores";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { useDeviceUiNavigation } from "@/hooks/useAppNavigation"; import { useDeviceUiNavigation } from "@/hooks/useAppNavigation";
import notifications from "@/notifications"; import notifications from "@/notifications";
const MountPopopover = forwardRef<HTMLDivElement, object>((_props, ref) => { const MountPopopover = forwardRef<HTMLDivElement, object>((_props, ref) => {
const diskDataChannelStats = useRTCStore(state => state.diskDataChannelStats); const diskDataChannelStats = useRTCStore(state => state.diskDataChannelStats);
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const { remoteVirtualMediaState, setModalView, setRemoteVirtualMediaState } = const { remoteVirtualMediaState, setModalView, setRemoteVirtualMediaState } =
useMountMediaStore(); useMountMediaStore();
@ -47,7 +47,7 @@ const MountPopopover = forwardRef<HTMLDivElement, object>((_props, ref) => {
}, [diskDataChannelStats]); }, [diskDataChannelStats]);
const syncRemoteVirtualMediaState = useCallback(() => { const syncRemoteVirtualMediaState = useCallback(() => {
send("getVirtualMediaState", {}, (response: JsonRpcResponse) => { send("getVirtualMediaState", {}, response => {
if ("error" in response) { if ("error" in response) {
notifications.error( notifications.error(
`Failed to get virtual media state: ${response.error.message}`, `Failed to get virtual media state: ${response.error.message}`,
@ -59,7 +59,7 @@ const MountPopopover = forwardRef<HTMLDivElement, object>((_props, ref) => {
}, [send, setRemoteVirtualMediaState]); }, [send, setRemoteVirtualMediaState]);
const handleUnmount = () => { const handleUnmount = () => {
send("unmountImage", {}, (response: JsonRpcResponse) => { send("unmountImage", {}, response => {
if ("error" in response) { if ("error" in response) {
notifications.error(`Failed to unmount image: ${response.error.message}`); notifications.error(`Failed to unmount image: ${response.error.message}`);
} else { } else {

View File

@ -7,7 +7,7 @@ import { Button } from "@components/Button";
import { GridCard } from "@components/Card"; import { GridCard } from "@components/Card";
import { TextAreaWithLabel } from "@components/TextArea"; import { TextAreaWithLabel } from "@components/TextArea";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { useHidStore, useRTCStore, useUiStore, useSettingsStore } from "@/hooks/stores"; import { useHidStore, useRTCStore, useUiStore, useSettingsStore } from "@/hooks/stores";
import { keys, modifiers } from "@/keyboardMappings"; import { keys, modifiers } from "@/keyboardMappings";
import { KeyStroke, KeyboardLayout, selectedKeyboard } from "@/keyboardLayouts"; import { KeyStroke, KeyboardLayout, selectedKeyboard } from "@/keyboardLayouts";
@ -28,7 +28,7 @@ export default function PasteModal() {
const setPasteMode = useHidStore(state => state.setPasteModeEnabled); const setPasteMode = useHidStore(state => state.setPasteModeEnabled);
const setDisableVideoFocusTrap = useUiStore(state => state.setDisableVideoFocusTrap); const setDisableVideoFocusTrap = useUiStore(state => state.setDisableVideoFocusTrap);
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const rpcDataChannel = useRTCStore(state => state.rpcDataChannel); const rpcDataChannel = useRTCStore(state => state.rpcDataChannel);
const [invalidChars, setInvalidChars] = useState<string[]>([]); const [invalidChars, setInvalidChars] = useState<string[]>([]);
@ -47,7 +47,7 @@ export default function PasteModal() {
}, [keyboardLayout]); }, [keyboardLayout]);
useEffect(() => { useEffect(() => {
send("getKeyboardLayout", {}, (resp: JsonRpcResponse) => { send("getKeyboardLayout", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setKeyboardLayout(resp.result as string); setKeyboardLayout(resp.result as string);
}); });

View File

@ -3,7 +3,7 @@ import { useClose } from "@headlessui/react";
import { GridCard } from "@components/Card"; import { GridCard } from "@components/Card";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { useRTCStore, useUiStore } from "@/hooks/stores"; import { useRTCStore, useUiStore } from "@/hooks/stores";
import notifications from "@/notifications"; import notifications from "@/notifications";
@ -18,7 +18,7 @@ export default function WakeOnLanModal() {
const rpcDataChannel = useRTCStore(state => state.rpcDataChannel); const rpcDataChannel = useRTCStore(state => state.rpcDataChannel);
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const close = useClose(); const close = useClose();
const [errorMessage, setErrorMessage] = useState<string | null>(null); const [errorMessage, setErrorMessage] = useState<string | null>(null);
const [addDeviceErrorMessage, setAddDeviceErrorMessage] = useState<string | null>(null); const [addDeviceErrorMessage, setAddDeviceErrorMessage] = useState<string | null>(null);
@ -33,7 +33,7 @@ export default function WakeOnLanModal() {
setErrorMessage(null); setErrorMessage(null);
if (rpcDataChannel?.readyState !== "open") return; if (rpcDataChannel?.readyState !== "open") return;
send("sendWOLMagicPacket", { macAddress }, (resp: JsonRpcResponse) => { send("sendWOLMagicPacket", { macAddress }, resp => {
if ("error" in resp) { if ("error" in resp) {
const isInvalid = resp.error.data?.includes("invalid MAC address"); const isInvalid = resp.error.data?.includes("invalid MAC address");
if (isInvalid) { if (isInvalid) {
@ -52,7 +52,7 @@ export default function WakeOnLanModal() {
); );
const syncStoredDevices = useCallback(() => { const syncStoredDevices = useCallback(() => {
send("getWakeOnLanDevices", {}, (resp: JsonRpcResponse) => { send("getWakeOnLanDevices", {}, resp => {
if ("result" in resp) { if ("result" in resp) {
setStoredDevices(resp.result as StoredDevice[]); setStoredDevices(resp.result as StoredDevice[]);
} else { } else {
@ -70,7 +70,7 @@ export default function WakeOnLanModal() {
(index: number) => { (index: number) => {
const updatedDevices = storedDevices.filter((_, i) => i !== index); const updatedDevices = storedDevices.filter((_, i) => i !== index);
send("setWakeOnLanDevices", { params: { devices: updatedDevices } }, (resp: JsonRpcResponse) => { send("setWakeOnLanDevices", { params: { devices: updatedDevices } }, resp => {
if ("error" in resp) { if ("error" in resp) {
console.error("Failed to update Wake-on-LAN devices:", resp.error); console.error("Failed to update Wake-on-LAN devices:", resp.error);
} else { } else {
@ -86,7 +86,7 @@ export default function WakeOnLanModal() {
if (!name || !macAddress) return; if (!name || !macAddress) return;
const updatedDevices = [...storedDevices, { name, macAddress }]; const updatedDevices = [...storedDevices, { name, macAddress }];
console.log("updatedDevices", updatedDevices); console.log("updatedDevices", updatedDevices);
send("setWakeOnLanDevices", { params: { devices: updatedDevices } }, (resp: JsonRpcResponse) => { send("setWakeOnLanDevices", { params: { devices: updatedDevices } }, resp => {
if ("error" in resp) { if ("error" in resp) {
console.error("Failed to add Wake-on-LAN device:", resp.error); console.error("Failed to add Wake-on-LAN device:", resp.error);
setAddDeviceErrorMessage("Failed to add device"); setAddDeviceErrorMessage("Failed to add device");

View File

@ -853,7 +853,7 @@ export const useMacrosStore = create<MacrosState>((set, get) => ({
try { try {
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
sendFn("getKeyboardMacros", {}, (response: JsonRpcResponse) => { sendFn("getKeyboardMacros", {}, response => {
if (response.error) { if (response.error) {
console.error("Error loading macros:", response.error); console.error("Error loading macros:", response.error);
reject(new Error(response.error.message)); reject(new Error(response.error.message));

View File

@ -6,9 +6,7 @@ export type AudioEventType =
| 'audio-mute-changed' | 'audio-mute-changed'
| 'audio-metrics-update' | 'audio-metrics-update'
| 'microphone-state-changed' | 'microphone-state-changed'
| 'microphone-metrics-update' | 'microphone-metrics-update';
| 'audio-process-metrics'
| 'microphone-process-metrics';
// Audio event data interfaces // Audio event data interfaces
export interface AudioMuteData { export interface AudioMuteData {
@ -38,20 +36,10 @@ export interface MicrophoneMetricsData {
average_latency: string; average_latency: string;
} }
export interface ProcessMetricsData {
pid: number;
cpu_percent: number;
memory_rss: number;
memory_vms: number;
memory_percent: number;
running: boolean;
process_name: string;
}
// Audio event structure // Audio event structure
export interface AudioEvent { export interface AudioEvent {
type: AudioEventType; type: AudioEventType;
data: AudioMuteData | AudioMetricsData | MicrophoneStateData | MicrophoneMetricsData | ProcessMetricsData; data: AudioMuteData | AudioMetricsData | MicrophoneStateData | MicrophoneMetricsData;
} }
// Hook return type // Hook return type
@ -68,17 +56,13 @@ export interface UseAudioEventsReturn {
microphoneState: MicrophoneStateData | null; microphoneState: MicrophoneStateData | null;
microphoneMetrics: MicrophoneMetricsData | null; microphoneMetrics: MicrophoneMetricsData | null;
// Process metrics
audioProcessMetrics: ProcessMetricsData | null;
microphoneProcessMetrics: ProcessMetricsData | null;
// Manual subscription control // Manual subscription control
subscribe: () => void; subscribe: () => void;
unsubscribe: () => void; unsubscribe: () => void;
} }
// Global subscription management to prevent multiple subscriptions per WebSocket connection // Global subscription management to prevent multiple subscriptions per WebSocket connection
const globalSubscriptionState = { let globalSubscriptionState = {
isSubscribed: false, isSubscribed: false,
subscriberCount: 0, subscriberCount: 0,
connectionId: null as string | null connectionId: null as string | null
@ -90,8 +74,6 @@ export function useAudioEvents(): UseAudioEventsReturn {
const [audioMetrics, setAudioMetrics] = useState<AudioMetricsData | null>(null); const [audioMetrics, setAudioMetrics] = useState<AudioMetricsData | null>(null);
const [microphoneState, setMicrophoneState] = useState<MicrophoneStateData | null>(null); const [microphoneState, setMicrophoneState] = useState<MicrophoneStateData | null>(null);
const [microphoneMetrics, setMicrophoneMetricsData] = useState<MicrophoneMetricsData | null>(null); const [microphoneMetrics, setMicrophoneMetricsData] = useState<MicrophoneMetricsData | null>(null);
const [audioProcessMetrics, setAudioProcessMetrics] = useState<ProcessMetricsData | null>(null);
const [microphoneProcessMetrics, setMicrophoneProcessMetrics] = useState<ProcessMetricsData | null>(null);
// Local subscription state // Local subscription state
const [isLocallySubscribed, setIsLocallySubscribed] = useState(false); const [isLocallySubscribed, setIsLocallySubscribed] = useState(false);
@ -232,18 +214,6 @@ export function useAudioEvents(): UseAudioEventsReturn {
break; break;
} }
case 'audio-process-metrics': {
const audioProcessData = audioEvent.data as ProcessMetricsData;
setAudioProcessMetrics(audioProcessData);
break;
}
case 'microphone-process-metrics': {
const micProcessData = audioEvent.data as ProcessMetricsData;
setMicrophoneProcessMetrics(micProcessData);
break;
}
default: default:
// Ignore other message types (WebRTC signaling, etc.) // Ignore other message types (WebRTC signaling, etc.)
break; break;
@ -305,10 +275,6 @@ export function useAudioEvents(): UseAudioEventsReturn {
microphoneState, microphoneState,
microphoneMetrics: microphoneMetrics, microphoneMetrics: microphoneMetrics,
// Process metrics
audioProcessMetrics,
microphoneProcessMetrics,
// Manual subscription control // Manual subscription control
subscribe, subscribe,
unsubscribe, unsubscribe,

View File

@ -78,5 +78,5 @@ export function useJsonRpc(onRequest?: (payload: JsonRpcRequest) => void) {
}; };
}, [rpcDataChannel, onRequest]); }, [rpcDataChannel, onRequest]);
return { send }; return [send];
} }

View File

@ -5,7 +5,7 @@ import { useJsonRpc } from "@/hooks/useJsonRpc";
import { keys, modifiers } from "@/keyboardMappings"; import { keys, modifiers } from "@/keyboardMappings";
export default function useKeyboard() { export default function useKeyboard() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const rpcDataChannel = useRTCStore(state => state.rpcDataChannel); const rpcDataChannel = useRTCStore(state => state.rpcDataChannel);
const updateActiveKeysAndModifiers = useHidStore( const updateActiveKeysAndModifiers = useHidStore(

View File

@ -858,15 +858,11 @@ export function useMicrophone() {
}, [microphoneSender, peerConnection]); }, [microphoneSender, peerConnection]);
const startMicrophoneDebounced = useCallback((deviceId?: string) => { const startMicrophoneDebounced = useCallback((deviceId?: string) => {
debouncedOperation(async () => { debouncedOperation(() => startMicrophone(deviceId).then(() => {}), "start");
await startMicrophone(deviceId).catch(console.error);
}, "start");
}, [startMicrophone, debouncedOperation]); }, [startMicrophone, debouncedOperation]);
const stopMicrophoneDebounced = useCallback(() => { const stopMicrophoneDebounced = useCallback(() => {
debouncedOperation(async () => { debouncedOperation(() => stopMicrophone().then(() => {}), "stop");
await stopMicrophone().catch(console.error);
}, "stop");
}, [stopMicrophone, debouncedOperation]); }, [stopMicrophone, debouncedOperation]);
// Make debug functions available globally for console access // Make debug functions available globally for console access

View File

@ -27,7 +27,7 @@ import NetBootIcon from "@/assets/netboot-icon.svg";
import Fieldset from "@/components/Fieldset"; import Fieldset from "@/components/Fieldset";
import { DEVICE_API } from "@/ui.config"; import { DEVICE_API } from "@/ui.config";
import { JsonRpcResponse, useJsonRpc } from "../hooks/useJsonRpc"; import { useJsonRpc } from "../hooks/useJsonRpc";
import notifications from "../notifications"; import notifications from "../notifications";
import { isOnDevice } from "../main"; import { isOnDevice } from "../main";
import { cx } from "../cva.config"; import { cx } from "../cva.config";
@ -64,10 +64,10 @@ export function Dialog({ onClose }: { onClose: () => void }) {
setRemoteVirtualMediaState(null); setRemoteVirtualMediaState(null);
} }
const { send } = useJsonRpc(); const [send] = useJsonRpc();
async function syncRemoteVirtualMediaState() { async function syncRemoteVirtualMediaState() {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
send("getVirtualMediaState", {}, (resp: JsonRpcResponse) => { send("getVirtualMediaState", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
reject(new Error(resp.error.message)); reject(new Error(resp.error.message));
} else { } else {
@ -89,7 +89,7 @@ export function Dialog({ onClose }: { onClose: () => void }) {
console.log(`Mounting ${url} as ${mode}`); console.log(`Mounting ${url} as ${mode}`);
setMountInProgress(true); setMountInProgress(true);
send("mountWithHTTP", { url, mode }, async (resp: JsonRpcResponse) => { send("mountWithHTTP", { url, mode }, async resp => {
if ("error" in resp) triggerError(resp.error.message); if ("error" in resp) triggerError(resp.error.message);
clearMountMediaState(); clearMountMediaState();
@ -108,7 +108,7 @@ export function Dialog({ onClose }: { onClose: () => void }) {
console.log(`Mounting ${fileName} as ${mode}`); console.log(`Mounting ${fileName} as ${mode}`);
setMountInProgress(true); setMountInProgress(true);
send("mountWithStorage", { filename: fileName, mode }, async (resp: JsonRpcResponse) => { send("mountWithStorage", { filename: fileName, mode }, async resp => {
if ("error" in resp) triggerError(resp.error.message); if ("error" in resp) triggerError(resp.error.message);
clearMountMediaState(); clearMountMediaState();
@ -689,7 +689,7 @@ function DeviceFileView({
const [currentPage, setCurrentPage] = useState(1); const [currentPage, setCurrentPage] = useState(1);
const filesPerPage = 5; const filesPerPage = 5;
const { send } = useJsonRpc(); const [send] = useJsonRpc();
interface StorageSpace { interface StorageSpace {
bytesUsed: number; bytesUsed: number;
@ -718,12 +718,12 @@ function DeviceFileView({
}, [storageSpace]); }, [storageSpace]);
const syncStorage = useCallback(() => { const syncStorage = useCallback(() => {
send("listStorageFiles", {}, (resp: JsonRpcResponse) => { send("listStorageFiles", {}, res => {
if ("error" in resp) { if ("error" in res) {
notifications.error(`Error listing storage files: ${resp.error}`); notifications.error(`Error listing storage files: ${res.error}`);
return; return;
} }
const { files } = resp.result as StorageFiles; const { files } = res.result as StorageFiles;
const formattedFiles = files.map(file => ({ const formattedFiles = files.map(file => ({
name: file.filename, name: file.filename,
size: formatters.bytes(file.size), size: formatters.bytes(file.size),
@ -733,13 +733,13 @@ function DeviceFileView({
setOnStorageFiles(formattedFiles); setOnStorageFiles(formattedFiles);
}); });
send("getStorageSpace", {}, (resp: JsonRpcResponse) => { send("getStorageSpace", {}, res => {
if ("error" in resp) { if ("error" in res) {
notifications.error(`Error getting storage space: ${resp.error}`); notifications.error(`Error getting storage space: ${res.error}`);
return; return;
} }
const space = resp.result as StorageSpace; const space = res.result as StorageSpace;
setStorageSpace(space); setStorageSpace(space);
}); });
}, [send, setOnStorageFiles, setStorageSpace]); }, [send, setOnStorageFiles, setStorageSpace]);
@ -762,9 +762,9 @@ function DeviceFileView({
function handleDeleteFile(file: { name: string; size: string; createdAt: string }) { function handleDeleteFile(file: { name: string; size: string; createdAt: string }) {
console.log("Deleting file:", file); console.log("Deleting file:", file);
send("deleteStorageFile", { filename: file.name }, (resp: JsonRpcResponse) => { send("deleteStorageFile", { filename: file.name }, res => {
if ("error" in resp) { if ("error" in res) {
notifications.error(`Error deleting file: ${resp.error}`); notifications.error(`Error deleting file: ${res.error}`);
return; return;
} }
@ -1001,7 +1001,7 @@ function UploadFileView({
const [fileError, setFileError] = useState<string | null>(null); const [fileError, setFileError] = useState<string | null>(null);
const [uploadError, setUploadError] = useState<string | null>(null); const [uploadError, setUploadError] = useState<string | null>(null);
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const rtcDataChannelRef = useRef<RTCDataChannel | null>(null); const rtcDataChannelRef = useRef<RTCDataChannel | null>(null);
useEffect(() => { useEffect(() => {
@ -1216,7 +1216,7 @@ function UploadFileView({
setUploadState("uploading"); setUploadState("uploading");
console.log("Upload state set to 'uploading'"); console.log("Upload state set to 'uploading'");
send("startStorageFileUpload", { filename: file.name, size: file.size }, (resp: JsonRpcResponse) => { send("startStorageFileUpload", { filename: file.name, size: file.size }, resp => {
console.log("startStorageFileUpload response:", resp); console.log("startStorageFileUpload response:", resp);
if ("error" in resp) { if ("error" in resp) {
console.error("Upload error:", resp.error.message); console.error("Upload error:", resp.error.message);

View File

@ -12,7 +12,7 @@ import { SettingsSectionHeader } from "@/components/SettingsSectionHeader";
import { useDeviceUiNavigation } from "@/hooks/useAppNavigation"; import { useDeviceUiNavigation } from "@/hooks/useAppNavigation";
import notifications from "@/notifications"; import notifications from "@/notifications";
import { DEVICE_API } from "@/ui.config"; import { DEVICE_API } from "@/ui.config";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { isOnDevice } from "@/main"; import { isOnDevice } from "@/main";
import { TextAreaWithLabel } from "@components/TextArea"; import { TextAreaWithLabel } from "@components/TextArea";
@ -42,7 +42,7 @@ export default function SettingsAccessIndexRoute() {
const { navigateTo } = useDeviceUiNavigation(); const { navigateTo } = useDeviceUiNavigation();
const navigate = useNavigate(); const navigate = useNavigate();
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [isAdopted, setAdopted] = useState(false); const [isAdopted, setAdopted] = useState(false);
const [deviceId, setDeviceId] = useState<string | null>(null); const [deviceId, setDeviceId] = useState<string | null>(null);
@ -56,7 +56,7 @@ export default function SettingsAccessIndexRoute() {
const [tlsKey, setTlsKey] = useState<string>(""); const [tlsKey, setTlsKey] = useState<string>("");
const getCloudState = useCallback(() => { const getCloudState = useCallback(() => {
send("getCloudState", {}, (resp: JsonRpcResponse) => { send("getCloudState", {}, resp => {
if ("error" in resp) return console.error(resp.error); if ("error" in resp) return console.error(resp.error);
const cloudState = resp.result as CloudState; const cloudState = resp.result as CloudState;
setAdopted(cloudState.connected); setAdopted(cloudState.connected);
@ -77,7 +77,7 @@ export default function SettingsAccessIndexRoute() {
}, [send]); }, [send]);
const getTLSState = useCallback(() => { const getTLSState = useCallback(() => {
send("getTLSState", {}, (resp: JsonRpcResponse) => { send("getTLSState", {}, resp => {
if ("error" in resp) return console.error(resp.error); if ("error" in resp) return console.error(resp.error);
const tlsState = resp.result as TLSState; const tlsState = resp.result as TLSState;
@ -88,7 +88,7 @@ export default function SettingsAccessIndexRoute() {
}, [send]); }, [send]);
const deregisterDevice = async () => { const deregisterDevice = async () => {
send("deregisterDevice", {}, (resp: JsonRpcResponse) => { send("deregisterDevice", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to de-register device: ${resp.error.data || "Unknown error"}`, `Failed to de-register device: ${resp.error.data || "Unknown error"}`,
@ -110,7 +110,7 @@ export default function SettingsAccessIndexRoute() {
return; return;
} }
send("setCloudUrl", { apiUrl: cloudApiUrl, appUrl: cloudAppUrl }, (resp: JsonRpcResponse) => { send("setCloudUrl", { apiUrl: cloudApiUrl, appUrl: cloudAppUrl }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to update cloud URL: ${resp.error.data || "Unknown error"}`, `Failed to update cloud URL: ${resp.error.data || "Unknown error"}`,
@ -156,7 +156,7 @@ export default function SettingsAccessIndexRoute() {
state.privateKey = key; state.privateKey = key;
} }
send("setTLSState", { state }, (resp: JsonRpcResponse) => { send("setTLSState", { state }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to update TLS settings: ${resp.error.data || "Unknown error"}`, `Failed to update TLS settings: ${resp.error.data || "Unknown error"}`,
@ -198,7 +198,7 @@ export default function SettingsAccessIndexRoute() {
getCloudState(); getCloudState();
getTLSState(); getTLSState();
send("getDeviceID", {}, async (resp: JsonRpcResponse) => { send("getDeviceID", {}, async resp => {
if ("error" in resp) return console.error(resp.error); if ("error" in resp) return console.error(resp.error);
setDeviceId(resp.result as string); setDeviceId(resp.result as string);
}); });

View File

@ -8,14 +8,14 @@ import { ConfirmDialog } from "../components/ConfirmDialog";
import { SettingsPageHeader } from "../components/SettingsPageheader"; import { SettingsPageHeader } from "../components/SettingsPageheader";
import { TextAreaWithLabel } from "../components/TextArea"; import { TextAreaWithLabel } from "../components/TextArea";
import { useSettingsStore } from "../hooks/stores"; import { useSettingsStore } from "../hooks/stores";
import { JsonRpcResponse, useJsonRpc } from "../hooks/useJsonRpc"; import { useJsonRpc } from "../hooks/useJsonRpc";
import { isOnDevice } from "../main"; import { isOnDevice } from "../main";
import notifications from "../notifications"; import notifications from "../notifications";
import { SettingsItem } from "./devices.$id.settings"; import { SettingsItem } from "./devices.$id.settings";
export default function SettingsAdvancedRoute() { export default function SettingsAdvancedRoute() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [sshKey, setSSHKey] = useState<string>(""); const [sshKey, setSSHKey] = useState<string>("");
const setDeveloperMode = useSettingsStore(state => state.setDeveloperMode); const setDeveloperMode = useSettingsStore(state => state.setDeveloperMode);
@ -27,35 +27,35 @@ export default function SettingsAdvancedRoute() {
const settings = useSettingsStore(); const settings = useSettingsStore();
useEffect(() => { useEffect(() => {
send("getDevModeState", {}, (resp: JsonRpcResponse) => { send("getDevModeState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
const result = resp.result as { enabled: boolean }; const result = resp.result as { enabled: boolean };
setDeveloperMode(result.enabled); setDeveloperMode(result.enabled);
}); });
send("getSSHKeyState", {}, (resp: JsonRpcResponse) => { send("getSSHKeyState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setSSHKey(resp.result as string); setSSHKey(resp.result as string);
}); });
send("getUsbEmulationState", {}, (resp: JsonRpcResponse) => { send("getUsbEmulationState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setUsbEmulationEnabled(resp.result as boolean); setUsbEmulationEnabled(resp.result as boolean);
}); });
send("getDevChannelState", {}, (resp: JsonRpcResponse) => { send("getDevChannelState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setDevChannel(resp.result as boolean); setDevChannel(resp.result as boolean);
}); });
send("getLocalLoopbackOnly", {}, (resp: JsonRpcResponse) => { send("getLocalLoopbackOnly", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setLocalLoopbackOnly(resp.result as boolean); setLocalLoopbackOnly(resp.result as boolean);
}); });
}, [send, setDeveloperMode]); }, [send, setDeveloperMode]);
const getUsbEmulationState = useCallback(() => { const getUsbEmulationState = useCallback(() => {
send("getUsbEmulationState", {}, (resp: JsonRpcResponse) => { send("getUsbEmulationState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setUsbEmulationEnabled(resp.result as boolean); setUsbEmulationEnabled(resp.result as boolean);
}); });
@ -63,7 +63,7 @@ export default function SettingsAdvancedRoute() {
const handleUsbEmulationToggle = useCallback( const handleUsbEmulationToggle = useCallback(
(enabled: boolean) => { (enabled: boolean) => {
send("setUsbEmulationState", { enabled: enabled }, (resp: JsonRpcResponse) => { send("setUsbEmulationState", { enabled: enabled }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to ${enabled ? "enable" : "disable"} USB emulation: ${resp.error.data || "Unknown error"}`, `Failed to ${enabled ? "enable" : "disable"} USB emulation: ${resp.error.data || "Unknown error"}`,
@ -78,7 +78,7 @@ export default function SettingsAdvancedRoute() {
); );
const handleResetConfig = useCallback(() => { const handleResetConfig = useCallback(() => {
send("resetConfig", {}, (resp: JsonRpcResponse) => { send("resetConfig", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to reset configuration: ${resp.error.data || "Unknown error"}`, `Failed to reset configuration: ${resp.error.data || "Unknown error"}`,
@ -90,7 +90,7 @@ export default function SettingsAdvancedRoute() {
}, [send]); }, [send]);
const handleUpdateSSHKey = useCallback(() => { const handleUpdateSSHKey = useCallback(() => {
send("setSSHKeyState", { sshKey }, (resp: JsonRpcResponse) => { send("setSSHKeyState", { sshKey }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to update SSH key: ${resp.error.data || "Unknown error"}`, `Failed to update SSH key: ${resp.error.data || "Unknown error"}`,
@ -103,7 +103,7 @@ export default function SettingsAdvancedRoute() {
const handleDevModeChange = useCallback( const handleDevModeChange = useCallback(
(developerMode: boolean) => { (developerMode: boolean) => {
send("setDevModeState", { enabled: developerMode }, (resp: JsonRpcResponse) => { send("setDevModeState", { enabled: developerMode }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set dev mode: ${resp.error.data || "Unknown error"}`, `Failed to set dev mode: ${resp.error.data || "Unknown error"}`,
@ -118,7 +118,7 @@ export default function SettingsAdvancedRoute() {
const handleDevChannelChange = useCallback( const handleDevChannelChange = useCallback(
(enabled: boolean) => { (enabled: boolean) => {
send("setDevChannelState", { enabled }, (resp: JsonRpcResponse) => { send("setDevChannelState", { enabled }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set dev channel state: ${resp.error.data || "Unknown error"}`, `Failed to set dev channel state: ${resp.error.data || "Unknown error"}`,
@ -133,7 +133,7 @@ export default function SettingsAdvancedRoute() {
const applyLoopbackOnlyMode = useCallback( const applyLoopbackOnlyMode = useCallback(
(enabled: boolean) => { (enabled: boolean) => {
send("setLocalLoopbackOnly", { enabled }, (resp: JsonRpcResponse) => { send("setLocalLoopbackOnly", { enabled }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to ${enabled ? "enable" : "disable"} loopback-only mode: ${resp.error.data || "Unknown error"}`, `Failed to ${enabled ? "enable" : "disable"} loopback-only mode: ${resp.error.data || "Unknown error"}`,

View File

@ -1,7 +1,7 @@
import { useState , useEffect } from "react"; import { useState , useEffect } from "react";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { SettingsPageHeader } from "../components/SettingsPageheader"; import { SettingsPageHeader } from "../components/SettingsPageheader";
import { Button } from "../components/Button"; import { Button } from "../components/Button";
@ -13,7 +13,7 @@ import { useDeviceStore } from "../hooks/stores";
import { SettingsItem } from "./devices.$id.settings"; import { SettingsItem } from "./devices.$id.settings";
export default function SettingsGeneralRoute() { export default function SettingsGeneralRoute() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const { navigateTo } = useDeviceUiNavigation(); const { navigateTo } = useDeviceUiNavigation();
const [autoUpdate, setAutoUpdate] = useState(true); const [autoUpdate, setAutoUpdate] = useState(true);
@ -24,14 +24,14 @@ export default function SettingsGeneralRoute() {
}); });
useEffect(() => { useEffect(() => {
send("getAutoUpdateState", {}, (resp: JsonRpcResponse) => { send("getAutoUpdateState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setAutoUpdate(resp.result as boolean); setAutoUpdate(resp.result as boolean);
}); });
}, [send]); }, [send]);
const handleAutoUpdateChange = (enabled: boolean) => { const handleAutoUpdateChange = (enabled: boolean) => {
send("setAutoUpdateState", { enabled }, (resp: JsonRpcResponse) => { send("setAutoUpdateState", { enabled }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set auto-update: ${resp.error.data || "Unknown error"}`, `Failed to set auto-update: ${resp.error.data || "Unknown error"}`,

View File

@ -6,7 +6,7 @@ import { Button } from "@components/Button";
export default function SettingsGeneralRebootRoute() { export default function SettingsGeneralRebootRoute() {
const navigate = useNavigate(); const navigate = useNavigate();
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const onConfirmUpdate = useCallback(() => { const onConfirmUpdate = useCallback(() => {
// This is where we send the RPC to the golang binary // This is where we send the RPC to the golang binary

View File

@ -3,7 +3,7 @@ import { useCallback, useEffect, useRef, useState } from "react";
import { CheckCircleIcon } from "@heroicons/react/20/solid"; import { CheckCircleIcon } from "@heroicons/react/20/solid";
import Card from "@/components/Card"; import Card from "@/components/Card";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { Button } from "@components/Button"; import { Button } from "@components/Button";
import { UpdateState, useDeviceStore, useUpdateStore } from "@/hooks/stores"; import { UpdateState, useDeviceStore, useUpdateStore } from "@/hooks/stores";
import notifications from "@/notifications"; import notifications from "@/notifications";
@ -16,7 +16,7 @@ export default function SettingsGeneralUpdateRoute() {
const { updateSuccess } = location.state || {}; const { updateSuccess } = location.state || {};
const { setModalView, otaState } = useUpdateStore(); const { setModalView, otaState } = useUpdateStore();
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const onConfirmUpdate = useCallback(() => { const onConfirmUpdate = useCallback(() => {
send("tryUpdate", {}); send("tryUpdate", {});
@ -134,14 +134,14 @@ function LoadingState({
}) { }) {
const [progressWidth, setProgressWidth] = useState("0%"); const [progressWidth, setProgressWidth] = useState("0%");
const abortControllerRef = useRef<AbortController | null>(null); const abortControllerRef = useRef<AbortController | null>(null);
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const setAppVersion = useDeviceStore(state => state.setAppVersion); const setAppVersion = useDeviceStore(state => state.setAppVersion);
const setSystemVersion = useDeviceStore(state => state.setSystemVersion); const setSystemVersion = useDeviceStore(state => state.setSystemVersion);
const getVersionInfo = useCallback(() => { const getVersionInfo = useCallback(() => {
return new Promise<SystemVersionInfo>((resolve, reject) => { return new Promise<SystemVersionInfo>((resolve, reject) => {
send("getUpdateStatus", {}, async (resp: JsonRpcResponse) => { send("getUpdateStatus", {}, async resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error(`Failed to check for updates: ${resp.error}`); notifications.error(`Failed to check for updates: ${resp.error}`);
reject(new Error("Failed to check for updates")); reject(new Error("Failed to check for updates"));

View File

@ -3,7 +3,7 @@ import { useEffect } from "react";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { SettingsItem } from "@routes/devices.$id.settings"; import { SettingsItem } from "@routes/devices.$id.settings";
import { BacklightSettings, useSettingsStore } from "@/hooks/stores"; import { BacklightSettings, useSettingsStore } from "@/hooks/stores";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { SelectMenuBasic } from "@components/SelectMenuBasic"; import { SelectMenuBasic } from "@components/SelectMenuBasic";
import { UsbDeviceSetting } from "@components/UsbDeviceSetting"; import { UsbDeviceSetting } from "@components/UsbDeviceSetting";
@ -12,7 +12,7 @@ import { UsbInfoSetting } from "../components/UsbInfoSetting";
import { FeatureFlag } from "../components/FeatureFlag"; import { FeatureFlag } from "../components/FeatureFlag";
export default function SettingsHardwareRoute() { export default function SettingsHardwareRoute() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const settings = useSettingsStore(); const settings = useSettingsStore();
const setDisplayRotation = useSettingsStore(state => state.setDisplayRotation); const setDisplayRotation = useSettingsStore(state => state.setDisplayRotation);
@ -23,7 +23,7 @@ export default function SettingsHardwareRoute() {
}; };
const handleDisplayRotationSave = () => { const handleDisplayRotationSave = () => {
send("setDisplayRotation", { params: { rotation: settings.displayRotation } }, (resp: JsonRpcResponse) => { send("setDisplayRotation", { params: { rotation: settings.displayRotation } }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set display orientation: ${resp.error.data || "Unknown error"}`, `Failed to set display orientation: ${resp.error.data || "Unknown error"}`,
@ -48,7 +48,7 @@ export default function SettingsHardwareRoute() {
}; };
const handleBacklightSettingsSave = () => { const handleBacklightSettingsSave = () => {
send("setBacklightSettings", { params: settings.backlightSettings }, (resp: JsonRpcResponse) => { send("setBacklightSettings", { params: settings.backlightSettings }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set backlight settings: ${resp.error.data || "Unknown error"}`, `Failed to set backlight settings: ${resp.error.data || "Unknown error"}`,
@ -60,7 +60,7 @@ export default function SettingsHardwareRoute() {
}; };
useEffect(() => { useEffect(() => {
send("getBacklightSettings", {}, (resp: JsonRpcResponse) => { send("getBacklightSettings", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
return notifications.error( return notifications.error(
`Failed to get backlight settings: ${resp.error.data || "Unknown error"}`, `Failed to get backlight settings: ${resp.error.data || "Unknown error"}`,

View File

@ -1,7 +1,7 @@
import { useCallback, useEffect, useMemo } from "react"; import { useCallback, useEffect, useMemo } from "react";
import { KeyboardLedSync, useSettingsStore } from "@/hooks/stores"; import { KeyboardLedSync, useSettingsStore } from "@/hooks/stores";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import notifications from "@/notifications"; import notifications from "@/notifications";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { keyboardOptions } from "@/keyboardLayouts"; import { keyboardOptions } from "@/keyboardLayouts";
@ -39,10 +39,10 @@ export default function SettingsKeyboardRoute() {
{ value: "host", label: "Host Only" }, { value: "host", label: "Host Only" },
]; ];
const { send } = useJsonRpc(); const [send] = useJsonRpc();
useEffect(() => { useEffect(() => {
send("getKeyboardLayout", {}, (resp: JsonRpcResponse) => { send("getKeyboardLayout", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setKeyboardLayout(resp.result as string); setKeyboardLayout(resp.result as string);
}); });
@ -51,7 +51,7 @@ export default function SettingsKeyboardRoute() {
const onKeyboardLayoutChange = useCallback( const onKeyboardLayoutChange = useCallback(
(e: React.ChangeEvent<HTMLSelectElement>) => { (e: React.ChangeEvent<HTMLSelectElement>) => {
const layout = e.target.value; const layout = e.target.value;
send("setKeyboardLayout", { layout }, (resp: JsonRpcResponse) => { send("setKeyboardLayout", { layout }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set keyboard layout: ${resp.error.data || "Unknown error"}`, `Failed to set keyboard layout: ${resp.error.data || "Unknown error"}`,

View File

@ -6,7 +6,7 @@ import PointingFinger from "@/assets/pointing-finger.svg";
import { GridCard } from "@/components/Card"; import { GridCard } from "@/components/Card";
import { Checkbox } from "@/components/Checkbox"; import { Checkbox } from "@/components/Checkbox";
import { useSettingsStore } from "@/hooks/stores"; import { useSettingsStore } from "@/hooks/stores";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { SelectMenuBasic } from "@components/SelectMenuBasic"; import { SelectMenuBasic } from "@components/SelectMenuBasic";
import { JigglerSetting } from "@components/JigglerSetting"; import { JigglerSetting } from "@components/JigglerSetting";
@ -87,17 +87,17 @@ export default function SettingsMouseRoute() {
{ value: "100", label: "Very High" }, { value: "100", label: "Very High" },
]; ];
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const syncJigglerSettings = useCallback(() => { const syncJigglerSettings = useCallback(() => {
send("getJigglerState", {}, (resp: JsonRpcResponse) => { send("getJigglerState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
const isEnabled = resp.result as boolean; const isEnabled = resp.result as boolean;
// If the jiggler is disabled, set the selected option to "disabled" and nothing else // If the jiggler is disabled, set the selected option to "disabled" and nothing else
if (!isEnabled) return setSelectedJigglerOption("disabled"); if (!isEnabled) return setSelectedJigglerOption("disabled");
send("getJigglerConfig", {}, (resp: JsonRpcResponse) => { send("getJigglerConfig", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
const result = resp.result as JigglerConfig; const result = resp.result as JigglerConfig;
setCurrentJigglerConfig(result); setCurrentJigglerConfig(result);
@ -121,7 +121,7 @@ export default function SettingsMouseRoute() {
const saveJigglerConfig = useCallback( const saveJigglerConfig = useCallback(
(jigglerConfig: JigglerConfig) => { (jigglerConfig: JigglerConfig) => {
// We assume the jiggler should be set to enabled if the config is being updated // We assume the jiggler should be set to enabled if the config is being updated
send("setJigglerState", { enabled: true }, async (resp: JsonRpcResponse) => { send("setJigglerState", { enabled: true }, async resp => {
if ("error" in resp) { if ("error" in resp) {
return notifications.error( return notifications.error(
`Failed to set jiggler state: ${resp.error.data || "Unknown error"}`, `Failed to set jiggler state: ${resp.error.data || "Unknown error"}`,
@ -129,7 +129,7 @@ export default function SettingsMouseRoute() {
} }
}); });
send("setJigglerConfig", { jigglerConfig }, async (resp: JsonRpcResponse) => { send("setJigglerConfig", { jigglerConfig }, async resp => {
if ("error" in resp) { if ("error" in resp) {
const errorMsg = resp.error.data || "Unknown error"; const errorMsg = resp.error.data || "Unknown error";
@ -163,7 +163,7 @@ export default function SettingsMouseRoute() {
// We don't need to update the device jiggler state when the option is "disabled" // We don't need to update the device jiggler state when the option is "disabled"
if (option === "disabled") { if (option === "disabled") {
send("setJigglerState", { enabled: false }, async (resp: JsonRpcResponse) => { send("setJigglerState", { enabled: false }, async resp => {
if ("error" in resp) { if ("error" in resp) {
return notifications.error( return notifications.error(
`Failed to set jiggler state: ${resp.error.data || "Unknown error"}`, `Failed to set jiggler state: ${resp.error.data || "Unknown error"}`,

View File

@ -13,7 +13,7 @@ import {
TimeSyncMode, TimeSyncMode,
useNetworkStateStore, useNetworkStateStore,
} from "@/hooks/stores"; } from "@/hooks/stores";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { Button } from "@components/Button"; import { Button } from "@components/Button";
import { GridCard } from "@components/Card"; import { GridCard } from "@components/Card";
import InputField, { InputFieldWithLabel } from "@components/InputField"; import InputField, { InputFieldWithLabel } from "@components/InputField";
@ -72,7 +72,7 @@ export function LifeTimeLabel({ lifetime }: { lifetime: string }) {
} }
export default function SettingsNetworkRoute() { export default function SettingsNetworkRoute() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [networkState, setNetworkState] = useNetworkStateStore(state => [ const [networkState, setNetworkState] = useNetworkStateStore(state => [
state, state,
state.setNetworkState, state.setNetworkState,
@ -104,7 +104,7 @@ export default function SettingsNetworkRoute() {
const getNetworkSettings = useCallback(() => { const getNetworkSettings = useCallback(() => {
setNetworkSettingsLoaded(false); setNetworkSettingsLoaded(false);
send("getNetworkSettings", {}, (resp: JsonRpcResponse) => { send("getNetworkSettings", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
console.log(resp.result); console.log(resp.result);
setNetworkSettings(resp.result as NetworkSettings); setNetworkSettings(resp.result as NetworkSettings);
@ -117,7 +117,7 @@ export default function SettingsNetworkRoute() {
}, [send]); }, [send]);
const getNetworkState = useCallback(() => { const getNetworkState = useCallback(() => {
send("getNetworkState", {}, (resp: JsonRpcResponse) => { send("getNetworkState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
console.log(resp.result); console.log(resp.result);
setNetworkState(resp.result as NetworkState); setNetworkState(resp.result as NetworkState);
@ -127,7 +127,7 @@ export default function SettingsNetworkRoute() {
const setNetworkSettingsRemote = useCallback( const setNetworkSettingsRemote = useCallback(
(settings: NetworkSettings) => { (settings: NetworkSettings) => {
setNetworkSettingsLoaded(false); setNetworkSettingsLoaded(false);
send("setNetworkSettings", { settings }, (resp: JsonRpcResponse) => { send("setNetworkSettings", { settings }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
"Failed to save network settings: " + "Failed to save network settings: " +
@ -148,7 +148,7 @@ export default function SettingsNetworkRoute() {
); );
const handleRenewLease = useCallback(() => { const handleRenewLease = useCallback(() => {
send("renewDHCPLease", {}, (resp: JsonRpcResponse) => { send("renewDHCPLease", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error("Failed to renew lease: " + resp.error.message); notifications.error("Failed to renew lease: " + resp.error.message);
} else { } else {

View File

@ -2,7 +2,7 @@ import { useState, useEffect } from "react";
import { Button } from "@/components/Button"; import { Button } from "@/components/Button";
import { TextAreaWithLabel } from "@/components/TextArea"; import { TextAreaWithLabel } from "@/components/TextArea";
import { JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { useJsonRpc } from "@/hooks/useJsonRpc";
import { SettingsPageHeader } from "@components/SettingsPageheader"; import { SettingsPageHeader } from "@components/SettingsPageheader";
import { useSettingsStore } from "@/hooks/stores"; import { useSettingsStore } from "@/hooks/stores";
@ -41,7 +41,7 @@ const streamQualityOptions = [
]; ];
export default function SettingsVideoRoute() { export default function SettingsVideoRoute() {
const { send } = useJsonRpc(); const [send] = useJsonRpc();
const [streamQuality, setStreamQuality] = useState("1"); const [streamQuality, setStreamQuality] = useState("1");
const [customEdidValue, setCustomEdidValue] = useState<string | null>(null); const [customEdidValue, setCustomEdidValue] = useState<string | null>(null);
const [edid, setEdid] = useState<string | null>(null); const [edid, setEdid] = useState<string | null>(null);
@ -55,12 +55,12 @@ export default function SettingsVideoRoute() {
const setVideoContrast = useSettingsStore(state => state.setVideoContrast); const setVideoContrast = useSettingsStore(state => state.setVideoContrast);
useEffect(() => { useEffect(() => {
send("getStreamQualityFactor", {}, (resp: JsonRpcResponse) => { send("getStreamQualityFactor", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setStreamQuality(String(resp.result)); setStreamQuality(String(resp.result));
}); });
send("getEDID", {}, (resp: JsonRpcResponse) => { send("getEDID", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error(`Failed to get EDID: ${resp.error.data || "Unknown error"}`); notifications.error(`Failed to get EDID: ${resp.error.data || "Unknown error"}`);
return; return;
@ -85,7 +85,7 @@ export default function SettingsVideoRoute() {
}, [send]); }, [send]);
const handleStreamQualityChange = (factor: string) => { const handleStreamQualityChange = (factor: string) => {
send("setStreamQualityFactor", { factor: Number(factor) }, (resp: JsonRpcResponse) => { send("setStreamQualityFactor", { factor: Number(factor) }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error( notifications.error(
`Failed to set stream quality: ${resp.error.data || "Unknown error"}`, `Failed to set stream quality: ${resp.error.data || "Unknown error"}`,
@ -99,7 +99,7 @@ export default function SettingsVideoRoute() {
}; };
const handleEDIDChange = (newEdid: string) => { const handleEDIDChange = (newEdid: string) => {
send("setEDID", { edid: newEdid }, (resp: JsonRpcResponse) => { send("setEDID", { edid: newEdid }, resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error(`Failed to set EDID: ${resp.error.data || "Unknown error"}`); notifications.error(`Failed to set EDID: ${resp.error.data || "Unknown error"}`);
return; return;

View File

@ -39,7 +39,7 @@ import { checkAuth, isInCloud, isOnDevice } from "@/main";
import DashboardNavbar from "@components/Header"; import DashboardNavbar from "@components/Header";
import ConnectionStatsSidebar from "@/components/sidebar/connectionStats"; import ConnectionStatsSidebar from "@/components/sidebar/connectionStats";
import AudioMetricsSidebar from "@/components/sidebar/AudioMetricsSidebar"; import AudioMetricsSidebar from "@/components/sidebar/AudioMetricsSidebar";
import { JsonRpcRequest, JsonRpcResponse, useJsonRpc } from "@/hooks/useJsonRpc"; import { JsonRpcRequest, useJsonRpc } from "@/hooks/useJsonRpc";
import Terminal from "@components/Terminal"; import Terminal from "@components/Terminal";
import { CLOUD_API, DEVICE_API } from "@/ui.config"; import { CLOUD_API, DEVICE_API } from "@/ui.config";
@ -653,11 +653,11 @@ export default function KvmIdRoute() {
} }
const rpcDataChannel = useRTCStore(state => state.rpcDataChannel); const rpcDataChannel = useRTCStore(state => state.rpcDataChannel);
const { send } = useJsonRpc(onJsonRpcRequest); const [send] = useJsonRpc(onJsonRpcRequest);
useEffect(() => { useEffect(() => {
if (rpcDataChannel?.readyState !== "open") return; if (rpcDataChannel?.readyState !== "open") return;
send("getVideoState", {}, (resp: JsonRpcResponse) => { send("getVideoState", {}, resp => {
if ("error" in resp) return; if ("error" in resp) return;
setHdmiState(resp.result as Parameters<VideoState["setHdmiState"]>[0]); setHdmiState(resp.result as Parameters<VideoState["setHdmiState"]>[0]);
}); });
@ -669,7 +669,7 @@ export default function KvmIdRoute() {
if (keyboardLedState !== undefined) return; if (keyboardLedState !== undefined) return;
console.log("Requesting keyboard led state"); console.log("Requesting keyboard led state");
send("getKeyboardLedState", {}, (resp: JsonRpcResponse) => { send("getKeyboardLedState", {}, resp => {
if ("error" in resp) { if ("error" in resp) {
// -32601 means the method is not supported // -32601 means the method is not supported
if (resp.error.code === -32601) { if (resp.error.code === -32601) {
@ -742,7 +742,7 @@ export default function KvmIdRoute() {
useEffect(() => { useEffect(() => {
if (appVersion) return; if (appVersion) return;
send("getUpdateStatus", {}, async (resp: JsonRpcResponse) => { send("getUpdateStatus", {}, async resp => {
if ("error" in resp) { if ("error" in resp) {
notifications.error(`Failed to get device version: ${resp.error}`); notifications.error(`Failed to get device version: ${resp.error}`);
return return

142
web.go
View File

@ -159,6 +159,10 @@ func setupRouter() *gin.Engine {
protected.POST("/storage/upload", handleUploadHttp) protected.POST("/storage/upload", handleUploadHttp)
} }
protected.GET("/audio/mute", func(c *gin.Context) {
c.JSON(200, gin.H{"muted": audio.IsAudioMuted()})
})
protected.POST("/audio/mute", func(c *gin.Context) { protected.POST("/audio/mute", func(c *gin.Context) {
type muteReq struct { type muteReq struct {
Muted bool `json:"muted"` Muted bool `json:"muted"`
@ -169,8 +173,6 @@ func setupRouter() *gin.Engine {
return return
} }
audio.SetAudioMuted(req.Muted) audio.SetAudioMuted(req.Muted)
// Also set relay mute state if in main process
audio.SetAudioRelayMuted(req.Muted)
// Broadcast audio mute state change via WebSocket // Broadcast audio mute state change via WebSocket
broadcaster := audio.GetAudioEventBroadcaster() broadcaster := audio.GetAudioEventBroadcaster()
@ -219,7 +221,7 @@ func setupRouter() *gin.Engine {
"bytes_processed": metrics.BytesProcessed, "bytes_processed": metrics.BytesProcessed,
"last_frame_time": metrics.LastFrameTime, "last_frame_time": metrics.LastFrameTime,
"connection_drops": metrics.ConnectionDrops, "connection_drops": metrics.ConnectionDrops,
"average_latency": fmt.Sprintf("%.1fms", float64(metrics.AverageLatency.Nanoseconds())/1e6), "average_latency": metrics.AverageLatency.String(),
}) })
}) })
@ -284,18 +286,18 @@ func setupRouter() *gin.Engine {
// Optimized server-side cooldown using atomic operations // Optimized server-side cooldown using atomic operations
opResult := audio.TryMicrophoneOperation() opResult := audio.TryMicrophoneOperation()
if !opResult.Allowed { if !opResult.Allowed {
running := currentSession.AudioInputManager.IsRunning() running := currentSession.AudioInputManager.IsRunning() || audio.IsNonBlockingAudioInputRunning()
c.JSON(200, gin.H{ c.JSON(200, gin.H{
"status": "cooldown", "status": "cooldown",
"running": running, "running": running,
"cooldown_ms_remaining": opResult.RemainingCooldown.Milliseconds(), "cooldown_ms_remaining": opResult.RemainingCooldown.Milliseconds(),
"operation_id": opResult.OperationID, "operation_id": opResult.OperationID,
}) })
return return
} }
// Check if already running before attempting to start // Check if already running before attempting to start
if currentSession.AudioInputManager.IsRunning() { if currentSession.AudioInputManager.IsRunning() || audio.IsNonBlockingAudioInputRunning() {
c.JSON(200, gin.H{ c.JSON(200, gin.H{
"status": "already running", "status": "already running",
"running": true, "running": true,
@ -310,7 +312,7 @@ func setupRouter() *gin.Engine {
// Check if it's already running after the failed start attempt // Check if it's already running after the failed start attempt
// This handles race conditions where another request started it // This handles race conditions where another request started it
if currentSession.AudioInputManager.IsRunning() { if currentSession.AudioInputManager.IsRunning() || audio.IsNonBlockingAudioInputRunning() {
c.JSON(200, gin.H{ c.JSON(200, gin.H{
"status": "started by concurrent request", "status": "started by concurrent request",
"running": true, "running": true,
@ -346,18 +348,18 @@ func setupRouter() *gin.Engine {
// Optimized server-side cooldown using atomic operations // Optimized server-side cooldown using atomic operations
opResult := audio.TryMicrophoneOperation() opResult := audio.TryMicrophoneOperation()
if !opResult.Allowed { if !opResult.Allowed {
running := currentSession.AudioInputManager.IsRunning() running := currentSession.AudioInputManager.IsRunning() || audio.IsNonBlockingAudioInputRunning()
c.JSON(200, gin.H{ c.JSON(200, gin.H{
"status": "cooldown", "status": "cooldown",
"running": running, "running": running,
"cooldown_ms_remaining": opResult.RemainingCooldown.Milliseconds(), "cooldown_ms_remaining": opResult.RemainingCooldown.Milliseconds(),
"operation_id": opResult.OperationID, "operation_id": opResult.OperationID,
}) })
return return
} }
// Check if already stopped before attempting to stop // Check if already stopped before attempting to stop
if !currentSession.AudioInputManager.IsRunning() { if !currentSession.AudioInputManager.IsRunning() && !audio.IsNonBlockingAudioInputRunning() {
c.JSON(200, gin.H{ c.JSON(200, gin.H{
"status": "already stopped", "status": "already stopped",
"running": false, "running": false,
@ -367,7 +369,7 @@ func setupRouter() *gin.Engine {
currentSession.AudioInputManager.Stop() currentSession.AudioInputManager.Stop()
// AudioInputManager.Stop() already coordinates a clean stop via IPC audio input system // AudioInputManager.Stop() already coordinates a clean stop via StopNonBlockingAudioInput()
// so we don't need to call it again here // so we don't need to call it again here
// Broadcast microphone state change via WebSocket // Broadcast microphone state change via WebSocket
@ -406,7 +408,7 @@ func setupRouter() *gin.Engine {
"bytes_processed": 0, "bytes_processed": 0,
"last_frame_time": "", "last_frame_time": "",
"connection_drops": 0, "connection_drops": 0,
"average_latency": "0.0ms", "average_latency": "0s",
}) })
return return
} }
@ -418,101 +420,7 @@ func setupRouter() *gin.Engine {
"bytes_processed": metrics.BytesProcessed, "bytes_processed": metrics.BytesProcessed,
"last_frame_time": metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"), "last_frame_time": metrics.LastFrameTime.Format("2006-01-02T15:04:05.000Z"),
"connection_drops": metrics.ConnectionDrops, "connection_drops": metrics.ConnectionDrops,
"average_latency": fmt.Sprintf("%.1fms", float64(metrics.AverageLatency.Nanoseconds())/1e6), "average_latency": metrics.AverageLatency.String(),
})
})
// Audio subprocess process metrics endpoints
protected.GET("/audio/process-metrics", func(c *gin.Context) {
// Access the global audio supervisor from main.go
if audioSupervisor == nil {
c.JSON(200, gin.H{
"cpu_percent": 0.0,
"memory_percent": 0.0,
"memory_rss": 0,
"memory_vms": 0,
"running": false,
})
return
}
metrics := audioSupervisor.GetProcessMetrics()
if metrics == nil {
c.JSON(200, gin.H{
"cpu_percent": 0.0,
"memory_percent": 0.0,
"memory_rss": 0,
"memory_vms": 0,
"running": false,
})
return
}
c.JSON(200, gin.H{
"cpu_percent": metrics.CPUPercent,
"memory_percent": metrics.MemoryPercent,
"memory_rss": metrics.MemoryRSS,
"memory_vms": metrics.MemoryVMS,
"running": true,
})
})
// Audio memory allocation metrics endpoint
protected.GET("/audio/memory-metrics", gin.WrapF(audio.HandleMemoryMetrics))
protected.GET("/microphone/process-metrics", func(c *gin.Context) {
if currentSession == nil || currentSession.AudioInputManager == nil {
c.JSON(200, gin.H{
"cpu_percent": 0.0,
"memory_percent": 0.0,
"memory_rss": 0,
"memory_vms": 0,
"running": false,
})
return
}
// Get the supervisor from the audio input manager
supervisor := currentSession.AudioInputManager.GetSupervisor()
if supervisor == nil {
c.JSON(200, gin.H{
"cpu_percent": 0.0,
"memory_percent": 0.0,
"memory_rss": 0,
"memory_vms": 0,
"running": false,
})
return
}
metrics := supervisor.GetProcessMetrics()
if metrics == nil {
c.JSON(200, gin.H{
"cpu_percent": 0.0,
"memory_percent": 0.0,
"memory_rss": 0,
"memory_vms": 0,
"running": false,
})
return
}
c.JSON(200, gin.H{
"cpu_percent": metrics.CPUPercent,
"memory_percent": metrics.MemoryPercent,
"memory_rss": metrics.MemoryRSS,
"memory_vms": metrics.MemoryVMS,
"running": true,
})
})
// System memory information endpoint
protected.GET("/system/memory", func(c *gin.Context) {
processMonitor := audio.GetProcessMonitor()
totalMemory := processMonitor.GetTotalMemory()
c.JSON(200, gin.H{
"total_memory_bytes": totalMemory,
"total_memory_mb": totalMemory / (1024 * 1024),
}) })
}) })
@ -529,8 +437,9 @@ func setupRouter() *gin.Engine {
logger.Info().Msg("forcing microphone state reset") logger.Info().Msg("forcing microphone state reset")
// Force stop the AudioInputManager // Force stop both the AudioInputManager and NonBlockingAudioManager
currentSession.AudioInputManager.Stop() currentSession.AudioInputManager.Stop()
audio.StopNonBlockingAudioInput()
// Wait a bit to ensure everything is stopped // Wait a bit to ensure everything is stopped
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
@ -540,8 +449,9 @@ func setupRouter() *gin.Engine {
broadcaster.BroadcastMicrophoneStateChanged(false, true) broadcaster.BroadcastMicrophoneStateChanged(false, true)
c.JSON(200, gin.H{ c.JSON(200, gin.H{
"status": "reset", "status": "reset",
"audio_input_running": currentSession.AudioInputManager.IsRunning(), "audio_input_running": currentSession.AudioInputManager.IsRunning(),
"nonblocking_input_running": audio.IsNonBlockingAudioInputRunning(),
}) })
}) })

View File

@ -29,13 +29,11 @@ type Session struct {
DiskChannel *webrtc.DataChannel DiskChannel *webrtc.DataChannel
AudioInputManager *audio.AudioInputManager AudioInputManager *audio.AudioInputManager
shouldUmountVirtualMedia bool shouldUmountVirtualMedia bool
// Microphone operation throttling
micCooldown time.Duration // Microphone operation cooldown to mitigate rapid start/stop races
// Audio frame processing micOpMu sync.Mutex
audioFrameChan chan []byte lastMicOp time.Time
audioStopChan chan struct{} micCooldown time.Duration
audioWg sync.WaitGroup
rpcQueue chan webrtc.DataChannelMessage
} }
type SessionConfig struct { type SessionConfig struct {
@ -117,33 +115,18 @@ func newSession(config SessionConfig) (*Session, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
session := &Session{ session := &Session{
peerConnection: peerConnection, peerConnection: peerConnection,
AudioInputManager: audio.NewAudioInputManager(), AudioInputManager: audio.NewAudioInputManager(),
micCooldown: 100 * time.Millisecond,
audioFrameChan: make(chan []byte, 1000),
audioStopChan: make(chan struct{}),
} }
// Start audio processing goroutine
session.startAudioProcessor(*logger)
session.rpcQueue = make(chan webrtc.DataChannelMessage, 256)
go func() {
for msg := range session.rpcQueue {
onRPCMessage(msg, session)
}
}()
peerConnection.OnDataChannel(func(d *webrtc.DataChannel) { peerConnection.OnDataChannel(func(d *webrtc.DataChannel) {
scopedLogger.Info().Str("label", d.Label()).Uint16("id", *d.ID()).Msg("New DataChannel") scopedLogger.Info().Str("label", d.Label()).Uint16("id", *d.ID()).Msg("New DataChannel")
switch d.Label() { switch d.Label() {
case "rpc": case "rpc":
session.RPCChannel = d session.RPCChannel = d
d.OnMessage(func(msg webrtc.DataChannelMessage) { d.OnMessage(func(msg webrtc.DataChannelMessage) {
// Enqueue to ensure ordered processing go onRPCMessage(msg, session)
session.rpcQueue <- msg
}) })
triggerOTAStateUpdate() triggerOTAStateUpdate()
triggerVideoStateUpdate() triggerVideoStateUpdate()
@ -172,11 +155,6 @@ func newSession(config SessionConfig) (*Session, error) {
return nil, err return nil, err
} }
// Update the audio relay with the new WebRTC audio track
if err := audio.UpdateAudioRelayTrack(session.AudioTrack); err != nil {
scopedLogger.Warn().Err(err).Msg("Failed to update audio relay track")
}
videoRtpSender, err := peerConnection.AddTrack(session.VideoTrack) videoRtpSender, err := peerConnection.AddTrack(session.VideoTrack)
if err != nil { if err != nil {
return nil, err return nil, err
@ -212,14 +190,10 @@ func newSession(config SessionConfig) (*Session, error) {
// Extract Opus payload from RTP packet // Extract Opus payload from RTP packet
opusPayload := rtpPacket.Payload opusPayload := rtpPacket.Payload
if len(opusPayload) > 0 { if len(opusPayload) > 0 && session.AudioInputManager != nil {
// Send to buffered channel for processing err := session.AudioInputManager.WriteOpusFrame(opusPayload)
select { if err != nil {
case session.audioFrameChan <- opusPayload: scopedLogger.Warn().Err(err).Msg("Failed to write Opus frame to audio input manager")
// Frame sent successfully
default:
// Channel is full, drop the frame
scopedLogger.Warn().Msg("Audio frame channel full, dropping frame")
} }
} }
} }
@ -267,17 +241,11 @@ func newSession(config SessionConfig) (*Session, error) {
if session == currentSession { if session == currentSession {
currentSession = nil currentSession = nil
} }
// Stop RPC processor
if session.rpcQueue != nil {
close(session.rpcQueue)
session.rpcQueue = nil
}
if session.shouldUmountVirtualMedia { if session.shouldUmountVirtualMedia {
err := rpcUnmountImage() err := rpcUnmountImage()
scopedLogger.Warn().Err(err).Msg("unmount image failed on connection close") scopedLogger.Warn().Err(err).Msg("unmount image failed on connection close")
} }
// Stop audio processing and input manager // Stop audio input manager
session.stopAudioProcessor()
if session.AudioInputManager != nil { if session.AudioInputManager != nil {
session.AudioInputManager.Stop() session.AudioInputManager.Stop()
} }
@ -294,43 +262,6 @@ func newSession(config SessionConfig) (*Session, error) {
return session, nil return session, nil
} }
// startAudioProcessor starts the dedicated audio processing goroutine
func (s *Session) startAudioProcessor(logger zerolog.Logger) {
s.audioWg.Add(1)
go func() {
defer s.audioWg.Done()
logger.Debug().Msg("Audio processor goroutine started")
for {
select {
case frame := <-s.audioFrameChan:
if s.AudioInputManager != nil {
// Check if audio input manager is ready before processing frames
if s.AudioInputManager.IsReady() {
err := s.AudioInputManager.WriteOpusFrame(frame)
if err != nil {
logger.Warn().Err(err).Msg("Failed to write Opus frame to audio input manager")
}
} else {
// Audio input manager not ready, drop frame silently
// This prevents the "client not connected" errors during startup
logger.Debug().Msg("Audio input manager not ready, dropping frame")
}
}
case <-s.audioStopChan:
logger.Debug().Msg("Audio processor goroutine stopping")
return
}
}
}()
}
// stopAudioProcessor stops the audio processing goroutine
func (s *Session) stopAudioProcessor() {
close(s.audioStopChan)
s.audioWg.Wait()
}
func drainRtpSender(rtpSender *webrtc.RTPSender) { func drainRtpSender(rtpSender *webrtc.RTPSender) {
// Lock to OS thread to isolate RTCP processing // Lock to OS thread to isolate RTCP processing
runtime.LockOSThread() runtime.LockOSThread()