Merge branch 'main' into main

This commit is contained in:
SilentWind
2026-02-20 14:19:38 +08:00
committed by GitHub
111 changed files with 7290 additions and 1787 deletions

View File

@@ -28,7 +28,8 @@ serde_json = "1"
# Logging # Logging
tracing = "0.1" tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "tracing-log"] }
tracing-log = "0.2"
# Error handling # Error handling
thiserror = "2" thiserror = "2"
@@ -46,7 +47,7 @@ nix = { version = "0.30", features = ["fs", "net", "hostname", "poll"] }
# HTTP client (for URL downloads) # HTTP client (for URL downloads)
# Use rustls by default, but allow native-tls for systems with older GLIBC # Use rustls by default, but allow native-tls for systems with older GLIBC
reqwest = { version = "0.13", features = ["stream", "rustls"], default-features = false } reqwest = { version = "0.13", features = ["stream", "rustls", "json"], default-features = false }
urlencoding = "2" urlencoding = "2"
# Static file embedding # Static file embedding
@@ -65,7 +66,7 @@ clap = { version = "4", features = ["derive"] }
time = "0.3" time = "0.3"
# Video capture (V4L2) # Video capture (V4L2)
v4l = "0.14" v4l2r = "0.0.7"
# JPEG encoding (libjpeg-turbo, SIMD accelerated) # JPEG encoding (libjpeg-turbo, SIMD accelerated)
turbojpeg = "1.3" turbojpeg = "1.3"
@@ -91,6 +92,8 @@ arc-swap = "1.8"
# WebRTC # WebRTC
webrtc = "0.14" webrtc = "0.14"
rtp = "0.14" rtp = "0.14"
rtsp-types = "0.1"
sdp-types = "0.1"
# Audio (ALSA capture + Opus encoding) # Audio (ALSA capture + Opus encoding)
# Note: audiopus links to libopus.so (unavoidable for audio support) # Note: audiopus links to libopus.so (unavoidable for audio support)
@@ -115,7 +118,6 @@ hwcodec = { path = "libs/hwcodec" }
protobuf = { version = "3.7", features = ["with-bytes"] } protobuf = { version = "3.7", features = ["with-bytes"] }
sodiumoxide = "0.2" sodiumoxide = "0.2"
sha2 = "0.10" sha2 = "0.10"
# High-performance pixel format conversion (libyuv) # High-performance pixel format conversion (libyuv)
libyuv = { path = "res/vcpkg/libyuv" } libyuv = { path = "res/vcpkg/libyuv" }

View File

@@ -3,9 +3,13 @@
FROM debian:11 FROM debian:11
# Linux headers used by v4l2r bindgen
ARG LINUX_HEADERS_VERSION=6.6
ARG LINUX_HEADERS_SHA256=
# Set Rustup mirrors (Aliyun) # Set Rustup mirrors (Aliyun)
ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \ #ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup # RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
# Install Rust toolchain # Install Rust toolchain
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
@@ -31,7 +35,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
meson \ meson \
ninja-build \ ninja-build \
wget \ wget \
xz-utils \
file \ file \
rsync \
gcc-aarch64-linux-gnu \ gcc-aarch64-linux-gnu \
g++-aarch64-linux-gnu \ g++-aarch64-linux-gnu \
libc6-dev-arm64-cross \ libc6-dev-arm64-cross \
@@ -47,10 +53,22 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libv4l-dev:arm64 \ libv4l-dev:arm64 \
libudev-dev:arm64 \ libudev-dev:arm64 \
zlib1g-dev:arm64 \ zlib1g-dev:arm64 \
linux-libc-dev:arm64 \
# Note: libjpeg-turbo, libyuv, libvpx, libx264, libx265, libopus are built from source below for static linking # Note: libjpeg-turbo, libyuv, libvpx, libx264, libx265, libopus are built from source below for static linking
libdrm-dev:arm64 \ libdrm-dev:arm64 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install newer V4L2 headers for v4l2r bindgen
RUN mkdir -p /opt/v4l2-headers \
&& wget -q https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-${LINUX_HEADERS_VERSION}.tar.xz -O /tmp/linux-headers.tar.xz \
&& if [ -n "$LINUX_HEADERS_SHA256" ]; then echo "$LINUX_HEADERS_SHA256 /tmp/linux-headers.tar.xz" | sha256sum -c -; fi \
&& tar -xf /tmp/linux-headers.tar.xz -C /tmp \
&& cd /tmp/linux-${LINUX_HEADERS_VERSION} \
&& make ARCH=arm64 headers_install INSTALL_HDR_PATH=/opt/v4l2-headers \
&& rm -rf /tmp/linux-${LINUX_HEADERS_VERSION} /tmp/linux-headers.tar.xz
ENV V4L2R_VIDEODEV2_H_PATH=/opt/v4l2-headers/include
# Build static libjpeg-turbo from source (cross-compile for ARM64) # Build static libjpeg-turbo from source (cross-compile for ARM64)
RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \ RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \
&& cd /tmp/libjpeg-turbo \ && cd /tmp/libjpeg-turbo \

View File

@@ -3,9 +3,13 @@
FROM debian:11 FROM debian:11
# Linux headers used by v4l2r bindgen
ARG LINUX_HEADERS_VERSION=6.6
ARG LINUX_HEADERS_SHA256=
# Set Rustup mirrors (Aliyun) # Set Rustup mirrors (Aliyun)
ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \ #ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup # RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
# Install Rust toolchain # Install Rust toolchain
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
@@ -31,7 +35,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
meson \ meson \
ninja-build \ ninja-build \
wget \ wget \
xz-utils \
file \ file \
rsync \
gcc-arm-linux-gnueabihf \ gcc-arm-linux-gnueabihf \
g++-arm-linux-gnueabihf \ g++-arm-linux-gnueabihf \
libc6-dev-armhf-cross \ libc6-dev-armhf-cross \
@@ -46,10 +52,22 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libasound2-dev:armhf \ libasound2-dev:armhf \
libv4l-dev:armhf \ libv4l-dev:armhf \
libudev-dev:armhf \ libudev-dev:armhf \
linux-libc-dev:armhf \
zlib1g-dev:armhf \ zlib1g-dev:armhf \
libdrm-dev:armhf \ libdrm-dev:armhf \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install newer V4L2 headers for v4l2r bindgen
RUN mkdir -p /opt/v4l2-headers \
&& wget -q https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-${LINUX_HEADERS_VERSION}.tar.xz -O /tmp/linux-headers.tar.xz \
&& if [ -n "$LINUX_HEADERS_SHA256" ]; then echo "$LINUX_HEADERS_SHA256 /tmp/linux-headers.tar.xz" | sha256sum -c -; fi \
&& tar -xf /tmp/linux-headers.tar.xz -C /tmp \
&& cd /tmp/linux-${LINUX_HEADERS_VERSION} \
&& make ARCH=arm headers_install INSTALL_HDR_PATH=/opt/v4l2-headers \
&& rm -rf /tmp/linux-${LINUX_HEADERS_VERSION} /tmp/linux-headers.tar.xz
ENV V4L2R_VIDEODEV2_H_PATH=/opt/v4l2-headers/include
# Build static libjpeg-turbo from source (cross-compile for ARMv7) # Build static libjpeg-turbo from source (cross-compile for ARMv7)
RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \ RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \
&& cd /tmp/libjpeg-turbo \ && cd /tmp/libjpeg-turbo \

View File

@@ -3,9 +3,13 @@
FROM debian:11 FROM debian:11
# Linux headers used by v4l2r bindgen
ARG LINUX_HEADERS_VERSION=6.6
ARG LINUX_HEADERS_SHA256=
# Set Rustup mirrors (Aliyun) # Set Rustup mirrors (Aliyun)
ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \ #ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup # RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
# Install Rust toolchain # Install Rust toolchain
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
@@ -29,6 +33,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libclang-dev \ libclang-dev \
llvm \ llvm \
wget \ wget \
xz-utils \
rsync \
# Autotools for libopus (requires autoreconf) # Autotools for libopus (requires autoreconf)
autoconf \ autoconf \
automake \ automake \
@@ -37,6 +43,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libasound2-dev \ libasound2-dev \
libv4l-dev \ libv4l-dev \
libudev-dev \ libudev-dev \
linux-libc-dev \
zlib1g-dev \ zlib1g-dev \
# Note: libjpeg-turbo, libx264, libx265, libopus are built from source below for static linking # Note: libjpeg-turbo, libx264, libx265, libopus are built from source below for static linking
libva-dev \ libva-dev \
@@ -49,6 +56,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libxdmcp-dev \ libxdmcp-dev \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install newer V4L2 headers for v4l2r bindgen
RUN mkdir -p /opt/v4l2-headers \
&& wget -q https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-${LINUX_HEADERS_VERSION}.tar.xz -O /tmp/linux-headers.tar.xz \
&& if [ -n "$LINUX_HEADERS_SHA256" ]; then echo "$LINUX_HEADERS_SHA256 /tmp/linux-headers.tar.xz" | sha256sum -c -; fi \
&& tar -xf /tmp/linux-headers.tar.xz -C /tmp \
&& cd /tmp/linux-${LINUX_HEADERS_VERSION} \
&& make ARCH=x86 headers_install INSTALL_HDR_PATH=/opt/v4l2-headers \
&& rm -rf /tmp/linux-${LINUX_HEADERS_VERSION} /tmp/linux-headers.tar.xz
ENV V4L2R_VIDEODEV2_H_PATH=/opt/v4l2-headers/include
# Build static libjpeg-turbo from source (needed by libyuv) # Build static libjpeg-turbo from source (needed by libyuv)
RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \ RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \
&& cd /tmp/libjpeg-turbo \ && cd /tmp/libjpeg-turbo \

View File

@@ -6,6 +6,7 @@
include!(concat!(env!("OUT_DIR"), "/ffmpeg_ffi.rs")); include!(concat!(env!("OUT_DIR"), "/ffmpeg_ffi.rs"));
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use std::env;
#[derive(Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)] #[derive(Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
pub enum AVHWDeviceType { pub enum AVHWDeviceType {
@@ -53,7 +54,36 @@ pub extern "C" fn hwcodec_av_log_callback(level: i32, message: *const std::os::r
pub(crate) fn init_av_log() { pub(crate) fn init_av_log() {
static INIT: std::sync::Once = std::sync::Once::new(); static INIT: std::sync::Once = std::sync::Once::new();
INIT.call_once(|| unsafe { INIT.call_once(|| unsafe {
av_log_set_level(AV_LOG_ERROR as i32); av_log_set_level(parse_ffmpeg_log_level());
hwcodec_set_av_log_callback(); hwcodec_set_av_log_callback();
}); });
} }
fn parse_ffmpeg_log_level() -> i32 {
let raw = match env::var("ONE_KVM_FFMPEG_LOG") {
Ok(value) => value,
Err(_) => return AV_LOG_ERROR as i32,
};
let value = raw.trim().to_ascii_lowercase();
if value.is_empty() {
return AV_LOG_ERROR as i32;
}
if let Ok(level) = value.parse::<i32>() {
return level;
}
match value.as_str() {
"quiet" => AV_LOG_QUIET as i32,
"panic" => AV_LOG_PANIC as i32,
"fatal" => AV_LOG_FATAL as i32,
"error" => AV_LOG_ERROR as i32,
"warn" | "warning" => AV_LOG_WARNING as i32,
"info" => AV_LOG_INFO as i32,
"verbose" => AV_LOG_VERBOSE as i32,
"debug" => AV_LOG_DEBUG as i32,
"trace" => AV_LOG_TRACE as i32,
_ => AV_LOG_ERROR as i32,
}
}

View File

@@ -31,8 +31,10 @@ unsafe impl Send for HwMjpegH26xPipeline {}
impl HwMjpegH26xPipeline { impl HwMjpegH26xPipeline {
pub fn new(config: HwMjpegH26xConfig) -> Result<Self, String> { pub fn new(config: HwMjpegH26xConfig) -> Result<Self, String> {
unsafe { unsafe {
let dec = CString::new(config.decoder.as_str()).map_err(|_| "decoder name invalid".to_string())?; let dec = CString::new(config.decoder.as_str())
let enc = CString::new(config.encoder.as_str()).map_err(|_| "encoder name invalid".to_string())?; .map_err(|_| "decoder name invalid".to_string())?;
let enc = CString::new(config.encoder.as_str())
.map_err(|_| "encoder name invalid".to_string())?;
let ctx = ffmpeg_hw_mjpeg_h26x_new( let ctx = ffmpeg_hw_mjpeg_h26x_new(
dec.as_ptr(), dec.as_ptr(),
enc.as_ptr(), enc.as_ptr(),

View File

@@ -1,8 +1,7 @@
use crate::{ use crate::{
ffmpeg::{init_av_log, AVPixelFormat}, ffmpeg::{init_av_log, AVPixelFormat},
ffmpeg_ram::{ ffmpeg_ram::{
ffmpeg_ram_decode, ffmpeg_ram_free_decoder, ffmpeg_ram_last_error, ffmpeg_ram_decode, ffmpeg_ram_free_decoder, ffmpeg_ram_last_error, ffmpeg_ram_new_decoder,
ffmpeg_ram_new_decoder,
}, },
}; };
use std::{ use std::{

View File

@@ -352,6 +352,7 @@ impl Encoder {
debug!("Encoder {} created successfully", codec.name); debug!("Encoder {} created successfully", codec.name);
let mut passed = false; let mut passed = false;
let mut last_err: Option<i32> = None; let mut last_err: Option<i32> = None;
let is_v4l2m2m = codec.name.contains("v4l2m2m");
let max_attempts = if codec.name.contains("v4l2m2m") { let max_attempts = if codec.name.contains("v4l2m2m") {
5 5

View File

@@ -8,11 +8,11 @@ use tracing::{debug, info, warn};
use super::executor::{timing, AtxKeyExecutor}; use super::executor::{timing, AtxKeyExecutor};
use super::led::LedSensor; use super::led::LedSensor;
use super::types::{AtxKeyConfig, AtxLedConfig, AtxState, PowerStatus}; use super::types::{AtxAction, AtxKeyConfig, AtxLedConfig, AtxState, PowerStatus};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
/// ATX power control configuration /// ATX power control configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone, Default)]
pub struct AtxControllerConfig { pub struct AtxControllerConfig {
/// Whether ATX is enabled /// Whether ATX is enabled
pub enabled: bool, pub enabled: bool,
@@ -24,17 +24,6 @@ pub struct AtxControllerConfig {
pub led: AtxLedConfig, pub led: AtxLedConfig,
} }
impl Default for AtxControllerConfig {
fn default() -> Self {
Self {
enabled: false,
power: AtxKeyConfig::default(),
reset: AtxKeyConfig::default(),
led: AtxLedConfig::default(),
}
}
}
/// Internal state holding all ATX components /// Internal state holding all ATX components
/// Grouped together to reduce lock acquisitions /// Grouped together to reduce lock acquisitions
struct AtxInner { struct AtxInner {
@@ -54,34 +43,7 @@ pub struct AtxController {
} }
impl AtxController { impl AtxController {
/// Create a new ATX controller with the specified configuration async fn init_components(inner: &mut AtxInner) {
pub fn new(config: AtxControllerConfig) -> Self {
Self {
inner: RwLock::new(AtxInner {
config,
power_executor: None,
reset_executor: None,
led_sensor: None,
}),
}
}
/// Create a disabled ATX controller
pub fn disabled() -> Self {
Self::new(AtxControllerConfig::default())
}
/// Initialize the ATX controller and its executors
pub async fn init(&self) -> Result<()> {
let mut inner = self.inner.write().await;
if !inner.config.enabled {
info!("ATX disabled in configuration");
return Ok(());
}
info!("Initializing ATX controller");
// Initialize power executor // Initialize power executor
if inner.config.power.is_configured() { if inner.config.power.is_configured() {
let mut executor = AtxKeyExecutor::new(inner.config.power.clone()); let mut executor = AtxKeyExecutor::new(inner.config.power.clone());
@@ -123,234 +85,180 @@ impl AtxController {
inner.led_sensor = Some(sensor); inner.led_sensor = Some(sensor);
} }
} }
info!("ATX controller initialized successfully");
Ok(())
} }
/// Reload the ATX controller with new configuration async fn shutdown_components(inner: &mut AtxInner) {
/// if let Some(executor) = inner.power_executor.as_mut() {
/// This is called when configuration changes and supports hot-reload. if let Err(e) = executor.shutdown().await {
pub async fn reload(&self, new_config: AtxControllerConfig) -> Result<()> { warn!("Failed to shutdown power executor: {}", e);
info!("Reloading ATX controller with new configuration"); }
}
inner.power_executor = None;
// Shutdown existing executors if let Some(executor) = inner.reset_executor.as_mut() {
self.shutdown_internal().await?; if let Err(e) = executor.shutdown().await {
warn!("Failed to shutdown reset executor: {}", e);
}
}
inner.reset_executor = None;
// Update configuration and re-initialize if let Some(sensor) = inner.led_sensor.as_mut() {
{ if let Err(e) = sensor.shutdown().await {
warn!("Failed to shutdown LED sensor: {}", e);
}
}
inner.led_sensor = None;
}
/// Create a new ATX controller with the specified configuration
pub fn new(config: AtxControllerConfig) -> Self {
Self {
inner: RwLock::new(AtxInner {
config,
power_executor: None,
reset_executor: None,
led_sensor: None,
}),
}
}
/// Create a disabled ATX controller
pub fn disabled() -> Self {
Self::new(AtxControllerConfig::default())
}
/// Initialize the ATX controller and its executors
pub async fn init(&self) -> Result<()> {
let mut inner = self.inner.write().await; let mut inner = self.inner.write().await;
inner.config = new_config;
if !inner.config.enabled {
info!("ATX disabled in configuration");
return Ok(());
} }
// Re-initialize info!("Initializing ATX controller");
self.init().await?;
Self::init_components(&mut inner).await;
info!("ATX controller reloaded successfully");
Ok(()) Ok(())
} }
/// Get current ATX state (single lock acquisition) /// Reload ATX controller configuration
pub async fn reload(&self, config: AtxControllerConfig) -> Result<()> {
let mut inner = self.inner.write().await;
info!("Reloading ATX controller configuration");
// Shutdown existing components first, then rebuild with new config.
Self::shutdown_components(&mut inner).await;
inner.config = config;
if !inner.config.enabled {
info!("ATX disabled after reload");
return Ok(());
}
Self::init_components(&mut inner).await;
info!("ATX controller reloaded");
Ok(())
}
/// Shutdown ATX controller and release all resources
pub async fn shutdown(&self) -> Result<()> {
let mut inner = self.inner.write().await;
Self::shutdown_components(&mut inner).await;
info!("ATX controller shutdown complete");
Ok(())
}
/// Trigger a power action (short/long/reset)
pub async fn trigger_power_action(&self, action: AtxAction) -> Result<()> {
let inner = self.inner.read().await;
match action {
AtxAction::Short | AtxAction::Long => {
if let Some(executor) = &inner.power_executor {
let duration = match action {
AtxAction::Short => timing::SHORT_PRESS,
AtxAction::Long => timing::LONG_PRESS,
_ => unreachable!(),
};
executor.pulse(duration).await?;
} else {
return Err(AppError::Config(
"Power button not configured for ATX controller".to_string(),
));
}
}
AtxAction::Reset => {
if let Some(executor) = &inner.reset_executor {
executor.pulse(timing::RESET_PRESS).await?;
} else {
return Err(AppError::Config(
"Reset button not configured for ATX controller".to_string(),
));
}
}
}
Ok(())
}
/// Trigger a short power button press
pub async fn power_short(&self) -> Result<()> {
self.trigger_power_action(AtxAction::Short).await
}
/// Trigger a long power button press
pub async fn power_long(&self) -> Result<()> {
self.trigger_power_action(AtxAction::Long).await
}
/// Trigger a reset button press
pub async fn reset(&self) -> Result<()> {
self.trigger_power_action(AtxAction::Reset).await
}
/// Get the current power status using the LED sensor (if configured)
pub async fn power_status(&self) -> PowerStatus {
let inner = self.inner.read().await;
if let Some(sensor) = &inner.led_sensor {
match sensor.read().await {
Ok(status) => status,
Err(e) => {
debug!("Failed to read ATX LED sensor: {}", e);
PowerStatus::Unknown
}
}
} else {
PowerStatus::Unknown
}
}
/// Get a snapshot of the ATX state for API responses
pub async fn state(&self) -> AtxState { pub async fn state(&self) -> AtxState {
let inner = self.inner.read().await; let inner = self.inner.read().await;
let power_status = if let Some(sensor) = inner.led_sensor.as_ref() { let power_status = if let Some(sensor) = &inner.led_sensor {
sensor.read().await.unwrap_or(PowerStatus::Unknown) match sensor.read().await {
Ok(status) => status,
Err(e) => {
debug!("Failed to read ATX LED sensor: {}", e);
PowerStatus::Unknown
}
}
} else { } else {
PowerStatus::Unknown PowerStatus::Unknown
}; };
AtxState { AtxState {
available: inner.config.enabled, available: inner.config.enabled,
power_configured: inner power_configured: inner.power_executor.is_some(),
.power_executor reset_configured: inner.reset_executor.is_some(),
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false),
reset_configured: inner
.reset_executor
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false),
power_status, power_status,
led_supported: inner led_supported: inner.led_sensor.is_some(),
.led_sensor
.as_ref()
.map(|s| s.is_initialized())
.unwrap_or(false),
} }
} }
/// Get current state as SystemEvent
pub async fn current_state_event(&self) -> crate::events::SystemEvent {
let state = self.state().await;
crate::events::SystemEvent::AtxStateChanged {
power_status: state.power_status,
}
}
/// Check if ATX is available
pub async fn is_available(&self) -> bool {
let inner = self.inner.read().await;
inner.config.enabled
}
/// Check if power button is configured and initialized
pub async fn is_power_ready(&self) -> bool {
let inner = self.inner.read().await;
inner
.power_executor
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false)
}
/// Check if reset button is configured and initialized
pub async fn is_reset_ready(&self) -> bool {
let inner = self.inner.read().await;
inner
.reset_executor
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false)
}
/// Short press power button (turn on or graceful shutdown)
pub async fn power_short(&self) -> Result<()> {
let inner = self.inner.read().await;
let executor = inner
.power_executor
.as_ref()
.ok_or_else(|| AppError::Internal("Power button not configured".to_string()))?;
info!(
"ATX: Short press power button ({}ms)",
timing::SHORT_PRESS.as_millis()
);
executor.pulse(timing::SHORT_PRESS).await
}
/// Long press power button (force power off)
pub async fn power_long(&self) -> Result<()> {
let inner = self.inner.read().await;
let executor = inner
.power_executor
.as_ref()
.ok_or_else(|| AppError::Internal("Power button not configured".to_string()))?;
info!(
"ATX: Long press power button ({}ms)",
timing::LONG_PRESS.as_millis()
);
executor.pulse(timing::LONG_PRESS).await
}
/// Press reset button
pub async fn reset(&self) -> Result<()> {
let inner = self.inner.read().await;
let executor = inner
.reset_executor
.as_ref()
.ok_or_else(|| AppError::Internal("Reset button not configured".to_string()))?;
info!(
"ATX: Press reset button ({}ms)",
timing::RESET_PRESS.as_millis()
);
executor.pulse(timing::RESET_PRESS).await
}
/// Get current power status from LED sensor
pub async fn power_status(&self) -> Result<PowerStatus> {
let inner = self.inner.read().await;
match inner.led_sensor.as_ref() {
Some(sensor) => sensor.read().await,
None => Ok(PowerStatus::Unknown),
}
}
/// Shutdown the ATX controller
pub async fn shutdown(&self) -> Result<()> {
info!("Shutting down ATX controller");
self.shutdown_internal().await?;
info!("ATX controller shutdown complete");
Ok(())
}
/// Internal shutdown helper
async fn shutdown_internal(&self) -> Result<()> {
let mut inner = self.inner.write().await;
// Shutdown power executor
if let Some(mut executor) = inner.power_executor.take() {
executor.shutdown().await.ok();
}
// Shutdown reset executor
if let Some(mut executor) = inner.reset_executor.take() {
executor.shutdown().await.ok();
}
// Shutdown LED sensor
if let Some(mut sensor) = inner.led_sensor.take() {
sensor.shutdown().await.ok();
}
Ok(())
}
}
impl Drop for AtxController {
fn drop(&mut self) {
debug!("ATX controller dropped");
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_controller_config_default() {
let config = AtxControllerConfig::default();
assert!(!config.enabled);
assert!(!config.power.is_configured());
assert!(!config.reset.is_configured());
assert!(!config.led.is_configured());
}
#[test]
fn test_controller_creation() {
let controller = AtxController::disabled();
assert!(controller.inner.try_read().is_ok());
}
#[tokio::test]
async fn test_controller_disabled_state() {
let controller = AtxController::disabled();
let state = controller.state().await;
assert!(!state.available);
assert!(!state.power_configured);
assert!(!state.reset_configured);
}
#[tokio::test]
async fn test_controller_init_disabled() {
let controller = AtxController::disabled();
let result = controller.init().await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_controller_is_available() {
let controller = AtxController::disabled();
assert!(!controller.is_available().await);
let config = AtxControllerConfig {
enabled: true,
..Default::default()
};
let controller = AtxController::new(config);
assert!(controller.is_available().await);
}
} }

View File

@@ -28,12 +28,14 @@
//! device: "/dev/gpiochip0".to_string(), //! device: "/dev/gpiochip0".to_string(),
//! pin: 5, //! pin: 5,
//! active_level: ActiveLevel::High, //! active_level: ActiveLevel::High,
//! baud_rate: 9600,
//! }, //! },
//! reset: AtxKeyConfig { //! reset: AtxKeyConfig {
//! driver: AtxDriverType::UsbRelay, //! driver: AtxDriverType::UsbRelay,
//! device: "/dev/hidraw0".to_string(), //! device: "/dev/hidraw0".to_string(),
//! pin: 0, //! pin: 0,
//! active_level: ActiveLevel::High, //! active_level: ActiveLevel::High,
//! baud_rate: 9600,
//! }, //! },
//! led: Default::default(), //! led: Default::default(),
//! }; //! };

View File

@@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use typeshare::typeshare; use typeshare::typeshare;
/// Power status /// Power status
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum PowerStatus { pub enum PowerStatus {
/// Power is on /// Power is on
@@ -15,18 +15,13 @@ pub enum PowerStatus {
/// Power is off /// Power is off
Off, Off,
/// Power status unknown (no LED connected) /// Power status unknown (no LED connected)
#[default]
Unknown, Unknown,
} }
impl Default for PowerStatus {
fn default() -> Self {
Self::Unknown
}
}
/// Driver type for ATX key operations /// Driver type for ATX key operations
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum AtxDriverType { pub enum AtxDriverType {
/// GPIO control via Linux character device /// GPIO control via Linux character device
@@ -36,32 +31,22 @@ pub enum AtxDriverType {
/// Serial/COM port relay (taobao LCUS type) /// Serial/COM port relay (taobao LCUS type)
Serial, Serial,
/// Disabled / Not configured /// Disabled / Not configured
#[default]
None, None,
} }
impl Default for AtxDriverType {
fn default() -> Self {
Self::None
}
}
/// Active level for GPIO pins /// Active level for GPIO pins
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum ActiveLevel { pub enum ActiveLevel {
/// Active high (default for most cases) /// Active high (default for most cases)
#[default]
High, High,
/// Active low (inverted) /// Active low (inverted)
Low, Low,
} }
impl Default for ActiveLevel {
fn default() -> Self {
Self::High
}
}
/// Configuration for a single ATX key (power or reset) /// Configuration for a single ATX key (power or reset)
/// This is the "four-tuple" configuration: (driver, device, pin/channel, level) /// This is the "four-tuple" configuration: (driver, device, pin/channel, level)
#[typeshare] #[typeshare]
@@ -77,6 +62,7 @@ pub struct AtxKeyConfig {
/// Pin or channel number: /// Pin or channel number:
/// - For GPIO: GPIO pin number /// - For GPIO: GPIO pin number
/// - For USB Relay: relay channel (0-based) /// - For USB Relay: relay channel (0-based)
/// - For Serial Relay (LCUS): relay channel (1-based)
pub pin: u32, pub pin: u32,
/// Active level (only applicable to GPIO, ignored for USB Relay) /// Active level (only applicable to GPIO, ignored for USB Relay)
pub active_level: ActiveLevel, pub active_level: ActiveLevel,
@@ -105,7 +91,7 @@ impl AtxKeyConfig {
/// LED sensing configuration (optional) /// LED sensing configuration (optional)
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
#[serde(default)] #[serde(default)]
pub struct AtxLedConfig { pub struct AtxLedConfig {
/// Whether LED sensing is enabled /// Whether LED sensing is enabled
@@ -118,17 +104,6 @@ pub struct AtxLedConfig {
pub inverted: bool, pub inverted: bool,
} }
impl Default for AtxLedConfig {
fn default() -> Self {
Self {
enabled: false,
gpio_chip: String::new(),
gpio_pin: 0,
inverted: false,
}
}
}
impl AtxLedConfig { impl AtxLedConfig {
/// Check if LED sensing is configured /// Check if LED sensing is configured
pub fn is_configured(&self) -> bool { pub fn is_configured(&self) -> bool {
@@ -137,7 +112,7 @@ impl AtxLedConfig {
} }
/// ATX state information /// ATX state information
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct AtxState { pub struct AtxState {
/// Whether ATX feature is available/enabled /// Whether ATX feature is available/enabled
pub available: bool, pub available: bool,
@@ -151,18 +126,6 @@ pub struct AtxState {
pub led_supported: bool, pub led_supported: bool,
} }
impl Default for AtxState {
fn default() -> Self {
Self {
available: false,
power_configured: false,
reset_configured: false,
power_status: PowerStatus::Unknown,
led_supported: false,
}
}
}
/// ATX power action request /// ATX power action request
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
pub struct AtxPowerRequest { pub struct AtxPowerRequest {
@@ -274,5 +237,6 @@ mod tests {
assert!(!state.power_configured); assert!(!state.power_configured);
assert!(!state.reset_configured); assert!(!state.reset_configured);
assert_eq!(state.power_status, PowerStatus::Unknown); assert_eq!(state.power_status, PowerStatus::Unknown);
assert!(!state.led_supported);
} }
} }

View File

@@ -160,8 +160,8 @@ mod tests {
let packet = build_magic_packet(&mac); let packet = build_magic_packet(&mac);
// Check header (6 bytes of 0xFF) // Check header (6 bytes of 0xFF)
for i in 0..6 { for byte in packet.iter().take(6) {
assert_eq!(packet[i], 0xFF); assert_eq!(*byte, 0xFF);
} }
// Check MAC repetitions // Check MAC repetitions

View File

@@ -184,14 +184,7 @@ impl AudioCapturer {
let log_throttler = self.log_throttler.clone(); let log_throttler = self.log_throttler.clone();
let handle = tokio::task::spawn_blocking(move || { let handle = tokio::task::spawn_blocking(move || {
capture_loop( capture_loop(config, state, frame_tx, stop_flag, sequence, log_throttler);
config,
state,
frame_tx,
stop_flag,
sequence,
log_throttler,
);
}); });
*self.capture_handle.lock().await = Some(handle); *self.capture_handle.lock().await = Some(handle);

View File

@@ -39,6 +39,7 @@ impl AudioQuality {
} }
/// Parse from string /// Parse from string
#[allow(clippy::should_implement_trait)]
pub fn from_str(s: &str) -> Self { pub fn from_str(s: &str) -> Self {
match s.to_lowercase().as_str() { match s.to_lowercase().as_str() {
"voice" | "low" => AudioQuality::Voice, "voice" | "low" => AudioQuality::Voice,

View File

@@ -85,9 +85,7 @@ pub fn enumerate_audio_devices_with_current(
let mut devices = Vec::new(); let mut devices = Vec::new();
// Try to enumerate cards // Try to enumerate cards
let cards = match alsa::card::Iter::new() { let cards = alsa::card::Iter::new();
i => i,
};
for card_result in cards { for card_result in cards {
let card = match card_result { let card = match card_result {

View File

@@ -16,9 +16,10 @@ use crate::events::{EventBus, SystemEvent};
use crate::utils::LogThrottler; use crate::utils::LogThrottler;
/// Audio health status /// Audio health status
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Default)]
pub enum AudioHealthStatus { pub enum AudioHealthStatus {
/// Device is healthy and operational /// Device is healthy and operational
#[default]
Healthy, Healthy,
/// Device has an error, attempting recovery /// Device has an error, attempting recovery
Error { Error {
@@ -33,12 +34,6 @@ pub enum AudioHealthStatus {
Disconnected, Disconnected,
} }
impl Default for AudioHealthStatus {
fn default() -> Self {
Self::Healthy
}
}
/// Audio health monitor configuration /// Audio health monitor configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AudioMonitorConfig { pub struct AudioMonitorConfig {
@@ -166,7 +161,7 @@ impl AudioHealthMonitor {
let attempt = self.retry_count.load(Ordering::Relaxed); let attempt = self.retry_count.load(Ordering::Relaxed);
// Only publish every 5 attempts to avoid event spam // Only publish every 5 attempts to avoid event spam
if attempt == 1 || attempt % 5 == 0 { if attempt == 1 || attempt.is_multiple_of(5) {
debug!("Audio reconnecting, attempt {}", attempt); debug!("Audio reconnecting, attempt {}", attempt);
if let Some(ref events) = *self.events.read().await { if let Some(ref events) = *self.events.read().await {

View File

@@ -14,9 +14,10 @@ use super::encoder::{OpusConfig, OpusEncoder, OpusFrame};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
/// Audio stream state /// Audio stream state
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum AudioStreamState { pub enum AudioStreamState {
/// Stream is stopped /// Stream is stopped
#[default]
Stopped, Stopped,
/// Stream is starting up /// Stream is starting up
Starting, Starting,
@@ -26,14 +27,8 @@ pub enum AudioStreamState {
Error, Error,
} }
impl Default for AudioStreamState {
fn default() -> Self {
Self::Stopped
}
}
/// Audio streamer configuration /// Audio streamer configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone, Default)]
pub struct AudioStreamerConfig { pub struct AudioStreamerConfig {
/// Audio capture configuration /// Audio capture configuration
pub capture: AudioConfig, pub capture: AudioConfig,
@@ -41,15 +36,6 @@ pub struct AudioStreamerConfig {
pub opus: OpusConfig, pub opus: OpusConfig,
} }
impl Default for AudioStreamerConfig {
fn default() -> Self {
Self {
capture: AudioConfig::default(),
opus: OpusConfig::default(),
}
}
}
impl AudioStreamerConfig { impl AudioStreamerConfig {
/// Create config for a specific device with default quality /// Create config for a specific device with default quality
pub fn for_device(device_name: &str) -> Self { pub fn for_device(device_name: &str) -> Self {
@@ -290,11 +276,9 @@ impl AudioStreamer {
// Encode to Opus // Encode to Opus
let opus_result = { let opus_result = {
let mut enc_guard = encoder.lock().await; let mut enc_guard = encoder.lock().await;
if let Some(ref mut enc) = *enc_guard { (*enc_guard)
Some(enc.encode_frame(&audio_frame)) .as_mut()
} else { .map(|enc| enc.encode_frame(&audio_frame))
None
}
}; };
match opus_result { match opus_result {

View File

@@ -92,11 +92,7 @@ fn is_public_endpoint(path: &str) -> bool {
// Note: paths here are relative to /api since middleware is applied within the nested router // Note: paths here are relative to /api since middleware is applied within the nested router
matches!( matches!(
path, path,
"/" "/" | "/auth/login" | "/health" | "/setup" | "/setup/init"
| "/auth/login"
| "/health"
| "/setup"
| "/setup/init"
) || path.starts_with("/assets/") ) || path.starts_with("/assets/")
|| path.starts_with("/static/") || path.starts_with("/static/")
|| path.ends_with(".js") || path.ends_with(".js")

View File

@@ -110,7 +110,9 @@ impl SessionStore {
/// Delete all expired sessions /// Delete all expired sessions
pub async fn cleanup_expired(&self) -> Result<u64> { pub async fn cleanup_expired(&self) -> Result<u64> {
let result = sqlx::query("DELETE FROM sessions WHERE expires_at < datetime('now')") let now = Utc::now().to_rfc3339();
let result = sqlx::query("DELETE FROM sessions WHERE expires_at < ?1")
.bind(now)
.execute(&self.pool) .execute(&self.pool)
.await?; .await?;
Ok(result.rows_affected()) Ok(result.rows_affected())

View File

@@ -7,7 +7,7 @@ use super::password::{hash_password, verify_password};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
/// User row type from database /// User row type from database
type UserRow = (String, String, String, i32, String, String); type UserRow = (String, String, String, String, String);
/// User data /// User data
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@@ -16,7 +16,6 @@ pub struct User {
pub username: String, pub username: String,
#[serde(skip_serializing)] #[serde(skip_serializing)]
pub password_hash: String, pub password_hash: String,
pub is_admin: bool,
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
} }
@@ -24,12 +23,11 @@ pub struct User {
impl User { impl User {
/// Convert from database row to User /// Convert from database row to User
fn from_row(row: UserRow) -> Self { fn from_row(row: UserRow) -> Self {
let (id, username, password_hash, is_admin, created_at, updated_at) = row; let (id, username, password_hash, created_at, updated_at) = row;
Self { Self {
id, id,
username, username,
password_hash, password_hash,
is_admin: is_admin != 0,
created_at: DateTime::parse_from_rfc3339(&created_at) created_at: DateTime::parse_from_rfc3339(&created_at)
.map(|dt| dt.with_timezone(&Utc)) .map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|_| Utc::now()), .unwrap_or_else(|_| Utc::now()),
@@ -53,7 +51,7 @@ impl UserStore {
} }
/// Create a new user /// Create a new user
pub async fn create(&self, username: &str, password: &str, is_admin: bool) -> Result<User> { pub async fn create(&self, username: &str, password: &str) -> Result<User> {
// Check if username already exists // Check if username already exists
if self.get_by_username(username).await?.is_some() { if self.get_by_username(username).await?.is_some() {
return Err(AppError::BadRequest(format!( return Err(AppError::BadRequest(format!(
@@ -68,21 +66,19 @@ impl UserStore {
id: Uuid::new_v4().to_string(), id: Uuid::new_v4().to_string(),
username: username.to_string(), username: username.to_string(),
password_hash, password_hash,
is_admin,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO users (id, username, password_hash, is_admin, created_at, updated_at) INSERT INTO users (id, username, password_hash, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5, ?6) VALUES (?1, ?2, ?3, ?4, ?5)
"#, "#,
) )
.bind(&user.id) .bind(&user.id)
.bind(&user.username) .bind(&user.username)
.bind(&user.password_hash) .bind(&user.password_hash)
.bind(user.is_admin as i32)
.bind(user.created_at.to_rfc3339()) .bind(user.created_at.to_rfc3339())
.bind(user.updated_at.to_rfc3339()) .bind(user.updated_at.to_rfc3339())
.execute(&self.pool) .execute(&self.pool)
@@ -94,7 +90,7 @@ impl UserStore {
/// Get user by ID /// Get user by ID
pub async fn get(&self, user_id: &str) -> Result<Option<User>> { pub async fn get(&self, user_id: &str) -> Result<Option<User>> {
let row: Option<UserRow> = sqlx::query_as( let row: Option<UserRow> = sqlx::query_as(
"SELECT id, username, password_hash, is_admin, created_at, updated_at FROM users WHERE id = ?1", "SELECT id, username, password_hash, created_at, updated_at FROM users WHERE id = ?1",
) )
.bind(user_id) .bind(user_id)
.fetch_optional(&self.pool) .fetch_optional(&self.pool)
@@ -106,7 +102,7 @@ impl UserStore {
/// Get user by username /// Get user by username
pub async fn get_by_username(&self, username: &str) -> Result<Option<User>> { pub async fn get_by_username(&self, username: &str) -> Result<Option<User>> {
let row: Option<UserRow> = sqlx::query_as( let row: Option<UserRow> = sqlx::query_as(
"SELECT id, username, password_hash, is_admin, created_at, updated_at FROM users WHERE username = ?1", "SELECT id, username, password_hash, created_at, updated_at FROM users WHERE username = ?1",
) )
.bind(username) .bind(username)
.fetch_optional(&self.pool) .fetch_optional(&self.pool)
@@ -161,8 +157,7 @@ impl UserStore {
} }
let now = Utc::now(); let now = Utc::now();
let result = let result = sqlx::query("UPDATE users SET username = ?1, updated_at = ?2 WHERE id = ?3")
sqlx::query("UPDATE users SET username = ?1, updated_at = ?2 WHERE id = ?3")
.bind(new_username) .bind(new_username)
.bind(now.to_rfc3339()) .bind(now.to_rfc3339())
.bind(user_id) .bind(user_id)
@@ -179,7 +174,7 @@ impl UserStore {
/// List all users /// List all users
pub async fn list(&self) -> Result<Vec<User>> { pub async fn list(&self) -> Result<Vec<User>> {
let rows: Vec<UserRow> = sqlx::query_as( let rows: Vec<UserRow> = sqlx::query_as(
"SELECT id, username, password_hash, is_admin, created_at, updated_at FROM users ORDER BY created_at", "SELECT id, username, password_hash, created_at, updated_at FROM users ORDER BY created_at",
) )
.fetch_all(&self.pool) .fetch_all(&self.pool)
.await?; .await?;

View File

@@ -11,6 +11,7 @@ pub use crate::rustdesk::config::RustDeskConfig;
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)] #[serde(default)]
#[derive(Default)]
pub struct AppConfig { pub struct AppConfig {
/// Whether initial setup has been completed /// Whether initial setup has been completed
pub initialized: bool, pub initialized: bool,
@@ -34,24 +35,8 @@ pub struct AppConfig {
pub extensions: ExtensionsConfig, pub extensions: ExtensionsConfig,
/// RustDesk remote access settings /// RustDesk remote access settings
pub rustdesk: RustDeskConfig, pub rustdesk: RustDeskConfig,
} /// RTSP streaming settings
pub rtsp: RtspConfig,
impl Default for AppConfig {
fn default() -> Self {
Self {
initialized: false,
auth: AuthConfig::default(),
video: VideoConfig::default(),
hid: HidConfig::default(),
msd: MsdConfig::default(),
atx: AtxConfig::default(),
audio: AudioConfig::default(),
stream: StreamConfig::default(),
web: WebConfig::default(),
extensions: ExtensionsConfig::default(),
rustdesk: RustDeskConfig::default(),
}
}
} }
/// Authentication configuration /// Authentication configuration
@@ -116,21 +101,17 @@ impl Default for VideoConfig {
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum HidBackend { pub enum HidBackend {
/// USB OTG HID gadget /// USB OTG HID gadget
Otg, Otg,
/// CH9329 serial HID controller /// CH9329 serial HID controller
Ch9329, Ch9329,
/// Disabled /// Disabled
#[default]
None, None,
} }
impl Default for HidBackend {
fn default() -> Self {
Self::None
}
}
/// OTG USB device descriptor configuration /// OTG USB device descriptor configuration
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@@ -163,8 +144,10 @@ impl Default for OtgDescriptorConfig {
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum OtgHidProfile { pub enum OtgHidProfile {
/// Full HID device set (keyboard + relative mouse + absolute mouse + consumer control) /// Full HID device set (keyboard + relative mouse + absolute mouse + consumer control)
#[default]
Full, Full,
/// Full HID device set without MSD /// Full HID device set without MSD
FullNoMsd, FullNoMsd,
@@ -180,12 +163,6 @@ pub enum OtgHidProfile {
Custom, Custom,
} }
impl Default for OtgHidProfile {
fn default() -> Self {
Self::Full
}
}
/// OTG HID function selection (used when profile is Custom) /// OTG HID function selection (used when profile is Custom)
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@@ -360,6 +337,7 @@ pub use crate::atx::{ActiveLevel, AtxDriverType, AtxKeyConfig, AtxLedConfig};
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)] #[serde(default)]
#[derive(Default)]
pub struct AtxConfig { pub struct AtxConfig {
/// Enable ATX functionality /// Enable ATX functionality
pub enabled: bool, pub enabled: bool,
@@ -373,18 +351,6 @@ pub struct AtxConfig {
pub wol_interface: String, pub wol_interface: String,
} }
impl Default for AtxConfig {
fn default() -> Self {
Self {
enabled: false,
power: AtxKeyConfig::default(),
reset: AtxKeyConfig::default(),
led: AtxLedConfig::default(),
wol_interface: String::new(),
}
}
}
impl AtxConfig { impl AtxConfig {
/// Convert to AtxControllerConfig for the controller /// Convert to AtxControllerConfig for the controller
pub fn to_controller_config(&self) -> crate::atx::AtxControllerConfig { pub fn to_controller_config(&self) -> crate::atx::AtxControllerConfig {
@@ -427,16 +393,62 @@ impl Default for AudioConfig {
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum StreamMode { pub enum StreamMode {
/// WebRTC with H264/H265 /// WebRTC with H264/H265
WebRTC, WebRTC,
/// MJPEG over HTTP /// MJPEG over HTTP
#[default]
Mjpeg, Mjpeg,
} }
impl Default for StreamMode { /// RTSP output codec
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum RtspCodec {
#[default]
H264,
H265,
}
/// RTSP configuration
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct RtspConfig {
/// Enable RTSP output
pub enabled: bool,
/// Bind IP address
pub bind: String,
/// RTSP TCP listen port
pub port: u16,
/// Stream path (without leading slash)
pub path: String,
/// Allow only one client connection at a time
pub allow_one_client: bool,
/// Output codec (H264/H265)
pub codec: RtspCodec,
/// Optional username for authentication
pub username: Option<String>,
/// Optional password for authentication
#[typeshare(skip)]
pub password: Option<String>,
}
impl Default for RtspConfig {
fn default() -> Self { fn default() -> Self {
Self::Mjpeg Self {
enabled: false,
bind: "0.0.0.0".to_string(),
port: 8554,
path: "live".to_string(),
allow_one_client: true,
codec: RtspCodec::H264,
username: None,
password: None,
}
} }
} }
@@ -444,8 +456,10 @@ impl Default for StreamMode {
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum EncoderType { pub enum EncoderType {
/// Auto-detect best encoder /// Auto-detect best encoder
#[default]
Auto, Auto,
/// Software encoder (libx264) /// Software encoder (libx264)
Software, Software,
@@ -463,12 +477,6 @@ pub enum EncoderType {
V4l2m2m, V4l2m2m,
} }
impl Default for EncoderType {
fn default() -> Self {
Self::Auto
}
}
impl EncoderType { impl EncoderType {
/// Convert to EncoderBackend for registry queries /// Convert to EncoderBackend for registry queries
pub fn to_backend(&self) -> Option<crate::video::encoder::registry::EncoderBackend> { pub fn to_backend(&self) -> Option<crate::video::encoder::registry::EncoderBackend> {

View File

@@ -82,7 +82,6 @@ impl ConfigStore {
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
username TEXT NOT NULL UNIQUE, username TEXT NOT NULL UNIQUE,
password_hash TEXT NOT NULL, password_hash TEXT NOT NULL,
is_admin INTEGER NOT NULL DEFAULT 0,
created_at TEXT NOT NULL DEFAULT (datetime('now')), created_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now')) updated_at TEXT NOT NULL DEFAULT (datetime('now'))
) )
@@ -121,6 +120,26 @@ impl ConfigStore {
.execute(pool) .execute(pool)
.await?; .await?;
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS wol_history (
mac_address TEXT PRIMARY KEY,
updated_at INTEGER NOT NULL
)
"#,
)
.execute(pool)
.await?;
sqlx::query(
r#"
CREATE INDEX IF NOT EXISTS idx_wol_history_updated_at
ON wol_history(updated_at DESC)
"#,
)
.execute(pool)
.await?;
Ok(()) Ok(())
} }

View File

@@ -124,6 +124,7 @@ pub struct ClientStats {
/// ``` /// ```
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(tag = "event", content = "data")] #[serde(tag = "event", content = "data")]
#[allow(clippy::large_enum_variant)]
pub enum SystemEvent { pub enum SystemEvent {
// ============================================================================ // ============================================================================
// Video Stream Events // Video Stream Events

View File

@@ -230,13 +230,6 @@ impl ExtensionManager {
"-W".to_string(), // Writable (allow input) "-W".to_string(), // Writable (allow input)
]; ];
// Add credential if set (still useful for additional security layer)
if let Some(ref cred) = c.credential {
if !cred.is_empty() {
args.extend(["-c".to_string(), cred.clone()]);
}
}
// Add shell as last argument // Add shell as last argument
args.push(c.shell.clone()); args.push(c.shell.clone());
Ok(args) Ok(args)

View File

@@ -102,9 +102,6 @@ pub struct TtydConfig {
pub port: u16, pub port: u16,
/// Shell to execute /// Shell to execute
pub shell: String, pub shell: String,
/// Credential in format "user:password" (optional)
#[serde(skip_serializing_if = "Option::is_none")]
pub credential: Option<String>,
} }
impl Default for TtydConfig { impl Default for TtydConfig {
@@ -113,7 +110,6 @@ impl Default for TtydConfig {
enabled: false, enabled: false,
port: 7681, port: 7681,
shell: "/bin/bash".to_string(), shell: "/bin/bash".to_string(),
credential: None,
} }
} }
} }
@@ -149,6 +145,7 @@ impl Default for GostcConfig {
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)] #[serde(default)]
#[derive(Default)]
pub struct EasytierConfig { pub struct EasytierConfig {
/// Enable auto-start /// Enable auto-start
pub enabled: bool, pub enabled: bool,
@@ -165,18 +162,6 @@ pub struct EasytierConfig {
pub virtual_ip: Option<String>, pub virtual_ip: Option<String>,
} }
impl Default for EasytierConfig {
fn default() -> Self {
Self {
enabled: false,
network_name: String::new(),
network_secret: String::new(),
peer_urls: Vec::new(),
virtual_ip: None,
}
}
}
/// Combined extensions configuration /// Combined extensions configuration
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, Default)] #[derive(Debug, Clone, Serialize, Deserialize, Default)]

View File

@@ -14,6 +14,7 @@ fn default_ch9329_baud_rate() -> u32 {
/// HID backend type /// HID backend type
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")] #[serde(tag = "type", rename_all = "lowercase")]
#[derive(Default)]
pub enum HidBackendType { pub enum HidBackendType {
/// USB OTG gadget mode /// USB OTG gadget mode
Otg, Otg,
@@ -26,15 +27,10 @@ pub enum HidBackendType {
baud_rate: u32, baud_rate: u32,
}, },
/// No HID backend (disabled) /// No HID backend (disabled)
#[default]
None, None,
} }
impl Default for HidBackendType {
fn default() -> Self {
Self::None
}
}
impl HidBackendType { impl HidBackendType {
/// Check if OTG backend is available on this system /// Check if OTG backend is available on this system
pub fn otg_available() -> bool { pub fn otg_available() -> bool {

View File

@@ -219,8 +219,10 @@ impl From<u8> for LedStatus {
/// CH9329 work mode /// CH9329 work mode
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[repr(u8)] #[repr(u8)]
#[derive(Default)]
pub enum WorkMode { pub enum WorkMode {
/// Mode 0: Standard USB Keyboard + Mouse (default) /// Mode 0: Standard USB Keyboard + Mouse (default)
#[default]
KeyboardMouse = 0x00, KeyboardMouse = 0x00,
/// Mode 1: Standard USB Keyboard only /// Mode 1: Standard USB Keyboard only
KeyboardOnly = 0x01, KeyboardOnly = 0x01,
@@ -230,17 +232,13 @@ pub enum WorkMode {
CustomHid = 0x03, CustomHid = 0x03,
} }
impl Default for WorkMode {
fn default() -> Self {
Self::KeyboardMouse
}
}
/// CH9329 serial communication mode /// CH9329 serial communication mode
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[repr(u8)] #[repr(u8)]
#[derive(Default)]
pub enum SerialMode { pub enum SerialMode {
/// Mode 0: Protocol transmission mode (default) /// Mode 0: Protocol transmission mode (default)
#[default]
Protocol = 0x00, Protocol = 0x00,
/// Mode 1: ASCII mode /// Mode 1: ASCII mode
Ascii = 0x01, Ascii = 0x01,
@@ -248,12 +246,6 @@ pub enum SerialMode {
Transparent = 0x02, Transparent = 0x02,
} }
impl Default for SerialMode {
fn default() -> Self {
Self::Protocol
}
}
/// CH9329 configuration parameters /// CH9329 configuration parameters
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Ch9329Config { pub struct Ch9329Config {

View File

@@ -9,7 +9,7 @@
//! //!
//! Keyboard event (type 0x01): //! Keyboard event (type 0x01):
//! - Byte 1: Event type (0x00 = down, 0x01 = up) //! - Byte 1: Event type (0x00 = down, 0x01 = up)
//! - Byte 2: Key code (USB HID usage code or JS keyCode) //! - Byte 2: Key code (USB HID usage code)
//! - Byte 3: Modifiers bitmask //! - Byte 3: Modifiers bitmask
//! - Bit 0: Left Ctrl //! - Bit 0: Left Ctrl
//! - Bit 1: Left Shift //! - Bit 1: Left Shift
@@ -119,7 +119,7 @@ fn parse_keyboard_message(data: &[u8]) -> Option<HidChannelEvent> {
event_type, event_type,
key, key,
modifiers, modifiers,
is_usb_hid: false, // WebRTC datachannel sends JS keycodes is_usb_hid: true, // WebRTC/WebSocket HID channel sends USB HID usages
})) }))
} }
@@ -245,6 +245,7 @@ mod tests {
assert_eq!(kb.key, 0x04); assert_eq!(kb.key, 0x04);
assert!(kb.modifiers.left_ctrl); assert!(kb.modifiers.left_ctrl);
assert!(!kb.modifiers.left_shift); assert!(!kb.modifiers.left_shift);
assert!(kb.is_usb_hid);
} }
_ => panic!("Expected keyboard event"), _ => panic!("Expected keyboard event"),
} }
@@ -280,7 +281,7 @@ mod tests {
right_alt: false, right_alt: false,
right_meta: false, right_meta: false,
}, },
is_usb_hid: false, is_usb_hid: true,
}; };
let encoded = encode_keyboard_event(&event); let encoded = encode_keyboard_event(&event);

View File

@@ -42,17 +42,17 @@ pub struct HidInfo {
pub screen_resolution: Option<(u32, u32)>, pub screen_resolution: Option<(u32, u32)>,
} }
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::{info, warn}; use tracing::{info, warn};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::otg::OtgService; use crate::otg::OtgService;
use std::time::Duration;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use std::time::Duration;
const HID_EVENT_QUEUE_CAPACITY: usize = 64; const HID_EVENT_QUEUE_CAPACITY: usize = 64;
const HID_EVENT_SEND_TIMEOUT_MS: u64 = 30; const HID_EVENT_SEND_TIMEOUT_MS: u64 = 30;
@@ -203,7 +203,10 @@ impl HidController {
)); ));
} }
if matches!(event.event_type, MouseEventType::Move | MouseEventType::MoveAbs) { if matches!(
event.event_type,
MouseEventType::Move | MouseEventType::MoveAbs
) {
// Best-effort: drop/merge move events if queue is full // Best-effort: drop/merge move events if queue is full
self.enqueue_mouse_move(event) self.enqueue_mouse_move(event)
} else { } else {
@@ -470,13 +473,7 @@ impl HidController {
None => break, None => break,
}; };
process_hid_event( process_hid_event(event, &backend, &monitor, &backend_type).await;
event,
&backend,
&monitor,
&backend_type,
)
.await;
// After each event, flush latest move if pending // After each event, flush latest move if pending
if pending_move_flag.swap(false, Ordering::AcqRel) { if pending_move_flag.swap(false, Ordering::AcqRel) {
@@ -505,9 +502,9 @@ impl HidController {
self.pending_move_flag.store(true, Ordering::Release); self.pending_move_flag.store(true, Ordering::Release);
Ok(()) Ok(())
} }
Err(mpsc::error::TrySendError::Closed(_)) => Err(AppError::BadRequest( Err(mpsc::error::TrySendError::Closed(_)) => {
"HID event queue closed".to_string(), Err(AppError::BadRequest("HID event queue closed".to_string()))
)), }
} }
} }
@@ -517,8 +514,10 @@ impl HidController {
Err(mpsc::error::TrySendError::Full(ev)) => { Err(mpsc::error::TrySendError::Full(ev)) => {
// For non-move events, wait briefly to avoid dropping critical input // For non-move events, wait briefly to avoid dropping critical input
let tx = self.hid_tx.clone(); let tx = self.hid_tx.clone();
let send_result = let send_result = tokio::time::timeout(
tokio::time::timeout(Duration::from_millis(HID_EVENT_SEND_TIMEOUT_MS), tx.send(ev)) Duration::from_millis(HID_EVENT_SEND_TIMEOUT_MS),
tx.send(ev),
)
.await; .await;
if send_result.is_ok() { if send_result.is_ok() {
Ok(()) Ok(())
@@ -527,9 +526,9 @@ impl HidController {
Ok(()) Ok(())
} }
} }
Err(mpsc::error::TrySendError::Closed(_)) => Err(AppError::BadRequest( Err(mpsc::error::TrySendError::Closed(_)) => {
"HID event queue closed".to_string(), Err(AppError::BadRequest("HID event queue closed".to_string()))
)), }
} }
} }
} }

View File

@@ -16,9 +16,10 @@ use crate::events::{EventBus, SystemEvent};
use crate::utils::LogThrottler; use crate::utils::LogThrottler;
/// HID health status /// HID health status
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Default)]
pub enum HidHealthStatus { pub enum HidHealthStatus {
/// Device is healthy and operational /// Device is healthy and operational
#[default]
Healthy, Healthy,
/// Device has an error, attempting recovery /// Device has an error, attempting recovery
Error { Error {
@@ -33,12 +34,6 @@ pub enum HidHealthStatus {
Disconnected, Disconnected,
} }
impl Default for HidHealthStatus {
fn default() -> Self {
Self::Healthy
}
}
/// HID health monitor configuration /// HID health monitor configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct HidMonitorConfig { pub struct HidMonitorConfig {
@@ -196,7 +191,7 @@ impl HidHealthMonitor {
let attempt = self.retry_count.load(Ordering::Relaxed); let attempt = self.retry_count.load(Ordering::Relaxed);
// Only publish every 5 attempts to avoid event spam // Only publish every 5 attempts to avoid event spam
if attempt == 1 || attempt % 5 == 0 { if attempt == 1 || attempt.is_multiple_of(5) {
debug!("HID {} reconnecting, attempt {}", backend, attempt); debug!("HID {} reconnecting, attempt {}", backend, attempt);
if let Some(ref events) = *self.events.read().await { if let Some(ref events) = *self.events.read().await {

View File

@@ -228,7 +228,7 @@ impl OtgBackend {
Ok(false) Ok(false)
} }
Ok(_) => Ok(false), Ok(_) => Ok(false),
Err(e) => Err(std::io::Error::new(std::io::ErrorKind::Other, e)), Err(e) => Err(std::io::Error::other(e)),
} }
} }
@@ -393,21 +393,10 @@ impl OtgBackend {
/// Check if all HID device files exist /// Check if all HID device files exist
pub fn check_devices_exist(&self) -> bool { pub fn check_devices_exist(&self) -> bool {
self.keyboard_path self.keyboard_path.as_ref().is_none_or(|p| p.exists())
.as_ref() && self.mouse_rel_path.as_ref().is_none_or(|p| p.exists())
.map_or(true, |p| p.exists()) && self.mouse_abs_path.as_ref().is_none_or(|p| p.exists())
&& self && self.consumer_path.as_ref().is_none_or(|p| p.exists())
.mouse_rel_path
.as_ref()
.map_or(true, |p| p.exists())
&& self
.mouse_abs_path
.as_ref()
.map_or(true, |p| p.exists())
&& self
.consumer_path
.as_ref()
.map_or(true, |p| p.exists())
} }
/// Get list of missing device paths /// Get list of missing device paths
@@ -952,9 +941,7 @@ impl HidBackend for OtgBackend {
} }
fn supports_absolute_mouse(&self) -> bool { fn supports_absolute_mouse(&self) -> bool {
self.mouse_abs_path self.mouse_abs_path.as_ref().is_some_and(|p| p.exists())
.as_ref()
.map_or(false, |p| p.exists())
} }
async fn send_consumer(&self, event: ConsumerEvent) -> Result<()> { async fn send_consumer(&self, event: ConsumerEvent) -> Result<()> {

View File

@@ -14,9 +14,11 @@ pub mod hid;
pub mod modules; pub mod modules;
pub mod msd; pub mod msd;
pub mod otg; pub mod otg;
pub mod rtsp;
pub mod rustdesk; pub mod rustdesk;
pub mod state; pub mod state;
pub mod stream; pub mod stream;
pub mod update;
pub mod utils; pub mod utils;
pub mod video; pub mod video;
pub mod web; pub mod web;

View File

@@ -19,9 +19,14 @@ use one_kvm::extensions::ExtensionManager;
use one_kvm::hid::{HidBackendType, HidController}; use one_kvm::hid::{HidBackendType, HidController};
use one_kvm::msd::MsdController; use one_kvm::msd::MsdController;
use one_kvm::otg::{configfs, OtgService}; use one_kvm::otg::{configfs, OtgService};
use one_kvm::rtsp::RtspService;
use one_kvm::rustdesk::RustDeskService; use one_kvm::rustdesk::RustDeskService;
use one_kvm::state::AppState; use one_kvm::state::AppState;
use one_kvm::update::UpdateService;
use one_kvm::utils::bind_tcp_listener; use one_kvm::utils::bind_tcp_listener;
use one_kvm::video::codec_constraints::{
enforce_constraints_with_stream_manager, StreamCodecConstraints,
};
use one_kvm::video::format::{PixelFormat, Resolution}; use one_kvm::video::format::{PixelFormat, Resolution};
use one_kvm::video::{Streamer, VideoStreamManager}; use one_kvm::video::{Streamer, VideoStreamManager};
use one_kvm::web; use one_kvm::web;
@@ -158,7 +163,11 @@ async fn main() -> anyhow::Result<()> {
} }
let bind_ips = resolve_bind_addresses(&config.web)?; let bind_ips = resolve_bind_addresses(&config.web)?;
let scheme = if config.web.https_enabled { "https" } else { "http" }; let scheme = if config.web.https_enabled {
"https"
} else {
"http"
};
let bind_port = if config.web.https_enabled { let bind_port = if config.web.https_enabled {
config.web.https_port config.web.https_port
} else { } else {
@@ -530,7 +539,24 @@ async fn main() -> anyhow::Result<()> {
None None
}; };
// Create RTSP service (optional, based on config)
let rtsp = if config.rtsp.enabled {
tracing::info!(
"Initializing RTSP service: rtsp://{}:{}/{}",
config.rtsp.bind,
config.rtsp.port,
config.rtsp.path
);
let service = RtspService::new(config.rtsp.clone(), stream_manager.clone());
Some(Arc::new(service))
} else {
tracing::info!("RTSP disabled in configuration");
None
};
// Create application state // Create application state
let update_service = Arc::new(UpdateService::new(data_dir.join("updates")));
let state = AppState::new( let state = AppState::new(
config_store.clone(), config_store.clone(),
session_store, session_store,
@@ -542,8 +568,10 @@ async fn main() -> anyhow::Result<()> {
atx, atx,
audio, audio,
rustdesk.clone(), rustdesk.clone(),
rtsp.clone(),
extensions.clone(), extensions.clone(),
events.clone(), events.clone(),
update_service,
shutdown_tx.clone(), shutdown_tx.clone(),
data_dir.clone(), data_dir.clone(),
); );
@@ -573,6 +601,30 @@ async fn main() -> anyhow::Result<()> {
} }
} }
// Start RTSP service if enabled
if let Some(ref service) = rtsp {
if let Err(e) = service.start().await {
tracing::error!("Failed to start RTSP service: {}", e);
} else {
tracing::info!("RTSP service started");
}
}
// Enforce startup codec constraints (e.g. RTSP/RustDesk locks)
{
let runtime_config = state.config.get();
let constraints = StreamCodecConstraints::from_config(&runtime_config);
match enforce_constraints_with_stream_manager(&state.stream_manager, &constraints).await {
Ok(result) if result.changed => {
if let Some(message) = result.message {
tracing::info!("{}", message);
}
}
Ok(_) => {}
Err(e) => tracing::warn!("Failed to enforce startup codec constraints: {}", e),
}
}
// Start enabled extensions // Start enabled extensions
{ {
let ext_config = config_store.get(); let ext_config = config_store.get();
@@ -646,7 +698,7 @@ async fn main() -> anyhow::Result<()> {
let server = axum_server::from_tcp_rustls(listener, tls_config.clone())? let server = axum_server::from_tcp_rustls(listener, tls_config.clone())?
.serve(app.clone().into_make_service()); .serve(app.clone().into_make_service());
servers.push(async move { server.await }); servers.push(server);
} }
tokio::select! { tokio::select! {
@@ -712,10 +764,13 @@ fn init_logging(level: LogLevel, verbose_count: u8) {
let env_filter = let env_filter =
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| filter.into()); tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| filter.into());
tracing_subscriber::registry() if let Err(err) = tracing_subscriber::registry()
.with(env_filter) .with(env_filter)
.with(tracing_subscriber::fmt::layer()) .with(tracing_subscriber::fmt::layer())
.init(); .try_init()
{
eprintln!("failed to initialize tracing: {}", err);
}
} }
/// Get the application data directory /// Get the application data directory
@@ -879,6 +934,15 @@ async fn cleanup(state: &Arc<AppState>) {
} }
} }
// Stop RTSP service
if let Some(ref service) = *state.rtsp.read().await {
if let Err(e) = service.stop().await {
tracing::warn!("Failed to stop RTSP service: {}", e);
} else {
tracing::info!("RTSP service stopped");
}
}
// Stop video // Stop video
if let Err(e) = state.stream_manager.stop().await { if let Err(e) = state.stream_manager.stop().await {
tracing::warn!("Failed to stop streamer: {}", e); tracing::warn!("Failed to stop streamer: {}", e);

View File

@@ -52,10 +52,7 @@ impl MsdController {
/// # Parameters /// # Parameters
/// * `otg_service` - OTG service for gadget management /// * `otg_service` - OTG service for gadget management
/// * `msd_dir` - Base directory for MSD storage /// * `msd_dir` - Base directory for MSD storage
pub fn new( pub fn new(otg_service: Arc<OtgService>, msd_dir: impl Into<PathBuf>) -> Self {
otg_service: Arc<OtgService>,
msd_dir: impl Into<PathBuf>,
) -> Self {
let msd_dir = msd_dir.into(); let msd_dir = msd_dir.into();
let images_path = msd_dir.join("images"); let images_path = msd_dir.join("images");
let ventoy_dir = msd_dir.join("ventoy"); let ventoy_dir = msd_dir.join("ventoy");

View File

@@ -87,8 +87,7 @@ impl ImageManager {
.ok() .ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| { .map(|d| {
chrono::DateTime::from_timestamp(d.as_secs() as i64, 0) chrono::DateTime::from_timestamp(d.as_secs() as i64, 0).unwrap_or_else(Utc::now)
.unwrap_or_else(|| Utc::now().into())
}) })
.unwrap_or_else(Utc::now); .unwrap_or_else(Utc::now);
@@ -400,7 +399,7 @@ impl ImageManager {
.headers() .headers()
.get(reqwest::header::CONTENT_DISPOSITION) .get(reqwest::header::CONTENT_DISPOSITION)
.and_then(|v| v.to_str().ok()) .and_then(|v| v.to_str().ok())
.and_then(|s| extract_filename_from_content_disposition(s)); .and_then(extract_filename_from_content_disposition);
if let Some(name) = from_header { if let Some(name) = from_header {
sanitize_filename(&name) sanitize_filename(&name)

View File

@@ -15,9 +15,10 @@ use crate::events::{EventBus, SystemEvent};
use crate::utils::LogThrottler; use crate::utils::LogThrottler;
/// MSD health status /// MSD health status
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Default)]
pub enum MsdHealthStatus { pub enum MsdHealthStatus {
/// Device is healthy and operational /// Device is healthy and operational
#[default]
Healthy, Healthy,
/// Device has an error /// Device has an error
Error { Error {
@@ -28,12 +29,6 @@ pub enum MsdHealthStatus {
}, },
} }
impl Default for MsdHealthStatus {
fn default() -> Self {
Self::Healthy
}
}
/// MSD health monitor configuration /// MSD health monitor configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct MsdMonitorConfig { pub struct MsdMonitorConfig {

View File

@@ -7,8 +7,10 @@ use std::path::PathBuf;
/// MSD operating mode /// MSD operating mode
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum MsdMode { pub enum MsdMode {
/// No storage connected /// No storage connected
#[default]
None, None,
/// Image file mounted (ISO/IMG) /// Image file mounted (ISO/IMG)
Image, Image,
@@ -16,12 +18,6 @@ pub enum MsdMode {
Drive, Drive,
} }
impl Default for MsdMode {
fn default() -> Self {
Self::None
}
}
/// Image file metadata /// Image file metadata
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageInfo { pub struct ImageInfo {

View File

@@ -328,10 +328,7 @@ impl VentoyDrive {
let image = match VentoyImage::open(&path) { let image = match VentoyImage::open(&path) {
Ok(img) => img, Ok(img) => img,
Err(e) => { Err(e) => {
let _ = rt.block_on(tx.send(Err(std::io::Error::new( let _ = rt.block_on(tx.send(Err(std::io::Error::other(e.to_string()))));
std::io::ErrorKind::Other,
e.to_string(),
))));
return; return;
} }
}; };
@@ -341,10 +338,7 @@ impl VentoyDrive {
// Stream the file through the writer // Stream the file through the writer
if let Err(e) = image.read_file_to_writer(&file_path_owned, &mut chunk_writer) { if let Err(e) = image.read_file_to_writer(&file_path_owned, &mut chunk_writer) {
let _ = rt.block_on(tx.send(Err(std::io::Error::new( let _ = rt.block_on(tx.send(Err(std::io::Error::other(e.to_string()))));
std::io::ErrorKind::Other,
e.to_string(),
))));
} }
}); });
@@ -543,17 +537,14 @@ mod tests {
/// Decompress xz file using system command /// Decompress xz file using system command
fn decompress_xz(src: &std::path::Path, dst: &std::path::Path) -> std::io::Result<()> { fn decompress_xz(src: &std::path::Path, dst: &std::path::Path) -> std::io::Result<()> {
let output = Command::new("xz") let output = Command::new("xz")
.args(&["-d", "-k", "-c", src.to_str().unwrap()]) .args(["-d", "-k", "-c", src.to_str().unwrap()])
.output()?; .output()?;
if !output.status.success() { if !output.status.success() {
return Err(std::io::Error::new( return Err(std::io::Error::other(format!(
std::io::ErrorKind::Other,
format!(
"xz decompress failed: {}", "xz decompress failed: {}",
String::from_utf8_lossy(&output.stderr) String::from_utf8_lossy(&output.stderr)
), )));
));
} }
std::fs::write(dst, &output.stdout)?; std::fs::write(dst, &output.stdout)?;

View File

@@ -422,7 +422,11 @@ impl OtgGadgetManager {
if dest.exists() { if dest.exists() {
if let Err(e) = remove_file(&dest) { if let Err(e) = remove_file(&dest) {
warn!("Failed to remove existing config link {}: {}", dest.display(), e); warn!(
"Failed to remove existing config link {}: {}",
dest.display(),
e
);
continue; continue;
} }
} }

View File

@@ -35,7 +35,7 @@ const FLAG_HID: u8 = 0b01;
const FLAG_MSD: u8 = 0b10; const FLAG_MSD: u8 = 0b10;
/// HID device paths /// HID device paths
#[derive(Debug, Clone)] #[derive(Debug, Clone, Default)]
pub struct HidDevicePaths { pub struct HidDevicePaths {
pub keyboard: Option<PathBuf>, pub keyboard: Option<PathBuf>,
pub mouse_relative: Option<PathBuf>, pub mouse_relative: Option<PathBuf>,
@@ -43,17 +43,6 @@ pub struct HidDevicePaths {
pub consumer: Option<PathBuf>, pub consumer: Option<PathBuf>,
} }
impl Default for HidDevicePaths {
fn default() -> Self {
Self {
keyboard: None,
mouse_relative: None,
mouse_absolute: None,
consumer: None,
}
}
}
impl HidDevicePaths { impl HidDevicePaths {
pub fn existing_paths(&self) -> Vec<PathBuf> { pub fn existing_paths(&self) -> Vec<PathBuf> {
let mut paths = Vec::new(); let mut paths = Vec::new();
@@ -239,15 +228,13 @@ impl OtgService {
let requested_functions = self.hid_functions.read().await.clone(); let requested_functions = self.hid_functions.read().await.clone();
{ {
let state = self.state.read().await; let state = self.state.read().await;
if state.hid_enabled { if state.hid_enabled && state.hid_functions.as_ref() == Some(&requested_functions) {
if state.hid_functions.as_ref() == Some(&requested_functions) {
if let Some(ref paths) = state.hid_paths { if let Some(ref paths) = state.hid_paths {
info!("HID already enabled, returning existing paths"); info!("HID already enabled, returning existing paths");
return Ok(paths.clone()); return Ok(paths.clone());
} }
} }
} }
}
// Recreate gadget with both HID and MSD if needed // Recreate gadget with both HID and MSD if needed
self.recreate_gadget().await?; self.recreate_gadget().await?;
@@ -671,7 +658,7 @@ mod tests {
fn test_service_creation() { fn test_service_creation() {
let _service = OtgService::new(); let _service = OtgService::new();
// Just test that creation doesn't panic // Just test that creation doesn't panic
assert!(!OtgService::is_available() || true); // Depends on environment let _ = OtgService::is_available(); // Depends on environment
} }
#[tokio::test] #[tokio::test]

3
src/rtsp/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod service;
pub use service::{RtspService, RtspServiceStatus};

1343
src/rtsp/service.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -50,7 +50,7 @@ fn decode_header(first_byte: u8, header_bytes: &[u8]) -> (usize, usize) {
let head_len = ((first_byte & 0x3) + 1) as usize; let head_len = ((first_byte & 0x3) + 1) as usize;
let mut n = first_byte as usize; let mut n = first_byte as usize;
if head_len > 1 && header_bytes.len() >= 1 { if head_len > 1 && !header_bytes.is_empty() {
n |= (header_bytes[0] as usize) << 8; n |= (header_bytes[0] as usize) << 8;
} }
if head_len > 2 && header_bytes.len() >= 2 { if head_len > 2 && header_bytes.len() >= 2 {

View File

@@ -202,9 +202,11 @@ mod tests {
#[test] #[test]
fn test_rendezvous_addr() { fn test_rendezvous_addr() {
let mut config = RustDeskConfig::default(); let mut config = RustDeskConfig {
rendezvous_server: "example.com".to_string(),
..Default::default()
};
config.rendezvous_server = "example.com".to_string();
assert_eq!(config.rendezvous_addr(), "example.com:21116"); assert_eq!(config.rendezvous_addr(), "example.com:21116");
config.rendezvous_server = "example.com:21116".to_string(); config.rendezvous_server = "example.com:21116".to_string();
@@ -217,10 +219,12 @@ mod tests {
#[test] #[test]
fn test_relay_addr() { fn test_relay_addr() {
let mut config = RustDeskConfig::default(); let mut config = RustDeskConfig {
rendezvous_server: "example.com".to_string(),
..Default::default()
};
// Rendezvous server configured, relay defaults to same host // Rendezvous server configured, relay defaults to same host
config.rendezvous_server = "example.com".to_string();
assert_eq!(config.relay_addr(), Some("example.com:21117".to_string())); assert_eq!(config.relay_addr(), Some("example.com:21117".to_string()));
// Explicit relay server // Explicit relay server
@@ -238,10 +242,12 @@ mod tests {
#[test] #[test]
fn test_effective_rendezvous_server() { fn test_effective_rendezvous_server() {
let mut config = RustDeskConfig::default(); let mut config = RustDeskConfig {
rendezvous_server: "custom.example.com".to_string(),
..Default::default()
};
// When user sets a server, use it // When user sets a server, use it
config.rendezvous_server = "custom.example.com".to_string();
assert_eq!(config.effective_rendezvous_server(), "custom.example.com"); assert_eq!(config.effective_rendezvous_server(), "custom.example.com");
// When empty, returns empty // When empty, returns empty

View File

@@ -23,6 +23,9 @@ use tracing::{debug, error, info, warn};
use crate::audio::AudioController; use crate::audio::AudioController;
use crate::hid::{HidController, KeyEventType, KeyboardEvent, KeyboardModifiers}; use crate::hid::{HidController, KeyEventType, KeyboardEvent, KeyboardModifiers};
use crate::video::codec_constraints::{
encoder_codec_to_id, encoder_codec_to_video_codec, video_codec_to_encoder_codec,
};
use crate::video::encoder::registry::{EncoderRegistry, VideoEncoderType}; use crate::video::encoder::registry::{EncoderRegistry, VideoEncoderType};
use crate::video::encoder::BitratePreset; use crate::video::encoder::BitratePreset;
use crate::video::stream_manager::VideoStreamManager; use crate::video::stream_manager::VideoStreamManager;
@@ -627,7 +630,7 @@ impl Connection {
// Select the best available video codec // Select the best available video codec
// Priority: H264 > H265 > VP8 > VP9 (H264/H265 leverage hardware encoding) // Priority: H264 > H265 > VP8 > VP9 (H264/H265 leverage hardware encoding)
let negotiated = self.negotiate_video_codec(); let negotiated = self.negotiate_video_codec().await;
self.negotiated_codec = Some(negotiated); self.negotiated_codec = Some(negotiated);
info!("Negotiated video codec: {:?}", negotiated); info!("Negotiated video codec: {:?}", negotiated);
@@ -641,28 +644,51 @@ impl Connection {
/// Negotiate video codec - select the best available encoder /// Negotiate video codec - select the best available encoder
/// Priority: H264 > H265 > VP8 > VP9 (H264/H265 leverage hardware encoding on embedded devices) /// Priority: H264 > H265 > VP8 > VP9 (H264/H265 leverage hardware encoding on embedded devices)
fn negotiate_video_codec(&self) -> VideoEncoderType { async fn negotiate_video_codec(&self) -> VideoEncoderType {
let registry = EncoderRegistry::global(); let registry = EncoderRegistry::global();
let constraints = self.current_codec_constraints().await;
// Check availability in priority order // Check availability in priority order
// H264 is preferred because it has the best hardware encoder support (RKMPP, VAAPI, etc.) // H264 is preferred because it has the best hardware encoder support (RKMPP, VAAPI, etc.)
// and most RustDesk clients support H264 hardware decoding // and most RustDesk clients support H264 hardware decoding
if registry.is_format_available(VideoEncoderType::H264, false) { if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H264)
&& registry.is_format_available(VideoEncoderType::H264, false)
{
return VideoEncoderType::H264; return VideoEncoderType::H264;
} }
if registry.is_format_available(VideoEncoderType::H265, false) { if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H265)
&& registry.is_format_available(VideoEncoderType::H265, false)
{
return VideoEncoderType::H265; return VideoEncoderType::H265;
} }
if registry.is_format_available(VideoEncoderType::VP8, false) { if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP8)
&& registry.is_format_available(VideoEncoderType::VP8, false)
{
return VideoEncoderType::VP8; return VideoEncoderType::VP8;
} }
if registry.is_format_available(VideoEncoderType::VP9, false) { if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP9)
&& registry.is_format_available(VideoEncoderType::VP9, false)
{
return VideoEncoderType::VP9; return VideoEncoderType::VP9;
} }
// Fallback to H264 (should be available via hardware or software encoder) // Fallback to preferred allowed codec
warn!("No video encoder available, defaulting to H264"); let preferred = constraints.preferred_webrtc_codec();
VideoEncoderType::H264 warn!(
"No allowed encoder available in priority order, falling back to {}",
encoder_codec_to_id(video_codec_to_encoder_codec(preferred))
);
video_codec_to_encoder_codec(preferred)
}
async fn current_codec_constraints(
&self,
) -> crate::video::codec_constraints::StreamCodecConstraints {
if let Some(ref video_manager) = self.video_manager {
video_manager.codec_constraints().await
} else {
crate::video::codec_constraints::StreamCodecConstraints::unrestricted()
}
} }
/// Handle misc message with Arc writer /// Handle misc message with Arc writer
@@ -729,7 +755,7 @@ impl Connection {
} }
// Check if client sent supported_decoding with a codec preference // Check if client sent supported_decoding with a codec preference
if let Some(ref supported_decoding) = opt.supported_decoding.as_ref() { if let Some(supported_decoding) = opt.supported_decoding.as_ref() {
let prefer = supported_decoding.prefer.value(); let prefer = supported_decoding.prefer.value();
debug!("Client codec preference: prefer={}", prefer); debug!("Client codec preference: prefer={}", prefer);
@@ -747,6 +773,16 @@ impl Connection {
if let Some(new_codec) = requested_codec { if let Some(new_codec) = requested_codec {
// Check if this codec is different from current and available // Check if this codec is different from current and available
if self.negotiated_codec != Some(new_codec) { if self.negotiated_codec != Some(new_codec) {
let constraints = self.current_codec_constraints().await;
if !constraints.is_webrtc_codec_allowed(encoder_codec_to_video_codec(new_codec))
{
warn!(
"Client requested codec {:?} but it's blocked by constraints: {}",
new_codec, constraints.reason
);
return Ok(());
}
let registry = EncoderRegistry::global(); let registry = EncoderRegistry::global();
if registry.is_format_available(new_codec, false) { if registry.is_format_available(new_codec, false) {
info!( info!(
@@ -1080,12 +1116,21 @@ impl Connection {
if success { if success {
// Dynamically detect available encoders // Dynamically detect available encoders
let registry = EncoderRegistry::global(); let registry = EncoderRegistry::global();
let constraints = self.current_codec_constraints().await;
// Check which encoders are available (include software fallback) // Check which encoders are available (include software fallback)
let h264_available = registry.is_format_available(VideoEncoderType::H264, false); let h264_available = constraints
let h265_available = registry.is_format_available(VideoEncoderType::H265, false); .is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H264)
let vp8_available = registry.is_format_available(VideoEncoderType::VP8, false); && registry.is_format_available(VideoEncoderType::H264, false);
let vp9_available = registry.is_format_available(VideoEncoderType::VP9, false); let h265_available = constraints
.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H265)
&& registry.is_format_available(VideoEncoderType::H265, false);
let vp8_available = constraints
.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP8)
&& registry.is_format_available(VideoEncoderType::VP8, false);
let vp9_available = constraints
.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP9)
&& registry.is_format_available(VideoEncoderType::VP9, false);
info!( info!(
"Server encoding capabilities: H264={}, H265={}, VP8={}, VP9={}", "Server encoding capabilities: H264={}, H265={}, VP8={}, VP9={}",
@@ -1352,8 +1397,12 @@ impl Connection {
debug!("Mouse event: x={}, y={}, mask={}", me.x, me.y, me.mask); debug!("Mouse event: x={}, y={}, mask={}", me.x, me.y, me.mask);
// Convert RustDesk mouse event to One-KVM mouse events // Convert RustDesk mouse event to One-KVM mouse events
let mouse_events = let mouse_events = convert_mouse_event(
convert_mouse_event(me, self.screen_width, self.screen_height, self.relative_mouse_active); me,
self.screen_width,
self.screen_height,
self.relative_mouse_active,
);
// Send to HID controller if available // Send to HID controller if available
if let Some(ref hid) = self.hid { if let Some(ref hid) = self.hid {
@@ -1616,7 +1665,10 @@ async fn run_video_streaming(
); );
} }
if let Err(e) = video_manager.request_keyframe().await { if let Err(e) = video_manager.request_keyframe().await {
debug!("Failed to request keyframe for connection {}: {}", conn_id, e); debug!(
"Failed to request keyframe for connection {}: {}",
conn_id, e
);
} }
// Inner loop: receives frames from current subscription // Inner loop: receives frames from current subscription

View File

@@ -189,7 +189,7 @@ pub fn hash_password_double(password: &str, salt: &str, challenge: &str) -> Vec<
// Second hash: SHA256(first_hash + challenge) // Second hash: SHA256(first_hash + challenge)
let mut hasher2 = Sha256::new(); let mut hasher2 = Sha256::new();
hasher2.update(&first_hash); hasher2.update(first_hash);
hasher2.update(challenge.as_bytes()); hasher2.update(challenge.as_bytes());
hasher2.finalize().to_vec() hasher2.finalize().to_vec()
} }

View File

@@ -127,7 +127,7 @@ impl VideoFrameAdapter {
// Inject cached SPS/PPS before IDR when missing // Inject cached SPS/PPS before IDR when missing
if is_keyframe && (!has_sps || !has_pps) { if is_keyframe && (!has_sps || !has_pps) {
if let (Some(ref sps), Some(ref pps)) = (self.h264_sps.as_ref(), self.h264_pps.as_ref()) { if let (Some(sps), Some(pps)) = (self.h264_sps.as_ref(), self.h264_pps.as_ref()) {
let mut out = Vec::with_capacity(8 + sps.len() + pps.len() + data.len()); let mut out = Vec::with_capacity(8 + sps.len() + pps.len() + data.len());
out.extend_from_slice(&[0, 0, 0, 1]); out.extend_from_slice(&[0, 0, 0, 1]);
out.extend_from_slice(sps); out.extend_from_slice(sps);

View File

@@ -36,8 +36,8 @@ use tracing::{debug, error, info, warn};
use crate::audio::AudioController; use crate::audio::AudioController;
use crate::hid::HidController; use crate::hid::HidController;
use crate::video::stream_manager::VideoStreamManager;
use crate::utils::bind_tcp_listener; use crate::utils::bind_tcp_listener;
use crate::video::stream_manager::VideoStreamManager;
use self::config::RustDeskConfig; use self::config::RustDeskConfig;
use self::connection::ConnectionManager; use self::connection::ConnectionManager;
@@ -559,6 +559,7 @@ impl RustDeskService {
/// 2. Send RelayResponse with client's socket_addr /// 2. Send RelayResponse with client's socket_addr
/// 3. Connect to RELAY server /// 3. Connect to RELAY server
/// 4. Accept connection without waiting for response /// 4. Accept connection without waiting for response
#[allow(clippy::too_many_arguments)]
async fn handle_relay_request( async fn handle_relay_request(
rendezvous_addr: &str, rendezvous_addr: &str,
relay_server: &str, relay_server: &str,

View File

@@ -536,6 +536,10 @@ impl RendezvousMediator {
} }
} }
Some(rendezvous_message::Union::PunchHole(ph)) => { Some(rendezvous_message::Union::PunchHole(ph)) => {
let config = self.config.read().clone();
let effective_relay_server =
select_relay_server(config.relay_server.as_deref(), &ph.relay_server);
// Decode the peer's socket address // Decode the peer's socket address
let peer_addr = if !ph.socket_addr.is_empty() { let peer_addr = if !ph.socket_addr.is_empty() {
AddrMangle::decode(&ph.socket_addr) AddrMangle::decode(&ph.socket_addr)
@@ -544,8 +548,12 @@ impl RendezvousMediator {
}; };
info!( info!(
"Received PunchHole request: peer_addr={:?}, socket_addr_len={}, relay_server={}, nat_type={:?}", "Received PunchHole request: peer_addr={:?}, socket_addr_len={}, relay_server={}, effective_relay_server={}, nat_type={:?}",
peer_addr, ph.socket_addr.len(), ph.relay_server, ph.nat_type peer_addr,
ph.socket_addr.len(),
ph.relay_server,
effective_relay_server.as_deref().unwrap_or(""),
ph.nat_type
); );
// Send PunchHoleSent to acknowledge // Send PunchHoleSent to acknowledge
@@ -555,13 +563,19 @@ impl RendezvousMediator {
info!( info!(
"Sending PunchHoleSent: id={}, peer_addr={:?}, relay_server={}", "Sending PunchHoleSent: id={}, peer_addr={:?}, relay_server={}",
id, peer_addr, ph.relay_server id,
peer_addr,
effective_relay_server
.as_deref()
.unwrap_or(ph.relay_server.as_str())
); );
let msg = make_punch_hole_sent( let msg = make_punch_hole_sent(
&ph.socket_addr.to_vec(), // Use peer's socket_addr, not ours &ph.socket_addr, // Use peer's socket_addr, not ours
&id, &id,
&ph.relay_server, effective_relay_server
.as_deref()
.unwrap_or(ph.relay_server.as_str()),
ph.nat_type.enum_value().unwrap_or(NatType::UNKNOWN_NAT), ph.nat_type.enum_value().unwrap_or(NatType::UNKNOWN_NAT),
env!("CARGO_PKG_VERSION"), env!("CARGO_PKG_VERSION"),
); );
@@ -573,16 +587,10 @@ impl RendezvousMediator {
} }
// Try P2P direct connection first, fall back to relay if needed // Try P2P direct connection first, fall back to relay if needed
if !ph.relay_server.is_empty() { if let Some(relay_server) = effective_relay_server {
let relay_server = if ph.relay_server.contains(':') {
ph.relay_server.clone()
} else {
format!("{}:21117", ph.relay_server)
};
// Generate a standard UUID v4 for relay pairing // Generate a standard UUID v4 for relay pairing
// This must match the format used by RustDesk client // This must match the format used by RustDesk client
let uuid = uuid::Uuid::new_v4().to_string(); let uuid = uuid::Uuid::new_v4().to_string();
let config = self.config.read().clone();
let rendezvous_addr = config.rendezvous_addr(); let rendezvous_addr = config.rendezvous_addr();
let device_id = config.device_id.clone(); let device_id = config.device_id.clone();
@@ -606,21 +614,25 @@ impl RendezvousMediator {
device_id, device_id,
); );
} }
} else {
debug!("No relay server available for PunchHole, skipping relay fallback");
} }
} }
Some(rendezvous_message::Union::RequestRelay(rr)) => { Some(rendezvous_message::Union::RequestRelay(rr)) => {
let config = self.config.read().clone();
let effective_relay_server =
select_relay_server(config.relay_server.as_deref(), &rr.relay_server);
info!( info!(
"Received RequestRelay: relay_server={}, uuid={}, secure={}", "Received RequestRelay: relay_server={}, effective_relay_server={}, uuid={}, secure={}",
rr.relay_server, rr.uuid, rr.secure rr.relay_server,
effective_relay_server.as_deref().unwrap_or(""),
rr.uuid,
rr.secure
); );
// Call the relay callback to handle the connection // Call the relay callback to handle the connection
if let Some(callback) = self.relay_callback.read().as_ref() { if let Some(callback) = self.relay_callback.read().as_ref() {
let relay_server = if rr.relay_server.contains(':') { if let Some(relay_server) = effective_relay_server {
rr.relay_server.clone()
} else {
format!("{}:21117", rr.relay_server)
};
let config = self.config.read().clone();
let rendezvous_addr = config.rendezvous_addr(); let rendezvous_addr = config.rendezvous_addr();
let device_id = config.device_id.clone(); let device_id = config.device_id.clone();
callback( callback(
@@ -630,17 +642,28 @@ impl RendezvousMediator {
rr.socket_addr.to_vec(), rr.socket_addr.to_vec(),
device_id, device_id,
); );
} else {
debug!("No relay server available for RequestRelay callback");
}
} }
} }
Some(rendezvous_message::Union::FetchLocalAddr(fla)) => { Some(rendezvous_message::Union::FetchLocalAddr(fla)) => {
let config = self.config.read().clone();
let effective_relay_server =
select_relay_server(config.relay_server.as_deref(), &fla.relay_server)
.unwrap_or_default();
// Decode the peer address for logging // Decode the peer address for logging
let peer_addr = AddrMangle::decode(&fla.socket_addr); let peer_addr = AddrMangle::decode(&fla.socket_addr);
info!( info!(
"Received FetchLocalAddr request: peer_addr={:?}, socket_addr_len={}, relay_server={}", "Received FetchLocalAddr request: peer_addr={:?}, socket_addr_len={}, relay_server={}, effective_relay_server={}",
peer_addr, fla.socket_addr.len(), fla.relay_server peer_addr,
fla.socket_addr.len(),
fla.relay_server,
effective_relay_server
); );
// Respond with our local address for same-LAN direct connection // Respond with our local address for same-LAN direct connection
self.send_local_addr(socket, &fla.socket_addr, &fla.relay_server) self.send_local_addr(socket, &fla.socket_addr, &effective_relay_server)
.await?; .await?;
} }
Some(rendezvous_message::Union::ConfigureUpdate(cu)) => { Some(rendezvous_message::Union::ConfigureUpdate(cu)) => {
@@ -692,6 +715,25 @@ impl RendezvousMediator {
/// This encoding mangles the address to avoid detection. /// This encoding mangles the address to avoid detection.
pub struct AddrMangle; pub struct AddrMangle;
fn normalize_relay_server(server: &str) -> Option<String> {
let trimmed = server.trim();
if trimmed.is_empty() {
return None;
}
if trimmed.contains(':') {
Some(trimmed.to_string())
} else {
Some(format!("{}:21117", trimmed))
}
}
fn select_relay_server(local_relay: Option<&str>, server_relay: &str) -> Option<String> {
local_relay
.and_then(normalize_relay_server)
.or_else(|| normalize_relay_server(server_relay))
}
impl AddrMangle { impl AddrMangle {
/// Encode a SocketAddr to bytes using RustDesk's mangle algorithm /// Encode a SocketAddr to bytes using RustDesk's mangle algorithm
pub fn encode(addr: SocketAddr) -> Vec<u8> { pub fn encode(addr: SocketAddr) -> Vec<u8> {
@@ -876,3 +918,47 @@ fn get_local_addresses() -> Vec<std::net::IpAddr> {
addrs addrs
} }
#[cfg(test)]
mod tests {
use super::{normalize_relay_server, select_relay_server};
#[test]
fn test_normalize_relay_server() {
assert_eq!(normalize_relay_server(""), None);
assert_eq!(normalize_relay_server(" "), None);
assert_eq!(
normalize_relay_server("relay.example.com"),
Some("relay.example.com:21117".to_string())
);
assert_eq!(
normalize_relay_server("relay.example.com:22117"),
Some("relay.example.com:22117".to_string())
);
}
#[test]
fn test_select_relay_server_prefers_local() {
assert_eq!(
select_relay_server(Some("local.example.com:21117"), "server.example.com:21117"),
Some("local.example.com:21117".to_string())
);
assert_eq!(
select_relay_server(Some("local.example.com"), "server.example.com:21117"),
Some("local.example.com:21117".to_string())
);
assert_eq!(
select_relay_server(Some(" "), "server.example.com"),
Some("server.example.com:21117".to_string())
);
assert_eq!(
select_relay_server(None, "server.example.com:21117"),
Some("server.example.com:21117".to_string())
);
assert_eq!(select_relay_server(None, ""), None);
}
}

View File

@@ -13,7 +13,9 @@ use crate::extensions::ExtensionManager;
use crate::hid::HidController; use crate::hid::HidController;
use crate::msd::MsdController; use crate::msd::MsdController;
use crate::otg::OtgService; use crate::otg::OtgService;
use crate::rtsp::RtspService;
use crate::rustdesk::RustDeskService; use crate::rustdesk::RustDeskService;
use crate::update::UpdateService;
use crate::video::VideoStreamManager; use crate::video::VideoStreamManager;
/// Application-wide state shared across handlers /// Application-wide state shared across handlers
@@ -50,10 +52,14 @@ pub struct AppState {
pub audio: Arc<AudioController>, pub audio: Arc<AudioController>,
/// RustDesk remote access service (optional) /// RustDesk remote access service (optional)
pub rustdesk: Arc<RwLock<Option<Arc<RustDeskService>>>>, pub rustdesk: Arc<RwLock<Option<Arc<RustDeskService>>>>,
/// RTSP streaming service (optional)
pub rtsp: Arc<RwLock<Option<Arc<RtspService>>>>,
/// Extension manager (ttyd, gostc, easytier) /// Extension manager (ttyd, gostc, easytier)
pub extensions: Arc<ExtensionManager>, pub extensions: Arc<ExtensionManager>,
/// Event bus for real-time notifications /// Event bus for real-time notifications
pub events: Arc<EventBus>, pub events: Arc<EventBus>,
/// Online update service
pub update: Arc<UpdateService>,
/// Shutdown signal sender /// Shutdown signal sender
pub shutdown_tx: broadcast::Sender<()>, pub shutdown_tx: broadcast::Sender<()>,
/// Recently revoked session IDs (for client kick detection) /// Recently revoked session IDs (for client kick detection)
@@ -64,6 +70,7 @@ pub struct AppState {
impl AppState { impl AppState {
/// Create new application state /// Create new application state
#[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
config: ConfigStore, config: ConfigStore,
sessions: SessionStore, sessions: SessionStore,
@@ -75,8 +82,10 @@ impl AppState {
atx: Option<AtxController>, atx: Option<AtxController>,
audio: Arc<AudioController>, audio: Arc<AudioController>,
rustdesk: Option<Arc<RustDeskService>>, rustdesk: Option<Arc<RustDeskService>>,
rtsp: Option<Arc<RtspService>>,
extensions: Arc<ExtensionManager>, extensions: Arc<ExtensionManager>,
events: Arc<EventBus>, events: Arc<EventBus>,
update: Arc<UpdateService>,
shutdown_tx: broadcast::Sender<()>, shutdown_tx: broadcast::Sender<()>,
data_dir: std::path::PathBuf, data_dir: std::path::PathBuf,
) -> Arc<Self> { ) -> Arc<Self> {
@@ -91,8 +100,10 @@ impl AppState {
atx: Arc::new(RwLock::new(atx)), atx: Arc::new(RwLock::new(atx)),
audio, audio,
rustdesk: Arc::new(RwLock::new(rustdesk)), rustdesk: Arc::new(RwLock::new(rustdesk)),
rtsp: Arc::new(RwLock::new(rtsp)),
extensions, extensions,
events, events,
update,
shutdown_tx, shutdown_tx,
revoked_sessions: Arc::new(RwLock::new(VecDeque::new())), revoked_sessions: Arc::new(RwLock::new(VecDeque::new())),
data_dir, data_dir,

View File

@@ -15,18 +15,16 @@
//! //!
//! Note: Audio WebSocket is handled separately by audio_ws.rs (/api/ws/audio) //! Note: Audio WebSocket is handled separately by audio_ws.rs (/api/ws/audio)
use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream;
use std::collections::HashMap;
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio::sync::{Mutex, RwLock}; use tokio::sync::{Mutex, RwLock};
use tracing::{error, info, warn}; use tracing::{error, info, warn};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::video::capture::Parameters;
use v4l::Format;
use crate::audio::AudioController; use crate::audio::AudioController;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
@@ -491,8 +489,7 @@ impl MjpegStreamer {
} }
}; };
let mut device_opt: Option<Device> = None; let mut stream_opt: Option<V4l2rCaptureStream> = None;
let mut format_opt: Option<Format> = None;
let mut last_error: Option<String> = None; let mut last_error: Option<String> = None;
for attempt in 0..MAX_RETRIES { for attempt in 0..MAX_RETRIES {
@@ -501,8 +498,18 @@ impl MjpegStreamer {
return; return;
} }
let device = match Device::with_path(&device_path) { match V4l2rCaptureStream::open(
Ok(d) => d, &device_path,
config.resolution,
config.format,
config.fps,
4,
Duration::from_secs(2),
) {
Ok(stream) => {
stream_opt = Some(stream);
break;
}
Err(e) => { Err(e) => {
let err_str = e.to_string(); let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") { if err_str.contains("busy") || err_str.contains("resource") {
@@ -519,42 +526,12 @@ impl MjpegStreamer {
last_error = Some(err_str); last_error = Some(err_str);
break; break;
} }
};
let requested = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
match device.set_format(&requested) {
Ok(actual) => {
device_opt = Some(device);
format_opt = Some(actual);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(std::time::Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(err_str);
continue;
}
last_error = Some(err_str);
break;
}
} }
} }
let (device, actual_format) = match (device_opt, format_opt) { let mut stream = match stream_opt {
(Some(d), Some(f)) => (d, f), Some(stream) => stream,
_ => { None => {
error!( error!(
"Failed to open device {:?}: {}", "Failed to open device {:?}: {}",
device_path, device_path,
@@ -567,40 +544,36 @@ impl MjpegStreamer {
} }
}; };
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!( info!(
"Capture format: {}x{} {:?} stride={}", "Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride resolution.width, resolution.height, pixel_format, stride
); );
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.format);
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream = match MmapStream::with_buffers(&device, BufferType::VideoCapture, 4) {
Ok(s) => s,
Err(e) => {
error!("Failed to create capture stream: {}", e);
set_state(MjpegStreamerState::Error);
self.mjpeg_handler.set_offline();
self.direct_active.store(false, Ordering::SeqCst);
return;
}
};
let buffer_pool = Arc::new(FrameBufferPool::new(8)); let buffer_pool = Arc::new(FrameBufferPool::new(8));
let mut signal_present = true; let mut signal_present = true;
let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0; let mut validate_counter: u64 = 0;
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while !self.direct_stop.load(Ordering::Relaxed) { while !self.direct_stop.load(Ordering::Relaxed) {
let (buf, meta) = match stream.next() { let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
Ok(frame_data) => frame_data, let meta = match stream.next_into(&mut owned) {
Ok(meta) => meta,
Err(e) => { Err(e) => {
if e.kind() == io::ErrorKind::TimedOut { if e.kind() == io::ErrorKind::TimedOut {
if signal_present { if signal_present {
@@ -628,35 +601,43 @@ impl MjpegStreamer {
return; return;
} }
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e); error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue; continue;
} }
}; };
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE { if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue; continue;
} }
validate_counter = validate_counter.wrapping_add(1); validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed() if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0 && validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size]) && !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{ {
continue; continue;
} }
let mut owned = buffer_pool.take(frame_size); owned.truncate(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
let frame = VideoFrame::from_pooled( let frame = VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))), Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution, resolution,
pixel_format, pixel_format,
actual_format.stride, stride,
sequence, meta.sequence,
); );
sequence = sequence.wrapping_add(1);
if !signal_present { if !signal_present {
signal_present = true; signal_present = true;

606
src/update/mod.rs Normal file
View File

@@ -0,0 +1,606 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::sync::{broadcast, RwLock, Semaphore};
use crate::error::{AppError, Result};
const DEFAULT_UPDATE_BASE_URL: &str = "https://update.one-kvm.cn";
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum UpdateChannel {
Stable,
Beta,
}
impl Default for UpdateChannel {
fn default() -> Self {
Self::Stable
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChannelsManifest {
pub stable: String,
pub beta: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReleasesManifest {
pub releases: Vec<ReleaseInfo>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReleaseInfo {
pub version: String,
pub channel: UpdateChannel,
pub published_at: String,
#[serde(default)]
pub notes: Vec<String>,
#[serde(default)]
pub artifacts: HashMap<String, ArtifactInfo>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ArtifactInfo {
pub url: String,
pub sha256: String,
pub size: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReleaseNotesItem {
pub version: String,
pub published_at: String,
pub notes: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateOverviewResponse {
pub success: bool,
pub current_version: String,
pub channel: UpdateChannel,
pub latest_version: String,
pub upgrade_available: bool,
pub target_version: Option<String>,
pub notes_between: Vec<ReleaseNotesItem>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpgradeRequest {
pub channel: Option<UpdateChannel>,
pub target_version: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum UpdatePhase {
Idle,
Checking,
Downloading,
Verifying,
Installing,
Restarting,
Success,
Failed,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateStatusResponse {
pub success: bool,
pub phase: UpdatePhase,
pub progress: u8,
pub current_version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub target_version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub last_error: Option<String>,
}
pub struct UpdateService {
client: reqwest::Client,
base_url: String,
work_dir: PathBuf,
status: RwLock<UpdateStatusResponse>,
upgrade_permit: Arc<Semaphore>,
}
impl UpdateService {
pub fn new(work_dir: PathBuf) -> Self {
let base_url = std::env::var("ONE_KVM_UPDATE_BASE_URL")
.ok()
.filter(|url| !url.trim().is_empty())
.unwrap_or_else(|| DEFAULT_UPDATE_BASE_URL.to_string());
Self {
client: reqwest::Client::new(),
base_url,
work_dir,
status: RwLock::new(UpdateStatusResponse {
success: true,
phase: UpdatePhase::Idle,
progress: 0,
current_version: env!("CARGO_PKG_VERSION").to_string(),
target_version: None,
message: None,
last_error: None,
}),
upgrade_permit: Arc::new(Semaphore::new(1)),
}
}
pub async fn status(&self) -> UpdateStatusResponse {
self.status.read().await.clone()
}
pub async fn overview(&self, channel: UpdateChannel) -> Result<UpdateOverviewResponse> {
let channels: ChannelsManifest = self.fetch_json("/v1/channels.json").await?;
let releases: ReleasesManifest = self.fetch_json("/v1/releases.json").await?;
let current_version = parse_version(env!("CARGO_PKG_VERSION"))?;
let latest_version_str = match channel {
UpdateChannel::Stable => channels.stable,
UpdateChannel::Beta => channels.beta,
};
let latest_version = parse_version(&latest_version_str)?;
let current_parts = parse_version_parts(&current_version)?;
let latest_parts = parse_version_parts(&latest_version)?;
let mut notes_between = Vec::new();
for release in &releases.releases {
if release.channel != channel {
continue;
}
let version = match parse_version(&release.version) {
Ok(v) => v,
Err(_) => continue,
};
let version_parts = match parse_version_parts(&version) {
Ok(parts) => parts,
Err(_) => continue,
};
if compare_version_parts(&version_parts, &current_parts) == std::cmp::Ordering::Greater
&& compare_version_parts(&version_parts, &latest_parts)
!= std::cmp::Ordering::Greater
{
notes_between.push((
version_parts,
ReleaseNotesItem {
version: release.version.clone(),
published_at: release.published_at.clone(),
notes: release.notes.clone(),
},
));
}
}
notes_between.sort_by(|a, b| compare_version_parts(&a.0, &b.0));
let notes_between = notes_between.into_iter().map(|(_, item)| item).collect();
let upgrade_available =
compare_versions(&latest_version, &current_version)? == std::cmp::Ordering::Greater;
Ok(UpdateOverviewResponse {
success: true,
current_version: current_version.to_string(),
channel,
latest_version: latest_version.clone(),
upgrade_available,
target_version: if upgrade_available {
Some(latest_version)
} else {
None
},
notes_between,
})
}
pub fn start_upgrade(
self: &Arc<Self>,
req: UpgradeRequest,
shutdown_tx: broadcast::Sender<()>,
) -> Result<()> {
if req.channel.is_none() == req.target_version.is_none() {
return Err(AppError::BadRequest(
"Provide exactly one of channel or target_version".to_string(),
));
}
let permit = self
.upgrade_permit
.clone()
.try_acquire_owned()
.map_err(|_| AppError::BadRequest("Upgrade is already running".to_string()))?;
let service = self.clone();
tokio::spawn(async move {
let _permit = permit;
if let Err(e) = service.execute_upgrade(req, shutdown_tx).await {
service
.set_status(
UpdatePhase::Failed,
0,
None,
Some(e.to_string()),
Some(e.to_string()),
)
.await;
}
});
Ok(())
}
async fn execute_upgrade(
&self,
req: UpgradeRequest,
shutdown_tx: broadcast::Sender<()>,
) -> Result<()> {
self.set_status(
UpdatePhase::Checking,
5,
None,
Some("Checking for updates".to_string()),
None,
)
.await;
let channels: ChannelsManifest = self.fetch_json("/v1/channels.json").await?;
let releases: ReleasesManifest = self.fetch_json("/v1/releases.json").await?;
let current_version = parse_version(env!("CARGO_PKG_VERSION"))?;
let target_version = if let Some(channel) = req.channel {
let version_str = match channel {
UpdateChannel::Stable => channels.stable,
UpdateChannel::Beta => channels.beta,
};
parse_version(&version_str)?
} else {
parse_version(req.target_version.as_deref().unwrap_or_default())?
};
if compare_versions(&target_version, &current_version)? != std::cmp::Ordering::Greater {
return Err(AppError::BadRequest(format!(
"Target version {} must be greater than current version {}",
target_version, current_version
)));
}
let target_release = releases
.releases
.iter()
.find(|r| r.version == target_version)
.ok_or_else(|| AppError::NotFound(format!("Release {} not found", target_version)))?;
let target_triple = current_target_triple()?;
let artifact = target_release
.artifacts
.get(&target_triple)
.ok_or_else(|| {
AppError::NotFound(format!(
"No binary for target {} in version {}",
target_triple, target_version
))
})?
.clone();
self.set_status(
UpdatePhase::Downloading,
10,
Some(target_version.clone()),
Some("Downloading binary".to_string()),
None,
)
.await;
tokio::fs::create_dir_all(&self.work_dir).await?;
let staging_path = self
.work_dir
.join(format!("one-kvm-{}-download", target_version));
let artifact_url = self.resolve_url(&artifact.url);
self.download_and_verify(&artifact_url, &staging_path, &artifact)
.await?;
self.set_status(
UpdatePhase::Installing,
80,
Some(target_version.clone()),
Some("Replacing binary".to_string()),
None,
)
.await;
self.install_binary(&staging_path).await?;
self.set_status(
UpdatePhase::Restarting,
95,
Some(target_version),
Some("Restarting service".to_string()),
None,
)
.await;
let _ = shutdown_tx.send(());
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
restart_current_process()?;
Ok(())
}
async fn download_and_verify(
&self,
url: &str,
output_path: &Path,
artifact: &ArtifactInfo,
) -> Result<()> {
let response = self
.client
.get(url)
.send()
.await
.map_err(|e| AppError::Internal(format!("Failed to download {}: {}", url, e)))?
.error_for_status()
.map_err(|e| AppError::Internal(format!("Download request failed: {}", e)))?;
let mut file = tokio::fs::File::create(output_path).await?;
let mut stream = response.bytes_stream();
let mut downloaded: u64 = 0;
while let Some(chunk) = stream.next().await {
let chunk = chunk
.map_err(|e| AppError::Internal(format!("Read download stream failed: {}", e)))?;
file.write_all(&chunk).await?;
downloaded += chunk.len() as u64;
if artifact.size > 0 {
let ratio = (downloaded as f64 / artifact.size as f64).clamp(0.0, 1.0);
let progress = 10 + (ratio * 60.0) as u8;
self.set_status(
UpdatePhase::Downloading,
progress,
None,
Some(format!(
"Downloading binary ({} / {} bytes)",
downloaded, artifact.size
)),
None,
)
.await;
}
}
file.flush().await?;
if artifact.size > 0 && downloaded != artifact.size {
return Err(AppError::Internal(format!(
"Downloaded size mismatch: expected {}, got {}",
artifact.size, downloaded
)));
}
self.set_status(
UpdatePhase::Verifying,
72,
None,
Some("Verifying sha256".to_string()),
None,
)
.await;
let actual_sha256 = compute_file_sha256(output_path).await?;
let expected_sha256 = normalize_sha256(&artifact.sha256).ok_or_else(|| {
AppError::Internal(format!(
"Invalid sha256 format in manifest: {}",
artifact.sha256
))
})?;
if actual_sha256 != expected_sha256 {
return Err(AppError::Internal(format!(
"SHA256 mismatch: expected {}, got {}",
expected_sha256, actual_sha256
)));
}
Ok(())
}
async fn install_binary(&self, staging_path: &Path) -> Result<()> {
let current_exe = std::env::current_exe()
.map_err(|e| AppError::Internal(format!("Failed to get current exe path: {}", e)))?;
let exe_dir = current_exe.parent().ok_or_else(|| {
AppError::Internal("Failed to determine executable directory".to_string())
})?;
let install_path = exe_dir.join("one-kvm.upgrade.new");
tokio::fs::copy(staging_path, &install_path)
.await
.map_err(|e| {
AppError::Internal(format!("Failed to stage binary into install path: {}", e))
})?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = tokio::fs::metadata(&install_path).await?.permissions();
perms.set_mode(0o755);
tokio::fs::set_permissions(&install_path, perms).await?;
}
tokio::fs::rename(&install_path, &current_exe)
.await
.map_err(|e| AppError::Internal(format!("Failed to replace executable {}", e)))?;
Ok(())
}
async fn fetch_json<T: for<'de> Deserialize<'de>>(&self, path: &str) -> Result<T> {
let url = format!("{}{}", self.base_url.trim_end_matches('/'), path);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| AppError::Internal(format!("Failed to fetch {}: {}", url, e)))?
.error_for_status()
.map_err(|e| AppError::Internal(format!("Request failed {}: {}", url, e)))?;
response
.json::<T>()
.await
.map_err(|e| AppError::Internal(format!("Invalid update response {}: {}", url, e)))
}
fn resolve_url(&self, url: &str) -> String {
if url.starts_with("http://") || url.starts_with("https://") {
url.to_string()
} else {
format!(
"{}/{}",
self.base_url.trim_end_matches('/'),
url.trim_start_matches('/')
)
}
}
async fn set_status(
&self,
phase: UpdatePhase,
progress: u8,
target_version: Option<String>,
message: Option<String>,
last_error: Option<String>,
) {
let mut status = self.status.write().await;
status.phase = phase;
status.progress = progress;
if target_version.is_some() {
status.target_version = target_version;
}
status.message = message;
status.last_error = last_error;
status.success = status.phase != UpdatePhase::Failed;
status.current_version = env!("CARGO_PKG_VERSION").to_string();
}
}
fn parse_version(input: &str) -> Result<String> {
let parts: Vec<&str> = input.split('.').collect();
if parts.len() != 3 {
return Err(AppError::Internal(format!(
"Invalid version {}, expected x.x.x",
input
)));
}
if parts
.iter()
.any(|p| p.is_empty() || !p.chars().all(|c| c.is_ascii_digit()))
{
return Err(AppError::Internal(format!(
"Invalid version {}, expected numeric x.x.x",
input
)));
}
Ok(input.to_string())
}
fn compare_versions(a: &str, b: &str) -> Result<std::cmp::Ordering> {
let pa = parse_version_parts(a)?;
let pb = parse_version_parts(b)?;
Ok(compare_version_parts(&pa, &pb))
}
fn parse_version_parts(input: &str) -> Result<[u64; 3]> {
let parts: Vec<&str> = input.split('.').collect();
if parts.len() != 3 {
return Err(AppError::Internal(format!(
"Invalid version {}, expected x.x.x",
input
)));
}
let major = parts[0]
.parse::<u64>()
.map_err(|e| AppError::Internal(format!("Invalid major version {}: {}", parts[0], e)))?;
let minor = parts[1]
.parse::<u64>()
.map_err(|e| AppError::Internal(format!("Invalid minor version {}: {}", parts[1], e)))?;
let patch = parts[2]
.parse::<u64>()
.map_err(|e| AppError::Internal(format!("Invalid patch version {}: {}", parts[2], e)))?;
Ok([major, minor, patch])
}
fn compare_version_parts(a: &[u64; 3], b: &[u64; 3]) -> std::cmp::Ordering {
a[0].cmp(&b[0]).then(a[1].cmp(&b[1])).then(a[2].cmp(&b[2]))
}
async fn compute_file_sha256(path: &Path) -> Result<String> {
let mut file = tokio::fs::File::open(path).await?;
let mut hasher = Sha256::new();
let mut buffer = [0u8; 8192];
loop {
let bytes_read = file.read(&mut buffer).await?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
Ok(format!("{:x}", hasher.finalize()))
}
fn normalize_sha256(input: &str) -> Option<String> {
let token = input.split_whitespace().next()?.trim().to_lowercase();
if token.len() != 64 || !token.chars().all(|c| c.is_ascii_hexdigit()) {
return None;
}
Some(token)
}
fn current_target_triple() -> Result<String> {
let triple = match (std::env::consts::OS, std::env::consts::ARCH) {
("linux", "x86_64") => "x86_64-unknown-linux-gnu",
("linux", "aarch64") => "aarch64-unknown-linux-gnu",
("linux", "arm") => "armv7-unknown-linux-gnueabihf",
_ => {
return Err(AppError::BadRequest(format!(
"Unsupported platform {}-{}",
std::env::consts::OS,
std::env::consts::ARCH
)));
}
};
Ok(triple.to_string())
}
fn restart_current_process() -> Result<()> {
let exe = std::env::current_exe()
.map_err(|e| AppError::Internal(format!("Failed to get current exe: {}", e)))?;
let args: Vec<String> = std::env::args().skip(1).collect();
#[cfg(unix)]
{
use std::os::unix::process::CommandExt;
let err = std::process::Command::new(&exe).args(&args).exec();
Err(AppError::Internal(format!("Failed to restart: {}", err)))
}
#[cfg(not(unix))]
{
std::process::Command::new(&exe)
.args(&args)
.spawn()
.map_err(|e| AppError::Internal(format!("Failed to spawn restart process: {}", e)))?;
std::process::exit(0);
}
}

View File

@@ -2,8 +2,8 @@
//! //!
//! This module contains common utilities used across the codebase. //! This module contains common utilities used across the codebase.
pub mod throttle;
pub mod net; pub mod net;
pub mod throttle;
pub use throttle::LogThrottler;
pub use net::{bind_tcp_listener, bind_udp_socket}; pub use net::{bind_tcp_listener, bind_udp_socket};
pub use throttle::LogThrottler;

View File

@@ -2,24 +2,21 @@
//! //!
//! Provides async video capture using memory-mapped buffers. //! Provides async video capture using memory-mapped buffers.
use bytes::Bytes;
use std::collections::HashMap;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use bytes::Bytes;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use tokio::sync::{watch, Mutex}; use tokio::sync::{watch, Mutex};
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::capture::Parameters;
use v4l::video::Capture;
use v4l::Format;
use super::format::{PixelFormat, Resolution}; use super::format::{PixelFormat, Resolution};
use super::frame::VideoFrame; use super::frame::VideoFrame;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream;
/// Default number of capture buffers (reduced from 4 to 2 for lower latency) /// Default number of capture buffers (reduced from 4 to 2 for lower latency)
const DEFAULT_BUFFER_COUNT: u32 = 2; const DEFAULT_BUFFER_COUNT: u32 = 2;
@@ -280,9 +277,15 @@ fn run_capture(
return Ok(()); return Ok(());
} }
// Open device let stream = match V4l2rCaptureStream::open(
let device = match Device::with_path(&config.device_path) { &config.device_path,
Ok(d) => d, config.resolution,
config.format,
config.fps,
config.buffer_count,
config.timeout,
) {
Ok(stream) => stream,
Err(e) => { Err(e) => {
let err_str = e.to_string(); let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") { if err_str.contains("busy") || err_str.contains("resource") {
@@ -306,34 +309,7 @@ fn run_capture(
} }
}; };
// Set format return run_capture_inner(config, state, stats, stop_flag, stream);
let format = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
let actual_format = match device.set_format(&format) {
Ok(f) => f,
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(AppError::VideoError(format!("Failed to set format: {}", e)));
continue;
}
return Err(AppError::VideoError(format!("Failed to set format: {}", e)));
}
};
// Device opened and format set successfully - proceed with capture
return run_capture_inner(config, state, stats, stop_flag, device, actual_format);
} }
// All retries exhausted // All retries exhausted
@@ -348,48 +324,16 @@ fn run_capture_inner(
state: &watch::Sender<CaptureState>, state: &watch::Sender<CaptureState>,
stats: &Arc<Mutex<CaptureStats>>, stats: &Arc<Mutex<CaptureStats>>,
stop_flag: &AtomicBool, stop_flag: &AtomicBool,
device: Device, mut stream: V4l2rCaptureStream,
actual_format: Format,
) -> Result<()> { ) -> Result<()> {
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!( info!(
"Capture format: {}x{} {:?} stride={}", "Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride resolution.width, resolution.height, pixel_format, stride
); );
// Try to set hardware FPS (V4L2 VIDIOC_S_PARM)
if config.fps > 0 {
match device.set_params(&Parameters::with_fps(config.fps)) {
Ok(actual_params) => {
// Extract actual FPS from returned interval (numerator/denominator)
let actual_hw_fps = if actual_params.interval.numerator > 0 {
actual_params.interval.denominator / actual_params.interval.numerator
} else {
0
};
if actual_hw_fps == config.fps {
info!("Hardware FPS set successfully: {} fps", actual_hw_fps);
} else if actual_hw_fps > 0 {
info!(
"Hardware FPS coerced: requested {} fps, got {} fps",
config.fps, actual_hw_fps
);
} else {
warn!("Hardware FPS setting returned invalid interval");
}
}
Err(e) => {
warn!("Failed to set hardware FPS: {}", e);
}
}
}
// Create stream with mmap buffers
let mut stream =
MmapStream::with_buffers(&device, BufferType::VideoCapture, config.buffer_count)
.map_err(|e| AppError::VideoError(format!("Failed to create stream: {}", e)))?;
let _ = state.send(CaptureState::Running); let _ = state.send(CaptureState::Running);
info!("Capture started"); info!("Capture started");
@@ -397,12 +341,25 @@ fn run_capture_inner(
let mut fps_frame_count = 0u64; let mut fps_frame_count = 0u64;
let mut fps_window_start = Instant::now(); let mut fps_window_start = Instant::now();
let fps_window_duration = Duration::from_secs(1); let fps_window_duration = Duration::from_secs(1);
let mut scratch = Vec::new();
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
// Main capture loop // Main capture loop
while !stop_flag.load(Ordering::Relaxed) { while !stop_flag.load(Ordering::Relaxed) {
// Try to capture a frame let meta = match stream.next_into(&mut scratch) {
let (_buf, meta) = match stream.next() { Ok(meta) => meta,
Ok(frame_data) => frame_data,
Err(e) => { Err(e) => {
if e.kind() == io::ErrorKind::TimedOut { if e.kind() == io::ErrorKind::TimedOut {
warn!("Capture timeout - no signal?"); warn!("Capture timeout - no signal?");
@@ -432,19 +389,30 @@ fn run_capture_inner(
}); });
} }
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e); error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue; continue;
} }
}; };
// Use actual bytes used, not buffer size // Use actual bytes used, not buffer size
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
// Validate frame // Validate frame
if frame_size < MIN_FRAME_SIZE { if frame_size < MIN_FRAME_SIZE {
debug!( debug!(
"Dropping small frame: {} bytes (bytesused={})", "Dropping small frame: {} bytes (bytesused={})",
frame_size, meta.bytesused frame_size, meta.bytes_used
); );
continue; continue;
} }
@@ -470,6 +438,10 @@ fn run_capture_inner(
s.current_fps = (fps_frame_count as f32 / elapsed.as_secs_f32()).max(0.0); s.current_fps = (fps_frame_count as f32 / elapsed.as_secs_f32()).max(0.0);
} }
} }
if *state.borrow() == CaptureState::NoSignal {
let _ = state.send(CaptureState::Running);
}
} }
info!("Capture stopped"); info!("Capture stopped");
@@ -525,38 +497,37 @@ fn grab_single_frame(
resolution: Resolution, resolution: Resolution,
format: PixelFormat, format: PixelFormat,
) -> Result<VideoFrame> { ) -> Result<VideoFrame> {
let device = Device::with_path(device_path) let mut stream = V4l2rCaptureStream::open(
.map_err(|e| AppError::VideoError(format!("Failed to open device: {}", e)))?; device_path,
resolution,
let fmt = Format::new(resolution.width, resolution.height, format.to_fourcc()); format,
let actual = device 0,
.set_format(&fmt) 2,
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?; Duration::from_secs(DEFAULT_TIMEOUT),
)?;
let mut stream = MmapStream::with_buffers(&device, BufferType::VideoCapture, 2) let actual_resolution = stream.resolution();
.map_err(|e| AppError::VideoError(format!("Failed to create stream: {}", e)))?; let actual_format = stream.format();
let actual_stride = stream.stride();
let mut scratch = Vec::new();
// Try to get a valid frame (skip first few which might be bad) // Try to get a valid frame (skip first few which might be bad)
for attempt in 0..5 { for attempt in 0..5 {
match stream.next() { match stream.next_into(&mut scratch) {
Ok((buf, _meta)) => { Ok(meta) => {
if buf.len() >= MIN_FRAME_SIZE { if meta.bytes_used >= MIN_FRAME_SIZE {
let actual_format = PixelFormat::from_fourcc(actual.fourcc).unwrap_or(format);
return Ok(VideoFrame::new( return Ok(VideoFrame::new(
Bytes::copy_from_slice(buf), Bytes::copy_from_slice(&scratch[..meta.bytes_used]),
Resolution::new(actual.width, actual.height), actual_resolution,
actual_format, actual_format,
actual.stride, actual_stride,
0, 0,
)); ));
} }
} }
Err(e) => { Err(e) if attempt == 4 => {
if attempt == 4 {
return Err(AppError::VideoError(format!("Failed to grab frame: {}", e))); return Err(AppError::VideoError(format!("Failed to grab frame: {}", e)));
} }
} Err(_) => {}
} }
} }

View File

@@ -0,0 +1,193 @@
use crate::config::{AppConfig, RtspCodec, StreamMode};
use crate::error::Result;
use crate::video::encoder::registry::VideoEncoderType;
use crate::video::encoder::VideoCodecType;
use crate::video::VideoStreamManager;
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct StreamCodecConstraints {
pub rustdesk_enabled: bool,
pub rtsp_enabled: bool,
pub allowed_webrtc_codecs: Vec<VideoCodecType>,
pub allow_mjpeg: bool,
pub locked_codec: Option<VideoCodecType>,
pub reason: String,
}
#[derive(Debug, Clone)]
pub struct ConstraintEnforcementResult {
pub changed: bool,
pub message: Option<String>,
}
impl StreamCodecConstraints {
pub fn unrestricted() -> Self {
Self {
rustdesk_enabled: false,
rtsp_enabled: false,
allowed_webrtc_codecs: vec![
VideoCodecType::H264,
VideoCodecType::H265,
VideoCodecType::VP8,
VideoCodecType::VP9,
],
allow_mjpeg: true,
locked_codec: None,
reason: "No codec lock active".to_string(),
}
}
pub fn from_config(config: &AppConfig) -> Self {
let rustdesk_enabled = config.rustdesk.enabled;
let rtsp_enabled = config.rtsp.enabled;
if rtsp_enabled {
let locked_codec = match config.rtsp.codec {
RtspCodec::H264 => VideoCodecType::H264,
RtspCodec::H265 => VideoCodecType::H265,
};
return Self {
rustdesk_enabled,
rtsp_enabled,
allowed_webrtc_codecs: vec![locked_codec],
allow_mjpeg: false,
locked_codec: Some(locked_codec),
reason: if rustdesk_enabled {
format!(
"RTSP enabled with codec lock ({:?}) and RustDesk enabled",
locked_codec
)
} else {
format!("RTSP enabled with codec lock ({:?})", locked_codec)
},
};
}
if rustdesk_enabled {
return Self {
rustdesk_enabled,
rtsp_enabled,
allowed_webrtc_codecs: vec![
VideoCodecType::H264,
VideoCodecType::H265,
VideoCodecType::VP8,
VideoCodecType::VP9,
],
allow_mjpeg: false,
locked_codec: None,
reason: "RustDesk enabled, MJPEG disabled".to_string(),
};
}
Self::unrestricted()
}
pub fn is_mjpeg_allowed(&self) -> bool {
self.allow_mjpeg
}
pub fn is_webrtc_codec_allowed(&self, codec: VideoCodecType) -> bool {
self.allowed_webrtc_codecs.contains(&codec)
}
pub fn preferred_webrtc_codec(&self) -> VideoCodecType {
if let Some(codec) = self.locked_codec {
return codec;
}
self.allowed_webrtc_codecs
.first()
.copied()
.unwrap_or(VideoCodecType::H264)
}
pub fn allowed_codecs_for_api(&self) -> Vec<&'static str> {
let mut codecs = Vec::new();
if self.allow_mjpeg {
codecs.push("mjpeg");
}
for codec in &self.allowed_webrtc_codecs {
codecs.push(codec_to_id(*codec));
}
codecs
}
}
pub async fn enforce_constraints_with_stream_manager(
stream_manager: &Arc<VideoStreamManager>,
constraints: &StreamCodecConstraints,
) -> Result<ConstraintEnforcementResult> {
let current_mode = stream_manager.current_mode().await;
if current_mode == StreamMode::Mjpeg && !constraints.allow_mjpeg {
let target_codec = constraints.preferred_webrtc_codec();
stream_manager.set_video_codec(target_codec).await?;
let _ = stream_manager
.switch_mode_transaction(StreamMode::WebRTC)
.await?;
return Ok(ConstraintEnforcementResult {
changed: true,
message: Some(format!(
"Auto-switched from MJPEG to {} due to codec lock",
codec_to_id(target_codec)
)),
});
}
if current_mode == StreamMode::WebRTC {
let current_codec = stream_manager.webrtc_streamer().current_video_codec().await;
if !constraints.is_webrtc_codec_allowed(current_codec) {
let target_codec = constraints.preferred_webrtc_codec();
stream_manager.set_video_codec(target_codec).await?;
return Ok(ConstraintEnforcementResult {
changed: true,
message: Some(format!(
"Auto-switched codec from {} to {} due to codec lock",
codec_to_id(current_codec),
codec_to_id(target_codec)
)),
});
}
}
Ok(ConstraintEnforcementResult {
changed: false,
message: None,
})
}
pub fn codec_to_id(codec: VideoCodecType) -> &'static str {
match codec {
VideoCodecType::H264 => "h264",
VideoCodecType::H265 => "h265",
VideoCodecType::VP8 => "vp8",
VideoCodecType::VP9 => "vp9",
}
}
pub fn encoder_codec_to_id(codec: VideoEncoderType) -> &'static str {
match codec {
VideoEncoderType::H264 => "h264",
VideoEncoderType::H265 => "h265",
VideoEncoderType::VP8 => "vp8",
VideoEncoderType::VP9 => "vp9",
}
}
pub fn video_codec_to_encoder_codec(codec: VideoCodecType) -> VideoEncoderType {
match codec {
VideoCodecType::H264 => VideoEncoderType::H264,
VideoCodecType::H265 => VideoEncoderType::H265,
VideoCodecType::VP8 => VideoEncoderType::VP8,
VideoCodecType::VP9 => VideoEncoderType::VP9,
}
}
pub fn encoder_codec_to_video_codec(codec: VideoEncoderType) -> VideoCodecType {
match codec {
VideoEncoderType::H264 => VideoCodecType::H264,
VideoEncoderType::H265 => VideoCodecType::H265,
VideoEncoderType::VP8 => VideoCodecType::VP8,
VideoEncoderType::VP9 => VideoCodecType::VP9,
}
}

View File

@@ -1,15 +1,17 @@
//! V4L2 device enumeration and capability query //! V4L2 device enumeration and capability query
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fs::File;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::mpsc; use std::sync::mpsc;
use std::time::Duration; use std::time::Duration;
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use v4l::capability::Flags; use v4l2r::bindings::{v4l2_frmivalenum, v4l2_frmsizeenum};
use v4l::prelude::*; use v4l2r::ioctl::{
use v4l::video::Capture; self, Capabilities, Capability as V4l2rCapability, FormatIterator, FrmIvalTypes, FrmSizeTypes,
use v4l::Format; };
use v4l::FourCC; use v4l2r::nix::errno::Errno;
use v4l2r::{Format as V4l2rFormat, QueueType};
use super::format::{PixelFormat, Resolution}; use super::format::{PixelFormat, Resolution};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
@@ -81,7 +83,7 @@ pub struct DeviceCapabilities {
/// Wrapper around a V4L2 video device /// Wrapper around a V4L2 video device
pub struct VideoDevice { pub struct VideoDevice {
pub path: PathBuf, pub path: PathBuf,
device: Device, fd: File,
} }
impl VideoDevice { impl VideoDevice {
@@ -90,42 +92,55 @@ impl VideoDevice {
let path = path.as_ref().to_path_buf(); let path = path.as_ref().to_path_buf();
debug!("Opening video device: {:?}", path); debug!("Opening video device: {:?}", path);
let device = Device::with_path(&path).map_err(|e| { let fd = File::options()
.read(true)
.write(true)
.open(&path)
.map_err(|e| {
AppError::VideoError(format!("Failed to open device {:?}: {}", path, e)) AppError::VideoError(format!("Failed to open device {:?}: {}", path, e))
})?; })?;
Ok(Self { path, device }) Ok(Self { path, fd })
}
/// Open a video device read-only (for probing/enumeration)
pub fn open_readonly(path: impl AsRef<Path>) -> Result<Self> {
let path = path.as_ref().to_path_buf();
debug!("Opening video device (read-only): {:?}", path);
let fd = File::options().read(true).open(&path).map_err(|e| {
AppError::VideoError(format!("Failed to open device {:?}: {}", path, e))
})?;
Ok(Self { path, fd })
} }
/// Get device capabilities /// Get device capabilities
pub fn capabilities(&self) -> Result<DeviceCapabilities> { pub fn capabilities(&self) -> Result<DeviceCapabilities> {
let caps = self let caps: V4l2rCapability = ioctl::querycap(&self.fd)
.device
.query_caps()
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let flags = caps.device_caps();
Ok(DeviceCapabilities { Ok(DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE), video_capture: flags.contains(Capabilities::VIDEO_CAPTURE),
video_capture_mplane: caps.capabilities.contains(Flags::VIDEO_CAPTURE_MPLANE), video_capture_mplane: flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE),
video_output: caps.capabilities.contains(Flags::VIDEO_OUTPUT), video_output: flags.contains(Capabilities::VIDEO_OUTPUT),
streaming: caps.capabilities.contains(Flags::STREAMING), streaming: flags.contains(Capabilities::STREAMING),
read_write: caps.capabilities.contains(Flags::READ_WRITE), read_write: flags.contains(Capabilities::READWRITE),
}) })
} }
/// Get detailed device information /// Get detailed device information
pub fn info(&self) -> Result<VideoDeviceInfo> { pub fn info(&self) -> Result<VideoDeviceInfo> {
let caps = self let caps: V4l2rCapability = ioctl::querycap(&self.fd)
.device
.query_caps()
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let flags = caps.device_caps();
let capabilities = DeviceCapabilities { let capabilities = DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE), video_capture: flags.contains(Capabilities::VIDEO_CAPTURE),
video_capture_mplane: caps.capabilities.contains(Flags::VIDEO_CAPTURE_MPLANE), video_capture_mplane: flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE),
video_output: caps.capabilities.contains(Flags::VIDEO_OUTPUT), video_output: flags.contains(Capabilities::VIDEO_OUTPUT),
streaming: caps.capabilities.contains(Flags::STREAMING), streaming: flags.contains(Capabilities::STREAMING),
read_write: caps.capabilities.contains(Flags::READ_WRITE), read_write: flags.contains(Capabilities::READWRITE),
}; };
let formats = self.enumerate_formats()?; let formats = self.enumerate_formats()?;
@@ -141,7 +156,7 @@ impl VideoDevice {
path: self.path.clone(), path: self.path.clone(),
name: caps.card.clone(), name: caps.card.clone(),
driver: caps.driver.clone(), driver: caps.driver.clone(),
bus_info: caps.bus.clone(), bus_info: caps.bus_info.clone(),
card: caps.card, card: caps.card,
formats, formats,
capabilities, capabilities,
@@ -154,16 +169,13 @@ impl VideoDevice {
pub fn enumerate_formats(&self) -> Result<Vec<FormatInfo>> { pub fn enumerate_formats(&self) -> Result<Vec<FormatInfo>> {
let mut formats = Vec::new(); let mut formats = Vec::new();
// Get supported formats let queue = self.capture_queue_type()?;
let format_descs = self let format_descs = FormatIterator::new(&self.fd, queue);
.device
.enum_formats()
.map_err(|e| AppError::VideoError(format!("Failed to enumerate formats: {}", e)))?;
for desc in format_descs { for desc in format_descs {
// Try to convert FourCC to our PixelFormat // Try to convert FourCC to our PixelFormat
if let Some(format) = PixelFormat::from_fourcc(desc.fourcc) { if let Some(format) = PixelFormat::from_v4l2r(desc.pixelformat) {
let resolutions = self.enumerate_resolutions(desc.fourcc)?; let resolutions = self.enumerate_resolutions(desc.pixelformat)?;
formats.push(FormatInfo { formats.push(FormatInfo {
format, format,
@@ -173,7 +185,7 @@ impl VideoDevice {
} else { } else {
debug!( debug!(
"Skipping unsupported format: {:?} ({})", "Skipping unsupported format: {:?} ({})",
desc.fourcc, desc.description desc.pixelformat, desc.description
); );
} }
} }
@@ -185,22 +197,22 @@ impl VideoDevice {
} }
/// Enumerate resolutions for a specific format /// Enumerate resolutions for a specific format
fn enumerate_resolutions(&self, fourcc: FourCC) -> Result<Vec<ResolutionInfo>> { fn enumerate_resolutions(&self, fourcc: v4l2r::PixelFormat) -> Result<Vec<ResolutionInfo>> {
let mut resolutions = Vec::new(); let mut resolutions = Vec::new();
// Try to enumerate frame sizes let mut index = 0u32;
match self.device.enum_framesizes(fourcc) { loop {
Ok(sizes) => { match ioctl::enum_frame_sizes::<v4l2_frmsizeenum>(&self.fd, index, fourcc) {
for size in sizes { Ok(size) => {
match size.size { if let Some(size) = size.size() {
v4l::framesize::FrameSizeEnum::Discrete(d) => { match size {
FrmSizeTypes::Discrete(d) => {
let fps = self let fps = self
.enumerate_fps(fourcc, d.width, d.height) .enumerate_fps(fourcc, d.width, d.height)
.unwrap_or_default(); .unwrap_or_default();
resolutions.push(ResolutionInfo::new(d.width, d.height, fps)); resolutions.push(ResolutionInfo::new(d.width, d.height, fps));
} }
v4l::framesize::FrameSizeEnum::Stepwise(s) => { FrmSizeTypes::StepWise(s) => {
// For stepwise, add some common resolutions
for res in [ for res in [
Resolution::VGA, Resolution::VGA,
Resolution::HD720, Resolution::HD720,
@@ -222,10 +234,19 @@ impl VideoDevice {
} }
} }
} }
index += 1;
} }
Err(e) => { Err(e) => {
let is_einval = matches!(
e,
v4l2r::ioctl::FrameSizeError::IoctlError(err) if err == Errno::EINVAL
);
if !is_einval {
debug!("Failed to enumerate frame sizes for {:?}: {}", fourcc, e); debug!("Failed to enumerate frame sizes for {:?}: {}", fourcc, e);
} }
break;
}
}
} }
// Sort by resolution (largest first) // Sort by resolution (largest first)
@@ -236,21 +257,29 @@ impl VideoDevice {
} }
/// Enumerate FPS for a specific resolution /// Enumerate FPS for a specific resolution
fn enumerate_fps(&self, fourcc: FourCC, width: u32, height: u32) -> Result<Vec<u32>> { fn enumerate_fps(
&self,
fourcc: v4l2r::PixelFormat,
width: u32,
height: u32,
) -> Result<Vec<u32>> {
let mut fps_list = Vec::new(); let mut fps_list = Vec::new();
match self.device.enum_frameintervals(fourcc, width, height) { let mut index = 0u32;
Ok(intervals) => { loop {
for interval in intervals { match ioctl::enum_frame_intervals::<v4l2_frmivalenum>(
match interval.interval { &self.fd, index, fourcc, width, height,
v4l::frameinterval::FrameIntervalEnum::Discrete(fraction) => { ) {
Ok(interval) => {
if let Some(interval) = interval.intervals() {
match interval {
FrmIvalTypes::Discrete(fraction) => {
if fraction.numerator > 0 { if fraction.numerator > 0 {
let fps = fraction.denominator / fraction.numerator; let fps = fraction.denominator / fraction.numerator;
fps_list.push(fps); fps_list.push(fps);
} }
} }
v4l::frameinterval::FrameIntervalEnum::Stepwise(step) => { FrmIvalTypes::StepWise(step) => {
// Just pick max/min/step
if step.max.numerator > 0 { if step.max.numerator > 0 {
let min_fps = step.max.denominator / step.max.numerator; let min_fps = step.max.denominator / step.max.numerator;
let max_fps = step.min.denominator / step.min.numerator; let max_fps = step.min.denominator / step.min.numerator;
@@ -262,10 +291,21 @@ impl VideoDevice {
} }
} }
} }
index += 1;
}
Err(e) => {
let is_einval = matches!(
e,
v4l2r::ioctl::FrameIntervalsError::IoctlError(err) if err == Errno::EINVAL
);
if !is_einval {
debug!(
"Failed to enumerate frame intervals for {:?} {}x{}: {}",
fourcc, width, height, e
);
}
break;
} }
Err(_) => {
// If enumeration fails, assume 30fps
fps_list.push(30);
} }
} }
@@ -275,20 +315,26 @@ impl VideoDevice {
} }
/// Get current format /// Get current format
pub fn get_format(&self) -> Result<Format> { pub fn get_format(&self) -> Result<V4l2rFormat> {
self.device let queue = self.capture_queue_type()?;
.format() ioctl::g_fmt(&self.fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e))) .map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e)))
} }
/// Set capture format /// Set capture format
pub fn set_format(&self, width: u32, height: u32, format: PixelFormat) -> Result<Format> { pub fn set_format(&self, width: u32, height: u32, format: PixelFormat) -> Result<V4l2rFormat> {
let fmt = Format::new(width, height, format.to_fourcc()); let queue = self.capture_queue_type()?;
let mut fmt: V4l2rFormat = ioctl::g_fmt(&self.fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e)))?;
fmt.width = width;
fmt.height = height;
fmt.pixelformat = format.to_v4l2r();
// Request the format let mut fd = self
let actual = self .fd
.device .try_clone()
.set_format(&fmt) .map_err(|e| AppError::VideoError(format!("Failed to clone device fd: {}", e)))?;
let actual: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt))
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?;
if actual.width != width || actual.height != height { if actual.width != width || actual.height != height {
@@ -364,7 +410,7 @@ impl VideoDevice {
.max() .max()
.unwrap_or(0); .unwrap_or(0);
priority += (max_resolution / 100000) as u32; priority += max_resolution / 100000;
// Known good drivers get bonus // Known good drivers get bonus
let good_drivers = ["uvcvideo", "tc358743"]; let good_drivers = ["uvcvideo", "tc358743"];
@@ -376,8 +422,21 @@ impl VideoDevice {
} }
/// Get the inner device reference (for advanced usage) /// Get the inner device reference (for advanced usage)
pub fn inner(&self) -> &Device { pub fn inner(&self) -> &File {
&self.device &self.fd
}
fn capture_queue_type(&self) -> Result<QueueType> {
let caps = self.capabilities()?;
if caps.video_capture {
Ok(QueueType::VideoCapture)
} else if caps.video_capture_mplane {
Ok(QueueType::VideoCaptureMplane)
} else {
Err(AppError::VideoError(
"Device does not expose a capture queue".to_string(),
))
}
} }
} }
@@ -446,7 +505,7 @@ fn probe_device_with_timeout(path: &Path, timeout: Duration) -> Option<VideoDevi
std::thread::spawn(move || { std::thread::spawn(move || {
let result = (|| -> Result<VideoDeviceInfo> { let result = (|| -> Result<VideoDeviceInfo> {
let device = VideoDevice::open(&path_for_thread)?; let device = VideoDevice::open_readonly(&path_for_thread)?;
device.info() device.info()
})(); })();
let _ = tx.send(result); let _ = tx.send(result);
@@ -503,15 +562,7 @@ fn sysfs_maybe_capture(path: &Path) -> bool {
} }
let skip_hints = [ let skip_hints = [
"codec", "codec", "decoder", "encoder", "isp", "mem2mem", "m2m", "vbi", "radio", "metadata",
"decoder",
"encoder",
"isp",
"mem2mem",
"m2m",
"vbi",
"radio",
"metadata",
"output", "output",
]; ];
if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture { if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture {

View File

@@ -32,7 +32,7 @@ fn init_hwcodec_logging() {
} }
/// H.264 encoder type (detected from hwcodec) /// H.264 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum H264EncoderType { pub enum H264EncoderType {
/// NVIDIA NVENC /// NVIDIA NVENC
Nvenc, Nvenc,
@@ -49,6 +49,7 @@ pub enum H264EncoderType {
/// Software encoding (libx264/openh264) /// Software encoding (libx264/openh264)
Software, Software,
/// No encoder available /// No encoder available
#[default]
None, None,
} }
@@ -67,12 +68,6 @@ impl std::fmt::Display for H264EncoderType {
} }
} }
impl Default for H264EncoderType {
fn default() -> Self {
Self::None
}
}
/// Map codec name to encoder type /// Map codec name to encoder type
fn codec_name_to_type(name: &str) -> H264EncoderType { fn codec_name_to_type(name: &str) -> H264EncoderType {
if name.contains("nvenc") { if name.contains("nvenc") {
@@ -93,11 +88,12 @@ fn codec_name_to_type(name: &str) -> H264EncoderType {
} }
/// Input pixel format for H264 encoder /// Input pixel format for H264 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum H264InputFormat { pub enum H264InputFormat {
/// YUV420P (I420) - planar Y, U, V /// YUV420P (I420) - planar Y, U, V
Yuv420p, Yuv420p,
/// NV12 - Y plane + interleaved UV plane (optimal for VAAPI) /// NV12 - Y plane + interleaved UV plane (optimal for VAAPI)
#[default]
Nv12, Nv12,
/// NV21 - Y plane + interleaved VU plane /// NV21 - Y plane + interleaved VU plane
Nv21, Nv21,
@@ -113,12 +109,6 @@ pub enum H264InputFormat {
Bgr24, Bgr24,
} }
impl Default for H264InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for VAAPI compatibility
}
}
/// H.264 encoder configuration /// H.264 encoder configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct H264Config { pub struct H264Config {

View File

@@ -30,7 +30,7 @@ fn init_hwcodec_logging() {
} }
/// H.265 encoder type (detected from hwcodec) /// H.265 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum H265EncoderType { pub enum H265EncoderType {
/// NVIDIA NVENC /// NVIDIA NVENC
Nvenc, Nvenc,
@@ -47,6 +47,7 @@ pub enum H265EncoderType {
/// Software encoder (libx265) /// Software encoder (libx265)
Software, Software,
/// No encoder available /// No encoder available
#[default]
None, None,
} }
@@ -65,12 +66,6 @@ impl std::fmt::Display for H265EncoderType {
} }
} }
impl Default for H265EncoderType {
fn default() -> Self {
Self::None
}
}
impl From<EncoderBackend> for H265EncoderType { impl From<EncoderBackend> for H265EncoderType {
fn from(backend: EncoderBackend) -> Self { fn from(backend: EncoderBackend) -> Self {
match backend { match backend {
@@ -86,11 +81,12 @@ impl From<EncoderBackend> for H265EncoderType {
} }
/// Input pixel format for H265 encoder /// Input pixel format for H265 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum H265InputFormat { pub enum H265InputFormat {
/// YUV420P (I420) - planar Y, U, V /// YUV420P (I420) - planar Y, U, V
Yuv420p, Yuv420p,
/// NV12 - Y plane + interleaved UV plane (optimal for hardware encoders) /// NV12 - Y plane + interleaved UV plane (optimal for hardware encoders)
#[default]
Nv12, Nv12,
/// NV21 - Y plane + interleaved VU plane /// NV21 - Y plane + interleaved VU plane
Nv21, Nv21,
@@ -106,12 +102,6 @@ pub enum H265InputFormat {
Bgr24, Bgr24,
} }
impl Default for H265InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for hardware encoder compatibility
}
}
/// H.265 encoder configuration /// H.265 encoder configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct H265Config { pub struct H265Config {
@@ -256,8 +246,6 @@ pub fn detect_best_h265_encoder(width: u32, height: u32) -> (H265EncoderType, Op
H265EncoderType::Rkmpp H265EncoderType::Rkmpp
} else if codec.name.contains("v4l2m2m") { } else if codec.name.contains("v4l2m2m") {
H265EncoderType::V4l2M2m H265EncoderType::V4l2M2m
} else if codec.name.contains("libx265") {
H265EncoderType::Software
} else { } else {
H265EncoderType::Software // Default to software for unknown H265EncoderType::Software // Default to software for unknown
}; };

View File

@@ -145,6 +145,7 @@ impl EncoderBackend {
} }
/// Parse from string (case-insensitive) /// Parse from string (case-insensitive)
#[allow(clippy::should_implement_trait)]
pub fn from_str(s: &str) -> Option<Self> { pub fn from_str(s: &str) -> Option<Self> {
match s.to_lowercase().as_str() { match s.to_lowercase().as_str() {
"vaapi" => Some(EncoderBackend::Vaapi), "vaapi" => Some(EncoderBackend::Vaapi),

View File

@@ -15,12 +15,14 @@ use crate::video::format::{PixelFormat, Resolution};
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", content = "value")] #[serde(tag = "type", content = "value")]
#[derive(Default)]
pub enum BitratePreset { pub enum BitratePreset {
/// Speed priority: 1 Mbps, lowest latency, smaller GOP /// Speed priority: 1 Mbps, lowest latency, smaller GOP
/// Best for: slow networks, remote management, low-bandwidth scenarios /// Best for: slow networks, remote management, low-bandwidth scenarios
Speed, Speed,
/// Balanced: 4 Mbps, good quality/latency tradeoff /// Balanced: 4 Mbps, good quality/latency tradeoff
/// Best for: typical usage, recommended default /// Best for: typical usage, recommended default
#[default]
Balanced, Balanced,
/// Quality priority: 8 Mbps, best visual quality /// Quality priority: 8 Mbps, best visual quality
/// Best for: local network, high-bandwidth scenarios, detailed work /// Best for: local network, high-bandwidth scenarios, detailed work
@@ -74,12 +76,6 @@ impl BitratePreset {
} }
} }
impl Default for BitratePreset {
fn default() -> Self {
Self::Balanced
}
}
impl std::fmt::Display for BitratePreset { impl std::fmt::Display for BitratePreset {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {

View File

@@ -30,13 +30,14 @@ fn init_hwcodec_logging() {
} }
/// VP8 encoder type (detected from hwcodec) /// VP8 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum VP8EncoderType { pub enum VP8EncoderType {
/// VAAPI (Intel on Linux) /// VAAPI (Intel on Linux)
Vaapi, Vaapi,
/// Software encoder (libvpx) /// Software encoder (libvpx)
Software, Software,
/// No encoder available /// No encoder available
#[default]
None, None,
} }
@@ -50,12 +51,6 @@ impl std::fmt::Display for VP8EncoderType {
} }
} }
impl Default for VP8EncoderType {
fn default() -> Self {
Self::None
}
}
impl From<EncoderBackend> for VP8EncoderType { impl From<EncoderBackend> for VP8EncoderType {
fn from(backend: EncoderBackend) -> Self { fn from(backend: EncoderBackend) -> Self {
match backend { match backend {
@@ -67,20 +62,15 @@ impl From<EncoderBackend> for VP8EncoderType {
} }
/// Input pixel format for VP8 encoder /// Input pixel format for VP8 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum VP8InputFormat { pub enum VP8InputFormat {
/// YUV420P (I420) - planar Y, U, V /// YUV420P (I420) - planar Y, U, V
Yuv420p, Yuv420p,
/// NV12 - Y plane + interleaved UV plane /// NV12 - Y plane + interleaved UV plane
#[default]
Nv12, Nv12,
} }
impl Default for VP8InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for VAAPI compatibility
}
}
/// VP8 encoder configuration /// VP8 encoder configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct VP8Config { pub struct VP8Config {
@@ -180,8 +170,6 @@ pub fn detect_best_vp8_encoder(width: u32, height: u32) -> (VP8EncoderType, Opti
let encoder_type = if codec.name.contains("vaapi") { let encoder_type = if codec.name.contains("vaapi") {
VP8EncoderType::Vaapi VP8EncoderType::Vaapi
} else if codec.name.contains("libvpx") {
VP8EncoderType::Software
} else { } else {
VP8EncoderType::Software // Default to software for unknown VP8EncoderType::Software // Default to software for unknown
}; };

View File

@@ -30,13 +30,14 @@ fn init_hwcodec_logging() {
} }
/// VP9 encoder type (detected from hwcodec) /// VP9 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum VP9EncoderType { pub enum VP9EncoderType {
/// VAAPI (Intel on Linux) /// VAAPI (Intel on Linux)
Vaapi, Vaapi,
/// Software encoder (libvpx-vp9) /// Software encoder (libvpx-vp9)
Software, Software,
/// No encoder available /// No encoder available
#[default]
None, None,
} }
@@ -50,12 +51,6 @@ impl std::fmt::Display for VP9EncoderType {
} }
} }
impl Default for VP9EncoderType {
fn default() -> Self {
Self::None
}
}
impl From<EncoderBackend> for VP9EncoderType { impl From<EncoderBackend> for VP9EncoderType {
fn from(backend: EncoderBackend) -> Self { fn from(backend: EncoderBackend) -> Self {
match backend { match backend {
@@ -67,20 +62,15 @@ impl From<EncoderBackend> for VP9EncoderType {
} }
/// Input pixel format for VP9 encoder /// Input pixel format for VP9 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum VP9InputFormat { pub enum VP9InputFormat {
/// YUV420P (I420) - planar Y, U, V /// YUV420P (I420) - planar Y, U, V
Yuv420p, Yuv420p,
/// NV12 - Y plane + interleaved UV plane /// NV12 - Y plane + interleaved UV plane
#[default]
Nv12, Nv12,
} }
impl Default for VP9InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for VAAPI compatibility
}
}
/// VP9 encoder configuration /// VP9 encoder configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct VP9Config { pub struct VP9Config {
@@ -180,8 +170,6 @@ pub fn detect_best_vp9_encoder(width: u32, height: u32) -> (VP9EncoderType, Opti
let encoder_type = if codec.name.contains("vaapi") { let encoder_type = if codec.name.contains("vaapi") {
VP9EncoderType::Vaapi VP9EncoderType::Vaapi
} else if codec.name.contains("libvpx") {
VP9EncoderType::Software
} else { } else {
VP9EncoderType::Software // Default to software for unknown VP9EncoderType::Software // Default to software for unknown
}; };

View File

@@ -2,7 +2,7 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use v4l::format::fourcc; use v4l2r::PixelFormat as V4l2rPixelFormat;
/// Supported pixel formats /// Supported pixel formats
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@@ -41,30 +41,29 @@ pub enum PixelFormat {
} }
impl PixelFormat { impl PixelFormat {
/// Convert to V4L2 FourCC /// Convert to V4L2 FourCC bytes
pub fn to_fourcc(&self) -> fourcc::FourCC { pub fn to_fourcc(&self) -> [u8; 4] {
match self { match self {
PixelFormat::Mjpeg => fourcc::FourCC::new(b"MJPG"), PixelFormat::Mjpeg => *b"MJPG",
PixelFormat::Jpeg => fourcc::FourCC::new(b"JPEG"), PixelFormat::Jpeg => *b"JPEG",
PixelFormat::Yuyv => fourcc::FourCC::new(b"YUYV"), PixelFormat::Yuyv => *b"YUYV",
PixelFormat::Yvyu => fourcc::FourCC::new(b"YVYU"), PixelFormat::Yvyu => *b"YVYU",
PixelFormat::Uyvy => fourcc::FourCC::new(b"UYVY"), PixelFormat::Uyvy => *b"UYVY",
PixelFormat::Nv12 => fourcc::FourCC::new(b"NV12"), PixelFormat::Nv12 => *b"NV12",
PixelFormat::Nv21 => fourcc::FourCC::new(b"NV21"), PixelFormat::Nv21 => *b"NV21",
PixelFormat::Nv16 => fourcc::FourCC::new(b"NV16"), PixelFormat::Nv16 => *b"NV16",
PixelFormat::Nv24 => fourcc::FourCC::new(b"NV24"), PixelFormat::Nv24 => *b"NV24",
PixelFormat::Yuv420 => fourcc::FourCC::new(b"YU12"), PixelFormat::Yuv420 => *b"YU12",
PixelFormat::Yvu420 => fourcc::FourCC::new(b"YV12"), PixelFormat::Yvu420 => *b"YV12",
PixelFormat::Rgb565 => fourcc::FourCC::new(b"RGBP"), PixelFormat::Rgb565 => *b"RGBP",
PixelFormat::Rgb24 => fourcc::FourCC::new(b"RGB3"), PixelFormat::Rgb24 => *b"RGB3",
PixelFormat::Bgr24 => fourcc::FourCC::new(b"BGR3"), PixelFormat::Bgr24 => *b"BGR3",
PixelFormat::Grey => fourcc::FourCC::new(b"GREY"), PixelFormat::Grey => *b"GREY",
} }
} }
/// Try to convert from V4L2 FourCC /// Try to convert from V4L2 FourCC
pub fn from_fourcc(fourcc: fourcc::FourCC) -> Option<Self> { pub fn from_fourcc(repr: [u8; 4]) -> Option<Self> {
let repr = fourcc.repr;
match &repr { match &repr {
b"MJPG" => Some(PixelFormat::Mjpeg), b"MJPG" => Some(PixelFormat::Mjpeg),
b"JPEG" => Some(PixelFormat::Jpeg), b"JPEG" => Some(PixelFormat::Jpeg),
@@ -85,6 +84,17 @@ impl PixelFormat {
} }
} }
/// Convert to v4l2r PixelFormat
pub fn to_v4l2r(&self) -> V4l2rPixelFormat {
V4l2rPixelFormat::from(&self.to_fourcc())
}
/// Convert from v4l2r PixelFormat
pub fn from_v4l2r(format: V4l2rPixelFormat) -> Option<Self> {
let repr: [u8; 4] = format.into();
Self::from_fourcc(repr)
}
/// Check if format is compressed (JPEG/MJPEG) /// Check if format is compressed (JPEG/MJPEG)
pub fn is_compressed(&self) -> bool { pub fn is_compressed(&self) -> bool {
matches!(self, PixelFormat::Mjpeg | PixelFormat::Jpeg) matches!(self, PixelFormat::Mjpeg | PixelFormat::Jpeg)

View File

@@ -81,6 +81,11 @@ impl FrameBuffer {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.data.len() self.data.len()
} }
/// Check if the frame buffer has no data
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
} }
impl std::fmt::Debug for FrameBuffer { impl std::fmt::Debug for FrameBuffer {

View File

@@ -3,6 +3,7 @@
//! This module provides V4L2 video capture, encoding, and streaming functionality. //! This module provides V4L2 video capture, encoding, and streaming functionality.
pub mod capture; pub mod capture;
pub mod codec_constraints;
pub mod convert; pub mod convert;
pub mod decoder; pub mod decoder;
pub mod device; pub mod device;
@@ -13,6 +14,7 @@ pub mod h264_pipeline;
pub mod shared_video_pipeline; pub mod shared_video_pipeline;
pub mod stream_manager; pub mod stream_manager;
pub mod streamer; pub mod streamer;
pub mod v4l2r_capture;
pub mod video_session; pub mod video_session;
pub use capture::VideoCapturer; pub use capture::VideoCapturer;

View File

@@ -18,6 +18,7 @@
use bytes::Bytes; use bytes::Bytes;
use parking_lot::RwLock as ParkingRwLock; use parking_lot::RwLock as ParkingRwLock;
use std::collections::HashMap;
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering}; use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@@ -26,22 +27,17 @@ use tracing::{debug, error, info, trace, warn};
/// Grace period before auto-stopping pipeline when no subscribers (in seconds) /// Grace period before auto-stopping pipeline when no subscribers (in seconds)
const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3; const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3;
/// Restart capture stream after this many consecutive timeouts.
const CAPTURE_TIMEOUT_RESTART_THRESHOLD: u32 = 5;
/// Minimum valid frame size for capture /// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128; const MIN_CAPTURE_FRAME_SIZE: usize = 128;
/// Validate JPEG header every N frames to reduce overhead /// Validate JPEG header every N frames to reduce overhead
const JPEG_VALIDATE_INTERVAL: u64 = 30; const JPEG_VALIDATE_INTERVAL: u64 = 30;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::utils::LogThrottler;
use crate::video::convert::{Nv12Converter, PixelConverter}; use crate::video::convert::{Nv12Converter, PixelConverter};
use crate::video::decoder::MjpegTurboDecoder; use crate::video::decoder::MjpegTurboDecoder;
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use hwcodec::ffmpeg_hw::{last_error_message as ffmpeg_hw_last_error, HwMjpegH26xConfig, HwMjpegH26xPipeline};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::video::capture::Parameters;
use v4l::Format;
use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat}; use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat};
use crate::video::encoder::h265::{ use crate::video::encoder::h265::{
detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat, detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat,
@@ -52,6 +48,11 @@ use crate::video::encoder::vp8::{detect_best_vp8_encoder, VP8Config, VP8Encoder}
use crate::video::encoder::vp9::{detect_best_vp9_encoder, VP9Config, VP9Encoder}; use crate::video::encoder::vp9::{detect_best_vp9_encoder, VP9Config, VP9Encoder};
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame}; use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use crate::video::v4l2r_capture::V4l2rCaptureStream;
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use hwcodec::ffmpeg_hw::{
last_error_message as ffmpeg_hw_last_error, HwMjpegH26xConfig, HwMjpegH26xPipeline,
};
/// Encoded video frame for distribution /// Encoded video frame for distribution
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -511,7 +512,10 @@ impl SharedVideoPipeline {
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))] #[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
if needs_mjpeg_decode if needs_mjpeg_decode
&& is_rkmpp_encoder && is_rkmpp_encoder
&& matches!(config.output_codec, VideoEncoderType::H264 | VideoEncoderType::H265) && matches!(
config.output_codec,
VideoEncoderType::H264 | VideoEncoderType::H265
)
{ {
info!( info!(
"Initializing FFmpeg HW MJPEG->{} pipeline (no fallback)", "Initializing FFmpeg HW MJPEG->{} pipeline (no fallback)",
@@ -528,7 +532,11 @@ impl SharedVideoPipeline {
thread_count: 1, thread_count: 1,
}; };
let pipeline = HwMjpegH26xPipeline::new(hw_config).map_err(|e| { let pipeline = HwMjpegH26xPipeline::new(hw_config).map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e }; let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!( AppError::VideoError(format!(
"FFmpeg HW MJPEG->{} init failed: {}", "FFmpeg HW MJPEG->{} init failed: {}",
config.output_codec, detail config.output_codec, detail
@@ -902,7 +910,11 @@ impl SharedVideoPipeline {
/// Get subscriber count /// Get subscriber count
pub fn subscriber_count(&self) -> usize { pub fn subscriber_count(&self) -> usize {
self.subscribers.read().iter().filter(|tx| !tx.is_closed()).count() self.subscribers
.read()
.iter()
.filter(|tx| !tx.is_closed())
.count()
} }
/// Report that a receiver has lagged behind /// Report that a receiver has lagged behind
@@ -951,7 +963,11 @@ impl SharedVideoPipeline {
pipeline pipeline
.reconfigure(bitrate_kbps as i32, gop as i32) .reconfigure(bitrate_kbps as i32, gop as i32)
.map_err(|e| { .map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e }; let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!( AppError::VideoError(format!(
"FFmpeg HW reconfigure failed: {}", "FFmpeg HW reconfigure failed: {}",
detail detail
@@ -1279,53 +1295,17 @@ impl SharedVideoPipeline {
let frame_seq_tx = frame_seq_tx.clone(); let frame_seq_tx = frame_seq_tx.clone();
let buffer_pool = buffer_pool.clone(); let buffer_pool = buffer_pool.clone();
std::thread::spawn(move || { std::thread::spawn(move || {
let device = match Device::with_path(&device_path) { let mut stream = match V4l2rCaptureStream::open(
Ok(d) => d, &device_path,
Err(e) => { config.resolution,
error!("Failed to open device {:?}: {}", device_path, e); config.input_format,
let _ = pipeline.running.send(false); config.fps,
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
return;
}
};
let requested_format = Format::new(
config.resolution.width,
config.resolution.height,
config.input_format.to_fourcc(),
);
let actual_format = match device.set_format(&requested_format) {
Ok(f) => f,
Err(e) => {
error!("Failed to set capture format: {}", e);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
return;
}
};
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.input_format);
let stride = actual_format.stride;
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream = match MmapStream::with_buffers(
&device,
BufferType::VideoCapture,
buffer_count.max(1), buffer_count.max(1),
Duration::from_secs(2),
) { ) {
Ok(s) => s, Ok(stream) => stream,
Err(e) => { Err(e) => {
error!("Failed to create capture stream: {}", e); error!("Failed to open capture stream: {}", e);
let _ = pipeline.running.send(false); let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release); pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1); let _ = frame_seq_tx.send(1);
@@ -1333,10 +1313,28 @@ impl SharedVideoPipeline {
} }
}; };
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
let mut no_subscribers_since: Option<Instant> = None; let mut no_subscribers_since: Option<Instant> = None;
let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS); let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS);
let mut sequence: u64 = 0; let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0; let mut validate_counter: u64 = 0;
let mut consecutive_timeouts: u32 = 0;
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while pipeline.running_flag.load(Ordering::Acquire) { while pipeline.running_flag.load(Ordering::Acquire) {
let subscriber_count = pipeline.subscriber_count(); let subscriber_count = pipeline.subscriber_count();
@@ -1366,49 +1364,78 @@ impl SharedVideoPipeline {
no_subscribers_since = None; no_subscribers_since = None;
} }
let (buf, meta) = match stream.next() { let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
Ok(frame_data) => frame_data, let meta = match stream.next_into(&mut owned) {
Ok(meta) => {
consecutive_timeouts = 0;
meta
}
Err(e) => { Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut { if e.kind() == std::io::ErrorKind::TimedOut {
consecutive_timeouts = consecutive_timeouts.saturating_add(1);
warn!("Capture timeout - no signal?"); warn!("Capture timeout - no signal?");
if consecutive_timeouts >= CAPTURE_TIMEOUT_RESTART_THRESHOLD {
warn!(
"Capture timed out {} consecutive times, restarting video pipeline",
consecutive_timeouts
);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(sequence.wrapping_add(1));
break;
}
} else {
consecutive_timeouts = 0;
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed =
suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!(
"Capture error: {} (suppressed {} repeats)",
e, suppressed
);
} else { } else {
error!("Capture error: {}", e); error!("Capture error: {}", e);
} }
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
}
continue; continue;
} }
}; };
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE { if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue; continue;
} }
validate_counter = validate_counter.wrapping_add(1); validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed() if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0 && validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size]) && !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{ {
continue; continue;
} }
let mut owned = buffer_pool.take(frame_size); owned.truncate(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
let frame = Arc::new(VideoFrame::from_pooled( let frame = Arc::new(VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))), Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution, resolution,
pixel_format, pixel_format,
stride, stride,
sequence, meta.sequence,
)); ));
sequence = sequence.wrapping_add(1); sequence = meta.sequence.wrapping_add(1);
{ {
let mut guard = latest_frame.write(); let mut guard = latest_frame.write();
*guard = Some(frame); *guard = Some(frame);
} }
let _ = frame_seq_tx.send(sequence); let _ = frame_seq_tx.send(sequence);
} }
pipeline.running_flag.store(false, Ordering::Release); pipeline.running_flag.store(false, Ordering::Release);
@@ -1473,7 +1500,11 @@ impl SharedVideoPipeline {
} }
let packet = pipeline.encode(raw_frame, pts_ms).map_err(|e| { let packet = pipeline.encode(raw_frame, pts_ms).map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e }; let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!("FFmpeg HW encode failed: {}", detail)) AppError::VideoError(format!("FFmpeg HW encode failed: {}", detail))
})?; })?;
@@ -1493,9 +1524,10 @@ impl SharedVideoPipeline {
} }
let decoded_buf = if input_format.is_compressed() { let decoded_buf = if input_format.is_compressed() {
let decoder = state.mjpeg_decoder.as_mut().ok_or_else(|| { let decoder = state
AppError::VideoError("MJPEG decoder not initialized".to_string()) .mjpeg_decoder
})?; .as_mut()
.ok_or_else(|| AppError::VideoError("MJPEG decoder not initialized".to_string()))?;
let decoded = decoder.decode(raw_frame)?; let decoded = decoder.decode(raw_frame)?;
Some(decoded) Some(decoded)
} else { } else {
@@ -1525,16 +1557,18 @@ impl SharedVideoPipeline {
debug!("[Pipeline] Keyframe will be generated for this frame"); debug!("[Pipeline] Keyframe will be generated for this frame");
} }
let encode_result = if needs_yuv420p && state.yuv420p_converter.is_some() { let encode_result = if needs_yuv420p {
// Software encoder with direct input conversion to YUV420P // Software encoder with direct input conversion to YUV420P
let conv = state.yuv420p_converter.as_mut().unwrap(); if let Some(conv) = state.yuv420p_converter.as_mut() {
let yuv420p_data = conv let yuv420p_data = conv.convert(raw_frame).map_err(|e| {
.convert(raw_frame) AppError::VideoError(format!("YUV420P conversion failed: {}", e))
.map_err(|e| AppError::VideoError(format!("YUV420P conversion failed: {}", e)))?; })?;
encoder.encode_raw(yuv420p_data, pts_ms) encoder.encode_raw(yuv420p_data, pts_ms)
} else if state.nv12_converter.is_some() { } else {
encoder.encode_raw(raw_frame, pts_ms)
}
} else if let Some(conv) = state.nv12_converter.as_mut() {
// Hardware encoder with input conversion to NV12 // Hardware encoder with input conversion to NV12
let conv = state.nv12_converter.as_mut().unwrap();
let nv12_data = conv let nv12_data = conv
.convert(raw_frame) .convert(raw_frame)
.map_err(|e| AppError::VideoError(format!("NV12 conversion failed: {}", e)))?; .map_err(|e| AppError::VideoError(format!("NV12 conversion failed: {}", e)))?;

View File

@@ -37,6 +37,7 @@ use crate::error::Result;
use crate::events::{EventBus, SystemEvent, VideoDeviceInfo}; use crate::events::{EventBus, SystemEvent, VideoDeviceInfo};
use crate::hid::HidController; use crate::hid::HidController;
use crate::stream::MjpegStreamHandler; use crate::stream::MjpegStreamHandler;
use crate::video::codec_constraints::StreamCodecConstraints;
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::streamer::{Streamer, StreamerState}; use crate::video::streamer::{Streamer, StreamerState};
use crate::webrtc::WebRtcStreamer; use crate::webrtc::WebRtcStreamer;
@@ -144,6 +145,16 @@ impl VideoStreamManager {
*self.config_store.write().await = Some(config); *self.config_store.write().await = Some(config);
} }
/// Get current stream codec constraints derived from global configuration.
pub async fn codec_constraints(&self) -> StreamCodecConstraints {
if let Some(ref config_store) = *self.config_store.read().await {
let config = config_store.get();
StreamCodecConstraints::from_config(&config)
} else {
StreamCodecConstraints::unrestricted()
}
}
/// Get current streaming mode /// Get current streaming mode
pub async fn current_mode(&self) -> StreamMode { pub async fn current_mode(&self) -> StreamMode {
self.mode.read().await.clone() self.mode.read().await.clone()
@@ -718,9 +729,11 @@ impl VideoStreamManager {
/// Returns None if video capture cannot be started or pipeline creation fails. /// Returns None if video capture cannot be started or pipeline creation fails.
pub async fn subscribe_encoded_frames( pub async fn subscribe_encoded_frames(
&self, &self,
) -> Option<tokio::sync::mpsc::Receiver<std::sync::Arc< ) -> Option<
crate::video::shared_video_pipeline::EncodedVideoFrame, tokio::sync::mpsc::Receiver<
>>> { std::sync::Arc<crate::video::shared_video_pipeline::EncodedVideoFrame>,
>,
> {
// 1. Ensure video capture is initialized (for config discovery) // 1. Ensure video capture is initialized (for config discovery)
if self.streamer.state().await == StreamerState::Uninitialized { if self.streamer.state().await == StreamerState::Uninitialized {
tracing::info!("Initializing video capture for encoded frame subscription"); tracing::info!("Initializing video capture for encoded frame subscription");
@@ -756,7 +769,11 @@ impl VideoStreamManager {
} }
// 3. Use WebRtcStreamer to ensure the shared video pipeline is running // 3. Use WebRtcStreamer to ensure the shared video pipeline is running
match self.webrtc_streamer.ensure_video_pipeline_for_external().await { match self
.webrtc_streamer
.ensure_video_pipeline_for_external()
.await
{
Ok(pipeline) => Some(pipeline.subscribe()), Ok(pipeline) => Some(pipeline.subscribe()),
Err(e) => { Err(e) => {
tracing::error!("Failed to start shared video pipeline: {}", e); tracing::error!("Failed to start shared video pipeline: {}", e);

View File

@@ -3,9 +3,11 @@
//! This module provides a high-level interface for video capture and streaming, //! This module provides a high-level interface for video capture and streaming,
//! managing the lifecycle of the capture thread and MJPEG/WebRTC distribution. //! managing the lifecycle of the capture thread and MJPEG/WebRTC distribution.
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicU32, Ordering}; use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::{debug, error, info, trace, warn}; use tracing::{debug, error, info, trace, warn};
@@ -15,12 +17,8 @@ use super::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::events::{EventBus, SystemEvent}; use crate::events::{EventBus, SystemEvent};
use crate::stream::MjpegStreamHandler; use crate::stream::MjpegStreamHandler;
use v4l::buffer::Type as BufferType; use crate::utils::LogThrottler;
use v4l::io::traits::CaptureStream; use crate::video::v4l2r_capture::V4l2rCaptureStream;
use v4l::prelude::*;
use v4l::video::capture::Parameters;
use v4l::video::Capture;
use v4l::Format;
/// Minimum valid frame size for capture /// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128; const MIN_CAPTURE_FRAME_SIZE: usize = 128;
@@ -573,13 +571,11 @@ impl Streamer {
break; break;
} }
} }
} else { } else if zero_since.is_some() {
if zero_since.is_some() {
info!("Clients reconnected, canceling auto-pause"); info!("Clients reconnected, canceling auto-pause");
zero_since = None; zero_since = None;
} }
} }
}
}); });
} else { } else {
debug!("Background tasks already started, skipping"); debug!("Background tasks already started, skipping");
@@ -632,8 +628,7 @@ impl Streamer {
} }
}; };
let mut device_opt: Option<Device> = None; let mut stream_opt: Option<V4l2rCaptureStream> = None;
let mut format_opt: Option<Format> = None;
let mut last_error: Option<String> = None; let mut last_error: Option<String> = None;
for attempt in 0..MAX_RETRIES { for attempt in 0..MAX_RETRIES {
@@ -642,8 +637,18 @@ impl Streamer {
return; return;
} }
let device = match Device::with_path(&device_path) { match V4l2rCaptureStream::open(
Ok(d) => d, &device_path,
config.resolution,
config.format,
config.fps,
BUFFER_COUNT,
Duration::from_secs(2),
) {
Ok(stream) => {
stream_opt = Some(stream);
break;
}
Err(e) => { Err(e) => {
let err_str = e.to_string(); let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") { if err_str.contains("busy") || err_str.contains("resource") {
@@ -660,42 +665,12 @@ impl Streamer {
last_error = Some(err_str); last_error = Some(err_str);
break; break;
} }
};
let requested = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
match device.set_format(&requested) {
Ok(actual) => {
device_opt = Some(device);
format_opt = Some(actual);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(std::time::Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(err_str);
continue;
}
last_error = Some(err_str);
break;
}
} }
} }
let (device, actual_format) = match (device_opt, format_opt) { let mut stream = match stream_opt {
(Some(d), Some(f)) => (d, f), Some(stream) => stream,
_ => { None => {
error!( error!(
"Failed to open device {:?}: {}", "Failed to open device {:?}: {}",
device_path, device_path,
@@ -709,42 +684,35 @@ impl Streamer {
} }
}; };
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!( info!(
"Capture format: {}x{} {:?} stride={}", "Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride resolution.width, resolution.height, pixel_format, stride
); );
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.format);
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream =
match MmapStream::with_buffers(&device, BufferType::VideoCapture, BUFFER_COUNT) {
Ok(s) => s,
Err(e) => {
error!("Failed to create capture stream: {}", e);
self.mjpeg_handler.set_offline();
set_state(StreamerState::Error);
self.direct_active.store(false, Ordering::SeqCst);
self.current_fps.store(0, Ordering::Relaxed);
return;
}
};
let buffer_pool = Arc::new(FrameBufferPool::new(BUFFER_COUNT.max(4) as usize)); let buffer_pool = Arc::new(FrameBufferPool::new(BUFFER_COUNT.max(4) as usize));
let mut signal_present = true; let mut signal_present = true;
let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0; let mut validate_counter: u64 = 0;
let mut idle_since: Option<std::time::Instant> = None; let mut idle_since: Option<std::time::Instant> = None;
let mut fps_frame_count: u64 = 0; let mut fps_frame_count: u64 = 0;
let mut last_fps_time = std::time::Instant::now(); let mut last_fps_time = std::time::Instant::now();
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while !self.direct_stop.load(Ordering::Relaxed) { while !self.direct_stop.load(Ordering::Relaxed) {
let mjpeg_clients = self.mjpeg_handler.client_count(); let mjpeg_clients = self.mjpeg_handler.client_count();
@@ -768,8 +736,9 @@ impl Streamer {
idle_since = None; idle_since = None;
} }
let (buf, meta) = match stream.next() { let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
Ok(frame_data) => frame_data, let meta = match stream.next_into(&mut owned) {
Ok(meta) => meta,
Err(e) => { Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut { if e.kind() == std::io::ErrorKind::TimedOut {
if signal_present { if signal_present {
@@ -811,35 +780,43 @@ impl Streamer {
break; break;
} }
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e); error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue; continue;
} }
}; };
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE { if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue; continue;
} }
validate_counter = validate_counter.wrapping_add(1); validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed() if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0 && validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size]) && !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{ {
continue; continue;
} }
let mut owned = buffer_pool.take(frame_size); owned.truncate(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
let frame = VideoFrame::from_pooled( let frame = VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))), Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution, resolution,
pixel_format, pixel_format,
actual_format.stride, stride,
sequence, meta.sequence,
); );
sequence = sequence.wrapping_add(1);
if !signal_present { if !signal_present {
signal_present = true; signal_present = true;
@@ -985,7 +962,7 @@ impl Streamer {
*streamer.state.write().await = StreamerState::Recovering; *streamer.state.write().await = StreamerState::Recovering;
// Publish reconnecting event (every 5 attempts to avoid spam) // Publish reconnecting event (every 5 attempts to avoid spam)
if attempt == 1 || attempt % 5 == 0 { if attempt == 1 || attempt.is_multiple_of(5) {
streamer streamer
.publish_event(SystemEvent::StreamReconnecting { .publish_event(SystemEvent::StreamReconnecting {
device: device_path.clone(), device: device_path.clone(),

277
src/video/v4l2r_capture.rs Normal file
View File

@@ -0,0 +1,277 @@
//! V4L2 capture implementation using v4l2r (ioctl layer).
use std::fs::File;
use std::io;
use std::os::fd::AsFd;
use std::path::Path;
use std::time::Duration;
use nix::poll::{poll, PollFd, PollFlags, PollTimeout};
use tracing::{debug, warn};
use v4l2r::bindings::{v4l2_requestbuffers, v4l2_streamparm, v4l2_streamparm__bindgen_ty_1};
use v4l2r::ioctl::{
self, Capabilities, Capability as V4l2rCapability, MemoryConsistency, PlaneMapping, QBufPlane,
QBuffer, QueryBuffer, V4l2Buffer,
};
use v4l2r::memory::{MemoryType, MmapHandle};
use v4l2r::{Format as V4l2rFormat, PixelFormat as V4l2rPixelFormat, QueueType};
use crate::error::{AppError, Result};
use crate::video::format::{PixelFormat, Resolution};
/// Metadata for a captured frame.
#[derive(Debug, Clone, Copy)]
pub struct CaptureMeta {
pub bytes_used: usize,
pub sequence: u64,
}
/// V4L2 capture stream backed by v4l2r ioctl.
pub struct V4l2rCaptureStream {
fd: File,
queue: QueueType,
resolution: Resolution,
format: PixelFormat,
stride: u32,
timeout: Duration,
mappings: Vec<Vec<PlaneMapping>>,
}
impl V4l2rCaptureStream {
pub fn open(
device_path: impl AsRef<Path>,
resolution: Resolution,
format: PixelFormat,
fps: u32,
buffer_count: u32,
timeout: Duration,
) -> Result<Self> {
let mut fd = File::options()
.read(true)
.write(true)
.open(device_path.as_ref())
.map_err(|e| AppError::VideoError(format!("Failed to open device: {}", e)))?;
let caps: V4l2rCapability = ioctl::querycap(&fd)
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let caps_flags = caps.device_caps();
// Prefer multi-planar capture when available, as it is required for some
// devices/pixel formats (e.g. NV12 via VIDEO_CAPTURE_MPLANE).
let queue = if caps_flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE) {
QueueType::VideoCaptureMplane
} else if caps_flags.contains(Capabilities::VIDEO_CAPTURE) {
QueueType::VideoCapture
} else {
return Err(AppError::VideoError(
"Device does not support capture queues".to_string(),
));
};
let mut fmt: V4l2rFormat = ioctl::g_fmt(&fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get device format: {}", e)))?;
fmt.width = resolution.width;
fmt.height = resolution.height;
fmt.pixelformat = V4l2rPixelFormat::from(&format.to_fourcc());
let actual_fmt: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt))
.map_err(|e| AppError::VideoError(format!("Failed to set device format: {}", e)))?;
let actual_resolution = Resolution::new(actual_fmt.width, actual_fmt.height);
let actual_format = PixelFormat::from_v4l2r(actual_fmt.pixelformat).unwrap_or(format);
let stride = actual_fmt
.plane_fmt
.first()
.map(|p| p.bytesperline)
.unwrap_or_else(|| match actual_format.bytes_per_pixel() {
Some(bpp) => actual_resolution.width * bpp as u32,
None => actual_resolution.width,
});
if fps > 0 {
if let Err(e) = set_fps(&fd, queue, fps) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let req: v4l2_requestbuffers = ioctl::reqbufs(
&fd,
queue,
MemoryType::Mmap,
buffer_count,
MemoryConsistency::empty(),
)
.map_err(|e| AppError::VideoError(format!("Failed to request buffers: {}", e)))?;
let allocated = req.count as usize;
if allocated == 0 {
return Err(AppError::VideoError(
"Driver returned zero capture buffers".to_string(),
));
}
let mut mappings = Vec::with_capacity(allocated);
for index in 0..allocated as u32 {
let query: QueryBuffer = ioctl::querybuf(&fd, queue, index as usize).map_err(|e| {
AppError::VideoError(format!("Failed to query buffer {}: {}", index, e))
})?;
if query.planes.is_empty() {
return Err(AppError::VideoError(format!(
"Driver returned zero planes for buffer {}",
index
)));
}
let mut plane_maps = Vec::with_capacity(query.planes.len());
for plane in &query.planes {
let mapping = ioctl::mmap(&fd, plane.mem_offset, plane.length).map_err(|e| {
AppError::VideoError(format!("Failed to mmap buffer {}: {}", index, e))
})?;
plane_maps.push(mapping);
}
mappings.push(plane_maps);
}
let mut stream = Self {
fd,
queue,
resolution: actual_resolution,
format: actual_format,
stride,
timeout,
mappings,
};
stream.queue_all_buffers()?;
ioctl::streamon(&stream.fd, stream.queue)
.map_err(|e| AppError::VideoError(format!("Failed to start capture stream: {}", e)))?;
Ok(stream)
}
pub fn resolution(&self) -> Resolution {
self.resolution
}
pub fn format(&self) -> PixelFormat {
self.format
}
pub fn stride(&self) -> u32 {
self.stride
}
pub fn next_into(&mut self, dst: &mut Vec<u8>) -> io::Result<CaptureMeta> {
self.wait_ready()?;
let dqbuf: V4l2Buffer = ioctl::dqbuf(&self.fd, self.queue)
.map_err(|e| io::Error::other(format!("dqbuf failed: {}", e)))?;
let index = dqbuf.as_v4l2_buffer().index as usize;
let sequence = dqbuf.as_v4l2_buffer().sequence as u64;
let mut total = 0usize;
for (plane_idx, plane) in dqbuf.planes_iter().enumerate() {
let bytes_used = *plane.bytesused as usize;
let data_offset = plane.data_offset.copied().unwrap_or(0) as usize;
if bytes_used == 0 {
continue;
}
let mapping = &self.mappings[index][plane_idx];
let start = data_offset.min(mapping.len());
let end = (data_offset + bytes_used).min(mapping.len());
total += end.saturating_sub(start);
}
dst.resize(total, 0);
let mut cursor = 0usize;
for (plane_idx, plane) in dqbuf.planes_iter().enumerate() {
let bytes_used = *plane.bytesused as usize;
let data_offset = plane.data_offset.copied().unwrap_or(0) as usize;
if bytes_used == 0 {
continue;
}
let mapping = &self.mappings[index][plane_idx];
let start = data_offset.min(mapping.len());
let end = (data_offset + bytes_used).min(mapping.len());
let len = end.saturating_sub(start);
if len == 0 {
continue;
}
dst[cursor..cursor + len].copy_from_slice(&mapping[start..end]);
cursor += len;
}
self.queue_buffer(index as u32)
.map_err(|e| io::Error::other(e.to_string()))?;
Ok(CaptureMeta {
bytes_used: total,
sequence,
})
}
fn wait_ready(&self) -> io::Result<()> {
if self.timeout.is_zero() {
return Ok(());
}
let mut fds = [PollFd::new(self.fd.as_fd(), PollFlags::POLLIN)];
let timeout_ms = self.timeout.as_millis().min(u16::MAX as u128) as u16;
let ready = poll(&mut fds, PollTimeout::from(timeout_ms))?;
if ready == 0 {
return Err(io::Error::new(io::ErrorKind::TimedOut, "capture timeout"));
}
Ok(())
}
fn queue_all_buffers(&mut self) -> Result<()> {
for index in 0..self.mappings.len() as u32 {
self.queue_buffer(index)?;
}
Ok(())
}
fn queue_buffer(&mut self, index: u32) -> Result<()> {
let handle = MmapHandle;
let planes = self.mappings[index as usize]
.iter()
.map(|mapping| {
let mut plane = QBufPlane::new_from_handle(&handle, 0);
plane.0.length = mapping.len() as u32;
plane
})
.collect();
let mut qbuf: QBuffer<MmapHandle> = QBuffer::new(self.queue, index);
qbuf.planes = planes;
ioctl::qbuf::<_, ()>(&self.fd, qbuf)
.map_err(|e| AppError::VideoError(format!("Failed to queue buffer: {}", e)))?;
Ok(())
}
}
impl Drop for V4l2rCaptureStream {
fn drop(&mut self) {
if let Err(e) = ioctl::streamoff(&self.fd, self.queue) {
debug!("Failed to stop capture stream: {}", e);
}
}
}
fn set_fps(fd: &File, queue: QueueType, fps: u32) -> Result<()> {
let mut params = unsafe { std::mem::zeroed::<v4l2_streamparm>() };
params.type_ = queue as u32;
params.parm = v4l2_streamparm__bindgen_ty_1 {
capture: v4l2r::bindings::v4l2_captureparm {
timeperframe: v4l2r::bindings::v4l2_fract {
numerator: 1,
denominator: fps,
},
..unsafe { std::mem::zeroed() }
},
};
let _actual: v4l2_streamparm = ioctl::s_parm(fd, params)
.map_err(|e| AppError::VideoError(format!("Failed to set FPS: {}", e)))?;
Ok(())
}

View File

@@ -326,7 +326,6 @@ impl VideoSessionManager {
bitrate_preset: self.config.bitrate_preset, bitrate_preset: self.config.bitrate_preset,
fps: self.config.fps, fps: self.config.fps,
encoder_backend: self.config.encoder_backend, encoder_backend: self.config.encoder_backend,
..Default::default()
}; };
// Create new pipeline // Create new pipeline

View File

@@ -7,7 +7,11 @@ use std::sync::Arc;
use crate::config::*; use crate::config::*;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::events::SystemEvent; use crate::events::SystemEvent;
use crate::rtsp::RtspService;
use crate::state::AppState; use crate::state::AppState;
use crate::video::codec_constraints::{
enforce_constraints_with_stream_manager, StreamCodecConstraints,
};
/// 应用 Video 配置变更 /// 应用 Video 配置变更
pub async fn apply_video_config( pub async fn apply_video_config(
@@ -191,9 +195,7 @@ pub async fn apply_hid_config(
// Low-endpoint UDCs (e.g., musb) cannot handle consumer control endpoints reliably // Low-endpoint UDCs (e.g., musb) cannot handle consumer control endpoints reliably
if new_config.backend == HidBackend::Otg { if new_config.backend == HidBackend::Otg {
if let Some(udc) = if let Some(udc) = crate::otg::configfs::resolve_udc_name(new_config.otg_udc.as_deref()) {
crate::otg::configfs::resolve_udc_name(new_config.otg_udc.as_deref())
{
if crate::otg::configfs::is_low_endpoint_udc(&udc) && new_hid_functions.consumer { if crate::otg::configfs::is_low_endpoint_udc(&udc) && new_hid_functions.consumer {
tracing::warn!( tracing::warn!(
"UDC {} has low endpoint resources, disabling consumer control", "UDC {} has low endpoint resources, disabling consumer control",
@@ -446,6 +448,15 @@ pub async fn apply_audio_config(
Ok(()) Ok(())
} }
/// Apply stream codec constraints derived from global config.
pub async fn enforce_stream_codec_constraints(state: &Arc<AppState>) -> Result<Option<String>> {
let config = state.config.get();
let constraints = StreamCodecConstraints::from_config(&config);
let enforcement =
enforce_constraints_with_stream_manager(&state.stream_manager, &constraints).await?;
Ok(enforcement.message)
}
/// 应用 RustDesk 配置变更 /// 应用 RustDesk 配置变更
pub async fn apply_rustdesk_config( pub async fn apply_rustdesk_config(
state: &Arc<AppState>, state: &Arc<AppState>,
@@ -455,6 +466,7 @@ pub async fn apply_rustdesk_config(
tracing::info!("Applying RustDesk config changes..."); tracing::info!("Applying RustDesk config changes...");
let mut rustdesk_guard = state.rustdesk.write().await; let mut rustdesk_guard = state.rustdesk.write().await;
let mut credentials_to_save = None;
// Check if service needs to be stopped // Check if service needs to be stopped
if old_config.enabled && !new_config.enabled { if old_config.enabled && !new_config.enabled {
@@ -466,7 +478,6 @@ pub async fn apply_rustdesk_config(
tracing::info!("RustDesk service stopped"); tracing::info!("RustDesk service stopped");
} }
*rustdesk_guard = None; *rustdesk_guard = None;
return Ok(());
} }
// Check if service needs to be started or restarted // Check if service needs to be started or restarted
@@ -475,8 +486,6 @@ pub async fn apply_rustdesk_config(
|| old_config.device_id != new_config.device_id || old_config.device_id != new_config.device_id
|| old_config.device_password != new_config.device_password; || old_config.device_password != new_config.device_password;
let mut credentials_to_save = None;
if rustdesk_guard.is_none() { if rustdesk_guard.is_none() {
// Create new service // Create new service
tracing::info!("Initializing RustDesk service..."); tracing::info!("Initializing RustDesk service...");
@@ -509,6 +518,7 @@ pub async fn apply_rustdesk_config(
} }
} }
} }
}
// Save credentials to persistent config store (outside the lock) // Save credentials to persistent config store (outside the lock)
drop(rustdesk_guard); drop(rustdesk_guard);
@@ -530,6 +540,59 @@ pub async fn apply_rustdesk_config(
tracing::info!("RustDesk credentials saved successfully"); tracing::info!("RustDesk credentials saved successfully");
} }
} }
if let Some(message) = enforce_stream_codec_constraints(state).await? {
tracing::info!("{}", message);
}
Ok(())
}
/// 应用 RTSP 配置变更
pub async fn apply_rtsp_config(
state: &Arc<AppState>,
old_config: &RtspConfig,
new_config: &RtspConfig,
) -> Result<()> {
tracing::info!("Applying RTSP config changes...");
let mut rtsp_guard = state.rtsp.write().await;
if old_config.enabled && !new_config.enabled {
if let Some(ref service) = *rtsp_guard {
if let Err(e) = service.stop().await {
tracing::error!("Failed to stop RTSP service: {}", e);
}
}
*rtsp_guard = None;
}
if new_config.enabled {
let need_restart = old_config.bind != new_config.bind
|| old_config.port != new_config.port
|| old_config.path != new_config.path
|| old_config.codec != new_config.codec
|| old_config.username != new_config.username
|| old_config.password != new_config.password
|| old_config.allow_one_client != new_config.allow_one_client;
if rtsp_guard.is_none() {
let service = RtspService::new(new_config.clone(), state.stream_manager.clone());
service.start().await?;
tracing::info!("RTSP service started");
*rtsp_guard = Some(Arc::new(service));
} else if need_restart {
if let Some(ref service) = *rtsp_guard {
service.restart(new_config.clone()).await?;
tracing::info!("RTSP service restarted");
}
}
}
drop(rtsp_guard);
if let Some(message) = enforce_stream_codec_constraints(state).await? {
tracing::info!("{}", message);
} }
Ok(()) Ok(())

View File

@@ -24,6 +24,7 @@ mod audio;
mod auth; mod auth;
mod hid; mod hid;
mod msd; mod msd;
mod rtsp;
mod rustdesk; mod rustdesk;
mod stream; mod stream;
pub(crate) mod video; pub(crate) mod video;
@@ -35,6 +36,7 @@ pub use audio::{get_audio_config, update_audio_config};
pub use auth::{get_auth_config, update_auth_config}; pub use auth::{get_auth_config, update_auth_config};
pub use hid::{get_hid_config, update_hid_config}; pub use hid::{get_hid_config, update_hid_config};
pub use msd::{get_msd_config, update_msd_config}; pub use msd::{get_msd_config, update_msd_config};
pub use rtsp::{get_rtsp_config, get_rtsp_status, update_rtsp_config};
pub use rustdesk::{ pub use rustdesk::{
get_device_password, get_rustdesk_config, get_rustdesk_status, regenerate_device_id, get_device_password, get_rustdesk_config, get_rustdesk_status, regenerate_device_id,
regenerate_device_password, update_rustdesk_config, regenerate_device_password, update_rustdesk_config,
@@ -50,10 +52,29 @@ use std::sync::Arc;
use crate::config::AppConfig; use crate::config::AppConfig;
use crate::state::AppState; use crate::state::AppState;
fn sanitize_config_for_api(config: &mut AppConfig) {
// Auth secrets
config.auth.totp_secret = None;
// Stream secrets
config.stream.turn_password = None;
// RustDesk secrets
config.rustdesk.device_password.clear();
config.rustdesk.relay_key = None;
config.rustdesk.public_key = None;
config.rustdesk.private_key = None;
config.rustdesk.signing_public_key = None;
config.rustdesk.signing_private_key = None;
// RTSP secrets
config.rtsp.password = None;
}
/// 获取完整配置 /// 获取完整配置
pub async fn get_all_config(State(state): State<Arc<AppState>>) -> Json<AppConfig> { pub async fn get_all_config(State(state): State<Arc<AppState>>) -> Json<AppConfig> {
let mut config = (*state.config.get()).clone(); let mut config = (*state.config.get()).clone();
// 不暴露敏感信息 // 不暴露敏感信息
config.auth.totp_secret = None; sanitize_config_for_api(&mut config);
Json(config) Json(config)
} }

View File

@@ -0,0 +1,70 @@
use axum::{extract::State, Json};
use std::sync::Arc;
use crate::error::{AppError, Result};
use crate::state::AppState;
use super::apply::apply_rtsp_config;
use super::types::{RtspConfigResponse, RtspConfigUpdate, RtspStatusResponse};
/// Get RTSP config
pub async fn get_rtsp_config(State(state): State<Arc<AppState>>) -> Json<RtspConfigResponse> {
let config = state.config.get();
Json(RtspConfigResponse::from(&config.rtsp))
}
/// Get RTSP status (config + service status)
pub async fn get_rtsp_status(State(state): State<Arc<AppState>>) -> Json<RtspStatusResponse> {
let config = state.config.get().rtsp.clone();
let status = {
let guard = state.rtsp.read().await;
if let Some(ref service) = *guard {
service.status().await
} else {
crate::rtsp::RtspServiceStatus::Stopped
}
};
Json(RtspStatusResponse::new(&config, status))
}
/// Update RTSP config
pub async fn update_rtsp_config(
State(state): State<Arc<AppState>>,
Json(req): Json<RtspConfigUpdate>,
) -> Result<Json<RtspConfigResponse>> {
req.validate()?;
let old_config = state.config.get().rtsp.clone();
state
.config
.update(|config| {
req.apply_to(&mut config.rtsp);
})
.await?;
let new_config = state.config.get().rtsp.clone();
if let Err(err) = apply_rtsp_config(&state, &old_config, &new_config).await {
tracing::error!("Failed to apply RTSP config: {}", err);
if let Err(rollback_err) = state
.config
.update(|config| {
config.rtsp = old_config.clone();
})
.await
{
tracing::error!(
"Failed to rollback RTSP config after apply failure: {}",
rollback_err
);
return Err(AppError::ServiceUnavailable(format!(
"RTSP apply failed: {}; rollback failed: {}",
err, rollback_err
)));
}
return Err(err);
}
Ok(Json(RtspConfigResponse::from(&new_config)))
}

View File

@@ -106,6 +106,15 @@ pub async fn update_rustdesk_config(
tracing::error!("Failed to apply RustDesk config: {}", e); tracing::error!("Failed to apply RustDesk config: {}", e);
} }
// Share a non-sensitive summary for frontend UX
let constraints = state.stream_manager.codec_constraints().await;
if constraints.rustdesk_enabled || constraints.rtsp_enabled {
tracing::info!(
"Stream codec constraints active after RustDesk update: {}",
constraints.reason
);
}
Ok(Json(RustDeskConfigResponse::from(&new_config))) Ok(Json(RustDeskConfigResponse::from(&new_config)))
} }
@@ -139,7 +148,7 @@ pub async fn regenerate_device_password(
Ok(Json(RustDeskConfigResponse::from(&new_config))) Ok(Json(RustDeskConfigResponse::from(&new_config)))
} }
/// 获取设备密码(管理员专用 /// 获取设备密码(已认证用户
pub async fn get_device_password(State(state): State<Arc<AppState>>) -> Json<serde_json::Value> { pub async fn get_device_password(State(state): State<Arc<AppState>>) -> Json<serde_json::Value> {
let config = state.config.get().rustdesk.clone(); let config = state.config.get().rustdesk.clone();
Json(serde_json::json!({ Json(serde_json::json!({

View File

@@ -42,5 +42,10 @@ pub async fn update_stream_config(
tracing::error!("Failed to apply stream config: {}", e); tracing::error!("Failed to apply stream config: {}", e);
} }
// 6. Enforce codec constraints after any stream config update
if let Err(e) = super::apply::enforce_stream_codec_constraints(&state).await {
tracing::error!("Failed to enforce stream codec constraints: {}", e);
}
Ok(Json(StreamConfigResponse::from(&new_stream_config))) Ok(Json(StreamConfigResponse::from(&new_stream_config)))
} }

View File

@@ -1,5 +1,6 @@
use crate::config::*; use crate::config::*;
use crate::error::AppError; use crate::error::AppError;
use crate::rtsp::RtspServiceStatus;
use crate::rustdesk::config::RustDeskConfig; use crate::rustdesk::config::RustDeskConfig;
use crate::video::encoder::BitratePreset; use crate::video::encoder::BitratePreset;
use serde::Deserialize; use serde::Deserialize;
@@ -604,6 +605,124 @@ impl RustDeskConfigUpdate {
} }
} }
// ===== RTSP Config =====
#[typeshare]
#[derive(Debug, serde::Serialize)]
pub struct RtspConfigResponse {
pub enabled: bool,
pub bind: String,
pub port: u16,
pub path: String,
pub allow_one_client: bool,
pub codec: RtspCodec,
pub username: Option<String>,
pub has_password: bool,
}
impl From<&RtspConfig> for RtspConfigResponse {
fn from(config: &RtspConfig) -> Self {
Self {
enabled: config.enabled,
bind: config.bind.clone(),
port: config.port,
path: config.path.clone(),
allow_one_client: config.allow_one_client,
codec: config.codec.clone(),
username: config.username.clone(),
has_password: config.password.is_some(),
}
}
}
#[typeshare]
#[derive(Debug, serde::Serialize)]
pub struct RtspStatusResponse {
pub config: RtspConfigResponse,
pub service_status: String,
}
impl RtspStatusResponse {
pub fn new(config: &RtspConfig, status: RtspServiceStatus) -> Self {
Self {
config: RtspConfigResponse::from(config),
service_status: status.to_string(),
}
}
}
#[typeshare]
#[derive(Debug, Deserialize)]
pub struct RtspConfigUpdate {
pub enabled: Option<bool>,
pub bind: Option<String>,
pub port: Option<u16>,
pub path: Option<String>,
pub allow_one_client: Option<bool>,
pub codec: Option<RtspCodec>,
pub username: Option<String>,
pub password: Option<String>,
}
impl RtspConfigUpdate {
pub fn validate(&self) -> crate::error::Result<()> {
if let Some(port) = self.port {
if port == 0 {
return Err(AppError::BadRequest("RTSP port cannot be 0".into()));
}
}
if let Some(ref bind) = self.bind {
if bind.parse::<std::net::IpAddr>().is_err() {
return Err(AppError::BadRequest("RTSP bind must be a valid IP".into()));
}
}
if let Some(ref path) = self.path {
let normalized = path.trim_matches('/');
if normalized.is_empty() {
return Err(AppError::BadRequest("RTSP path cannot be empty".into()));
}
}
Ok(())
}
pub fn apply_to(&self, config: &mut RtspConfig) {
if let Some(enabled) = self.enabled {
config.enabled = enabled;
}
if let Some(ref bind) = self.bind {
config.bind = bind.clone();
}
if let Some(port) = self.port {
config.port = port;
}
if let Some(ref path) = self.path {
config.path = path.trim_matches('/').to_string();
}
if let Some(allow_one_client) = self.allow_one_client {
config.allow_one_client = allow_one_client;
}
if let Some(codec) = self.codec.clone() {
config.codec = codec;
}
if let Some(ref username) = self.username {
config.username = if username.is_empty() {
None
} else {
Some(username.clone())
};
}
if let Some(ref password) = self.password {
config.password = if password.is_empty() {
None
} else {
Some(password.clone())
};
}
}
}
// ===== Web Config ===== // ===== Web Config =====
#[typeshare] #[typeshare]
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]

View File

@@ -86,7 +86,7 @@ pub async fn start_extension(
// Start the extension // Start the extension
mgr.start(ext_id, &config.extensions) mgr.start(ext_id, &config.extensions)
.await .await
.map_err(|e| AppError::Internal(e))?; .map_err(AppError::Internal)?;
// Return updated status // Return updated status
Ok(Json(ExtensionInfo { Ok(Json(ExtensionInfo {
@@ -108,7 +108,7 @@ pub async fn stop_extension(
let mgr = &state.extensions; let mgr = &state.extensions;
// Stop the extension // Stop the extension
mgr.stop(ext_id).await.map_err(|e| AppError::Internal(e))?; mgr.stop(ext_id).await.map_err(AppError::Internal)?;
// Return updated status // Return updated status
Ok(Json(ExtensionInfo { Ok(Json(ExtensionInfo {
@@ -156,7 +156,6 @@ pub struct TtydConfigUpdate {
pub enabled: Option<bool>, pub enabled: Option<bool>,
pub port: Option<u16>, pub port: Option<u16>,
pub shell: Option<String>, pub shell: Option<String>,
pub credential: Option<String>,
} }
/// Update gostc config /// Update gostc config
@@ -203,9 +202,6 @@ pub async fn update_ttyd_config(
if let Some(ref shell) = req.shell { if let Some(ref shell) = req.shell {
ttyd.shell = shell.clone(); ttyd.shell = shell.clone();
} }
if req.credential.is_some() {
ttyd.credential = req.credential.clone();
}
}) })
.await?; .await?;
@@ -263,15 +259,17 @@ pub async fn update_gostc_config(
if was_enabled && !is_enabled { if was_enabled && !is_enabled {
state.extensions.stop(ExtensionId::Gostc).await.ok(); state.extensions.stop(ExtensionId::Gostc).await.ok();
} else if !was_enabled && is_enabled && has_key { } else if !was_enabled
if state.extensions.check_available(ExtensionId::Gostc) { && is_enabled
&& has_key
&& state.extensions.check_available(ExtensionId::Gostc)
{
state state
.extensions .extensions
.start(ExtensionId::Gostc, &new_config.extensions) .start(ExtensionId::Gostc, &new_config.extensions)
.await .await
.ok(); .ok();
} }
}
Ok(Json(new_config.extensions.gostc.clone())) Ok(Json(new_config.extensions.gostc.clone()))
} }
@@ -312,15 +310,17 @@ pub async fn update_easytier_config(
if was_enabled && !is_enabled { if was_enabled && !is_enabled {
state.extensions.stop(ExtensionId::Easytier).await.ok(); state.extensions.stop(ExtensionId::Easytier).await.ok();
} else if !was_enabled && is_enabled && has_name { } else if !was_enabled
if state.extensions.check_available(ExtensionId::Easytier) { && is_enabled
&& has_name
&& state.extensions.check_available(ExtensionId::Easytier)
{
state state
.extensions .extensions
.start(ExtensionId::Easytier, &new_config.extensions) .start(ExtensionId::Easytier, &new_config.extensions)
.await .await
.ok(); .ok();
} }
}
Ok(Json(new_config.extensions.easytier.clone())) Ok(Json(new_config.extensions.easytier.clone()))
} }

File diff suppressed because it is too large Load Diff

View File

@@ -50,6 +50,7 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/stream/mode", post(handlers::stream_mode_set)) .route("/stream/mode", post(handlers::stream_mode_set))
.route("/stream/bitrate", post(handlers::stream_set_bitrate)) .route("/stream/bitrate", post(handlers::stream_set_bitrate))
.route("/stream/codecs", get(handlers::stream_codecs_list)) .route("/stream/codecs", get(handlers::stream_codecs_list))
.route("/stream/constraints", get(handlers::stream_constraints_get))
// WebRTC endpoints // WebRTC endpoints
.route("/webrtc/session", post(handlers::webrtc_create_session)) .route("/webrtc/session", post(handlers::webrtc_create_session))
.route("/webrtc/offer", post(handlers::webrtc_offer)) .route("/webrtc/offer", post(handlers::webrtc_offer))
@@ -59,6 +60,7 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/webrtc/close", post(handlers::webrtc_close_session)) .route("/webrtc/close", post(handlers::webrtc_close_session))
// HID endpoints // HID endpoints
.route("/hid/status", get(handlers::hid_status)) .route("/hid/status", get(handlers::hid_status))
.route("/hid/otg/self-check", get(handlers::hid_otg_self_check))
.route("/hid/reset", post(handlers::hid_reset)) .route("/hid/reset", post(handlers::hid_reset))
// WebSocket HID endpoint (for MJPEG mode) // WebSocket HID endpoint (for MJPEG mode)
.route("/ws/hid", any(ws_hid_handler)) .route("/ws/hid", any(ws_hid_handler))
@@ -120,6 +122,13 @@ pub fn create_router(state: Arc<AppState>) -> Router {
"/config/rustdesk/regenerate-password", "/config/rustdesk/regenerate-password",
post(handlers::config::regenerate_device_password), post(handlers::config::regenerate_device_password),
) )
// RTSP configuration endpoints
.route("/config/rtsp", get(handlers::config::get_rtsp_config))
.route("/config/rtsp", patch(handlers::config::update_rtsp_config))
.route(
"/config/rtsp/status",
get(handlers::config::get_rtsp_status),
)
// Web server configuration // Web server configuration
.route("/config/web", get(handlers::config::get_web_config)) .route("/config/web", get(handlers::config::get_web_config))
.route("/config/web", patch(handlers::config::update_web_config)) .route("/config/web", patch(handlers::config::update_web_config))
@@ -128,6 +137,9 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/config/auth", patch(handlers::config::update_auth_config)) .route("/config/auth", patch(handlers::config::update_auth_config))
// System control // System control
.route("/system/restart", post(handlers::system_restart)) .route("/system/restart", post(handlers::system_restart))
.route("/update/overview", get(handlers::update_overview))
.route("/update/upgrade", post(handlers::update_upgrade))
.route("/update/status", get(handlers::update_status))
// MSD (Mass Storage Device) endpoints // MSD (Mass Storage Device) endpoints
.route("/msd/status", get(handlers::msd_status)) .route("/msd/status", get(handlers::msd_status))
.route("/msd/images", get(handlers::msd_images_list)) .route("/msd/images", get(handlers::msd_images_list))
@@ -158,6 +170,7 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/atx/status", get(handlers::atx_status)) .route("/atx/status", get(handlers::atx_status))
.route("/atx/power", post(handlers::atx_power)) .route("/atx/power", post(handlers::atx_power))
.route("/atx/wol", post(handlers::atx_wol)) .route("/atx/wol", post(handlers::atx_wol))
.route("/atx/wol/history", get(handlers::atx_wol_history))
// Device discovery endpoints // Device discovery endpoints
.route("/devices/atx", get(handlers::devices::list_atx_devices)) .route("/devices/atx", get(handlers::devices::list_atx_devices))
// Extension management endpoints // Extension management endpoints

View File

@@ -127,14 +127,14 @@ fn try_serve_file(path: &str) -> Option<Response<Body>> {
.first_or_octet_stream() .first_or_octet_stream()
.to_string(); .to_string();
return Some( Some(
Response::builder() Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.header(header::CONTENT_TYPE, mime) .header(header::CONTENT_TYPE, mime)
.header(header::CACHE_CONTROL, "public, max-age=86400") .header(header::CACHE_CONTROL, "public, max-age=86400")
.body(Body::from(data)) .body(Body::from(data))
.unwrap(), .unwrap(),
); )
} }
Err(e) => { Err(e) => {
tracing::debug!( tracing::debug!(
@@ -143,7 +143,7 @@ fn try_serve_file(path: &str) -> Option<Response<Body>> {
file_path.display(), file_path.display(),
e e
); );
return None; None
} }
} }
} }

View File

@@ -108,19 +108,15 @@ impl TurnServer {
/// Video codec preference /// Video codec preference
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum VideoCodec { pub enum VideoCodec {
#[default]
H264, H264,
VP8, VP8,
VP9, VP9,
AV1, AV1,
} }
impl Default for VideoCodec {
fn default() -> Self {
Self::H264
}
}
impl std::fmt::Display for VideoCodec { impl std::fmt::Display for VideoCodec {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {

View File

@@ -18,7 +18,9 @@ pub fn mdns_mode_from_env() -> Option<MulticastDnsMode> {
} }
pub fn mdns_mode() -> MulticastDnsMode { pub fn mdns_mode() -> MulticastDnsMode {
mdns_mode_from_env().unwrap_or(MulticastDnsMode::QueryAndGather) // Default to QueryOnly to avoid gathering .local host candidates by default.
// This is generally more stable for LAN first-connection while preserving mDNS queries.
mdns_mode_from_env().unwrap_or(MulticastDnsMode::QueryOnly)
} }
pub fn mdns_mode_label(mode: MulticastDnsMode) -> &'static str { pub fn mdns_mode_label(mode: MulticastDnsMode) -> &'static str {

View File

@@ -93,7 +93,6 @@ impl PeerConnection {
urls: turn.urls.clone(), urls: turn.urls.clone(),
username: turn.username.clone(), username: turn.username.clone(),
credential: turn.credential.clone(), credential: turn.credential.clone(),
..Default::default()
}); });
} }
@@ -318,14 +317,26 @@ impl PeerConnection {
.await .await
.map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?;
// Wait for ICE gathering complete (or timeout) after setting local description.
// This improves first-connection robustness by returning a fuller initial candidate set.
let mut gather_complete = self.pc.gathering_complete_promise().await;
// Set local description // Set local description
self.pc self.pc
.set_local_description(answer.clone()) .set_local_description(answer.clone())
.await .await
.map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?;
// Wait a bit for ICE candidates to gather const ICE_GATHER_TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_millis(2500);
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; if tokio::time::timeout(ICE_GATHER_TIMEOUT, gather_complete.recv())
.await
.is_err()
{
debug!(
"ICE gathering timeout after {:?} for session {}",
ICE_GATHER_TIMEOUT, self.session_id
);
}
// Get gathered ICE candidates // Get gathered ICE candidates
let candidates = self.ice_candidates.lock().await.clone(); let candidates = self.ice_candidates.lock().await.clone();

View File

@@ -330,9 +330,7 @@ impl OpusAudioTrack {
stream_id.to_string(), stream_id.to_string(),
)); ));
Self { Self { track }
track,
}
} }
/// Get the underlying WebRTC track /// Get the underlying WebRTC track
@@ -365,10 +363,7 @@ impl OpusAudioTrack {
..Default::default() ..Default::default()
}; };
self.track self.track.write_sample(&sample).await.map_err(|e| {
.write_sample(&sample)
.await
.map_err(|e| {
error!("Failed to write Opus sample: {}", e); error!("Failed to write Opus sample: {}", e);
AppError::WebRtcError(format!("Failed to write audio sample: {}", e)) AppError::WebRtcError(format!("Failed to write audio sample: {}", e))
}) })

View File

@@ -199,7 +199,7 @@ impl VideoTrack {
let data = frame.data(); let data = frame.data();
let max_payload_size = 1200; // MTU - headers let max_payload_size = 1200; // MTU - headers
let packet_count = (data.len() + max_payload_size - 1) / max_payload_size; let packet_count = data.len().div_ceil(max_payload_size);
let mut bytes_sent = 0u64; let mut bytes_sent = 0u64;
for i in 0..packet_count { for i in 0..packet_count {

View File

@@ -292,7 +292,6 @@ impl UniversalSession {
urls: turn.urls.clone(), urls: turn.urls.clone(),
username: turn.username.clone(), username: turn.username.clone(),
credential: turn.credential.clone(), credential: turn.credential.clone(),
..Default::default()
}); });
} }
@@ -430,7 +429,9 @@ impl UniversalSession {
let candidate = IceCandidate { let candidate = IceCandidate {
candidate: candidate_str, candidate: candidate_str,
sdp_mid: candidate_json.as_ref().and_then(|j| j.sdp_mid.clone()), sdp_mid: candidate_json.as_ref().and_then(|j| j.sdp_mid.clone()),
sdp_mline_index: candidate_json.as_ref().and_then(|j| j.sdp_mline_index), sdp_mline_index: candidate_json
.as_ref()
.and_then(|j| j.sdp_mline_index),
username_fragment: candidate_json username_fragment: candidate_json
.as_ref() .as_ref()
.and_then(|j| j.username_fragment.clone()), .and_then(|j| j.username_fragment.clone()),
@@ -615,20 +616,15 @@ impl UniversalSession {
}; };
// Verify codec matches // Verify codec matches
let frame_codec = match encoded_frame.codec { let frame_codec = encoded_frame.codec;
VideoEncoderType::H264 => VideoEncoderType::H264,
VideoEncoderType::H265 => VideoEncoderType::H265,
VideoEncoderType::VP8 => VideoEncoderType::VP8,
VideoEncoderType::VP9 => VideoEncoderType::VP9,
};
if frame_codec != expected_codec { if frame_codec != expected_codec {
continue; continue;
} }
// Debug log for H265 frames // Debug log for H265 frames
if expected_codec == VideoEncoderType::H265 { if expected_codec == VideoEncoderType::H265
if encoded_frame.is_keyframe || frames_sent % 30 == 0 { && (encoded_frame.is_keyframe || frames_sent.is_multiple_of(30)) {
debug!( debug!(
"[Session-H265] Received frame #{}: size={}, keyframe={}, seq={}", "[Session-H265] Received frame #{}: size={}, keyframe={}, seq={}",
frames_sent, frames_sent,
@@ -637,7 +633,6 @@ impl UniversalSession {
encoded_frame.sequence encoded_frame.sequence
); );
} }
}
// Ensure decoder starts from a keyframe and recover on gaps. // Ensure decoder starts from a keyframe and recover on gaps.
let mut gap_detected = false; let mut gap_detected = false;
@@ -768,7 +763,7 @@ impl UniversalSession {
// 20ms at 48kHz = 960 samples // 20ms at 48kHz = 960 samples
let samples = 960u32; let samples = 960u32;
if let Err(e) = audio_track.write_packet(&opus_frame.data, samples).await { if let Err(e) = audio_track.write_packet(&opus_frame.data, samples).await {
if packets_sent % 100 == 0 { if packets_sent.is_multiple_of(100) {
debug!("Failed to write audio packet: {}", e); debug!("Failed to write audio packet: {}", e);
} }
} else { } else {
@@ -838,13 +833,24 @@ impl UniversalSession {
} }
} }
let mut gather_complete = self.pc.gathering_complete_promise().await;
self.pc self.pc
.set_local_description(answer.clone()) .set_local_description(answer.clone())
.await .await
.map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?;
// Wait for ICE candidates // Wait for ICE gathering complete (or timeout) to return a fuller initial candidate set.
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; const ICE_GATHER_TIMEOUT: Duration = Duration::from_millis(2500);
if tokio::time::timeout(ICE_GATHER_TIMEOUT, gather_complete.recv())
.await
.is_err()
{
debug!(
"ICE gathering timeout after {:?} for session {}",
ICE_GATHER_TIMEOUT, self.session_id
);
}
let candidates = self.ice_candidates.lock().await.clone(); let candidates = self.ice_candidates.lock().await.clone();
Ok(SdpAnswer::with_candidates(answer.sdp, candidates)) Ok(SdpAnswer::with_candidates(answer.sdp, candidates))

View File

@@ -285,7 +285,7 @@ impl UniversalVideoTrack {
} }
/// Get current statistics /// Get current statistics
///
/// Write an encoded frame to the track /// Write an encoded frame to the track
/// ///
/// Handles codec-specific processing: /// Handles codec-specific processing:
@@ -464,7 +464,6 @@ impl UniversalVideoTrack {
if let Err(e) = rtp_track.write_rtp(&packet).await { if let Err(e) = rtp_track.write_rtp(&packet).await {
trace!("H265 write_rtp failed: {}", e); trace!("H265 write_rtp failed: {}", e);
} }
} }
Ok(()) Ok(())

View File

@@ -35,8 +35,8 @@ use tokio::sync::RwLock;
use tracing::{debug, info, trace, warn}; use tracing::{debug, info, trace, warn};
use crate::audio::{AudioController, OpusFrame}; use crate::audio::{AudioController, OpusFrame};
use crate::events::EventBus;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::events::EventBus;
use crate::hid::HidController; use crate::hid::HidController;
use crate::video::encoder::registry::EncoderBackend; use crate::video::encoder::registry::EncoderBackend;
use crate::video::encoder::registry::VideoEncoderType; use crate::video::encoder::registry::VideoEncoderType;
@@ -250,6 +250,33 @@ impl WebRtcStreamer {
} }
} }
fn should_stop_pipeline(session_count: usize, subscriber_count: usize) -> bool {
session_count == 0 && subscriber_count == 0
}
async fn stop_pipeline_if_idle(&self, reason: &str) {
let session_count = self.sessions.read().await.len();
let pipeline = self.video_pipeline.read().await.clone();
let Some(pipeline) = pipeline else {
return;
};
let subscriber_count = pipeline.subscriber_count();
if Self::should_stop_pipeline(session_count, subscriber_count) {
info!(
"{} stopping video pipeline (sessions={}, subscribers={})",
reason, session_count, subscriber_count
);
pipeline.stop();
} else {
debug!(
"Keeping video pipeline alive (reason={}, sessions={}, subscribers={})",
reason, session_count, subscriber_count
);
}
}
/// Ensure video pipeline is initialized and running /// Ensure video pipeline is initialized and running
async fn ensure_video_pipeline(self: &Arc<Self>) -> Result<Arc<SharedVideoPipeline>> { async fn ensure_video_pipeline(self: &Arc<Self>) -> Result<Arc<SharedVideoPipeline>> {
let mut pipeline_guard = self.video_pipeline.write().await; let mut pipeline_guard = self.video_pipeline.write().await;
@@ -270,7 +297,6 @@ impl WebRtcStreamer {
bitrate_preset: config.bitrate_preset, bitrate_preset: config.bitrate_preset,
fps: config.fps, fps: config.fps,
encoder_backend: config.encoder_backend, encoder_backend: config.encoder_backend,
..Default::default()
}; };
info!("Creating shared video pipeline for {:?}", codec); info!("Creating shared video pipeline for {:?}", codec);
@@ -311,7 +337,9 @@ impl WebRtcStreamer {
} }
drop(pipeline_guard); drop(pipeline_guard);
info!("Video pipeline stopped, but keeping capture config for new sessions"); info!(
"Video pipeline stopped, but keeping capture config for new sessions"
);
} }
break; break;
} }
@@ -739,13 +767,7 @@ impl WebRtcStreamer {
session.close().await?; session.close().await?;
} }
// Stop pipeline if no more sessions self.stop_pipeline_if_idle("After close_session").await;
if self.sessions.read().await.is_empty() {
if let Some(ref pipeline) = *self.video_pipeline.read().await {
info!("No more sessions, stopping video pipeline");
pipeline.stop();
}
}
Ok(()) Ok(())
} }
@@ -762,11 +784,8 @@ impl WebRtcStreamer {
} }
} }
// Stop pipeline
drop(sessions); drop(sessions);
if let Some(ref pipeline) = *self.video_pipeline.read().await { self.stop_pipeline_if_idle("After close_all_sessions").await;
pipeline.stop();
}
count count
} }
@@ -825,14 +844,9 @@ impl WebRtcStreamer {
sessions.remove(id); sessions.remove(id);
} }
// Stop pipeline if no more sessions
if sessions.is_empty() {
drop(sessions); drop(sessions);
if let Some(ref pipeline) = *self.video_pipeline.read().await { self.stop_pipeline_if_idle("After cleanup_closed_sessions")
info!("No more sessions after cleanup, stopping video pipeline"); .await;
pipeline.stop();
}
}
} }
} }
@@ -926,10 +940,7 @@ impl WebRtcStreamer {
let pipeline = pipeline_for_callback.clone(); let pipeline = pipeline_for_callback.clone();
let sid = sid.clone(); let sid = sid.clone();
tokio::spawn(async move { tokio::spawn(async move {
info!( info!("Requesting keyframe for session {} after reconnect", sid);
"Requesting keyframe for session {} after reconnect",
sid
);
pipeline.request_keyframe().await; pipeline.request_keyframe().await;
}); });
}); });
@@ -992,4 +1003,12 @@ mod tests {
let codecs = streamer.supported_video_codecs(); let codecs = streamer.supported_video_codecs();
assert!(codecs.contains(&VideoCodecType::H264)); assert!(codecs.contains(&VideoCodecType::H264));
} }
#[test]
fn stop_pipeline_requires_no_sessions_and_no_subscribers() {
assert!(WebRtcStreamer::should_stop_pipeline(0, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(1, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(0, 1));
assert!(!WebRtcStreamer::should_stop_pipeline(2, 3));
}
} }

View File

@@ -136,6 +136,15 @@ export const msdConfigApi = {
// ===== ATX 配置 API ===== // ===== ATX 配置 API =====
import type { AtxDevices } from '@/types/generated' import type { AtxDevices } from '@/types/generated'
export interface WolHistoryEntry {
mac_address: string
updated_at: number
}
export interface WolHistoryResponse {
history: WolHistoryEntry[]
}
export const atxConfigApi = { export const atxConfigApi = {
/** /**
* 获取 ATX 配置 * 获取 ATX 配置
@@ -166,6 +175,13 @@ export const atxConfigApi = {
method: 'POST', method: 'POST',
body: JSON.stringify({ mac_address: macAddress }), body: JSON.stringify({ mac_address: macAddress }),
}), }),
/**
* 获取 WOL 历史记录(服务端持久化)
* @param limit 返回条数1-50
*/
getWolHistory: (limit = 5) =>
request<WolHistoryResponse>(`/atx/wol/history?limit=${Math.max(1, Math.min(50, limit))}`),
} }
// ===== Audio 配置 API ===== // ===== Audio 配置 API =====
@@ -330,6 +346,49 @@ export const rustdeskConfigApi = {
}), }),
} }
// ===== RTSP 配置 API =====
export type RtspCodec = 'h264' | 'h265'
export interface RtspConfigResponse {
enabled: boolean
bind: string
port: number
path: string
allow_one_client: boolean
codec: RtspCodec
username?: string | null
has_password: boolean
}
export interface RtspConfigUpdate {
enabled?: boolean
bind?: string
port?: number
path?: string
allow_one_client?: boolean
codec?: RtspCodec
username?: string
password?: string
}
export interface RtspStatusResponse {
config: RtspConfigResponse
service_status: string
}
export const rtspConfigApi = {
get: () => request<RtspConfigResponse>('/config/rtsp'),
update: (config: RtspConfigUpdate) =>
request<RtspConfigResponse>('/config/rtsp', {
method: 'PATCH',
body: JSON.stringify(config),
}),
getStatus: () => request<RtspStatusResponse>('/config/rtsp/status'),
}
// ===== Web 服务器配置 API ===== // ===== Web 服务器配置 API =====
/** Web 服务器配置 */ /** Web 服务器配置 */

View File

@@ -101,6 +101,46 @@ export const systemApi = {
}), }),
} }
export type UpdateChannel = 'stable' | 'beta'
export interface UpdateOverviewResponse {
success: boolean
current_version: string
channel: UpdateChannel
latest_version: string
upgrade_available: boolean
target_version?: string
notes_between: Array<{
version: string
published_at: string
notes: string[]
}>
}
export interface UpdateStatusResponse {
success: boolean
phase: 'idle' | 'checking' | 'downloading' | 'verifying' | 'installing' | 'restarting' | 'success' | 'failed'
progress: number
current_version: string
target_version?: string
message?: string
last_error?: string
}
export const updateApi = {
overview: (channel: UpdateChannel = 'stable') =>
request<UpdateOverviewResponse>(`/update/overview?channel=${encodeURIComponent(channel)}`),
upgrade: (payload: { channel?: UpdateChannel; target_version?: string }) =>
request<{ success: boolean; message?: string }>('/update/upgrade', {
method: 'POST',
body: JSON.stringify(payload),
}),
status: () =>
request<UpdateStatusResponse>('/update/status'),
}
// Stream API // Stream API
export interface VideoCodecInfo { export interface VideoCodecInfo {
id: string id: string
@@ -124,6 +164,19 @@ export interface AvailableCodecsResponse {
codecs: VideoCodecInfo[] codecs: VideoCodecInfo[]
} }
export interface StreamConstraintsResponse {
success: boolean
allowed_codecs: string[]
locked_codec: string | null
disallow_mjpeg: boolean
sources: {
rustdesk: boolean
rtsp: boolean
}
reason: string
current_mode: string
}
export const streamApi = { export const streamApi = {
status: () => status: () =>
request<{ request<{
@@ -161,6 +214,9 @@ export const streamApi = {
getCodecs: () => getCodecs: () =>
request<AvailableCodecsResponse>('/stream/codecs'), request<AvailableCodecsResponse>('/stream/codecs'),
getConstraints: () =>
request<StreamConstraintsResponse>('/stream/constraints'),
setBitratePreset: (bitrate_preset: import('@/types/generated').BitratePreset) => setBitratePreset: (bitrate_preset: import('@/types/generated').BitratePreset) =>
request<{ success: boolean; message?: string }>('/stream/bitrate', { request<{ success: boolean; message?: string }>('/stream/bitrate', {
method: 'POST', method: 'POST',
@@ -186,10 +242,10 @@ export const webrtcApi = {
createSession: () => createSession: () =>
request<{ session_id: string }>('/webrtc/session', { method: 'POST' }), request<{ session_id: string }>('/webrtc/session', { method: 'POST' }),
offer: (sdp: string, clientId?: string) => offer: (sdp: string) =>
request<{ sdp: string; session_id: string; ice_candidates: IceCandidate[] }>('/webrtc/offer', { request<{ sdp: string; session_id: string; ice_candidates: IceCandidate[] }>('/webrtc/offer', {
method: 'POST', method: 'POST',
body: JSON.stringify({ sdp, client_id: clientId }), body: JSON.stringify({ sdp }),
}), }),
addIceCandidate: (sessionId: string, candidate: IceCandidate) => addIceCandidate: (sessionId: string, candidate: IceCandidate) =>
@@ -247,17 +303,34 @@ export const hidApi = {
screen_resolution: [number, number] | null screen_resolution: [number, number] | null
}>('/hid/status'), }>('/hid/status'),
keyboard: async (type: 'down' | 'up', key: number, modifiers?: { otgSelfCheck: () =>
ctrl?: boolean request<{
shift?: boolean overall_ok: boolean
alt?: boolean error_count: number
meta?: boolean warning_count: number
}) => { hid_backend: string
selected_udc: string | null
bound_udc: string | null
udc_state: string | null
udc_speed: string | null
available_udcs: string[]
other_gadgets: string[]
checks: Array<{
id: string
ok: boolean
level: 'info' | 'warn' | 'error'
message: string
hint?: string
path?: string
}>
}>('/hid/otg/self-check'),
keyboard: async (type: 'down' | 'up', key: number, modifier?: number) => {
await ensureHidConnection() await ensureHidConnection()
const event: HidKeyboardEvent = { const event: HidKeyboardEvent = {
type: type === 'down' ? 'keydown' : 'keyup', type: type === 'down' ? 'keydown' : 'keyup',
key, key,
modifiers, modifier: (modifier ?? 0) & 0xff,
} }
await hidWs.sendKeyboard(event) await hidWs.sendKeyboard(event)
return { success: true } return { success: true }
@@ -481,6 +554,25 @@ export const msdApi = {
}), }),
} }
interface SerialDeviceOption {
path: string
name: string
}
function getSerialDevicePriority(path: string): number {
if (/^\/dev\/ttyUSB/i.test(path)) return 0
if (/^\/dev\/(ttyS|S)/i.test(path)) return 2
return 1
}
function sortSerialDevices(serialDevices: SerialDeviceOption[]): SerialDeviceOption[] {
return [...serialDevices].sort((a, b) => {
const priorityDiff = getSerialDevicePriority(a.path) - getSerialDevicePriority(b.path)
if (priorityDiff !== 0) return priorityDiff
return a.path.localeCompare(b.path, undefined, { numeric: true, sensitivity: 'base' })
})
}
// Config API // Config API
/** @deprecated 使用域特定 APIvideoConfigApi, hidConfigApi 等)替代 */ /** @deprecated 使用域特定 APIvideoConfigApi, hidConfigApi 等)替代 */
export const configApi = { export const configApi = {
@@ -493,8 +585,8 @@ export const configApi = {
body: JSON.stringify(updates), body: JSON.stringify(updates),
}), }),
listDevices: () => listDevices: async () => {
request<{ const result = await request<{
video: Array<{ video: Array<{
path: string path: string
name: string name: string
@@ -522,7 +614,13 @@ export const configApi = {
ttyd_available: boolean ttyd_available: boolean
rustdesk_available: boolean rustdesk_available: boolean
} }
}>('/devices'), }>('/devices')
return {
...result,
serial: sortSerialDevices(result.serial),
}
},
} }
// 导出新的域分离配置 API // 导出新的域分离配置 API
@@ -536,11 +634,15 @@ export {
audioConfigApi, audioConfigApi,
extensionsApi, extensionsApi,
rustdeskConfigApi, rustdeskConfigApi,
rtspConfigApi,
webConfigApi, webConfigApi,
type RustDeskConfigResponse, type RustDeskConfigResponse,
type RustDeskStatusResponse, type RustDeskStatusResponse,
type RustDeskConfigUpdate, type RustDeskConfigUpdate,
type RustDeskPasswordResponse, type RustDeskPasswordResponse,
type RtspConfigResponse,
type RtspConfigUpdate,
type RtspStatusResponse,
type WebConfig, type WebConfig,
} from './config' } from './config'

View File

@@ -52,7 +52,7 @@ async function handleLogout() {
</script> </script>
<template> <template>
<div class="h-screen flex flex-col bg-background overflow-hidden"> <div class="h-screen h-dvh flex flex-col bg-background overflow-hidden">
<!-- Header --> <!-- Header -->
<header class="shrink-0 z-50 w-full border-b bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60"> <header class="shrink-0 z-50 w-full border-b bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60">
<div class="flex h-14 items-center px-4 max-w-full"> <div class="flex h-14 items-center px-4 max-w-full">
@@ -86,14 +86,14 @@ async function handleLogout() {
</span> </span>
<!-- Theme Toggle --> <!-- Theme Toggle -->
<Button variant="ghost" size="icon" @click="toggleTheme"> <Button variant="ghost" size="icon" :aria-label="t('common.toggleTheme')" @click="toggleTheme">
<Sun class="h-4 w-4 rotate-0 scale-100 transition-all dark:-rotate-90 dark:scale-0" /> <Sun class="h-4 w-4 rotate-0 scale-100 transition-all dark:-rotate-90 dark:scale-0" />
<Moon class="absolute h-4 w-4 rotate-90 scale-0 transition-all dark:rotate-0 dark:scale-100" /> <Moon class="absolute h-4 w-4 rotate-90 scale-0 transition-all dark:rotate-0 dark:scale-100" />
<span class="sr-only">{{ t('common.toggleTheme') }}</span> <span class="sr-only">{{ t('common.toggleTheme') }}</span>
</Button> </Button>
<!-- Language Toggle --> <!-- Language Toggle -->
<Button variant="ghost" size="icon" @click="toggleLanguage"> <Button variant="ghost" size="icon" :aria-label="t('common.toggleLanguage')" @click="toggleLanguage">
<Languages class="h-4 w-4" /> <Languages class="h-4 w-4" />
<span class="sr-only">{{ t('common.toggleLanguage') }}</span> <span class="sr-only">{{ t('common.toggleLanguage') }}</span>
</Button> </Button>
@@ -101,7 +101,7 @@ async function handleLogout() {
<!-- Mobile Menu --> <!-- Mobile Menu -->
<DropdownMenu> <DropdownMenu>
<DropdownMenuTrigger as-child class="md:hidden"> <DropdownMenuTrigger as-child class="md:hidden">
<Button variant="ghost" size="icon"> <Button variant="ghost" size="icon" :aria-label="t('common.menu')">
<Menu class="h-4 w-4" /> <Menu class="h-4 w-4" />
</Button> </Button>
</DropdownMenuTrigger> </DropdownMenuTrigger>
@@ -119,7 +119,7 @@ async function handleLogout() {
</DropdownMenu> </DropdownMenu>
<!-- Logout Button (Desktop) --> <!-- Logout Button (Desktop) -->
<Button variant="ghost" size="icon" class="hidden md:flex" @click="handleLogout"> <Button variant="ghost" size="icon" class="hidden md:flex" :aria-label="t('nav.logout')" @click="handleLogout">
<LogOut class="h-4 w-4" /> <LogOut class="h-4 w-4" />
<span class="sr-only">{{ t('nav.logout') }}</span> <span class="sr-only">{{ t('nav.logout') }}</span>
</Button> </Button>

View File

@@ -1,5 +1,5 @@
<script setup lang="ts"> <script setup lang="ts">
import { ref, computed } from 'vue' import { ref, computed, watch } from 'vue'
import { useI18n } from 'vue-i18n' import { useI18n } from 'vue-i18n'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
import { Badge } from '@/components/ui/badge' import { Badge } from '@/components/ui/badge'
@@ -18,6 +18,7 @@ import {
AlertDialogTitle, AlertDialogTitle,
} from '@/components/ui/alert-dialog' } from '@/components/ui/alert-dialog'
import { Power, RotateCcw, CircleDot, Wifi, Send } from 'lucide-vue-next' import { Power, RotateCcw, CircleDot, Wifi, Send } from 'lucide-vue-next'
import { atxConfigApi } from '@/api/config'
const emit = defineEmits<{ const emit = defineEmits<{
(e: 'close'): void (e: 'close'): void
@@ -41,6 +42,7 @@ const confirmDialogOpen = ref(false)
const wolMacAddress = ref('') const wolMacAddress = ref('')
const wolHistory = ref<string[]>([]) const wolHistory = ref<string[]>([])
const wolSending = ref(false) const wolSending = ref(false)
const wolLoadingHistory = ref(false)
const powerStateColor = computed(() => { const powerStateColor = computed(() => {
switch (powerState.value) { switch (powerState.value) {
@@ -110,16 +112,11 @@ function sendWol() {
emit('wol', mac) emit('wol', mac)
// Add to history if not exists // Optimistic update, then sync from server after request likely completes
if (!wolHistory.value.includes(mac)) { wolHistory.value = [mac, ...wolHistory.value.filter(item => item !== mac)].slice(0, 5)
wolHistory.value.unshift(mac) setTimeout(() => {
// Keep only last 5 loadWolHistory().catch(() => {})
if (wolHistory.value.length > 5) { }, 1200)
wolHistory.value.pop()
}
// Save to localStorage
localStorage.setItem('wol_history', JSON.stringify(wolHistory.value))
}
setTimeout(() => { setTimeout(() => {
wolSending.value = false wolSending.value = false
@@ -130,15 +127,27 @@ function selectFromHistory(mac: string) {
wolMacAddress.value = mac wolMacAddress.value = mac
} }
// Load WOL history on mount async function loadWolHistory() {
const savedHistory = localStorage.getItem('wol_history') wolLoadingHistory.value = true
if (savedHistory) {
try { try {
wolHistory.value = JSON.parse(savedHistory) const response = await atxConfigApi.getWolHistory(5)
} catch (e) { wolHistory.value = response.history.map(item => item.mac_address)
} catch {
wolHistory.value = [] wolHistory.value = []
} finally {
wolLoadingHistory.value = false
} }
} }
watch(
() => activeTab.value,
(tab) => {
if (tab === 'wol') {
loadWolHistory().catch(() => {})
}
},
{ immediate: true },
)
</script> </script>
<template> <template>
@@ -234,6 +243,10 @@ if (savedHistory) {
</p> </p>
</div> </div>
<p v-if="wolLoadingHistory" class="text-xs text-muted-foreground">
{{ t('common.loading') }}
</p>
<!-- History --> <!-- History -->
<div v-if="wolHistory.length > 0" class="space-y-2"> <div v-if="wolHistory.length > 0" class="space-y-2">
<Separator /> <Separator />

View File

@@ -69,24 +69,24 @@ async function typeChar(char: string, signal: AbortSignal): Promise<boolean> {
return true return true
} }
const { keyCode, shift } = mapping const { hidCode, shift } = mapping
const modifiers = shift ? { shift: true } : undefined const modifier = shift ? 0x02 : 0
try { try {
// Send keydown // Send keydown
await hidApi.keyboard('down', keyCode, modifiers) await hidApi.keyboard('down', hidCode, modifier)
// Small delay between down and up to ensure key is registered // Small delay between down and up to ensure key is registered
await sleep(5) await sleep(5)
if (signal.aborted) { if (signal.aborted) {
// Even if aborted, still send keyup to release the key // Even if aborted, still send keyup to release the key
await hidApi.keyboard('up', keyCode, modifiers) await hidApi.keyboard('up', hidCode, modifier)
return false return false
} }
// Send keyup // Send keyup
await hidApi.keyboard('up', keyCode, modifiers) await hidApi.keyboard('up', hidCode, modifier)
// Additional small delay after keyup to ensure it's processed // Additional small delay after keyup to ensure it's processed
await sleep(2) await sleep(2)
@@ -96,7 +96,7 @@ async function typeChar(char: string, signal: AbortSignal): Promise<boolean> {
console.error('[Paste] Failed to type character:', char, error) console.error('[Paste] Failed to type character:', char, error)
// Try to release the key even on error // Try to release the key even on error
try { try {
await hidApi.keyboard('up', keyCode, modifiers) await hidApi.keyboard('up', hidCode, modifier)
} catch { } catch {
// Ignore cleanup errors // Ignore cleanup errors
} }

View File

@@ -442,7 +442,7 @@ onUnmounted(() => {
<Sheet :open="props.open" @update:open="emit('update:open', $event)"> <Sheet :open="props.open" @update:open="emit('update:open', $event)">
<SheetContent <SheetContent
side="right" side="right"
class="w-[400px] sm:w-[440px] p-0 border-l border-slate-200 dark:border-slate-800 bg-white dark:bg-slate-950" class="w-[90vw] max-w-[440px] p-0 border-l border-slate-200 dark:border-slate-800 bg-white dark:bg-slate-950"
> >
<!-- Header --> <!-- Header -->
<SheetHeader class="px-6 py-3 border-b border-slate-200 dark:border-slate-800"> <SheetHeader class="px-6 py-3 border-b border-slate-200 dark:border-slate-800">
@@ -454,7 +454,7 @@ onUnmounted(() => {
</div> </div>
</SheetHeader> </SheetHeader>
<ScrollArea class="h-[calc(100vh-60px)]"> <ScrollArea class="h-[calc(100dvh-60px)]">
<div class="px-6 py-4 space-y-6"> <div class="px-6 py-4 space-y-6">
<!-- Video Section Header --> <!-- Video Section Header -->
<div> <div>

View File

@@ -129,9 +129,11 @@ const statusBadgeText = computed(() => {
<HoverCard v-if="!prefersPopover" :open-delay="200" :close-delay="100"> <HoverCard v-if="!prefersPopover" :open-delay="200" :close-delay="100">
<HoverCardTrigger as-child> <HoverCardTrigger as-child>
<!-- New layout: vertical with title on top, status+quickInfo on bottom --> <!-- New layout: vertical with title on top, status+quickInfo on bottom -->
<div <button
type="button"
:aria-label="`${title}: ${quickInfo || subtitle || statusText}`"
:class="cn( :class="cn(
'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors', 'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors text-left focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
compact ? 'px-2 py-1 text-xs min-w-[80px]' : 'px-3 py-1.5 text-sm min-w-[100px]', compact ? 'px-2 py-1 text-xs min-w-[80px]' : 'px-3 py-1.5 text-sm min-w-[100px]',
'bg-white dark:bg-slate-800 hover:bg-slate-50 dark:hover:bg-slate-700', 'bg-white dark:bg-slate-800 hover:bg-slate-50 dark:hover:bg-slate-700',
'border-slate-200 dark:border-slate-700', 'border-slate-200 dark:border-slate-700',
@@ -147,7 +149,7 @@ const statusBadgeText = computed(() => {
{{ quickInfo || subtitle || statusText }} {{ quickInfo || subtitle || statusText }}
</span> </span>
</div> </div>
</div> </button>
</HoverCardTrigger> </HoverCardTrigger>
<HoverCardContent class="w-80" :align="hoverAlign"> <HoverCardContent class="w-80" :align="hoverAlign">
@@ -228,9 +230,11 @@ const statusBadgeText = computed(() => {
<Popover v-else> <Popover v-else>
<PopoverTrigger as-child> <PopoverTrigger as-child>
<!-- New layout: vertical with title on top, status+quickInfo on bottom --> <!-- New layout: vertical with title on top, status+quickInfo on bottom -->
<div <button
type="button"
:aria-label="`${title}: ${quickInfo || subtitle || statusText}`"
:class="cn( :class="cn(
'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors', 'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors text-left focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
compact ? 'px-2 py-1 text-xs min-w-[80px]' : 'px-3 py-1.5 text-sm min-w-[100px]', compact ? 'px-2 py-1 text-xs min-w-[80px]' : 'px-3 py-1.5 text-sm min-w-[100px]',
'bg-white dark:bg-slate-800 hover:bg-slate-50 dark:hover:bg-slate-700', 'bg-white dark:bg-slate-800 hover:bg-slate-50 dark:hover:bg-slate-700',
'border-slate-200 dark:border-slate-700', 'border-slate-200 dark:border-slate-700',
@@ -246,7 +250,7 @@ const statusBadgeText = computed(() => {
{{ quickInfo || subtitle || statusText }} {{ quickInfo || subtitle || statusText }}
</span> </span>
</div> </div>
</div> </button>
</PopoverTrigger> </PopoverTrigger>
<PopoverContent class="w-80" :align="hoverAlign"> <PopoverContent class="w-80" :align="hoverAlign">

View File

@@ -17,9 +17,16 @@ import {
SelectTrigger, SelectTrigger,
SelectValue, SelectValue,
} from '@/components/ui/select' } from '@/components/ui/select'
import { Monitor, RefreshCw, Loader2, Settings, Zap, Scale, Image } from 'lucide-vue-next' import { Monitor, RefreshCw, Loader2, Settings, Zap, Scale, Image, AlertTriangle } from 'lucide-vue-next'
import HelpTooltip from '@/components/HelpTooltip.vue' import HelpTooltip from '@/components/HelpTooltip.vue'
import { configApi, streamApi, type VideoCodecInfo, type EncoderBackendInfo, type BitratePreset } from '@/api' import {
configApi,
streamApi,
type VideoCodecInfo,
type EncoderBackendInfo,
type BitratePreset,
type StreamConstraintsResponse,
} from '@/api'
import { useConfigStore } from '@/stores/config' import { useConfigStore } from '@/stores/config'
import { useRouter } from 'vue-router' import { useRouter } from 'vue-router'
@@ -64,7 +71,50 @@ const loadingCodecs = ref(false)
// Backend list // Backend list
const backends = ref<EncoderBackendInfo[]>([]) const backends = ref<EncoderBackendInfo[]>([])
const constraints = ref<StreamConstraintsResponse | null>(null)
const currentEncoderBackend = computed(() => configStore.stream?.encoder || 'auto') const currentEncoderBackend = computed(() => configStore.stream?.encoder || 'auto')
const isRtspEnabled = computed(() => {
if (typeof configStore.rtspStatus?.config?.enabled === 'boolean') {
return configStore.rtspStatus.config.enabled
}
return !!configStore.rtspConfig?.enabled
})
const isRustdeskEnabled = computed(() => {
if (typeof configStore.rustdeskStatus?.config?.enabled === 'boolean') {
return configStore.rustdeskStatus.config.enabled
}
return !!configStore.rustdeskConfig?.enabled
})
const isRtspCodecLocked = computed(() => isRtspEnabled.value)
const isRustdeskWebrtcLocked = computed(() => !isRtspEnabled.value && isRustdeskEnabled.value)
const codecLockSources = computed(() => {
if (isRtspCodecLocked.value) {
return isRustdeskEnabled.value ? 'RTSP/RustDesk' : 'RTSP'
}
if (isRustdeskWebrtcLocked.value) return 'RustDesk'
return ''
})
const codecLockMessage = computed(() => {
if (!codecLockSources.value) return ''
return t('actionbar.multiSourceCodecLocked', { sources: codecLockSources.value })
})
const videoParamWarningSources = computed(() => {
if (isRtspEnabled.value && isRustdeskEnabled.value) return 'RTSP/RustDesk'
if (isRtspEnabled.value) return 'RTSP'
if (isRustdeskEnabled.value) return 'RustDesk'
return ''
})
const videoParamWarningMessage = computed(() => {
if (!videoParamWarningSources.value) return ''
return t('actionbar.multiSourceVideoParamsWarning', { sources: videoParamWarningSources.value })
})
const isCodecLocked = computed(() => !!codecLockMessage.value)
const isCodecOptionDisabled = (codecId: string): boolean => {
if (!isBrowserSupported(codecId)) return true
if (isRustdeskWebrtcLocked.value && codecId === 'mjpeg') return true
return false
}
// Browser supported codecs (WebRTC receive capabilities) // Browser supported codecs (WebRTC receive capabilities)
const browserSupportedCodecs = ref<Set<string>>(new Set()) const browserSupportedCodecs = ref<Set<string>>(new Set())
@@ -220,7 +270,7 @@ const availableCodecs = computed(() => {
const backend = backends.value.find(b => b.id === currentEncoderBackend.value) const backend = backends.value.find(b => b.id === currentEncoderBackend.value)
if (!backend) return allAvailable if (!backend) return allAvailable
return allAvailable const backendFiltered = allAvailable
.filter(codec => { .filter(codec => {
// MJPEG is always available (doesn't require encoder) // MJPEG is always available (doesn't require encoder)
if (codec.id === 'mjpeg') return true if (codec.id === 'mjpeg') return true
@@ -238,6 +288,13 @@ const availableCodecs = computed(() => {
backend: backend.name, backend: backend.name,
} }
}) })
const allowed = constraints.value?.allowed_codecs
if (!allowed || allowed.length === 0) {
return backendFiltered
}
return backendFiltered.filter(codec => allowed.includes(codec.id))
}) })
// Cascading filters // Cascading filters
@@ -303,6 +360,14 @@ async function loadCodecs() {
} }
} }
async function loadConstraints() {
try {
constraints.value = await streamApi.getConstraints()
} catch {
constraints.value = null
}
}
// Navigate to settings page (video tab) // Navigate to settings page (video tab)
function goToSettings() { function goToSettings() {
router.push('/settings?tab=video') router.push('/settings?tab=video')
@@ -339,6 +404,22 @@ function syncFromCurrentIfChanged() {
// Handle video mode change // Handle video mode change
function handleVideoModeChange(mode: unknown) { function handleVideoModeChange(mode: unknown) {
if (typeof mode !== 'string') return if (typeof mode !== 'string') return
if (isRtspCodecLocked.value) {
toast.warning(codecLockMessage.value)
return
}
if (isRustdeskWebrtcLocked.value && mode === 'mjpeg') {
toast.warning(codecLockMessage.value)
return
}
if (constraints.value?.allowed_codecs?.length && !constraints.value.allowed_codecs.includes(mode)) {
toast.error(constraints.value.reason || t('actionbar.selectMode'))
return
}
emit('update:videoMode', mode as VideoMode) emit('update:videoMode', mode as VideoMode)
} }
@@ -466,9 +547,13 @@ watch(() => props.open, (isOpen) => {
loadCodecs() loadCodecs()
} }
loadConstraints()
Promise.all([ Promise.all([
configStore.refreshVideo(), configStore.refreshVideo(),
configStore.refreshStream(), configStore.refreshStream(),
configStore.refreshRtspStatus(),
configStore.refreshRustdeskStatus(),
]).then(() => { ]).then(() => {
initializeFromCurrent() initializeFromCurrent()
}).catch(() => { }).catch(() => {
@@ -508,7 +593,7 @@ watch(currentConfig, () => {
<Select <Select
:model-value="props.videoMode" :model-value="props.videoMode"
@update:model-value="handleVideoModeChange" @update:model-value="handleVideoModeChange"
:disabled="loadingCodecs || availableCodecs.length === 0" :disabled="loadingCodecs || availableCodecs.length === 0 || isRtspCodecLocked"
> >
<SelectTrigger class="h-8 text-xs"> <SelectTrigger class="h-8 text-xs">
<div v-if="selectedCodecInfo" class="flex items-center gap-1.5 truncate"> <div v-if="selectedCodecInfo" class="flex items-center gap-1.5 truncate">
@@ -530,8 +615,8 @@ watch(currentConfig, () => {
v-for="codec in availableCodecs" v-for="codec in availableCodecs"
:key="codec.id" :key="codec.id"
:value="codec.id" :value="codec.id"
:disabled="!isBrowserSupported(codec.id)" :disabled="isCodecOptionDisabled(codec.id)"
:class="['text-xs', { 'opacity-50': !isBrowserSupported(codec.id) }]" :class="['text-xs', { 'opacity-50': isCodecOptionDisabled(codec.id) }]"
> >
<div class="flex items-center gap-2"> <div class="flex items-center gap-2">
<span>{{ codec.name }}</span> <span>{{ codec.name }}</span>
@@ -558,6 +643,9 @@ watch(currentConfig, () => {
<p v-if="props.videoMode !== 'mjpeg'" class="text-xs text-muted-foreground"> <p v-if="props.videoMode !== 'mjpeg'" class="text-xs text-muted-foreground">
{{ t('actionbar.webrtcHint') }} {{ t('actionbar.webrtcHint') }}
</p> </p>
<p v-if="isCodecLocked" class="text-xs text-amber-600 dark:text-amber-400">
{{ codecLockMessage }}
</p>
</div> </div>
<!-- Bitrate Preset - Only shown for WebRTC modes --> <!-- Bitrate Preset - Only shown for WebRTC modes -->
@@ -624,6 +712,16 @@ watch(currentConfig, () => {
<Separator /> <Separator />
<div class="space-y-3"> <div class="space-y-3">
<div
v-if="videoParamWarningMessage"
class="rounded-md border border-amber-500/30 bg-amber-500/10 px-2.5 py-2"
>
<p class="flex items-start gap-1.5 text-xs text-amber-700 dark:text-amber-300">
<AlertTriangle class="h-3.5 w-3.5 mt-0.5 shrink-0" />
<span>{{ videoParamWarningMessage }}</span>
</p>
</div>
<div class="flex items-center justify-between"> <div class="flex items-center justify-between">
<h5 class="text-xs font-medium text-muted-foreground">{{ t('actionbar.deviceSettings') }}</h5> <h5 class="text-xs font-medium text-muted-foreground">{{ t('actionbar.deviceSettings') }}</h5>
<Button <Button
@@ -655,7 +753,7 @@ watch(currentConfig, () => {
:value="device.path" :value="device.path"
class="text-xs" class="text-xs"
> >
{{ device.name }} {{ device.name }} ({{ device.path }})
</SelectItem> </SelectItem>
</SelectContent> </SelectContent>
</Select> </Select>

View File

@@ -9,6 +9,7 @@ import {
consumerKeys, consumerKeys,
latchingKeys, latchingKeys,
modifiers, modifiers,
updateModifierMaskForHidKey,
type KeyName, type KeyName,
type ConsumerKeyName, type ConsumerKeyName,
} from '@/lib/keyboardMappings' } from '@/lib/keyboardMappings'
@@ -304,9 +305,10 @@ async function onKeyDown(key: string) {
// Handle latching keys (Caps Lock, etc.) // Handle latching keys (Caps Lock, etc.)
if ((latchingKeys as readonly string[]).includes(cleanKey)) { if ((latchingKeys as readonly string[]).includes(cleanKey)) {
emit('keyDown', cleanKey) emit('keyDown', cleanKey)
await sendKeyPress(keyCode, true) const currentMask = pressedModifiers.value & 0xff
await sendKeyPress(keyCode, true, currentMask)
setTimeout(() => { setTimeout(() => {
sendKeyPress(keyCode, false) sendKeyPress(keyCode, false, currentMask)
emit('keyUp', cleanKey) emit('keyUp', cleanKey)
}, 100) }, 100)
return return
@@ -318,12 +320,14 @@ async function onKeyDown(key: string) {
const isCurrentlyDown = (pressedModifiers.value & mask) !== 0 const isCurrentlyDown = (pressedModifiers.value & mask) !== 0
if (isCurrentlyDown) { if (isCurrentlyDown) {
pressedModifiers.value &= ~mask const nextMask = pressedModifiers.value & ~mask
await sendKeyPress(keyCode, false) pressedModifiers.value = nextMask
await sendKeyPress(keyCode, false, nextMask)
emit('keyUp', cleanKey) emit('keyUp', cleanKey)
} else { } else {
pressedModifiers.value |= mask const nextMask = pressedModifiers.value | mask
await sendKeyPress(keyCode, true) pressedModifiers.value = nextMask
await sendKeyPress(keyCode, true, nextMask)
emit('keyDown', cleanKey) emit('keyDown', cleanKey)
} }
updateKeyboardButtonTheme() updateKeyboardButtonTheme()
@@ -333,11 +337,12 @@ async function onKeyDown(key: string) {
// Regular key: press and release // Regular key: press and release
keysDown.value.push(cleanKey) keysDown.value.push(cleanKey)
emit('keyDown', cleanKey) emit('keyDown', cleanKey)
await sendKeyPress(keyCode, true) const currentMask = pressedModifiers.value & 0xff
await sendKeyPress(keyCode, true, currentMask)
updateKeyboardButtonTheme() updateKeyboardButtonTheme()
setTimeout(async () => { setTimeout(async () => {
keysDown.value = keysDown.value.filter(k => k !== cleanKey) keysDown.value = keysDown.value.filter(k => k !== cleanKey)
await sendKeyPress(keyCode, false) await sendKeyPress(keyCode, false, currentMask)
emit('keyUp', cleanKey) emit('keyUp', cleanKey)
updateKeyboardButtonTheme() updateKeyboardButtonTheme()
}, 50) }, 50)
@@ -347,16 +352,9 @@ async function onKeyUp() {
// Not used for now - we handle up in onKeyDown with setTimeout // Not used for now - we handle up in onKeyDown with setTimeout
} }
async function sendKeyPress(keyCode: number, press: boolean) { async function sendKeyPress(keyCode: number, press: boolean, modifierMask: number) {
try { try {
const mods = { await hidApi.keyboard(press ? 'down' : 'up', keyCode, modifierMask & 0xff)
ctrl: (pressedModifiers.value & 0x11) !== 0,
shift: (pressedModifiers.value & 0x22) !== 0,
alt: (pressedModifiers.value & 0x44) !== 0,
meta: (pressedModifiers.value & 0x88) !== 0,
}
await hidApi.keyboard(press ? 'down' : 'up', keyCode, mods)
} catch (err) { } catch (err) {
console.error('[VirtualKeyboard] Key send failed:', err) console.error('[VirtualKeyboard] Key send failed:', err)
} }
@@ -368,16 +366,20 @@ interface MacroStep {
} }
async function executeMacro(steps: MacroStep[]) { async function executeMacro(steps: MacroStep[]) {
let macroModifierMask = pressedModifiers.value & 0xff
for (const step of steps) { for (const step of steps) {
for (const mod of step.modifiers) { for (const mod of step.modifiers) {
if (mod in keys) { if (mod in keys) {
await sendKeyPress(keys[mod as KeyName], true) const modHid = keys[mod as KeyName]
macroModifierMask = updateModifierMaskForHidKey(macroModifierMask, modHid, true)
await sendKeyPress(modHid, true, macroModifierMask)
} }
} }
for (const key of step.keys) { for (const key of step.keys) {
if (key in keys) { if (key in keys) {
await sendKeyPress(keys[key as KeyName], true) await sendKeyPress(keys[key as KeyName], true, macroModifierMask)
} }
} }
@@ -385,13 +387,15 @@ async function executeMacro(steps: MacroStep[]) {
for (const key of step.keys) { for (const key of step.keys) {
if (key in keys) { if (key in keys) {
await sendKeyPress(keys[key as KeyName], false) await sendKeyPress(keys[key as KeyName], false, macroModifierMask)
} }
} }
for (const mod of step.modifiers) { for (const mod of step.modifiers) {
if (mod in keys) { if (mod in keys) {
await sendKeyPress(keys[mod as KeyName], false) const modHid = keys[mod as KeyName]
macroModifierMask = updateModifierMaskForHidKey(macroModifierMask, modHid, false)
await sendKeyPress(modHid, false, macroModifierMask)
} }
} }
} }

View File

@@ -5,6 +5,7 @@ import { ref, type Ref } from 'vue'
import { useI18n } from 'vue-i18n' import { useI18n } from 'vue-i18n'
import { toast } from 'vue-sonner' import { toast } from 'vue-sonner'
import { hidApi } from '@/api' import { hidApi } from '@/api'
import { keyboardEventToHidCode, updateModifierMaskForHidKey } from '@/lib/keyboardMappings'
export interface HidInputState { export interface HidInputState {
mouseMode: Ref<'absolute' | 'relative'> mouseMode: Ref<'absolute' | 'relative'>
@@ -32,6 +33,7 @@ export function useHidInput(options: UseHidInputOptions) {
numLock: false, numLock: false,
scrollLock: false, scrollLock: false,
}) })
const activeModifierMask = ref(0)
const mousePosition = ref({ x: 0, y: 0 }) const mousePosition = ref({ x: 0, y: 0 })
const lastMousePosition = ref({ x: 0, y: 0 }) const lastMousePosition = ref({ x: 0, y: 0 })
const isPointerLocked = ref(false) const isPointerLocked = ref(false)
@@ -83,14 +85,14 @@ export function useHidInput(options: UseHidInputOptions) {
keyboardLed.value.numLock = e.getModifierState('NumLock') keyboardLed.value.numLock = e.getModifierState('NumLock')
keyboardLed.value.scrollLock = e.getModifierState('ScrollLock') keyboardLed.value.scrollLock = e.getModifierState('ScrollLock')
const modifiers = { const hidKey = keyboardEventToHidCode(e.code, e.key)
ctrl: e.ctrlKey, if (hidKey === undefined) {
shift: e.shiftKey, return
alt: e.altKey,
meta: e.metaKey,
} }
hidApi.keyboard('down', e.keyCode, modifiers).catch(err => handleHidError(err, 'keyboard down')) const modifierMask = updateModifierMaskForHidKey(activeModifierMask.value, hidKey, true)
activeModifierMask.value = modifierMask
hidApi.keyboard('down', hidKey, modifierMask).catch(err => handleHidError(err, 'keyboard down'))
} }
function handleKeyUp(e: KeyboardEvent) { function handleKeyUp(e: KeyboardEvent) {
@@ -107,7 +109,14 @@ export function useHidInput(options: UseHidInputOptions) {
const keyName = e.key === ' ' ? 'Space' : e.key const keyName = e.key === ' ' ? 'Space' : e.key
pressedKeys.value = pressedKeys.value.filter(k => k !== keyName) pressedKeys.value = pressedKeys.value.filter(k => k !== keyName)
hidApi.keyboard('up', e.keyCode).catch(err => handleHidError(err, 'keyboard up')) const hidKey = keyboardEventToHidCode(e.code, e.key)
if (hidKey === undefined) {
return
}
const modifierMask = updateModifierMaskForHidKey(activeModifierMask.value, hidKey, false)
activeModifierMask.value = modifierMask
hidApi.keyboard('up', hidKey, modifierMask).catch(err => handleHidError(err, 'keyboard up'))
} }
// Mouse handlers // Mouse handlers
@@ -233,6 +242,7 @@ export function useHidInput(options: UseHidInputOptions) {
function handleBlur() { function handleBlur() {
pressedKeys.value = [] pressedKeys.value = []
activeModifierMask.value = 0
if (pressedMouseButton.value !== null) { if (pressedMouseButton.value !== null) {
const button = pressedMouseButton.value const button = pressedMouseButton.value
pressedMouseButton.value = null pressedMouseButton.value = null

View File

@@ -3,7 +3,6 @@
import { ref, onUnmounted, computed, type Ref } from 'vue' import { ref, onUnmounted, computed, type Ref } from 'vue'
import { webrtcApi, type IceCandidate } from '@/api' import { webrtcApi, type IceCandidate } from '@/api'
import { generateUUID } from '@/lib/utils'
import { import {
type HidKeyboardEvent, type HidKeyboardEvent,
type HidMouseEvent, type HidMouseEvent,
@@ -15,6 +14,19 @@ import { useWebSocket } from '@/composables/useWebSocket'
export type { HidKeyboardEvent, HidMouseEvent } export type { HidKeyboardEvent, HidMouseEvent }
export type WebRTCState = 'disconnected' | 'connecting' | 'connected' | 'failed' export type WebRTCState = 'disconnected' | 'connecting' | 'connected' | 'failed'
export type WebRTCConnectStage =
| 'idle'
| 'fetching_ice_servers'
| 'creating_peer_connection'
| 'creating_data_channel'
| 'creating_offer'
| 'waiting_server_answer'
| 'setting_remote_description'
| 'applying_ice_candidates'
| 'waiting_connection'
| 'connected'
| 'disconnected'
| 'failed'
// ICE candidate type: host=P2P local, srflx=P2P STUN, relay=TURN relay // ICE candidate type: host=P2P local, srflx=P2P STUN, relay=TURN relay
export type IceCandidateType = 'host' | 'srflx' | 'prflx' | 'relay' | 'unknown' export type IceCandidateType = 'host' | 'srflx' | 'prflx' | 'relay' | 'unknown'
@@ -99,6 +111,7 @@ let dataChannel: RTCDataChannel | null = null
let sessionId: string | null = null let sessionId: string | null = null
let statsInterval: number | null = null let statsInterval: number | null = null
let isConnecting = false // Lock to prevent concurrent connect calls let isConnecting = false // Lock to prevent concurrent connect calls
let connectInFlight: Promise<boolean> | null = null
let pendingIceCandidates: RTCIceCandidate[] = [] // Queue for ICE candidates before sessionId is set let pendingIceCandidates: RTCIceCandidate[] = [] // Queue for ICE candidates before sessionId is set
let pendingRemoteCandidates: WebRTCIceCandidateEvent[] = [] // Queue for server ICE candidates let pendingRemoteCandidates: WebRTCIceCandidateEvent[] = [] // Queue for server ICE candidates
let pendingRemoteIceComplete = new Set<string>() // Session IDs waiting for end-of-candidates let pendingRemoteIceComplete = new Set<string>() // Session IDs waiting for end-of-candidates
@@ -131,6 +144,7 @@ const stats = ref<WebRTCStats>({
}) })
const error = ref<string | null>(null) const error = ref<string | null>(null)
const dataChannelReady = ref(false) const dataChannelReady = ref(false)
const connectStage = ref<WebRTCConnectStage>('idle')
// Create RTCPeerConnection with configuration // Create RTCPeerConnection with configuration
function createPeerConnection(iceServers: RTCIceServer[]): RTCPeerConnection { function createPeerConnection(iceServers: RTCIceServer[]): RTCPeerConnection {
@@ -149,16 +163,19 @@ function createPeerConnection(iceServers: RTCIceServer[]): RTCPeerConnection {
break break
case 'connected': case 'connected':
state.value = 'connected' state.value = 'connected'
connectStage.value = 'connected'
error.value = null error.value = null
startStatsCollection() startStatsCollection()
break break
case 'disconnected': case 'disconnected':
case 'closed': case 'closed':
state.value = 'disconnected' state.value = 'disconnected'
connectStage.value = 'disconnected'
stopStatsCollection() stopStatsCollection()
break break
case 'failed': case 'failed':
state.value = 'failed' state.value = 'failed'
connectStage.value = 'failed'
error.value = 'Connection failed' error.value = 'Connection failed'
stopStatsCollection() stopStatsCollection()
break break
@@ -450,11 +467,16 @@ async function flushPendingIceCandidates() {
// Connect to WebRTC server // Connect to WebRTC server
async function connect(): Promise<boolean> { async function connect(): Promise<boolean> {
if (connectInFlight) {
return connectInFlight
}
connectInFlight = (async () => {
registerWebSocketHandlers() registerWebSocketHandlers()
// Prevent concurrent connection attempts // Prevent concurrent connection attempts
if (isConnecting) { if (isConnecting) {
return false return state.value === 'connected'
} }
if (peerConnection && state.value === 'connected') { if (peerConnection && state.value === 'connected') {
@@ -474,12 +496,15 @@ async function connect(): Promise<boolean> {
try { try {
state.value = 'connecting' state.value = 'connecting'
error.value = null error.value = null
connectStage.value = 'fetching_ice_servers'
// Fetch ICE servers from backend API // Fetch ICE servers from backend API
const iceServers = await fetchIceServers() const iceServers = await fetchIceServers()
connectStage.value = 'creating_peer_connection'
// Create peer connection with fetched ICE servers // Create peer connection with fetched ICE servers
peerConnection = createPeerConnection(iceServers) peerConnection = createPeerConnection(iceServers)
connectStage.value = 'creating_data_channel'
// Create data channel before offer (for HID) // Create data channel before offer (for HID)
createDataChannel(peerConnection) createDataChannel(peerConnection)
@@ -487,13 +512,16 @@ async function connect(): Promise<boolean> {
// Add transceiver for receiving video // Add transceiver for receiving video
peerConnection.addTransceiver('video', { direction: 'recvonly' }) peerConnection.addTransceiver('video', { direction: 'recvonly' })
peerConnection.addTransceiver('audio', { direction: 'recvonly' }) peerConnection.addTransceiver('audio', { direction: 'recvonly' })
connectStage.value = 'creating_offer'
// Create offer // Create offer
const offer = await peerConnection.createOffer() const offer = await peerConnection.createOffer()
await peerConnection.setLocalDescription(offer) await peerConnection.setLocalDescription(offer)
connectStage.value = 'waiting_server_answer'
// Send offer to server and get answer // Send offer to server and get answer
const response = await webrtcApi.offer(offer.sdp!, generateUUID()) // Do not pass client_id here: each connect creates a fresh session.
const response = await webrtcApi.offer(offer.sdp!)
sessionId = response.session_id sessionId = response.session_id
// Send any ICE candidates that were queued while waiting for sessionId // Send any ICE candidates that were queued while waiting for sessionId
@@ -504,9 +532,11 @@ async function connect(): Promise<boolean> {
type: 'answer', type: 'answer',
sdp: response.sdp, sdp: response.sdp,
} }
connectStage.value = 'setting_remote_description'
await peerConnection.setRemoteDescription(answer) await peerConnection.setRemoteDescription(answer)
// Flush any pending server ICE candidates once remote description is set // Flush any pending server ICE candidates once remote description is set
connectStage.value = 'applying_ice_candidates'
await flushPendingRemoteIce() await flushPendingRemoteIce()
// Add any ICE candidates from the response // Add any ICE candidates from the response
@@ -522,10 +552,12 @@ async function connect(): Promise<boolean> {
const connectionTimeout = 15000 const connectionTimeout = 15000
const pollInterval = 100 const pollInterval = 100
let waited = 0 let waited = 0
connectStage.value = 'waiting_connection'
while (waited < connectionTimeout && peerConnection) { while (waited < connectionTimeout && peerConnection) {
const pcState = peerConnection.connectionState const pcState = peerConnection.connectionState
if (pcState === 'connected') { if (pcState === 'connected') {
connectStage.value = 'connected'
isConnecting = false isConnecting = false
return true return true
} }
@@ -540,11 +572,19 @@ async function connect(): Promise<boolean> {
throw new Error('Connection timeout waiting for ICE negotiation') throw new Error('Connection timeout waiting for ICE negotiation')
} catch (err) { } catch (err) {
state.value = 'failed' state.value = 'failed'
connectStage.value = 'failed'
error.value = err instanceof Error ? err.message : 'Connection failed' error.value = err instanceof Error ? err.message : 'Connection failed'
isConnecting = false isConnecting = false
disconnect() await disconnect()
return false return false
} }
})()
try {
return await connectInFlight
} finally {
connectInFlight = null
}
} }
// Disconnect from WebRTC server // Disconnect from WebRTC server
@@ -583,6 +623,7 @@ async function disconnect() {
audioTrack.value = null audioTrack.value = null
cachedMediaStream = null // Clear cached stream on disconnect cachedMediaStream = null // Clear cached stream on disconnect
state.value = 'disconnected' state.value = 'disconnected'
connectStage.value = 'disconnected'
error.value = null error.value = null
// Reset stats // Reset stats
@@ -694,6 +735,7 @@ export function useWebRTC() {
stats, stats,
error, error,
dataChannelReady, dataChannelReady,
connectStage,
sessionId: computed(() => sessionId), sessionId: computed(() => sessionId),
// Methods // Methods

View File

@@ -134,6 +134,8 @@ export default {
backendAuto: 'Auto', backendAuto: 'Auto',
recommended: 'Recommended', recommended: 'Recommended',
notRecommended: 'Not Recommended', notRecommended: 'Not Recommended',
multiSourceCodecLocked: '{sources} are enabled. Current codec is locked.',
multiSourceVideoParamsWarning: '{sources} are enabled. Changing video device and input parameters will interrupt the stream.',
// HID Config // HID Config
hidConfig: 'Mouse & HID', hidConfig: 'Mouse & HID',
mouseSettings: 'Mouse Settings', mouseSettings: 'Mouse Settings',
@@ -310,6 +312,14 @@ export default {
webrtcConnectedDesc: 'Using low-latency H.264 video stream', webrtcConnectedDesc: 'Using low-latency H.264 video stream',
webrtcFailed: 'WebRTC Connection Failed', webrtcFailed: 'WebRTC Connection Failed',
fallingBackToMjpeg: 'Falling back to MJPEG mode', fallingBackToMjpeg: 'Falling back to MJPEG mode',
webrtcPhaseIceServers: 'Loading ICE servers...',
webrtcPhaseCreatePeer: 'Creating peer connection...',
webrtcPhaseCreateChannel: 'Creating data channel...',
webrtcPhaseCreateOffer: 'Creating local offer...',
webrtcPhaseWaitAnswer: 'Waiting for remote answer...',
webrtcPhaseSetRemote: 'Applying remote description...',
webrtcPhaseApplyIce: 'Applying ICE candidates...',
webrtcPhaseNegotiating: 'Negotiating secure connection...',
// Pointer Lock // Pointer Lock
pointerLocked: 'Pointer Locked', pointerLocked: 'Pointer Locked',
pointerLockedDesc: 'Press Escape to release the pointer', pointerLockedDesc: 'Press Escape to release the pointer',
@@ -438,6 +448,7 @@ export default {
hid: 'HID', hid: 'HID',
msd: 'MSD', msd: 'MSD',
atx: 'ATX', atx: 'ATX',
environment: 'Environment',
network: 'Network', network: 'Network',
users: 'Users', users: 'Users',
hardware: 'Hardware', hardware: 'Hardware',
@@ -452,7 +463,7 @@ export default {
deviceInfo: 'Device Info', deviceInfo: 'Device Info',
deviceInfoDesc: 'Host system information', deviceInfoDesc: 'Host system information',
hostname: 'Hostname', hostname: 'Hostname',
cpuModel: 'CPU Model', cpuModel: 'Processor / Platform',
cpuUsage: 'CPU Usage', cpuUsage: 'CPU Usage',
memoryUsage: 'Memory Usage', memoryUsage: 'Memory Usage',
networkAddresses: 'Network Addresses', networkAddresses: 'Network Addresses',
@@ -501,6 +512,29 @@ export default {
restartRequired: 'Restart Required', restartRequired: 'Restart Required',
restartMessage: 'Web server configuration saved. A restart is required for changes to take effect.', restartMessage: 'Web server configuration saved. A restart is required for changes to take effect.',
restarting: 'Restarting...', restarting: 'Restarting...',
onlineUpgrade: 'Online Upgrade',
onlineUpgradeDesc: 'Check and upgrade One-KVM',
updateChannel: 'Update Channel',
currentVersion: 'Current Version',
latestVersion: 'Latest Version',
updateStatus: 'Update Status',
updateStatusIdle: 'Idle',
releaseNotes: 'Release Notes',
noUpdates: 'No new version available for current channel',
startUpgrade: 'Start Upgrade',
updatePhaseIdle: 'Idle',
updatePhaseChecking: 'Checking',
updatePhaseDownloading: 'Downloading',
updatePhaseVerifying: 'Verifying',
updatePhaseInstalling: 'Installing',
updatePhaseRestarting: 'Restarting',
updatePhaseSuccess: 'Success',
updatePhaseFailed: 'Failed',
updateMsgChecking: 'Checking for updates',
updateMsgDownloading: 'Downloading binary',
updateMsgVerifying: 'Verifying (SHA256)',
updateMsgInstalling: 'Replacing binary',
updateMsgRestarting: 'Restarting service',
// Auth // Auth
auth: 'Access', auth: 'Access',
authSettings: 'Access Settings', authSettings: 'Access Settings',
@@ -630,6 +664,86 @@ export default {
serialNumber: 'Serial Number', serialNumber: 'Serial Number',
serialNumberAuto: 'Auto-generated', serialNumberAuto: 'Auto-generated',
descriptorWarning: 'Changing these settings will reconnect the USB device', descriptorWarning: 'Changing these settings will reconnect the USB device',
otgSelfCheck: {
title: 'OTG Self-Check',
desc: 'Check UDC, gadget binding, and link status',
run: 'Run Self-Check',
failed: 'Failed to run OTG self-check',
overall: 'Overall Status',
ok: 'Healthy',
hasIssues: 'Issues Found',
summary: 'Issue Summary',
counts: '{errors} errors, {warnings} warnings',
groupCounts: '{ok} passed, {warnings} warnings, {errors} errors',
notRun: 'Not run',
status: {
ok: 'Healthy',
warn: 'Warning',
error: 'Error',
skipped: 'Skipped',
},
groups: {
udc: 'UDC Basics',
gadgetConfig: 'Gadget Config',
oneKvm: 'one-kvm Gadget',
functions: 'Functions & Nodes',
link: 'Link State',
},
values: {
missing: 'Missing',
notConfigured: 'Not configured',
mounted: 'Mounted',
unmounted: 'Unmounted',
available: 'Available',
unavailable: 'Unavailable',
exists: 'Exists',
none: 'None',
unbound: 'Unbound',
noConflict: 'No conflict',
conflict: 'Conflict',
unknown: 'Unknown',
normal: 'Normal',
abnormal: 'Abnormal',
},
selectedUdc: 'Target UDC',
boundUdc: 'Bound UDC',
messages: {
udc_dir_exists: 'UDC directory check',
udc_has_entries: 'UDC check',
configfs_mounted: 'configfs check',
usb_gadget_dir_exists: 'usb_gadget check',
libcomposite_loaded: 'libcomposite check',
one_kvm_gadget_exists: 'one-kvm gadget check',
other_gadgets: 'Other gadget check',
configured_udc_valid: 'Configured UDC check',
one_kvm_bound_udc: 'Bound UDC check',
hid_functions_present: 'HID function check',
config_c1_exists: 'configs/c.1 check',
function_links_ok: 'Function link check',
hid_device_nodes: 'HID node check',
udc_conflict: 'UDC conflict check',
udc_state: 'UDC state check',
udc_speed: 'UDC speed check',
},
hints: {
udc_dir_exists: 'Ensure UDC/OTG kernel drivers are enabled',
udc_has_entries: 'Ensure OTG controller is enabled in device tree',
configfs_mounted: 'Try: mount -t configfs none /sys/kernel/config',
usb_gadget_dir_exists: 'Ensure configfs and USB gadget support are enabled',
libcomposite_loaded: 'Try: modprobe libcomposite',
one_kvm_gadget_exists: 'Enable OTG HID or MSD to let one-kvm gadget be created automatically',
other_gadgets: 'Potential UDC contention with one-kvm; check other OTG services',
configured_udc_valid: 'Please reselect UDC in HID OTG settings',
one_kvm_bound_udc: 'Ensure HID/MSD is enabled and initialized successfully',
hid_functions_present: 'Check OTG HID config and enable at least one HID function',
config_c1_exists: 'Gadget structure is incomplete; try restarting One-KVM',
function_links_ok: 'Reinitialize OTG (toggle HID backend once or restart service)',
hid_device_nodes: 'Ensure gadget is bound and check kernel logs',
udc_conflict: 'Stop other OTG services or switch one-kvm to an idle UDC',
udc_state: 'Ensure target host is connected and has recognized the USB device',
udc_speed: 'Device may not be fully enumerated; try reconnecting USB',
},
},
// WebRTC / ICE // WebRTC / ICE
webrtcSettings: 'WebRTC Settings', webrtcSettings: 'WebRTC Settings',
webrtcSettingsDesc: 'Configure STUN/TURN servers for NAT traversal', webrtcSettingsDesc: 'Configure STUN/TURN servers for NAT traversal',
@@ -741,7 +855,6 @@ export default {
openInNewTab: 'Open in New Tab', openInNewTab: 'Open in New Tab',
port: 'Port', port: 'Port',
shell: 'Shell', shell: 'Shell',
credential: 'Credential',
}, },
// gostc // gostc
gostc: { gostc: {
@@ -801,6 +914,25 @@ export default {
keypairGenerated: 'Keypair Generated', keypairGenerated: 'Keypair Generated',
noKeypair: 'No Keypair', noKeypair: 'No Keypair',
}, },
rtsp: {
title: 'RTSP Streaming',
desc: 'Configure RTSP video output service (H.264/H.265)',
bind: 'Bind Address',
port: 'Port',
path: 'Stream Path',
pathPlaceholder: 'live',
pathHint: 'Example: rtsp://device-ip:8554/live',
codec: 'Codec',
codecHint: 'Enabling RTSP locks codec to selected value and disables MJPEG.',
allowOneClient: 'Allow One Client Only',
username: 'Username',
usernamePlaceholder: 'Empty means no authentication',
password: 'Password',
passwordPlaceholder: 'Enter new password',
passwordSet: '••••••••',
passwordHint: 'Leave empty to keep current password; enter a new value to update it.',
urlPreview: 'RTSP URL Preview',
},
}, },
stats: { stats: {
title: 'Connection Stats', title: 'Connection Stats',

Some files were not shown because too many files have changed in this diff Show More