Merge branch 'main' into main

This commit is contained in:
SilentWind
2026-02-20 14:19:38 +08:00
committed by GitHub
111 changed files with 7290 additions and 1787 deletions

View File

@@ -28,7 +28,8 @@ serde_json = "1"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "tracing-log"] }
tracing-log = "0.2"
# Error handling
thiserror = "2"
@@ -46,7 +47,7 @@ nix = { version = "0.30", features = ["fs", "net", "hostname", "poll"] }
# HTTP client (for URL downloads)
# Use rustls by default, but allow native-tls for systems with older GLIBC
reqwest = { version = "0.13", features = ["stream", "rustls"], default-features = false }
reqwest = { version = "0.13", features = ["stream", "rustls", "json"], default-features = false }
urlencoding = "2"
# Static file embedding
@@ -65,7 +66,7 @@ clap = { version = "4", features = ["derive"] }
time = "0.3"
# Video capture (V4L2)
v4l = "0.14"
v4l2r = "0.0.7"
# JPEG encoding (libjpeg-turbo, SIMD accelerated)
turbojpeg = "1.3"
@@ -91,6 +92,8 @@ arc-swap = "1.8"
# WebRTC
webrtc = "0.14"
rtp = "0.14"
rtsp-types = "0.1"
sdp-types = "0.1"
# Audio (ALSA capture + Opus encoding)
# Note: audiopus links to libopus.so (unavoidable for audio support)
@@ -115,7 +118,6 @@ hwcodec = { path = "libs/hwcodec" }
protobuf = { version = "3.7", features = ["with-bytes"] }
sodiumoxide = "0.2"
sha2 = "0.10"
# High-performance pixel format conversion (libyuv)
libyuv = { path = "res/vcpkg/libyuv" }

View File

@@ -3,9 +3,13 @@
FROM debian:11
# Linux headers used by v4l2r bindgen
ARG LINUX_HEADERS_VERSION=6.6
ARG LINUX_HEADERS_SHA256=
# Set Rustup mirrors (Aliyun)
ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
#ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
# RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
# Install Rust toolchain
RUN apt-get update && apt-get install -y --no-install-recommends \
@@ -31,7 +35,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
meson \
ninja-build \
wget \
xz-utils \
file \
rsync \
gcc-aarch64-linux-gnu \
g++-aarch64-linux-gnu \
libc6-dev-arm64-cross \
@@ -47,10 +53,22 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libv4l-dev:arm64 \
libudev-dev:arm64 \
zlib1g-dev:arm64 \
linux-libc-dev:arm64 \
# Note: libjpeg-turbo, libyuv, libvpx, libx264, libx265, libopus are built from source below for static linking
libdrm-dev:arm64 \
&& rm -rf /var/lib/apt/lists/*
# Install newer V4L2 headers for v4l2r bindgen
RUN mkdir -p /opt/v4l2-headers \
&& wget -q https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-${LINUX_HEADERS_VERSION}.tar.xz -O /tmp/linux-headers.tar.xz \
&& if [ -n "$LINUX_HEADERS_SHA256" ]; then echo "$LINUX_HEADERS_SHA256 /tmp/linux-headers.tar.xz" | sha256sum -c -; fi \
&& tar -xf /tmp/linux-headers.tar.xz -C /tmp \
&& cd /tmp/linux-${LINUX_HEADERS_VERSION} \
&& make ARCH=arm64 headers_install INSTALL_HDR_PATH=/opt/v4l2-headers \
&& rm -rf /tmp/linux-${LINUX_HEADERS_VERSION} /tmp/linux-headers.tar.xz
ENV V4L2R_VIDEODEV2_H_PATH=/opt/v4l2-headers/include
# Build static libjpeg-turbo from source (cross-compile for ARM64)
RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \
&& cd /tmp/libjpeg-turbo \

View File

@@ -3,9 +3,13 @@
FROM debian:11
# Linux headers used by v4l2r bindgen
ARG LINUX_HEADERS_VERSION=6.6
ARG LINUX_HEADERS_SHA256=
# Set Rustup mirrors (Aliyun)
ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
#ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
# RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
# Install Rust toolchain
RUN apt-get update && apt-get install -y --no-install-recommends \
@@ -31,7 +35,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
meson \
ninja-build \
wget \
xz-utils \
file \
rsync \
gcc-arm-linux-gnueabihf \
g++-arm-linux-gnueabihf \
libc6-dev-armhf-cross \
@@ -46,10 +52,22 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libasound2-dev:armhf \
libv4l-dev:armhf \
libudev-dev:armhf \
linux-libc-dev:armhf \
zlib1g-dev:armhf \
libdrm-dev:armhf \
&& rm -rf /var/lib/apt/lists/*
# Install newer V4L2 headers for v4l2r bindgen
RUN mkdir -p /opt/v4l2-headers \
&& wget -q https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-${LINUX_HEADERS_VERSION}.tar.xz -O /tmp/linux-headers.tar.xz \
&& if [ -n "$LINUX_HEADERS_SHA256" ]; then echo "$LINUX_HEADERS_SHA256 /tmp/linux-headers.tar.xz" | sha256sum -c -; fi \
&& tar -xf /tmp/linux-headers.tar.xz -C /tmp \
&& cd /tmp/linux-${LINUX_HEADERS_VERSION} \
&& make ARCH=arm headers_install INSTALL_HDR_PATH=/opt/v4l2-headers \
&& rm -rf /tmp/linux-${LINUX_HEADERS_VERSION} /tmp/linux-headers.tar.xz
ENV V4L2R_VIDEODEV2_H_PATH=/opt/v4l2-headers/include
# Build static libjpeg-turbo from source (cross-compile for ARMv7)
RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \
&& cd /tmp/libjpeg-turbo \

View File

@@ -3,9 +3,13 @@
FROM debian:11
# Linux headers used by v4l2r bindgen
ARG LINUX_HEADERS_VERSION=6.6
ARG LINUX_HEADERS_SHA256=
# Set Rustup mirrors (Aliyun)
ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
#ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \
# RUSTUP_DIST_SERVER=https://mirrors.aliyun.com/rustup
# Install Rust toolchain
RUN apt-get update && apt-get install -y --no-install-recommends \
@@ -29,6 +33,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libclang-dev \
llvm \
wget \
xz-utils \
rsync \
# Autotools for libopus (requires autoreconf)
autoconf \
automake \
@@ -37,6 +43,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libasound2-dev \
libv4l-dev \
libudev-dev \
linux-libc-dev \
zlib1g-dev \
# Note: libjpeg-turbo, libx264, libx265, libopus are built from source below for static linking
libva-dev \
@@ -49,6 +56,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libxdmcp-dev \
&& rm -rf /var/lib/apt/lists/*
# Install newer V4L2 headers for v4l2r bindgen
RUN mkdir -p /opt/v4l2-headers \
&& wget -q https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-${LINUX_HEADERS_VERSION}.tar.xz -O /tmp/linux-headers.tar.xz \
&& if [ -n "$LINUX_HEADERS_SHA256" ]; then echo "$LINUX_HEADERS_SHA256 /tmp/linux-headers.tar.xz" | sha256sum -c -; fi \
&& tar -xf /tmp/linux-headers.tar.xz -C /tmp \
&& cd /tmp/linux-${LINUX_HEADERS_VERSION} \
&& make ARCH=x86 headers_install INSTALL_HDR_PATH=/opt/v4l2-headers \
&& rm -rf /tmp/linux-${LINUX_HEADERS_VERSION} /tmp/linux-headers.tar.xz
ENV V4L2R_VIDEODEV2_H_PATH=/opt/v4l2-headers/include
# Build static libjpeg-turbo from source (needed by libyuv)
RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \
&& cd /tmp/libjpeg-turbo \
@@ -208,4 +226,4 @@ RUN rustup target add x86_64-unknown-linux-gnu
# Configure environment for static linking
ENV PKG_CONFIG_ALLOW_CROSS=1\
FFMPEG_STATIC=1 \
LIBYUV_STATIC=1
LIBYUV_STATIC=1

View File

@@ -508,4 +508,4 @@ bool has_flag_could_not_find_ref_with_poc() {
extern "C" void hwcodec_set_flag_could_not_find_ref_with_poc() {
util_decode::g_flag_could_not_find_ref_with_poc = true;
}
}

View File

@@ -6,6 +6,7 @@
include!(concat!(env!("OUT_DIR"), "/ffmpeg_ffi.rs"));
use serde_derive::{Deserialize, Serialize};
use std::env;
#[derive(Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
pub enum AVHWDeviceType {
@@ -53,7 +54,36 @@ pub extern "C" fn hwcodec_av_log_callback(level: i32, message: *const std::os::r
pub(crate) fn init_av_log() {
static INIT: std::sync::Once = std::sync::Once::new();
INIT.call_once(|| unsafe {
av_log_set_level(AV_LOG_ERROR as i32);
av_log_set_level(parse_ffmpeg_log_level());
hwcodec_set_av_log_callback();
});
}
fn parse_ffmpeg_log_level() -> i32 {
let raw = match env::var("ONE_KVM_FFMPEG_LOG") {
Ok(value) => value,
Err(_) => return AV_LOG_ERROR as i32,
};
let value = raw.trim().to_ascii_lowercase();
if value.is_empty() {
return AV_LOG_ERROR as i32;
}
if let Ok(level) = value.parse::<i32>() {
return level;
}
match value.as_str() {
"quiet" => AV_LOG_QUIET as i32,
"panic" => AV_LOG_PANIC as i32,
"fatal" => AV_LOG_FATAL as i32,
"error" => AV_LOG_ERROR as i32,
"warn" | "warning" => AV_LOG_WARNING as i32,
"info" => AV_LOG_INFO as i32,
"verbose" => AV_LOG_VERBOSE as i32,
"debug" => AV_LOG_DEBUG as i32,
"trace" => AV_LOG_TRACE as i32,
_ => AV_LOG_ERROR as i32,
}
}

View File

@@ -31,8 +31,10 @@ unsafe impl Send for HwMjpegH26xPipeline {}
impl HwMjpegH26xPipeline {
pub fn new(config: HwMjpegH26xConfig) -> Result<Self, String> {
unsafe {
let dec = CString::new(config.decoder.as_str()).map_err(|_| "decoder name invalid".to_string())?;
let enc = CString::new(config.encoder.as_str()).map_err(|_| "encoder name invalid".to_string())?;
let dec = CString::new(config.decoder.as_str())
.map_err(|_| "decoder name invalid".to_string())?;
let enc = CString::new(config.encoder.as_str())
.map_err(|_| "encoder name invalid".to_string())?;
let ctx = ffmpeg_hw_mjpeg_h26x_new(
dec.as_ptr(),
enc.as_ptr(),

View File

@@ -1,8 +1,7 @@
use crate::{
ffmpeg::{init_av_log, AVPixelFormat},
ffmpeg_ram::{
ffmpeg_ram_decode, ffmpeg_ram_free_decoder, ffmpeg_ram_last_error,
ffmpeg_ram_new_decoder,
ffmpeg_ram_decode, ffmpeg_ram_free_decoder, ffmpeg_ram_last_error, ffmpeg_ram_new_decoder,
},
};
use std::{

View File

@@ -352,6 +352,7 @@ impl Encoder {
debug!("Encoder {} created successfully", codec.name);
let mut passed = false;
let mut last_err: Option<i32> = None;
let is_v4l2m2m = codec.name.contains("v4l2m2m");
let max_attempts = if codec.name.contains("v4l2m2m") {
5

View File

@@ -8,11 +8,11 @@ use tracing::{debug, info, warn};
use super::executor::{timing, AtxKeyExecutor};
use super::led::LedSensor;
use super::types::{AtxKeyConfig, AtxLedConfig, AtxState, PowerStatus};
use super::types::{AtxAction, AtxKeyConfig, AtxLedConfig, AtxState, PowerStatus};
use crate::error::{AppError, Result};
/// ATX power control configuration
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Default)]
pub struct AtxControllerConfig {
/// Whether ATX is enabled
pub enabled: bool,
@@ -24,17 +24,6 @@ pub struct AtxControllerConfig {
pub led: AtxLedConfig,
}
impl Default for AtxControllerConfig {
fn default() -> Self {
Self {
enabled: false,
power: AtxKeyConfig::default(),
reset: AtxKeyConfig::default(),
led: AtxLedConfig::default(),
}
}
}
/// Internal state holding all ATX components
/// Grouped together to reduce lock acquisitions
struct AtxInner {
@@ -54,34 +43,7 @@ pub struct AtxController {
}
impl AtxController {
/// Create a new ATX controller with the specified configuration
pub fn new(config: AtxControllerConfig) -> Self {
Self {
inner: RwLock::new(AtxInner {
config,
power_executor: None,
reset_executor: None,
led_sensor: None,
}),
}
}
/// Create a disabled ATX controller
pub fn disabled() -> Self {
Self::new(AtxControllerConfig::default())
}
/// Initialize the ATX controller and its executors
pub async fn init(&self) -> Result<()> {
let mut inner = self.inner.write().await;
if !inner.config.enabled {
info!("ATX disabled in configuration");
return Ok(());
}
info!("Initializing ATX controller");
async fn init_components(inner: &mut AtxInner) {
// Initialize power executor
if inner.config.power.is_configured() {
let mut executor = AtxKeyExecutor::new(inner.config.power.clone());
@@ -123,234 +85,180 @@ impl AtxController {
inner.led_sensor = Some(sensor);
}
}
info!("ATX controller initialized successfully");
Ok(())
}
/// Reload the ATX controller with new configuration
///
/// This is called when configuration changes and supports hot-reload.
pub async fn reload(&self, new_config: AtxControllerConfig) -> Result<()> {
info!("Reloading ATX controller with new configuration");
async fn shutdown_components(inner: &mut AtxInner) {
if let Some(executor) = inner.power_executor.as_mut() {
if let Err(e) = executor.shutdown().await {
warn!("Failed to shutdown power executor: {}", e);
}
}
inner.power_executor = None;
// Shutdown existing executors
self.shutdown_internal().await?;
if let Some(executor) = inner.reset_executor.as_mut() {
if let Err(e) = executor.shutdown().await {
warn!("Failed to shutdown reset executor: {}", e);
}
}
inner.reset_executor = None;
// Update configuration and re-initialize
{
let mut inner = self.inner.write().await;
inner.config = new_config;
if let Some(sensor) = inner.led_sensor.as_mut() {
if let Err(e) = sensor.shutdown().await {
warn!("Failed to shutdown LED sensor: {}", e);
}
}
inner.led_sensor = None;
}
/// Create a new ATX controller with the specified configuration
pub fn new(config: AtxControllerConfig) -> Self {
Self {
inner: RwLock::new(AtxInner {
config,
power_executor: None,
reset_executor: None,
led_sensor: None,
}),
}
}
/// Create a disabled ATX controller
pub fn disabled() -> Self {
Self::new(AtxControllerConfig::default())
}
/// Initialize the ATX controller and its executors
pub async fn init(&self) -> Result<()> {
let mut inner = self.inner.write().await;
if !inner.config.enabled {
info!("ATX disabled in configuration");
return Ok(());
}
// Re-initialize
self.init().await?;
info!("Initializing ATX controller");
Self::init_components(&mut inner).await;
info!("ATX controller reloaded successfully");
Ok(())
}
/// Get current ATX state (single lock acquisition)
/// Reload ATX controller configuration
pub async fn reload(&self, config: AtxControllerConfig) -> Result<()> {
let mut inner = self.inner.write().await;
info!("Reloading ATX controller configuration");
// Shutdown existing components first, then rebuild with new config.
Self::shutdown_components(&mut inner).await;
inner.config = config;
if !inner.config.enabled {
info!("ATX disabled after reload");
return Ok(());
}
Self::init_components(&mut inner).await;
info!("ATX controller reloaded");
Ok(())
}
/// Shutdown ATX controller and release all resources
pub async fn shutdown(&self) -> Result<()> {
let mut inner = self.inner.write().await;
Self::shutdown_components(&mut inner).await;
info!("ATX controller shutdown complete");
Ok(())
}
/// Trigger a power action (short/long/reset)
pub async fn trigger_power_action(&self, action: AtxAction) -> Result<()> {
let inner = self.inner.read().await;
match action {
AtxAction::Short | AtxAction::Long => {
if let Some(executor) = &inner.power_executor {
let duration = match action {
AtxAction::Short => timing::SHORT_PRESS,
AtxAction::Long => timing::LONG_PRESS,
_ => unreachable!(),
};
executor.pulse(duration).await?;
} else {
return Err(AppError::Config(
"Power button not configured for ATX controller".to_string(),
));
}
}
AtxAction::Reset => {
if let Some(executor) = &inner.reset_executor {
executor.pulse(timing::RESET_PRESS).await?;
} else {
return Err(AppError::Config(
"Reset button not configured for ATX controller".to_string(),
));
}
}
}
Ok(())
}
/// Trigger a short power button press
pub async fn power_short(&self) -> Result<()> {
self.trigger_power_action(AtxAction::Short).await
}
/// Trigger a long power button press
pub async fn power_long(&self) -> Result<()> {
self.trigger_power_action(AtxAction::Long).await
}
/// Trigger a reset button press
pub async fn reset(&self) -> Result<()> {
self.trigger_power_action(AtxAction::Reset).await
}
/// Get the current power status using the LED sensor (if configured)
pub async fn power_status(&self) -> PowerStatus {
let inner = self.inner.read().await;
if let Some(sensor) = &inner.led_sensor {
match sensor.read().await {
Ok(status) => status,
Err(e) => {
debug!("Failed to read ATX LED sensor: {}", e);
PowerStatus::Unknown
}
}
} else {
PowerStatus::Unknown
}
}
/// Get a snapshot of the ATX state for API responses
pub async fn state(&self) -> AtxState {
let inner = self.inner.read().await;
let power_status = if let Some(sensor) = inner.led_sensor.as_ref() {
sensor.read().await.unwrap_or(PowerStatus::Unknown)
let power_status = if let Some(sensor) = &inner.led_sensor {
match sensor.read().await {
Ok(status) => status,
Err(e) => {
debug!("Failed to read ATX LED sensor: {}", e);
PowerStatus::Unknown
}
}
} else {
PowerStatus::Unknown
};
AtxState {
available: inner.config.enabled,
power_configured: inner
.power_executor
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false),
reset_configured: inner
.reset_executor
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false),
power_configured: inner.power_executor.is_some(),
reset_configured: inner.reset_executor.is_some(),
power_status,
led_supported: inner
.led_sensor
.as_ref()
.map(|s| s.is_initialized())
.unwrap_or(false),
led_supported: inner.led_sensor.is_some(),
}
}
/// Get current state as SystemEvent
pub async fn current_state_event(&self) -> crate::events::SystemEvent {
let state = self.state().await;
crate::events::SystemEvent::AtxStateChanged {
power_status: state.power_status,
}
}
/// Check if ATX is available
pub async fn is_available(&self) -> bool {
let inner = self.inner.read().await;
inner.config.enabled
}
/// Check if power button is configured and initialized
pub async fn is_power_ready(&self) -> bool {
let inner = self.inner.read().await;
inner
.power_executor
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false)
}
/// Check if reset button is configured and initialized
pub async fn is_reset_ready(&self) -> bool {
let inner = self.inner.read().await;
inner
.reset_executor
.as_ref()
.map(|e| e.is_initialized())
.unwrap_or(false)
}
/// Short press power button (turn on or graceful shutdown)
pub async fn power_short(&self) -> Result<()> {
let inner = self.inner.read().await;
let executor = inner
.power_executor
.as_ref()
.ok_or_else(|| AppError::Internal("Power button not configured".to_string()))?;
info!(
"ATX: Short press power button ({}ms)",
timing::SHORT_PRESS.as_millis()
);
executor.pulse(timing::SHORT_PRESS).await
}
/// Long press power button (force power off)
pub async fn power_long(&self) -> Result<()> {
let inner = self.inner.read().await;
let executor = inner
.power_executor
.as_ref()
.ok_or_else(|| AppError::Internal("Power button not configured".to_string()))?;
info!(
"ATX: Long press power button ({}ms)",
timing::LONG_PRESS.as_millis()
);
executor.pulse(timing::LONG_PRESS).await
}
/// Press reset button
pub async fn reset(&self) -> Result<()> {
let inner = self.inner.read().await;
let executor = inner
.reset_executor
.as_ref()
.ok_or_else(|| AppError::Internal("Reset button not configured".to_string()))?;
info!(
"ATX: Press reset button ({}ms)",
timing::RESET_PRESS.as_millis()
);
executor.pulse(timing::RESET_PRESS).await
}
/// Get current power status from LED sensor
pub async fn power_status(&self) -> Result<PowerStatus> {
let inner = self.inner.read().await;
match inner.led_sensor.as_ref() {
Some(sensor) => sensor.read().await,
None => Ok(PowerStatus::Unknown),
}
}
/// Shutdown the ATX controller
pub async fn shutdown(&self) -> Result<()> {
info!("Shutting down ATX controller");
self.shutdown_internal().await?;
info!("ATX controller shutdown complete");
Ok(())
}
/// Internal shutdown helper
async fn shutdown_internal(&self) -> Result<()> {
let mut inner = self.inner.write().await;
// Shutdown power executor
if let Some(mut executor) = inner.power_executor.take() {
executor.shutdown().await.ok();
}
// Shutdown reset executor
if let Some(mut executor) = inner.reset_executor.take() {
executor.shutdown().await.ok();
}
// Shutdown LED sensor
if let Some(mut sensor) = inner.led_sensor.take() {
sensor.shutdown().await.ok();
}
Ok(())
}
}
impl Drop for AtxController {
fn drop(&mut self) {
debug!("ATX controller dropped");
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_controller_config_default() {
let config = AtxControllerConfig::default();
assert!(!config.enabled);
assert!(!config.power.is_configured());
assert!(!config.reset.is_configured());
assert!(!config.led.is_configured());
}
#[test]
fn test_controller_creation() {
let controller = AtxController::disabled();
assert!(controller.inner.try_read().is_ok());
}
#[tokio::test]
async fn test_controller_disabled_state() {
let controller = AtxController::disabled();
let state = controller.state().await;
assert!(!state.available);
assert!(!state.power_configured);
assert!(!state.reset_configured);
}
#[tokio::test]
async fn test_controller_init_disabled() {
let controller = AtxController::disabled();
let result = controller.init().await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_controller_is_available() {
let controller = AtxController::disabled();
assert!(!controller.is_available().await);
let config = AtxControllerConfig {
enabled: true,
..Default::default()
};
let controller = AtxController::new(config);
assert!(controller.is_available().await);
}
}

View File

@@ -28,12 +28,14 @@
//! device: "/dev/gpiochip0".to_string(),
//! pin: 5,
//! active_level: ActiveLevel::High,
//! baud_rate: 9600,
//! },
//! reset: AtxKeyConfig {
//! driver: AtxDriverType::UsbRelay,
//! device: "/dev/hidraw0".to_string(),
//! pin: 0,
//! active_level: ActiveLevel::High,
//! baud_rate: 9600,
//! },
//! led: Default::default(),
//! };

View File

@@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use typeshare::typeshare;
/// Power status
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
pub enum PowerStatus {
/// Power is on
@@ -15,18 +15,13 @@ pub enum PowerStatus {
/// Power is off
Off,
/// Power status unknown (no LED connected)
#[default]
Unknown,
}
impl Default for PowerStatus {
fn default() -> Self {
Self::Unknown
}
}
/// Driver type for ATX key operations
#[typeshare]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
pub enum AtxDriverType {
/// GPIO control via Linux character device
@@ -36,32 +31,22 @@ pub enum AtxDriverType {
/// Serial/COM port relay (taobao LCUS type)
Serial,
/// Disabled / Not configured
#[default]
None,
}
impl Default for AtxDriverType {
fn default() -> Self {
Self::None
}
}
/// Active level for GPIO pins
#[typeshare]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
pub enum ActiveLevel {
/// Active high (default for most cases)
#[default]
High,
/// Active low (inverted)
Low,
}
impl Default for ActiveLevel {
fn default() -> Self {
Self::High
}
}
/// Configuration for a single ATX key (power or reset)
/// This is the "four-tuple" configuration: (driver, device, pin/channel, level)
#[typeshare]
@@ -77,6 +62,7 @@ pub struct AtxKeyConfig {
/// Pin or channel number:
/// - For GPIO: GPIO pin number
/// - For USB Relay: relay channel (0-based)
/// - For Serial Relay (LCUS): relay channel (1-based)
pub pin: u32,
/// Active level (only applicable to GPIO, ignored for USB Relay)
pub active_level: ActiveLevel,
@@ -105,7 +91,7 @@ impl AtxKeyConfig {
/// LED sensing configuration (optional)
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
#[serde(default)]
pub struct AtxLedConfig {
/// Whether LED sensing is enabled
@@ -118,17 +104,6 @@ pub struct AtxLedConfig {
pub inverted: bool,
}
impl Default for AtxLedConfig {
fn default() -> Self {
Self {
enabled: false,
gpio_chip: String::new(),
gpio_pin: 0,
inverted: false,
}
}
}
impl AtxLedConfig {
/// Check if LED sensing is configured
pub fn is_configured(&self) -> bool {
@@ -137,7 +112,7 @@ impl AtxLedConfig {
}
/// ATX state information
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct AtxState {
/// Whether ATX feature is available/enabled
pub available: bool,
@@ -151,18 +126,6 @@ pub struct AtxState {
pub led_supported: bool,
}
impl Default for AtxState {
fn default() -> Self {
Self {
available: false,
power_configured: false,
reset_configured: false,
power_status: PowerStatus::Unknown,
led_supported: false,
}
}
}
/// ATX power action request
#[derive(Debug, Clone, Deserialize)]
pub struct AtxPowerRequest {
@@ -274,5 +237,6 @@ mod tests {
assert!(!state.power_configured);
assert!(!state.reset_configured);
assert_eq!(state.power_status, PowerStatus::Unknown);
assert!(!state.led_supported);
}
}

View File

@@ -10,7 +10,7 @@ use crate::error::{AppError, Result};
/// WOL magic packet structure:
/// - 6 bytes of 0xFF
/// - 16 repetitions of the target MAC address (6 bytes each)
/// Total: 6 + 16 * 6 = 102 bytes
/// Total: 6 + 16 * 6 = 102 bytes
const MAGIC_PACKET_SIZE: usize = 102;
/// Parse MAC address string into bytes
@@ -160,8 +160,8 @@ mod tests {
let packet = build_magic_packet(&mac);
// Check header (6 bytes of 0xFF)
for i in 0..6 {
assert_eq!(packet[i], 0xFF);
for byte in packet.iter().take(6) {
assert_eq!(*byte, 0xFF);
}
// Check MAC repetitions

View File

@@ -184,14 +184,7 @@ impl AudioCapturer {
let log_throttler = self.log_throttler.clone();
let handle = tokio::task::spawn_blocking(move || {
capture_loop(
config,
state,
frame_tx,
stop_flag,
sequence,
log_throttler,
);
capture_loop(config, state, frame_tx, stop_flag, sequence, log_throttler);
});
*self.capture_handle.lock().await = Some(handle);

View File

@@ -39,6 +39,7 @@ impl AudioQuality {
}
/// Parse from string
#[allow(clippy::should_implement_trait)]
pub fn from_str(s: &str) -> Self {
match s.to_lowercase().as_str() {
"voice" | "low" => AudioQuality::Voice,

View File

@@ -85,9 +85,7 @@ pub fn enumerate_audio_devices_with_current(
let mut devices = Vec::new();
// Try to enumerate cards
let cards = match alsa::card::Iter::new() {
i => i,
};
let cards = alsa::card::Iter::new();
for card_result in cards {
let card = match card_result {

View File

@@ -16,9 +16,10 @@ use crate::events::{EventBus, SystemEvent};
use crate::utils::LogThrottler;
/// Audio health status
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Default)]
pub enum AudioHealthStatus {
/// Device is healthy and operational
#[default]
Healthy,
/// Device has an error, attempting recovery
Error {
@@ -33,12 +34,6 @@ pub enum AudioHealthStatus {
Disconnected,
}
impl Default for AudioHealthStatus {
fn default() -> Self {
Self::Healthy
}
}
/// Audio health monitor configuration
#[derive(Debug, Clone)]
pub struct AudioMonitorConfig {
@@ -166,7 +161,7 @@ impl AudioHealthMonitor {
let attempt = self.retry_count.load(Ordering::Relaxed);
// Only publish every 5 attempts to avoid event spam
if attempt == 1 || attempt % 5 == 0 {
if attempt == 1 || attempt.is_multiple_of(5) {
debug!("Audio reconnecting, attempt {}", attempt);
if let Some(ref events) = *self.events.read().await {

View File

@@ -14,9 +14,10 @@ use super::encoder::{OpusConfig, OpusEncoder, OpusFrame};
use crate::error::{AppError, Result};
/// Audio stream state
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum AudioStreamState {
/// Stream is stopped
#[default]
Stopped,
/// Stream is starting up
Starting,
@@ -26,14 +27,8 @@ pub enum AudioStreamState {
Error,
}
impl Default for AudioStreamState {
fn default() -> Self {
Self::Stopped
}
}
/// Audio streamer configuration
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Default)]
pub struct AudioStreamerConfig {
/// Audio capture configuration
pub capture: AudioConfig,
@@ -41,15 +36,6 @@ pub struct AudioStreamerConfig {
pub opus: OpusConfig,
}
impl Default for AudioStreamerConfig {
fn default() -> Self {
Self {
capture: AudioConfig::default(),
opus: OpusConfig::default(),
}
}
}
impl AudioStreamerConfig {
/// Create config for a specific device with default quality
pub fn for_device(device_name: &str) -> Self {
@@ -290,11 +276,9 @@ impl AudioStreamer {
// Encode to Opus
let opus_result = {
let mut enc_guard = encoder.lock().await;
if let Some(ref mut enc) = *enc_guard {
Some(enc.encode_frame(&audio_frame))
} else {
None
}
(*enc_guard)
.as_mut()
.map(|enc| enc.encode_frame(&audio_frame))
};
match opus_result {

View File

@@ -92,11 +92,7 @@ fn is_public_endpoint(path: &str) -> bool {
// Note: paths here are relative to /api since middleware is applied within the nested router
matches!(
path,
"/"
| "/auth/login"
| "/health"
| "/setup"
| "/setup/init"
"/" | "/auth/login" | "/health" | "/setup" | "/setup/init"
) || path.starts_with("/assets/")
|| path.starts_with("/static/")
|| path.ends_with(".js")

View File

@@ -110,7 +110,9 @@ impl SessionStore {
/// Delete all expired sessions
pub async fn cleanup_expired(&self) -> Result<u64> {
let result = sqlx::query("DELETE FROM sessions WHERE expires_at < datetime('now')")
let now = Utc::now().to_rfc3339();
let result = sqlx::query("DELETE FROM sessions WHERE expires_at < ?1")
.bind(now)
.execute(&self.pool)
.await?;
Ok(result.rows_affected())

View File

@@ -7,7 +7,7 @@ use super::password::{hash_password, verify_password};
use crate::error::{AppError, Result};
/// User row type from database
type UserRow = (String, String, String, i32, String, String);
type UserRow = (String, String, String, String, String);
/// User data
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -16,7 +16,6 @@ pub struct User {
pub username: String,
#[serde(skip_serializing)]
pub password_hash: String,
pub is_admin: bool,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
@@ -24,12 +23,11 @@ pub struct User {
impl User {
/// Convert from database row to User
fn from_row(row: UserRow) -> Self {
let (id, username, password_hash, is_admin, created_at, updated_at) = row;
let (id, username, password_hash, created_at, updated_at) = row;
Self {
id,
username,
password_hash,
is_admin: is_admin != 0,
created_at: DateTime::parse_from_rfc3339(&created_at)
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|_| Utc::now()),
@@ -53,7 +51,7 @@ impl UserStore {
}
/// Create a new user
pub async fn create(&self, username: &str, password: &str, is_admin: bool) -> Result<User> {
pub async fn create(&self, username: &str, password: &str) -> Result<User> {
// Check if username already exists
if self.get_by_username(username).await?.is_some() {
return Err(AppError::BadRequest(format!(
@@ -68,21 +66,19 @@ impl UserStore {
id: Uuid::new_v4().to_string(),
username: username.to_string(),
password_hash,
is_admin,
created_at: now,
updated_at: now,
};
sqlx::query(
r#"
INSERT INTO users (id, username, password_hash, is_admin, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5, ?6)
INSERT INTO users (id, username, password_hash, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5)
"#,
)
.bind(&user.id)
.bind(&user.username)
.bind(&user.password_hash)
.bind(user.is_admin as i32)
.bind(user.created_at.to_rfc3339())
.bind(user.updated_at.to_rfc3339())
.execute(&self.pool)
@@ -94,7 +90,7 @@ impl UserStore {
/// Get user by ID
pub async fn get(&self, user_id: &str) -> Result<Option<User>> {
let row: Option<UserRow> = sqlx::query_as(
"SELECT id, username, password_hash, is_admin, created_at, updated_at FROM users WHERE id = ?1",
"SELECT id, username, password_hash, created_at, updated_at FROM users WHERE id = ?1",
)
.bind(user_id)
.fetch_optional(&self.pool)
@@ -106,7 +102,7 @@ impl UserStore {
/// Get user by username
pub async fn get_by_username(&self, username: &str) -> Result<Option<User>> {
let row: Option<UserRow> = sqlx::query_as(
"SELECT id, username, password_hash, is_admin, created_at, updated_at FROM users WHERE username = ?1",
"SELECT id, username, password_hash, created_at, updated_at FROM users WHERE username = ?1",
)
.bind(username)
.fetch_optional(&self.pool)
@@ -161,13 +157,12 @@ impl UserStore {
}
let now = Utc::now();
let result =
sqlx::query("UPDATE users SET username = ?1, updated_at = ?2 WHERE id = ?3")
.bind(new_username)
.bind(now.to_rfc3339())
.bind(user_id)
.execute(&self.pool)
.await?;
let result = sqlx::query("UPDATE users SET username = ?1, updated_at = ?2 WHERE id = ?3")
.bind(new_username)
.bind(now.to_rfc3339())
.bind(user_id)
.execute(&self.pool)
.await?;
if result.rows_affected() == 0 {
return Err(AppError::NotFound("User not found".to_string()));
@@ -179,7 +174,7 @@ impl UserStore {
/// List all users
pub async fn list(&self) -> Result<Vec<User>> {
let rows: Vec<UserRow> = sqlx::query_as(
"SELECT id, username, password_hash, is_admin, created_at, updated_at FROM users ORDER BY created_at",
"SELECT id, username, password_hash, created_at, updated_at FROM users ORDER BY created_at",
)
.fetch_all(&self.pool)
.await?;

View File

@@ -11,6 +11,7 @@ pub use crate::rustdesk::config::RustDeskConfig;
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
#[derive(Default)]
pub struct AppConfig {
/// Whether initial setup has been completed
pub initialized: bool,
@@ -34,24 +35,8 @@ pub struct AppConfig {
pub extensions: ExtensionsConfig,
/// RustDesk remote access settings
pub rustdesk: RustDeskConfig,
}
impl Default for AppConfig {
fn default() -> Self {
Self {
initialized: false,
auth: AuthConfig::default(),
video: VideoConfig::default(),
hid: HidConfig::default(),
msd: MsdConfig::default(),
atx: AtxConfig::default(),
audio: AudioConfig::default(),
stream: StreamConfig::default(),
web: WebConfig::default(),
extensions: ExtensionsConfig::default(),
rustdesk: RustDeskConfig::default(),
}
}
/// RTSP streaming settings
pub rtsp: RtspConfig,
}
/// Authentication configuration
@@ -116,21 +101,17 @@ impl Default for VideoConfig {
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum HidBackend {
/// USB OTG HID gadget
Otg,
/// CH9329 serial HID controller
Ch9329,
/// Disabled
#[default]
None,
}
impl Default for HidBackend {
fn default() -> Self {
Self::None
}
}
/// OTG USB device descriptor configuration
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@@ -163,8 +144,10 @@ impl Default for OtgDescriptorConfig {
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum OtgHidProfile {
/// Full HID device set (keyboard + relative mouse + absolute mouse + consumer control)
#[default]
Full,
/// Full HID device set without MSD
FullNoMsd,
@@ -180,12 +163,6 @@ pub enum OtgHidProfile {
Custom,
}
impl Default for OtgHidProfile {
fn default() -> Self {
Self::Full
}
}
/// OTG HID function selection (used when profile is Custom)
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@@ -360,6 +337,7 @@ pub use crate::atx::{ActiveLevel, AtxDriverType, AtxKeyConfig, AtxLedConfig};
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
#[derive(Default)]
pub struct AtxConfig {
/// Enable ATX functionality
pub enabled: bool,
@@ -373,18 +351,6 @@ pub struct AtxConfig {
pub wol_interface: String,
}
impl Default for AtxConfig {
fn default() -> Self {
Self {
enabled: false,
power: AtxKeyConfig::default(),
reset: AtxKeyConfig::default(),
led: AtxLedConfig::default(),
wol_interface: String::new(),
}
}
}
impl AtxConfig {
/// Convert to AtxControllerConfig for the controller
pub fn to_controller_config(&self) -> crate::atx::AtxControllerConfig {
@@ -427,16 +393,62 @@ impl Default for AudioConfig {
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum StreamMode {
/// WebRTC with H264/H265
WebRTC,
/// MJPEG over HTTP
#[default]
Mjpeg,
}
impl Default for StreamMode {
/// RTSP output codec
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum RtspCodec {
#[default]
H264,
H265,
}
/// RTSP configuration
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct RtspConfig {
/// Enable RTSP output
pub enabled: bool,
/// Bind IP address
pub bind: String,
/// RTSP TCP listen port
pub port: u16,
/// Stream path (without leading slash)
pub path: String,
/// Allow only one client connection at a time
pub allow_one_client: bool,
/// Output codec (H264/H265)
pub codec: RtspCodec,
/// Optional username for authentication
pub username: Option<String>,
/// Optional password for authentication
#[typeshare(skip)]
pub password: Option<String>,
}
impl Default for RtspConfig {
fn default() -> Self {
Self::Mjpeg
Self {
enabled: false,
bind: "0.0.0.0".to_string(),
port: 8554,
path: "live".to_string(),
allow_one_client: true,
codec: RtspCodec::H264,
username: None,
password: None,
}
}
}
@@ -444,8 +456,10 @@ impl Default for StreamMode {
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum EncoderType {
/// Auto-detect best encoder
#[default]
Auto,
/// Software encoder (libx264)
Software,
@@ -463,12 +477,6 @@ pub enum EncoderType {
V4l2m2m,
}
impl Default for EncoderType {
fn default() -> Self {
Self::Auto
}
}
impl EncoderType {
/// Convert to EncoderBackend for registry queries
pub fn to_backend(&self) -> Option<crate::video::encoder::registry::EncoderBackend> {

View File

@@ -82,7 +82,6 @@ impl ConfigStore {
id TEXT PRIMARY KEY,
username TEXT NOT NULL UNIQUE,
password_hash TEXT NOT NULL,
is_admin INTEGER NOT NULL DEFAULT 0,
created_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
)
@@ -121,6 +120,26 @@ impl ConfigStore {
.execute(pool)
.await?;
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS wol_history (
mac_address TEXT PRIMARY KEY,
updated_at INTEGER NOT NULL
)
"#,
)
.execute(pool)
.await?;
sqlx::query(
r#"
CREATE INDEX IF NOT EXISTS idx_wol_history_updated_at
ON wol_history(updated_at DESC)
"#,
)
.execute(pool)
.await?;
Ok(())
}

View File

@@ -124,6 +124,7 @@ pub struct ClientStats {
/// ```
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(tag = "event", content = "data")]
#[allow(clippy::large_enum_variant)]
pub enum SystemEvent {
// ============================================================================
// Video Stream Events

View File

@@ -230,13 +230,6 @@ impl ExtensionManager {
"-W".to_string(), // Writable (allow input)
];
// Add credential if set (still useful for additional security layer)
if let Some(ref cred) = c.credential {
if !cred.is_empty() {
args.extend(["-c".to_string(), cred.clone()]);
}
}
// Add shell as last argument
args.push(c.shell.clone());
Ok(args)

View File

@@ -102,9 +102,6 @@ pub struct TtydConfig {
pub port: u16,
/// Shell to execute
pub shell: String,
/// Credential in format "user:password" (optional)
#[serde(skip_serializing_if = "Option::is_none")]
pub credential: Option<String>,
}
impl Default for TtydConfig {
@@ -113,7 +110,6 @@ impl Default for TtydConfig {
enabled: false,
port: 7681,
shell: "/bin/bash".to_string(),
credential: None,
}
}
}
@@ -149,6 +145,7 @@ impl Default for GostcConfig {
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
#[derive(Default)]
pub struct EasytierConfig {
/// Enable auto-start
pub enabled: bool,
@@ -165,18 +162,6 @@ pub struct EasytierConfig {
pub virtual_ip: Option<String>,
}
impl Default for EasytierConfig {
fn default() -> Self {
Self {
enabled: false,
network_name: String::new(),
network_secret: String::new(),
peer_urls: Vec::new(),
virtual_ip: None,
}
}
}
/// Combined extensions configuration
#[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize, Default)]

View File

@@ -14,6 +14,7 @@ fn default_ch9329_baud_rate() -> u32 {
/// HID backend type
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
#[derive(Default)]
pub enum HidBackendType {
/// USB OTG gadget mode
Otg,
@@ -26,15 +27,10 @@ pub enum HidBackendType {
baud_rate: u32,
},
/// No HID backend (disabled)
#[default]
None,
}
impl Default for HidBackendType {
fn default() -> Self {
Self::None
}
}
impl HidBackendType {
/// Check if OTG backend is available on this system
pub fn otg_available() -> bool {

View File

@@ -219,8 +219,10 @@ impl From<u8> for LedStatus {
/// CH9329 work mode
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[repr(u8)]
#[derive(Default)]
pub enum WorkMode {
/// Mode 0: Standard USB Keyboard + Mouse (default)
#[default]
KeyboardMouse = 0x00,
/// Mode 1: Standard USB Keyboard only
KeyboardOnly = 0x01,
@@ -230,17 +232,13 @@ pub enum WorkMode {
CustomHid = 0x03,
}
impl Default for WorkMode {
fn default() -> Self {
Self::KeyboardMouse
}
}
/// CH9329 serial communication mode
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[repr(u8)]
#[derive(Default)]
pub enum SerialMode {
/// Mode 0: Protocol transmission mode (default)
#[default]
Protocol = 0x00,
/// Mode 1: ASCII mode
Ascii = 0x01,
@@ -248,12 +246,6 @@ pub enum SerialMode {
Transparent = 0x02,
}
impl Default for SerialMode {
fn default() -> Self {
Self::Protocol
}
}
/// CH9329 configuration parameters
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Ch9329Config {

View File

@@ -9,7 +9,7 @@
//!
//! Keyboard event (type 0x01):
//! - Byte 1: Event type (0x00 = down, 0x01 = up)
//! - Byte 2: Key code (USB HID usage code or JS keyCode)
//! - Byte 2: Key code (USB HID usage code)
//! - Byte 3: Modifiers bitmask
//! - Bit 0: Left Ctrl
//! - Bit 1: Left Shift
@@ -119,7 +119,7 @@ fn parse_keyboard_message(data: &[u8]) -> Option<HidChannelEvent> {
event_type,
key,
modifiers,
is_usb_hid: false, // WebRTC datachannel sends JS keycodes
is_usb_hid: true, // WebRTC/WebSocket HID channel sends USB HID usages
}))
}
@@ -245,6 +245,7 @@ mod tests {
assert_eq!(kb.key, 0x04);
assert!(kb.modifiers.left_ctrl);
assert!(!kb.modifiers.left_shift);
assert!(kb.is_usb_hid);
}
_ => panic!("Expected keyboard event"),
}
@@ -280,7 +281,7 @@ mod tests {
right_alt: false,
right_meta: false,
},
is_usb_hid: false,
is_usb_hid: true,
};
let encoded = encode_keyboard_event(&event);

View File

@@ -42,17 +42,17 @@ pub struct HidInfo {
pub screen_resolution: Option<(u32, u32)>,
}
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{info, warn};
use crate::error::{AppError, Result};
use crate::otg::OtgService;
use std::time::Duration;
use tokio::sync::mpsc;
use tokio::sync::Mutex;
use tokio::task::JoinHandle;
use std::time::Duration;
const HID_EVENT_QUEUE_CAPACITY: usize = 64;
const HID_EVENT_SEND_TIMEOUT_MS: u64 = 30;
@@ -203,7 +203,10 @@ impl HidController {
));
}
if matches!(event.event_type, MouseEventType::Move | MouseEventType::MoveAbs) {
if matches!(
event.event_type,
MouseEventType::Move | MouseEventType::MoveAbs
) {
// Best-effort: drop/merge move events if queue is full
self.enqueue_mouse_move(event)
} else {
@@ -470,13 +473,7 @@ impl HidController {
None => break,
};
process_hid_event(
event,
&backend,
&monitor,
&backend_type,
)
.await;
process_hid_event(event, &backend, &monitor, &backend_type).await;
// After each event, flush latest move if pending
if pending_move_flag.swap(false, Ordering::AcqRel) {
@@ -505,9 +502,9 @@ impl HidController {
self.pending_move_flag.store(true, Ordering::Release);
Ok(())
}
Err(mpsc::error::TrySendError::Closed(_)) => Err(AppError::BadRequest(
"HID event queue closed".to_string(),
)),
Err(mpsc::error::TrySendError::Closed(_)) => {
Err(AppError::BadRequest("HID event queue closed".to_string()))
}
}
}
@@ -517,9 +514,11 @@ impl HidController {
Err(mpsc::error::TrySendError::Full(ev)) => {
// For non-move events, wait briefly to avoid dropping critical input
let tx = self.hid_tx.clone();
let send_result =
tokio::time::timeout(Duration::from_millis(HID_EVENT_SEND_TIMEOUT_MS), tx.send(ev))
.await;
let send_result = tokio::time::timeout(
Duration::from_millis(HID_EVENT_SEND_TIMEOUT_MS),
tx.send(ev),
)
.await;
if send_result.is_ok() {
Ok(())
} else {
@@ -527,9 +526,9 @@ impl HidController {
Ok(())
}
}
Err(mpsc::error::TrySendError::Closed(_)) => Err(AppError::BadRequest(
"HID event queue closed".to_string(),
)),
Err(mpsc::error::TrySendError::Closed(_)) => {
Err(AppError::BadRequest("HID event queue closed".to_string()))
}
}
}
}

View File

@@ -16,9 +16,10 @@ use crate::events::{EventBus, SystemEvent};
use crate::utils::LogThrottler;
/// HID health status
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Default)]
pub enum HidHealthStatus {
/// Device is healthy and operational
#[default]
Healthy,
/// Device has an error, attempting recovery
Error {
@@ -33,12 +34,6 @@ pub enum HidHealthStatus {
Disconnected,
}
impl Default for HidHealthStatus {
fn default() -> Self {
Self::Healthy
}
}
/// HID health monitor configuration
#[derive(Debug, Clone)]
pub struct HidMonitorConfig {
@@ -196,7 +191,7 @@ impl HidHealthMonitor {
let attempt = self.retry_count.load(Ordering::Relaxed);
// Only publish every 5 attempts to avoid event spam
if attempt == 1 || attempt % 5 == 0 {
if attempt == 1 || attempt.is_multiple_of(5) {
debug!("HID {} reconnecting, attempt {}", backend, attempt);
if let Some(ref events) = *self.events.read().await {

View File

@@ -228,7 +228,7 @@ impl OtgBackend {
Ok(false)
}
Ok(_) => Ok(false),
Err(e) => Err(std::io::Error::new(std::io::ErrorKind::Other, e)),
Err(e) => Err(std::io::Error::other(e)),
}
}
@@ -393,21 +393,10 @@ impl OtgBackend {
/// Check if all HID device files exist
pub fn check_devices_exist(&self) -> bool {
self.keyboard_path
.as_ref()
.map_or(true, |p| p.exists())
&& self
.mouse_rel_path
.as_ref()
.map_or(true, |p| p.exists())
&& self
.mouse_abs_path
.as_ref()
.map_or(true, |p| p.exists())
&& self
.consumer_path
.as_ref()
.map_or(true, |p| p.exists())
self.keyboard_path.as_ref().is_none_or(|p| p.exists())
&& self.mouse_rel_path.as_ref().is_none_or(|p| p.exists())
&& self.mouse_abs_path.as_ref().is_none_or(|p| p.exists())
&& self.consumer_path.as_ref().is_none_or(|p| p.exists())
}
/// Get list of missing device paths
@@ -952,9 +941,7 @@ impl HidBackend for OtgBackend {
}
fn supports_absolute_mouse(&self) -> bool {
self.mouse_abs_path
.as_ref()
.map_or(false, |p| p.exists())
self.mouse_abs_path.as_ref().is_some_and(|p| p.exists())
}
async fn send_consumer(&self, event: ConsumerEvent) -> Result<()> {

View File

@@ -14,9 +14,11 @@ pub mod hid;
pub mod modules;
pub mod msd;
pub mod otg;
pub mod rtsp;
pub mod rustdesk;
pub mod state;
pub mod stream;
pub mod update;
pub mod utils;
pub mod video;
pub mod web;

View File

@@ -19,9 +19,14 @@ use one_kvm::extensions::ExtensionManager;
use one_kvm::hid::{HidBackendType, HidController};
use one_kvm::msd::MsdController;
use one_kvm::otg::{configfs, OtgService};
use one_kvm::rtsp::RtspService;
use one_kvm::rustdesk::RustDeskService;
use one_kvm::state::AppState;
use one_kvm::update::UpdateService;
use one_kvm::utils::bind_tcp_listener;
use one_kvm::video::codec_constraints::{
enforce_constraints_with_stream_manager, StreamCodecConstraints,
};
use one_kvm::video::format::{PixelFormat, Resolution};
use one_kvm::video::{Streamer, VideoStreamManager};
use one_kvm::web;
@@ -158,7 +163,11 @@ async fn main() -> anyhow::Result<()> {
}
let bind_ips = resolve_bind_addresses(&config.web)?;
let scheme = if config.web.https_enabled { "https" } else { "http" };
let scheme = if config.web.https_enabled {
"https"
} else {
"http"
};
let bind_port = if config.web.https_enabled {
config.web.https_port
} else {
@@ -530,7 +539,24 @@ async fn main() -> anyhow::Result<()> {
None
};
// Create RTSP service (optional, based on config)
let rtsp = if config.rtsp.enabled {
tracing::info!(
"Initializing RTSP service: rtsp://{}:{}/{}",
config.rtsp.bind,
config.rtsp.port,
config.rtsp.path
);
let service = RtspService::new(config.rtsp.clone(), stream_manager.clone());
Some(Arc::new(service))
} else {
tracing::info!("RTSP disabled in configuration");
None
};
// Create application state
let update_service = Arc::new(UpdateService::new(data_dir.join("updates")));
let state = AppState::new(
config_store.clone(),
session_store,
@@ -542,8 +568,10 @@ async fn main() -> anyhow::Result<()> {
atx,
audio,
rustdesk.clone(),
rtsp.clone(),
extensions.clone(),
events.clone(),
update_service,
shutdown_tx.clone(),
data_dir.clone(),
);
@@ -573,6 +601,30 @@ async fn main() -> anyhow::Result<()> {
}
}
// Start RTSP service if enabled
if let Some(ref service) = rtsp {
if let Err(e) = service.start().await {
tracing::error!("Failed to start RTSP service: {}", e);
} else {
tracing::info!("RTSP service started");
}
}
// Enforce startup codec constraints (e.g. RTSP/RustDesk locks)
{
let runtime_config = state.config.get();
let constraints = StreamCodecConstraints::from_config(&runtime_config);
match enforce_constraints_with_stream_manager(&state.stream_manager, &constraints).await {
Ok(result) if result.changed => {
if let Some(message) = result.message {
tracing::info!("{}", message);
}
}
Ok(_) => {}
Err(e) => tracing::warn!("Failed to enforce startup codec constraints: {}", e),
}
}
// Start enabled extensions
{
let ext_config = config_store.get();
@@ -646,7 +698,7 @@ async fn main() -> anyhow::Result<()> {
let server = axum_server::from_tcp_rustls(listener, tls_config.clone())?
.serve(app.clone().into_make_service());
servers.push(async move { server.await });
servers.push(server);
}
tokio::select! {
@@ -712,10 +764,13 @@ fn init_logging(level: LogLevel, verbose_count: u8) {
let env_filter =
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| filter.into());
tracing_subscriber::registry()
if let Err(err) = tracing_subscriber::registry()
.with(env_filter)
.with(tracing_subscriber::fmt::layer())
.init();
.try_init()
{
eprintln!("failed to initialize tracing: {}", err);
}
}
/// Get the application data directory
@@ -879,6 +934,15 @@ async fn cleanup(state: &Arc<AppState>) {
}
}
// Stop RTSP service
if let Some(ref service) = *state.rtsp.read().await {
if let Err(e) = service.stop().await {
tracing::warn!("Failed to stop RTSP service: {}", e);
} else {
tracing::info!("RTSP service stopped");
}
}
// Stop video
if let Err(e) = state.stream_manager.stop().await {
tracing::warn!("Failed to stop streamer: {}", e);

View File

@@ -52,10 +52,7 @@ impl MsdController {
/// # Parameters
/// * `otg_service` - OTG service for gadget management
/// * `msd_dir` - Base directory for MSD storage
pub fn new(
otg_service: Arc<OtgService>,
msd_dir: impl Into<PathBuf>,
) -> Self {
pub fn new(otg_service: Arc<OtgService>, msd_dir: impl Into<PathBuf>) -> Self {
let msd_dir = msd_dir.into();
let images_path = msd_dir.join("images");
let ventoy_dir = msd_dir.join("ventoy");

View File

@@ -87,8 +87,7 @@ impl ImageManager {
.ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| {
chrono::DateTime::from_timestamp(d.as_secs() as i64, 0)
.unwrap_or_else(|| Utc::now().into())
chrono::DateTime::from_timestamp(d.as_secs() as i64, 0).unwrap_or_else(Utc::now)
})
.unwrap_or_else(Utc::now);
@@ -400,7 +399,7 @@ impl ImageManager {
.headers()
.get(reqwest::header::CONTENT_DISPOSITION)
.and_then(|v| v.to_str().ok())
.and_then(|s| extract_filename_from_content_disposition(s));
.and_then(extract_filename_from_content_disposition);
if let Some(name) = from_header {
sanitize_filename(&name)

View File

@@ -15,9 +15,10 @@ use crate::events::{EventBus, SystemEvent};
use crate::utils::LogThrottler;
/// MSD health status
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Default)]
pub enum MsdHealthStatus {
/// Device is healthy and operational
#[default]
Healthy,
/// Device has an error
Error {
@@ -28,12 +29,6 @@ pub enum MsdHealthStatus {
},
}
impl Default for MsdHealthStatus {
fn default() -> Self {
Self::Healthy
}
}
/// MSD health monitor configuration
#[derive(Debug, Clone)]
pub struct MsdMonitorConfig {

View File

@@ -7,8 +7,10 @@ use std::path::PathBuf;
/// MSD operating mode
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum MsdMode {
/// No storage connected
#[default]
None,
/// Image file mounted (ISO/IMG)
Image,
@@ -16,12 +18,6 @@ pub enum MsdMode {
Drive,
}
impl Default for MsdMode {
fn default() -> Self {
Self::None
}
}
/// Image file metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageInfo {

View File

@@ -328,10 +328,7 @@ impl VentoyDrive {
let image = match VentoyImage::open(&path) {
Ok(img) => img,
Err(e) => {
let _ = rt.block_on(tx.send(Err(std::io::Error::new(
std::io::ErrorKind::Other,
e.to_string(),
))));
let _ = rt.block_on(tx.send(Err(std::io::Error::other(e.to_string()))));
return;
}
};
@@ -341,10 +338,7 @@ impl VentoyDrive {
// Stream the file through the writer
if let Err(e) = image.read_file_to_writer(&file_path_owned, &mut chunk_writer) {
let _ = rt.block_on(tx.send(Err(std::io::Error::new(
std::io::ErrorKind::Other,
e.to_string(),
))));
let _ = rt.block_on(tx.send(Err(std::io::Error::other(e.to_string()))));
}
});
@@ -543,17 +537,14 @@ mod tests {
/// Decompress xz file using system command
fn decompress_xz(src: &std::path::Path, dst: &std::path::Path) -> std::io::Result<()> {
let output = Command::new("xz")
.args(&["-d", "-k", "-c", src.to_str().unwrap()])
.args(["-d", "-k", "-c", src.to_str().unwrap()])
.output()?;
if !output.status.success() {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
format!(
"xz decompress failed: {}",
String::from_utf8_lossy(&output.stderr)
),
));
return Err(std::io::Error::other(format!(
"xz decompress failed: {}",
String::from_utf8_lossy(&output.stderr)
)));
}
std::fs::write(dst, &output.stdout)?;

View File

@@ -422,7 +422,11 @@ impl OtgGadgetManager {
if dest.exists() {
if let Err(e) = remove_file(&dest) {
warn!("Failed to remove existing config link {}: {}", dest.display(), e);
warn!(
"Failed to remove existing config link {}: {}",
dest.display(),
e
);
continue;
}
}

View File

@@ -35,7 +35,7 @@ const FLAG_HID: u8 = 0b01;
const FLAG_MSD: u8 = 0b10;
/// HID device paths
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Default)]
pub struct HidDevicePaths {
pub keyboard: Option<PathBuf>,
pub mouse_relative: Option<PathBuf>,
@@ -43,17 +43,6 @@ pub struct HidDevicePaths {
pub consumer: Option<PathBuf>,
}
impl Default for HidDevicePaths {
fn default() -> Self {
Self {
keyboard: None,
mouse_relative: None,
mouse_absolute: None,
consumer: None,
}
}
}
impl HidDevicePaths {
pub fn existing_paths(&self) -> Vec<PathBuf> {
let mut paths = Vec::new();
@@ -239,12 +228,10 @@ impl OtgService {
let requested_functions = self.hid_functions.read().await.clone();
{
let state = self.state.read().await;
if state.hid_enabled {
if state.hid_functions.as_ref() == Some(&requested_functions) {
if let Some(ref paths) = state.hid_paths {
info!("HID already enabled, returning existing paths");
return Ok(paths.clone());
}
if state.hid_enabled && state.hid_functions.as_ref() == Some(&requested_functions) {
if let Some(ref paths) = state.hid_paths {
info!("HID already enabled, returning existing paths");
return Ok(paths.clone());
}
}
}
@@ -671,7 +658,7 @@ mod tests {
fn test_service_creation() {
let _service = OtgService::new();
// Just test that creation doesn't panic
assert!(!OtgService::is_available() || true); // Depends on environment
let _ = OtgService::is_available(); // Depends on environment
}
#[tokio::test]

3
src/rtsp/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod service;
pub use service::{RtspService, RtspServiceStatus};

1343
src/rtsp/service.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -50,7 +50,7 @@ fn decode_header(first_byte: u8, header_bytes: &[u8]) -> (usize, usize) {
let head_len = ((first_byte & 0x3) + 1) as usize;
let mut n = first_byte as usize;
if head_len > 1 && header_bytes.len() >= 1 {
if head_len > 1 && !header_bytes.is_empty() {
n |= (header_bytes[0] as usize) << 8;
}
if head_len > 2 && header_bytes.len() >= 2 {

View File

@@ -202,9 +202,11 @@ mod tests {
#[test]
fn test_rendezvous_addr() {
let mut config = RustDeskConfig::default();
let mut config = RustDeskConfig {
rendezvous_server: "example.com".to_string(),
..Default::default()
};
config.rendezvous_server = "example.com".to_string();
assert_eq!(config.rendezvous_addr(), "example.com:21116");
config.rendezvous_server = "example.com:21116".to_string();
@@ -217,10 +219,12 @@ mod tests {
#[test]
fn test_relay_addr() {
let mut config = RustDeskConfig::default();
let mut config = RustDeskConfig {
rendezvous_server: "example.com".to_string(),
..Default::default()
};
// Rendezvous server configured, relay defaults to same host
config.rendezvous_server = "example.com".to_string();
assert_eq!(config.relay_addr(), Some("example.com:21117".to_string()));
// Explicit relay server
@@ -238,10 +242,12 @@ mod tests {
#[test]
fn test_effective_rendezvous_server() {
let mut config = RustDeskConfig::default();
let mut config = RustDeskConfig {
rendezvous_server: "custom.example.com".to_string(),
..Default::default()
};
// When user sets a server, use it
config.rendezvous_server = "custom.example.com".to_string();
assert_eq!(config.effective_rendezvous_server(), "custom.example.com");
// When empty, returns empty

View File

@@ -23,6 +23,9 @@ use tracing::{debug, error, info, warn};
use crate::audio::AudioController;
use crate::hid::{HidController, KeyEventType, KeyboardEvent, KeyboardModifiers};
use crate::video::codec_constraints::{
encoder_codec_to_id, encoder_codec_to_video_codec, video_codec_to_encoder_codec,
};
use crate::video::encoder::registry::{EncoderRegistry, VideoEncoderType};
use crate::video::encoder::BitratePreset;
use crate::video::stream_manager::VideoStreamManager;
@@ -627,7 +630,7 @@ impl Connection {
// Select the best available video codec
// Priority: H264 > H265 > VP8 > VP9 (H264/H265 leverage hardware encoding)
let negotiated = self.negotiate_video_codec();
let negotiated = self.negotiate_video_codec().await;
self.negotiated_codec = Some(negotiated);
info!("Negotiated video codec: {:?}", negotiated);
@@ -641,28 +644,51 @@ impl Connection {
/// Negotiate video codec - select the best available encoder
/// Priority: H264 > H265 > VP8 > VP9 (H264/H265 leverage hardware encoding on embedded devices)
fn negotiate_video_codec(&self) -> VideoEncoderType {
async fn negotiate_video_codec(&self) -> VideoEncoderType {
let registry = EncoderRegistry::global();
let constraints = self.current_codec_constraints().await;
// Check availability in priority order
// H264 is preferred because it has the best hardware encoder support (RKMPP, VAAPI, etc.)
// and most RustDesk clients support H264 hardware decoding
if registry.is_format_available(VideoEncoderType::H264, false) {
if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H264)
&& registry.is_format_available(VideoEncoderType::H264, false)
{
return VideoEncoderType::H264;
}
if registry.is_format_available(VideoEncoderType::H265, false) {
if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H265)
&& registry.is_format_available(VideoEncoderType::H265, false)
{
return VideoEncoderType::H265;
}
if registry.is_format_available(VideoEncoderType::VP8, false) {
if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP8)
&& registry.is_format_available(VideoEncoderType::VP8, false)
{
return VideoEncoderType::VP8;
}
if registry.is_format_available(VideoEncoderType::VP9, false) {
if constraints.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP9)
&& registry.is_format_available(VideoEncoderType::VP9, false)
{
return VideoEncoderType::VP9;
}
// Fallback to H264 (should be available via hardware or software encoder)
warn!("No video encoder available, defaulting to H264");
VideoEncoderType::H264
// Fallback to preferred allowed codec
let preferred = constraints.preferred_webrtc_codec();
warn!(
"No allowed encoder available in priority order, falling back to {}",
encoder_codec_to_id(video_codec_to_encoder_codec(preferred))
);
video_codec_to_encoder_codec(preferred)
}
async fn current_codec_constraints(
&self,
) -> crate::video::codec_constraints::StreamCodecConstraints {
if let Some(ref video_manager) = self.video_manager {
video_manager.codec_constraints().await
} else {
crate::video::codec_constraints::StreamCodecConstraints::unrestricted()
}
}
/// Handle misc message with Arc writer
@@ -729,7 +755,7 @@ impl Connection {
}
// Check if client sent supported_decoding with a codec preference
if let Some(ref supported_decoding) = opt.supported_decoding.as_ref() {
if let Some(supported_decoding) = opt.supported_decoding.as_ref() {
let prefer = supported_decoding.prefer.value();
debug!("Client codec preference: prefer={}", prefer);
@@ -747,6 +773,16 @@ impl Connection {
if let Some(new_codec) = requested_codec {
// Check if this codec is different from current and available
if self.negotiated_codec != Some(new_codec) {
let constraints = self.current_codec_constraints().await;
if !constraints.is_webrtc_codec_allowed(encoder_codec_to_video_codec(new_codec))
{
warn!(
"Client requested codec {:?} but it's blocked by constraints: {}",
new_codec, constraints.reason
);
return Ok(());
}
let registry = EncoderRegistry::global();
if registry.is_format_available(new_codec, false) {
info!(
@@ -1080,12 +1116,21 @@ impl Connection {
if success {
// Dynamically detect available encoders
let registry = EncoderRegistry::global();
let constraints = self.current_codec_constraints().await;
// Check which encoders are available (include software fallback)
let h264_available = registry.is_format_available(VideoEncoderType::H264, false);
let h265_available = registry.is_format_available(VideoEncoderType::H265, false);
let vp8_available = registry.is_format_available(VideoEncoderType::VP8, false);
let vp9_available = registry.is_format_available(VideoEncoderType::VP9, false);
let h264_available = constraints
.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H264)
&& registry.is_format_available(VideoEncoderType::H264, false);
let h265_available = constraints
.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::H265)
&& registry.is_format_available(VideoEncoderType::H265, false);
let vp8_available = constraints
.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP8)
&& registry.is_format_available(VideoEncoderType::VP8, false);
let vp9_available = constraints
.is_webrtc_codec_allowed(crate::video::encoder::VideoCodecType::VP9)
&& registry.is_format_available(VideoEncoderType::VP9, false);
info!(
"Server encoding capabilities: H264={}, H265={}, VP8={}, VP9={}",
@@ -1352,8 +1397,12 @@ impl Connection {
debug!("Mouse event: x={}, y={}, mask={}", me.x, me.y, me.mask);
// Convert RustDesk mouse event to One-KVM mouse events
let mouse_events =
convert_mouse_event(me, self.screen_width, self.screen_height, self.relative_mouse_active);
let mouse_events = convert_mouse_event(
me,
self.screen_width,
self.screen_height,
self.relative_mouse_active,
);
// Send to HID controller if available
if let Some(ref hid) = self.hid {
@@ -1616,7 +1665,10 @@ async fn run_video_streaming(
);
}
if let Err(e) = video_manager.request_keyframe().await {
debug!("Failed to request keyframe for connection {}: {}", conn_id, e);
debug!(
"Failed to request keyframe for connection {}: {}",
conn_id, e
);
}
// Inner loop: receives frames from current subscription

View File

@@ -189,7 +189,7 @@ pub fn hash_password_double(password: &str, salt: &str, challenge: &str) -> Vec<
// Second hash: SHA256(first_hash + challenge)
let mut hasher2 = Sha256::new();
hasher2.update(&first_hash);
hasher2.update(first_hash);
hasher2.update(challenge.as_bytes());
hasher2.finalize().to_vec()
}

View File

@@ -127,7 +127,7 @@ impl VideoFrameAdapter {
// Inject cached SPS/PPS before IDR when missing
if is_keyframe && (!has_sps || !has_pps) {
if let (Some(ref sps), Some(ref pps)) = (self.h264_sps.as_ref(), self.h264_pps.as_ref()) {
if let (Some(sps), Some(pps)) = (self.h264_sps.as_ref(), self.h264_pps.as_ref()) {
let mut out = Vec::with_capacity(8 + sps.len() + pps.len() + data.len());
out.extend_from_slice(&[0, 0, 0, 1]);
out.extend_from_slice(sps);

View File

@@ -36,8 +36,8 @@ use tracing::{debug, error, info, warn};
use crate::audio::AudioController;
use crate::hid::HidController;
use crate::video::stream_manager::VideoStreamManager;
use crate::utils::bind_tcp_listener;
use crate::video::stream_manager::VideoStreamManager;
use self::config::RustDeskConfig;
use self::connection::ConnectionManager;
@@ -559,6 +559,7 @@ impl RustDeskService {
/// 2. Send RelayResponse with client's socket_addr
/// 3. Connect to RELAY server
/// 4. Accept connection without waiting for response
#[allow(clippy::too_many_arguments)]
async fn handle_relay_request(
rendezvous_addr: &str,
relay_server: &str,

View File

@@ -536,6 +536,10 @@ impl RendezvousMediator {
}
}
Some(rendezvous_message::Union::PunchHole(ph)) => {
let config = self.config.read().clone();
let effective_relay_server =
select_relay_server(config.relay_server.as_deref(), &ph.relay_server);
// Decode the peer's socket address
let peer_addr = if !ph.socket_addr.is_empty() {
AddrMangle::decode(&ph.socket_addr)
@@ -544,8 +548,12 @@ impl RendezvousMediator {
};
info!(
"Received PunchHole request: peer_addr={:?}, socket_addr_len={}, relay_server={}, nat_type={:?}",
peer_addr, ph.socket_addr.len(), ph.relay_server, ph.nat_type
"Received PunchHole request: peer_addr={:?}, socket_addr_len={}, relay_server={}, effective_relay_server={}, nat_type={:?}",
peer_addr,
ph.socket_addr.len(),
ph.relay_server,
effective_relay_server.as_deref().unwrap_or(""),
ph.nat_type
);
// Send PunchHoleSent to acknowledge
@@ -555,13 +563,19 @@ impl RendezvousMediator {
info!(
"Sending PunchHoleSent: id={}, peer_addr={:?}, relay_server={}",
id, peer_addr, ph.relay_server
id,
peer_addr,
effective_relay_server
.as_deref()
.unwrap_or(ph.relay_server.as_str())
);
let msg = make_punch_hole_sent(
&ph.socket_addr.to_vec(), // Use peer's socket_addr, not ours
&ph.socket_addr, // Use peer's socket_addr, not ours
&id,
&ph.relay_server,
effective_relay_server
.as_deref()
.unwrap_or(ph.relay_server.as_str()),
ph.nat_type.enum_value().unwrap_or(NatType::UNKNOWN_NAT),
env!("CARGO_PKG_VERSION"),
);
@@ -573,16 +587,10 @@ impl RendezvousMediator {
}
// Try P2P direct connection first, fall back to relay if needed
if !ph.relay_server.is_empty() {
let relay_server = if ph.relay_server.contains(':') {
ph.relay_server.clone()
} else {
format!("{}:21117", ph.relay_server)
};
if let Some(relay_server) = effective_relay_server {
// Generate a standard UUID v4 for relay pairing
// This must match the format used by RustDesk client
let uuid = uuid::Uuid::new_v4().to_string();
let config = self.config.read().clone();
let rendezvous_addr = config.rendezvous_addr();
let device_id = config.device_id.clone();
@@ -606,41 +614,56 @@ impl RendezvousMediator {
device_id,
);
}
} else {
debug!("No relay server available for PunchHole, skipping relay fallback");
}
}
Some(rendezvous_message::Union::RequestRelay(rr)) => {
let config = self.config.read().clone();
let effective_relay_server =
select_relay_server(config.relay_server.as_deref(), &rr.relay_server);
info!(
"Received RequestRelay: relay_server={}, uuid={}, secure={}",
rr.relay_server, rr.uuid, rr.secure
"Received RequestRelay: relay_server={}, effective_relay_server={}, uuid={}, secure={}",
rr.relay_server,
effective_relay_server.as_deref().unwrap_or(""),
rr.uuid,
rr.secure
);
// Call the relay callback to handle the connection
if let Some(callback) = self.relay_callback.read().as_ref() {
let relay_server = if rr.relay_server.contains(':') {
rr.relay_server.clone()
if let Some(relay_server) = effective_relay_server {
let rendezvous_addr = config.rendezvous_addr();
let device_id = config.device_id.clone();
callback(
rendezvous_addr,
relay_server,
rr.uuid.clone(),
rr.socket_addr.to_vec(),
device_id,
);
} else {
format!("{}:21117", rr.relay_server)
};
let config = self.config.read().clone();
let rendezvous_addr = config.rendezvous_addr();
let device_id = config.device_id.clone();
callback(
rendezvous_addr,
relay_server,
rr.uuid.clone(),
rr.socket_addr.to_vec(),
device_id,
);
debug!("No relay server available for RequestRelay callback");
}
}
}
Some(rendezvous_message::Union::FetchLocalAddr(fla)) => {
let config = self.config.read().clone();
let effective_relay_server =
select_relay_server(config.relay_server.as_deref(), &fla.relay_server)
.unwrap_or_default();
// Decode the peer address for logging
let peer_addr = AddrMangle::decode(&fla.socket_addr);
info!(
"Received FetchLocalAddr request: peer_addr={:?}, socket_addr_len={}, relay_server={}",
peer_addr, fla.socket_addr.len(), fla.relay_server
"Received FetchLocalAddr request: peer_addr={:?}, socket_addr_len={}, relay_server={}, effective_relay_server={}",
peer_addr,
fla.socket_addr.len(),
fla.relay_server,
effective_relay_server
);
// Respond with our local address for same-LAN direct connection
self.send_local_addr(socket, &fla.socket_addr, &fla.relay_server)
self.send_local_addr(socket, &fla.socket_addr, &effective_relay_server)
.await?;
}
Some(rendezvous_message::Union::ConfigureUpdate(cu)) => {
@@ -692,6 +715,25 @@ impl RendezvousMediator {
/// This encoding mangles the address to avoid detection.
pub struct AddrMangle;
fn normalize_relay_server(server: &str) -> Option<String> {
let trimmed = server.trim();
if trimmed.is_empty() {
return None;
}
if trimmed.contains(':') {
Some(trimmed.to_string())
} else {
Some(format!("{}:21117", trimmed))
}
}
fn select_relay_server(local_relay: Option<&str>, server_relay: &str) -> Option<String> {
local_relay
.and_then(normalize_relay_server)
.or_else(|| normalize_relay_server(server_relay))
}
impl AddrMangle {
/// Encode a SocketAddr to bytes using RustDesk's mangle algorithm
pub fn encode(addr: SocketAddr) -> Vec<u8> {
@@ -876,3 +918,47 @@ fn get_local_addresses() -> Vec<std::net::IpAddr> {
addrs
}
#[cfg(test)]
mod tests {
use super::{normalize_relay_server, select_relay_server};
#[test]
fn test_normalize_relay_server() {
assert_eq!(normalize_relay_server(""), None);
assert_eq!(normalize_relay_server(" "), None);
assert_eq!(
normalize_relay_server("relay.example.com"),
Some("relay.example.com:21117".to_string())
);
assert_eq!(
normalize_relay_server("relay.example.com:22117"),
Some("relay.example.com:22117".to_string())
);
}
#[test]
fn test_select_relay_server_prefers_local() {
assert_eq!(
select_relay_server(Some("local.example.com:21117"), "server.example.com:21117"),
Some("local.example.com:21117".to_string())
);
assert_eq!(
select_relay_server(Some("local.example.com"), "server.example.com:21117"),
Some("local.example.com:21117".to_string())
);
assert_eq!(
select_relay_server(Some(" "), "server.example.com"),
Some("server.example.com:21117".to_string())
);
assert_eq!(
select_relay_server(None, "server.example.com:21117"),
Some("server.example.com:21117".to_string())
);
assert_eq!(select_relay_server(None, ""), None);
}
}

View File

@@ -13,7 +13,9 @@ use crate::extensions::ExtensionManager;
use crate::hid::HidController;
use crate::msd::MsdController;
use crate::otg::OtgService;
use crate::rtsp::RtspService;
use crate::rustdesk::RustDeskService;
use crate::update::UpdateService;
use crate::video::VideoStreamManager;
/// Application-wide state shared across handlers
@@ -50,10 +52,14 @@ pub struct AppState {
pub audio: Arc<AudioController>,
/// RustDesk remote access service (optional)
pub rustdesk: Arc<RwLock<Option<Arc<RustDeskService>>>>,
/// RTSP streaming service (optional)
pub rtsp: Arc<RwLock<Option<Arc<RtspService>>>>,
/// Extension manager (ttyd, gostc, easytier)
pub extensions: Arc<ExtensionManager>,
/// Event bus for real-time notifications
pub events: Arc<EventBus>,
/// Online update service
pub update: Arc<UpdateService>,
/// Shutdown signal sender
pub shutdown_tx: broadcast::Sender<()>,
/// Recently revoked session IDs (for client kick detection)
@@ -64,6 +70,7 @@ pub struct AppState {
impl AppState {
/// Create new application state
#[allow(clippy::too_many_arguments)]
pub fn new(
config: ConfigStore,
sessions: SessionStore,
@@ -75,8 +82,10 @@ impl AppState {
atx: Option<AtxController>,
audio: Arc<AudioController>,
rustdesk: Option<Arc<RustDeskService>>,
rtsp: Option<Arc<RtspService>>,
extensions: Arc<ExtensionManager>,
events: Arc<EventBus>,
update: Arc<UpdateService>,
shutdown_tx: broadcast::Sender<()>,
data_dir: std::path::PathBuf,
) -> Arc<Self> {
@@ -91,8 +100,10 @@ impl AppState {
atx: Arc::new(RwLock::new(atx)),
audio,
rustdesk: Arc::new(RwLock::new(rustdesk)),
rtsp: Arc::new(RwLock::new(rtsp)),
extensions,
events,
update,
shutdown_tx,
revoked_sessions: Arc::new(RwLock::new(VecDeque::new())),
data_dir,

View File

@@ -15,18 +15,16 @@
//!
//! Note: Audio WebSocket is handled separately by audio_ws.rs (/api/ws/audio)
use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream;
use std::collections::HashMap;
use std::io;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::{Mutex, RwLock};
use tracing::{error, info, warn};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::video::capture::Parameters;
use v4l::Format;
use crate::audio::AudioController;
use crate::error::{AppError, Result};
@@ -491,8 +489,7 @@ impl MjpegStreamer {
}
};
let mut device_opt: Option<Device> = None;
let mut format_opt: Option<Format> = None;
let mut stream_opt: Option<V4l2rCaptureStream> = None;
let mut last_error: Option<String> = None;
for attempt in 0..MAX_RETRIES {
@@ -501,8 +498,18 @@ impl MjpegStreamer {
return;
}
let device = match Device::with_path(&device_path) {
Ok(d) => d,
match V4l2rCaptureStream::open(
&device_path,
config.resolution,
config.format,
config.fps,
4,
Duration::from_secs(2),
) {
Ok(stream) => {
stream_opt = Some(stream);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
@@ -519,42 +526,12 @@ impl MjpegStreamer {
last_error = Some(err_str);
break;
}
};
let requested = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
match device.set_format(&requested) {
Ok(actual) => {
device_opt = Some(device);
format_opt = Some(actual);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(std::time::Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(err_str);
continue;
}
last_error = Some(err_str);
break;
}
}
}
let (device, actual_format) = match (device_opt, format_opt) {
(Some(d), Some(f)) => (d, f),
_ => {
let mut stream = match stream_opt {
Some(stream) => stream,
None => {
error!(
"Failed to open device {:?}: {}",
device_path,
@@ -567,40 +544,36 @@ impl MjpegStreamer {
}
};
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!(
"Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride
resolution.width, resolution.height, pixel_format, stride
);
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.format);
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream = match MmapStream::with_buffers(&device, BufferType::VideoCapture, 4) {
Ok(s) => s,
Err(e) => {
error!("Failed to create capture stream: {}", e);
set_state(MjpegStreamerState::Error);
self.mjpeg_handler.set_offline();
self.direct_active.store(false, Ordering::SeqCst);
return;
}
};
let buffer_pool = Arc::new(FrameBufferPool::new(8));
let mut signal_present = true;
let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0;
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while !self.direct_stop.load(Ordering::Relaxed) {
let (buf, meta) = match stream.next() {
Ok(frame_data) => frame_data,
let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
let meta = match stream.next_into(&mut owned) {
Ok(meta) => meta,
Err(e) => {
if e.kind() == io::ErrorKind::TimedOut {
if signal_present {
@@ -628,35 +601,43 @@ impl MjpegStreamer {
return;
}
error!("Capture error: {}", e);
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue;
}
};
let frame_size = meta.bytesused as usize;
let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue;
}
validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size])
&& validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{
continue;
}
let mut owned = buffer_pool.take(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
owned.truncate(frame_size);
let frame = VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution,
pixel_format,
actual_format.stride,
sequence,
stride,
meta.sequence,
);
sequence = sequence.wrapping_add(1);
if !signal_present {
signal_present = true;

606
src/update/mod.rs Normal file
View File

@@ -0,0 +1,606 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::sync::{broadcast, RwLock, Semaphore};
use crate::error::{AppError, Result};
const DEFAULT_UPDATE_BASE_URL: &str = "https://update.one-kvm.cn";
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum UpdateChannel {
Stable,
Beta,
}
impl Default for UpdateChannel {
fn default() -> Self {
Self::Stable
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChannelsManifest {
pub stable: String,
pub beta: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReleasesManifest {
pub releases: Vec<ReleaseInfo>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReleaseInfo {
pub version: String,
pub channel: UpdateChannel,
pub published_at: String,
#[serde(default)]
pub notes: Vec<String>,
#[serde(default)]
pub artifacts: HashMap<String, ArtifactInfo>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ArtifactInfo {
pub url: String,
pub sha256: String,
pub size: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReleaseNotesItem {
pub version: String,
pub published_at: String,
pub notes: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateOverviewResponse {
pub success: bool,
pub current_version: String,
pub channel: UpdateChannel,
pub latest_version: String,
pub upgrade_available: bool,
pub target_version: Option<String>,
pub notes_between: Vec<ReleaseNotesItem>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpgradeRequest {
pub channel: Option<UpdateChannel>,
pub target_version: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum UpdatePhase {
Idle,
Checking,
Downloading,
Verifying,
Installing,
Restarting,
Success,
Failed,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateStatusResponse {
pub success: bool,
pub phase: UpdatePhase,
pub progress: u8,
pub current_version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub target_version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub last_error: Option<String>,
}
pub struct UpdateService {
client: reqwest::Client,
base_url: String,
work_dir: PathBuf,
status: RwLock<UpdateStatusResponse>,
upgrade_permit: Arc<Semaphore>,
}
impl UpdateService {
pub fn new(work_dir: PathBuf) -> Self {
let base_url = std::env::var("ONE_KVM_UPDATE_BASE_URL")
.ok()
.filter(|url| !url.trim().is_empty())
.unwrap_or_else(|| DEFAULT_UPDATE_BASE_URL.to_string());
Self {
client: reqwest::Client::new(),
base_url,
work_dir,
status: RwLock::new(UpdateStatusResponse {
success: true,
phase: UpdatePhase::Idle,
progress: 0,
current_version: env!("CARGO_PKG_VERSION").to_string(),
target_version: None,
message: None,
last_error: None,
}),
upgrade_permit: Arc::new(Semaphore::new(1)),
}
}
pub async fn status(&self) -> UpdateStatusResponse {
self.status.read().await.clone()
}
pub async fn overview(&self, channel: UpdateChannel) -> Result<UpdateOverviewResponse> {
let channels: ChannelsManifest = self.fetch_json("/v1/channels.json").await?;
let releases: ReleasesManifest = self.fetch_json("/v1/releases.json").await?;
let current_version = parse_version(env!("CARGO_PKG_VERSION"))?;
let latest_version_str = match channel {
UpdateChannel::Stable => channels.stable,
UpdateChannel::Beta => channels.beta,
};
let latest_version = parse_version(&latest_version_str)?;
let current_parts = parse_version_parts(&current_version)?;
let latest_parts = parse_version_parts(&latest_version)?;
let mut notes_between = Vec::new();
for release in &releases.releases {
if release.channel != channel {
continue;
}
let version = match parse_version(&release.version) {
Ok(v) => v,
Err(_) => continue,
};
let version_parts = match parse_version_parts(&version) {
Ok(parts) => parts,
Err(_) => continue,
};
if compare_version_parts(&version_parts, &current_parts) == std::cmp::Ordering::Greater
&& compare_version_parts(&version_parts, &latest_parts)
!= std::cmp::Ordering::Greater
{
notes_between.push((
version_parts,
ReleaseNotesItem {
version: release.version.clone(),
published_at: release.published_at.clone(),
notes: release.notes.clone(),
},
));
}
}
notes_between.sort_by(|a, b| compare_version_parts(&a.0, &b.0));
let notes_between = notes_between.into_iter().map(|(_, item)| item).collect();
let upgrade_available =
compare_versions(&latest_version, &current_version)? == std::cmp::Ordering::Greater;
Ok(UpdateOverviewResponse {
success: true,
current_version: current_version.to_string(),
channel,
latest_version: latest_version.clone(),
upgrade_available,
target_version: if upgrade_available {
Some(latest_version)
} else {
None
},
notes_between,
})
}
pub fn start_upgrade(
self: &Arc<Self>,
req: UpgradeRequest,
shutdown_tx: broadcast::Sender<()>,
) -> Result<()> {
if req.channel.is_none() == req.target_version.is_none() {
return Err(AppError::BadRequest(
"Provide exactly one of channel or target_version".to_string(),
));
}
let permit = self
.upgrade_permit
.clone()
.try_acquire_owned()
.map_err(|_| AppError::BadRequest("Upgrade is already running".to_string()))?;
let service = self.clone();
tokio::spawn(async move {
let _permit = permit;
if let Err(e) = service.execute_upgrade(req, shutdown_tx).await {
service
.set_status(
UpdatePhase::Failed,
0,
None,
Some(e.to_string()),
Some(e.to_string()),
)
.await;
}
});
Ok(())
}
async fn execute_upgrade(
&self,
req: UpgradeRequest,
shutdown_tx: broadcast::Sender<()>,
) -> Result<()> {
self.set_status(
UpdatePhase::Checking,
5,
None,
Some("Checking for updates".to_string()),
None,
)
.await;
let channels: ChannelsManifest = self.fetch_json("/v1/channels.json").await?;
let releases: ReleasesManifest = self.fetch_json("/v1/releases.json").await?;
let current_version = parse_version(env!("CARGO_PKG_VERSION"))?;
let target_version = if let Some(channel) = req.channel {
let version_str = match channel {
UpdateChannel::Stable => channels.stable,
UpdateChannel::Beta => channels.beta,
};
parse_version(&version_str)?
} else {
parse_version(req.target_version.as_deref().unwrap_or_default())?
};
if compare_versions(&target_version, &current_version)? != std::cmp::Ordering::Greater {
return Err(AppError::BadRequest(format!(
"Target version {} must be greater than current version {}",
target_version, current_version
)));
}
let target_release = releases
.releases
.iter()
.find(|r| r.version == target_version)
.ok_or_else(|| AppError::NotFound(format!("Release {} not found", target_version)))?;
let target_triple = current_target_triple()?;
let artifact = target_release
.artifacts
.get(&target_triple)
.ok_or_else(|| {
AppError::NotFound(format!(
"No binary for target {} in version {}",
target_triple, target_version
))
})?
.clone();
self.set_status(
UpdatePhase::Downloading,
10,
Some(target_version.clone()),
Some("Downloading binary".to_string()),
None,
)
.await;
tokio::fs::create_dir_all(&self.work_dir).await?;
let staging_path = self
.work_dir
.join(format!("one-kvm-{}-download", target_version));
let artifact_url = self.resolve_url(&artifact.url);
self.download_and_verify(&artifact_url, &staging_path, &artifact)
.await?;
self.set_status(
UpdatePhase::Installing,
80,
Some(target_version.clone()),
Some("Replacing binary".to_string()),
None,
)
.await;
self.install_binary(&staging_path).await?;
self.set_status(
UpdatePhase::Restarting,
95,
Some(target_version),
Some("Restarting service".to_string()),
None,
)
.await;
let _ = shutdown_tx.send(());
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
restart_current_process()?;
Ok(())
}
async fn download_and_verify(
&self,
url: &str,
output_path: &Path,
artifact: &ArtifactInfo,
) -> Result<()> {
let response = self
.client
.get(url)
.send()
.await
.map_err(|e| AppError::Internal(format!("Failed to download {}: {}", url, e)))?
.error_for_status()
.map_err(|e| AppError::Internal(format!("Download request failed: {}", e)))?;
let mut file = tokio::fs::File::create(output_path).await?;
let mut stream = response.bytes_stream();
let mut downloaded: u64 = 0;
while let Some(chunk) = stream.next().await {
let chunk = chunk
.map_err(|e| AppError::Internal(format!("Read download stream failed: {}", e)))?;
file.write_all(&chunk).await?;
downloaded += chunk.len() as u64;
if artifact.size > 0 {
let ratio = (downloaded as f64 / artifact.size as f64).clamp(0.0, 1.0);
let progress = 10 + (ratio * 60.0) as u8;
self.set_status(
UpdatePhase::Downloading,
progress,
None,
Some(format!(
"Downloading binary ({} / {} bytes)",
downloaded, artifact.size
)),
None,
)
.await;
}
}
file.flush().await?;
if artifact.size > 0 && downloaded != artifact.size {
return Err(AppError::Internal(format!(
"Downloaded size mismatch: expected {}, got {}",
artifact.size, downloaded
)));
}
self.set_status(
UpdatePhase::Verifying,
72,
None,
Some("Verifying sha256".to_string()),
None,
)
.await;
let actual_sha256 = compute_file_sha256(output_path).await?;
let expected_sha256 = normalize_sha256(&artifact.sha256).ok_or_else(|| {
AppError::Internal(format!(
"Invalid sha256 format in manifest: {}",
artifact.sha256
))
})?;
if actual_sha256 != expected_sha256 {
return Err(AppError::Internal(format!(
"SHA256 mismatch: expected {}, got {}",
expected_sha256, actual_sha256
)));
}
Ok(())
}
async fn install_binary(&self, staging_path: &Path) -> Result<()> {
let current_exe = std::env::current_exe()
.map_err(|e| AppError::Internal(format!("Failed to get current exe path: {}", e)))?;
let exe_dir = current_exe.parent().ok_or_else(|| {
AppError::Internal("Failed to determine executable directory".to_string())
})?;
let install_path = exe_dir.join("one-kvm.upgrade.new");
tokio::fs::copy(staging_path, &install_path)
.await
.map_err(|e| {
AppError::Internal(format!("Failed to stage binary into install path: {}", e))
})?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = tokio::fs::metadata(&install_path).await?.permissions();
perms.set_mode(0o755);
tokio::fs::set_permissions(&install_path, perms).await?;
}
tokio::fs::rename(&install_path, &current_exe)
.await
.map_err(|e| AppError::Internal(format!("Failed to replace executable {}", e)))?;
Ok(())
}
async fn fetch_json<T: for<'de> Deserialize<'de>>(&self, path: &str) -> Result<T> {
let url = format!("{}{}", self.base_url.trim_end_matches('/'), path);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| AppError::Internal(format!("Failed to fetch {}: {}", url, e)))?
.error_for_status()
.map_err(|e| AppError::Internal(format!("Request failed {}: {}", url, e)))?;
response
.json::<T>()
.await
.map_err(|e| AppError::Internal(format!("Invalid update response {}: {}", url, e)))
}
fn resolve_url(&self, url: &str) -> String {
if url.starts_with("http://") || url.starts_with("https://") {
url.to_string()
} else {
format!(
"{}/{}",
self.base_url.trim_end_matches('/'),
url.trim_start_matches('/')
)
}
}
async fn set_status(
&self,
phase: UpdatePhase,
progress: u8,
target_version: Option<String>,
message: Option<String>,
last_error: Option<String>,
) {
let mut status = self.status.write().await;
status.phase = phase;
status.progress = progress;
if target_version.is_some() {
status.target_version = target_version;
}
status.message = message;
status.last_error = last_error;
status.success = status.phase != UpdatePhase::Failed;
status.current_version = env!("CARGO_PKG_VERSION").to_string();
}
}
fn parse_version(input: &str) -> Result<String> {
let parts: Vec<&str> = input.split('.').collect();
if parts.len() != 3 {
return Err(AppError::Internal(format!(
"Invalid version {}, expected x.x.x",
input
)));
}
if parts
.iter()
.any(|p| p.is_empty() || !p.chars().all(|c| c.is_ascii_digit()))
{
return Err(AppError::Internal(format!(
"Invalid version {}, expected numeric x.x.x",
input
)));
}
Ok(input.to_string())
}
fn compare_versions(a: &str, b: &str) -> Result<std::cmp::Ordering> {
let pa = parse_version_parts(a)?;
let pb = parse_version_parts(b)?;
Ok(compare_version_parts(&pa, &pb))
}
fn parse_version_parts(input: &str) -> Result<[u64; 3]> {
let parts: Vec<&str> = input.split('.').collect();
if parts.len() != 3 {
return Err(AppError::Internal(format!(
"Invalid version {}, expected x.x.x",
input
)));
}
let major = parts[0]
.parse::<u64>()
.map_err(|e| AppError::Internal(format!("Invalid major version {}: {}", parts[0], e)))?;
let minor = parts[1]
.parse::<u64>()
.map_err(|e| AppError::Internal(format!("Invalid minor version {}: {}", parts[1], e)))?;
let patch = parts[2]
.parse::<u64>()
.map_err(|e| AppError::Internal(format!("Invalid patch version {}: {}", parts[2], e)))?;
Ok([major, minor, patch])
}
fn compare_version_parts(a: &[u64; 3], b: &[u64; 3]) -> std::cmp::Ordering {
a[0].cmp(&b[0]).then(a[1].cmp(&b[1])).then(a[2].cmp(&b[2]))
}
async fn compute_file_sha256(path: &Path) -> Result<String> {
let mut file = tokio::fs::File::open(path).await?;
let mut hasher = Sha256::new();
let mut buffer = [0u8; 8192];
loop {
let bytes_read = file.read(&mut buffer).await?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
Ok(format!("{:x}", hasher.finalize()))
}
fn normalize_sha256(input: &str) -> Option<String> {
let token = input.split_whitespace().next()?.trim().to_lowercase();
if token.len() != 64 || !token.chars().all(|c| c.is_ascii_hexdigit()) {
return None;
}
Some(token)
}
fn current_target_triple() -> Result<String> {
let triple = match (std::env::consts::OS, std::env::consts::ARCH) {
("linux", "x86_64") => "x86_64-unknown-linux-gnu",
("linux", "aarch64") => "aarch64-unknown-linux-gnu",
("linux", "arm") => "armv7-unknown-linux-gnueabihf",
_ => {
return Err(AppError::BadRequest(format!(
"Unsupported platform {}-{}",
std::env::consts::OS,
std::env::consts::ARCH
)));
}
};
Ok(triple.to_string())
}
fn restart_current_process() -> Result<()> {
let exe = std::env::current_exe()
.map_err(|e| AppError::Internal(format!("Failed to get current exe: {}", e)))?;
let args: Vec<String> = std::env::args().skip(1).collect();
#[cfg(unix)]
{
use std::os::unix::process::CommandExt;
let err = std::process::Command::new(&exe).args(&args).exec();
Err(AppError::Internal(format!("Failed to restart: {}", err)))
}
#[cfg(not(unix))]
{
std::process::Command::new(&exe)
.args(&args)
.spawn()
.map_err(|e| AppError::Internal(format!("Failed to spawn restart process: {}", e)))?;
std::process::exit(0);
}
}

View File

@@ -2,8 +2,8 @@
//!
//! This module contains common utilities used across the codebase.
pub mod throttle;
pub mod net;
pub mod throttle;
pub use throttle::LogThrottler;
pub use net::{bind_tcp_listener, bind_udp_socket};
pub use throttle::LogThrottler;

View File

@@ -2,24 +2,21 @@
//!
//! Provides async video capture using memory-mapped buffers.
use bytes::Bytes;
use std::collections::HashMap;
use std::io;
use std::path::{Path, PathBuf};
use bytes::Bytes;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::{watch, Mutex};
use tracing::{debug, error, info, warn};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::capture::Parameters;
use v4l::video::Capture;
use v4l::Format;
use super::format::{PixelFormat, Resolution};
use super::frame::VideoFrame;
use crate::error::{AppError, Result};
use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream;
/// Default number of capture buffers (reduced from 4 to 2 for lower latency)
const DEFAULT_BUFFER_COUNT: u32 = 2;
@@ -280,9 +277,15 @@ fn run_capture(
return Ok(());
}
// Open device
let device = match Device::with_path(&config.device_path) {
Ok(d) => d,
let stream = match V4l2rCaptureStream::open(
&config.device_path,
config.resolution,
config.format,
config.fps,
config.buffer_count,
config.timeout,
) {
Ok(stream) => stream,
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
@@ -306,34 +309,7 @@ fn run_capture(
}
};
// Set format
let format = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
let actual_format = match device.set_format(&format) {
Ok(f) => f,
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(AppError::VideoError(format!("Failed to set format: {}", e)));
continue;
}
return Err(AppError::VideoError(format!("Failed to set format: {}", e)));
}
};
// Device opened and format set successfully - proceed with capture
return run_capture_inner(config, state, stats, stop_flag, device, actual_format);
return run_capture_inner(config, state, stats, stop_flag, stream);
}
// All retries exhausted
@@ -348,48 +324,16 @@ fn run_capture_inner(
state: &watch::Sender<CaptureState>,
stats: &Arc<Mutex<CaptureStats>>,
stop_flag: &AtomicBool,
device: Device,
actual_format: Format,
mut stream: V4l2rCaptureStream,
) -> Result<()> {
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!(
"Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride
resolution.width, resolution.height, pixel_format, stride
);
// Try to set hardware FPS (V4L2 VIDIOC_S_PARM)
if config.fps > 0 {
match device.set_params(&Parameters::with_fps(config.fps)) {
Ok(actual_params) => {
// Extract actual FPS from returned interval (numerator/denominator)
let actual_hw_fps = if actual_params.interval.numerator > 0 {
actual_params.interval.denominator / actual_params.interval.numerator
} else {
0
};
if actual_hw_fps == config.fps {
info!("Hardware FPS set successfully: {} fps", actual_hw_fps);
} else if actual_hw_fps > 0 {
info!(
"Hardware FPS coerced: requested {} fps, got {} fps",
config.fps, actual_hw_fps
);
} else {
warn!("Hardware FPS setting returned invalid interval");
}
}
Err(e) => {
warn!("Failed to set hardware FPS: {}", e);
}
}
}
// Create stream with mmap buffers
let mut stream =
MmapStream::with_buffers(&device, BufferType::VideoCapture, config.buffer_count)
.map_err(|e| AppError::VideoError(format!("Failed to create stream: {}", e)))?;
let _ = state.send(CaptureState::Running);
info!("Capture started");
@@ -397,12 +341,25 @@ fn run_capture_inner(
let mut fps_frame_count = 0u64;
let mut fps_window_start = Instant::now();
let fps_window_duration = Duration::from_secs(1);
let mut scratch = Vec::new();
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
// Main capture loop
while !stop_flag.load(Ordering::Relaxed) {
// Try to capture a frame
let (_buf, meta) = match stream.next() {
Ok(frame_data) => frame_data,
let meta = match stream.next_into(&mut scratch) {
Ok(meta) => meta,
Err(e) => {
if e.kind() == io::ErrorKind::TimedOut {
warn!("Capture timeout - no signal?");
@@ -432,19 +389,30 @@ fn run_capture_inner(
});
}
error!("Capture error: {}", e);
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue;
}
};
// Use actual bytes used, not buffer size
let frame_size = meta.bytesused as usize;
let frame_size = meta.bytes_used;
// Validate frame
if frame_size < MIN_FRAME_SIZE {
debug!(
"Dropping small frame: {} bytes (bytesused={})",
frame_size, meta.bytesused
frame_size, meta.bytes_used
);
continue;
}
@@ -470,6 +438,10 @@ fn run_capture_inner(
s.current_fps = (fps_frame_count as f32 / elapsed.as_secs_f32()).max(0.0);
}
}
if *state.borrow() == CaptureState::NoSignal {
let _ = state.send(CaptureState::Running);
}
}
info!("Capture stopped");
@@ -525,38 +497,37 @@ fn grab_single_frame(
resolution: Resolution,
format: PixelFormat,
) -> Result<VideoFrame> {
let device = Device::with_path(device_path)
.map_err(|e| AppError::VideoError(format!("Failed to open device: {}", e)))?;
let fmt = Format::new(resolution.width, resolution.height, format.to_fourcc());
let actual = device
.set_format(&fmt)
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?;
let mut stream = MmapStream::with_buffers(&device, BufferType::VideoCapture, 2)
.map_err(|e| AppError::VideoError(format!("Failed to create stream: {}", e)))?;
let mut stream = V4l2rCaptureStream::open(
device_path,
resolution,
format,
0,
2,
Duration::from_secs(DEFAULT_TIMEOUT),
)?;
let actual_resolution = stream.resolution();
let actual_format = stream.format();
let actual_stride = stream.stride();
let mut scratch = Vec::new();
// Try to get a valid frame (skip first few which might be bad)
for attempt in 0..5 {
match stream.next() {
Ok((buf, _meta)) => {
if buf.len() >= MIN_FRAME_SIZE {
let actual_format = PixelFormat::from_fourcc(actual.fourcc).unwrap_or(format);
match stream.next_into(&mut scratch) {
Ok(meta) => {
if meta.bytes_used >= MIN_FRAME_SIZE {
return Ok(VideoFrame::new(
Bytes::copy_from_slice(buf),
Resolution::new(actual.width, actual.height),
Bytes::copy_from_slice(&scratch[..meta.bytes_used]),
actual_resolution,
actual_format,
actual.stride,
actual_stride,
0,
));
}
}
Err(e) => {
if attempt == 4 {
return Err(AppError::VideoError(format!("Failed to grab frame: {}", e)));
}
Err(e) if attempt == 4 => {
return Err(AppError::VideoError(format!("Failed to grab frame: {}", e)));
}
Err(_) => {}
}
}

View File

@@ -0,0 +1,193 @@
use crate::config::{AppConfig, RtspCodec, StreamMode};
use crate::error::Result;
use crate::video::encoder::registry::VideoEncoderType;
use crate::video::encoder::VideoCodecType;
use crate::video::VideoStreamManager;
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct StreamCodecConstraints {
pub rustdesk_enabled: bool,
pub rtsp_enabled: bool,
pub allowed_webrtc_codecs: Vec<VideoCodecType>,
pub allow_mjpeg: bool,
pub locked_codec: Option<VideoCodecType>,
pub reason: String,
}
#[derive(Debug, Clone)]
pub struct ConstraintEnforcementResult {
pub changed: bool,
pub message: Option<String>,
}
impl StreamCodecConstraints {
pub fn unrestricted() -> Self {
Self {
rustdesk_enabled: false,
rtsp_enabled: false,
allowed_webrtc_codecs: vec![
VideoCodecType::H264,
VideoCodecType::H265,
VideoCodecType::VP8,
VideoCodecType::VP9,
],
allow_mjpeg: true,
locked_codec: None,
reason: "No codec lock active".to_string(),
}
}
pub fn from_config(config: &AppConfig) -> Self {
let rustdesk_enabled = config.rustdesk.enabled;
let rtsp_enabled = config.rtsp.enabled;
if rtsp_enabled {
let locked_codec = match config.rtsp.codec {
RtspCodec::H264 => VideoCodecType::H264,
RtspCodec::H265 => VideoCodecType::H265,
};
return Self {
rustdesk_enabled,
rtsp_enabled,
allowed_webrtc_codecs: vec![locked_codec],
allow_mjpeg: false,
locked_codec: Some(locked_codec),
reason: if rustdesk_enabled {
format!(
"RTSP enabled with codec lock ({:?}) and RustDesk enabled",
locked_codec
)
} else {
format!("RTSP enabled with codec lock ({:?})", locked_codec)
},
};
}
if rustdesk_enabled {
return Self {
rustdesk_enabled,
rtsp_enabled,
allowed_webrtc_codecs: vec![
VideoCodecType::H264,
VideoCodecType::H265,
VideoCodecType::VP8,
VideoCodecType::VP9,
],
allow_mjpeg: false,
locked_codec: None,
reason: "RustDesk enabled, MJPEG disabled".to_string(),
};
}
Self::unrestricted()
}
pub fn is_mjpeg_allowed(&self) -> bool {
self.allow_mjpeg
}
pub fn is_webrtc_codec_allowed(&self, codec: VideoCodecType) -> bool {
self.allowed_webrtc_codecs.contains(&codec)
}
pub fn preferred_webrtc_codec(&self) -> VideoCodecType {
if let Some(codec) = self.locked_codec {
return codec;
}
self.allowed_webrtc_codecs
.first()
.copied()
.unwrap_or(VideoCodecType::H264)
}
pub fn allowed_codecs_for_api(&self) -> Vec<&'static str> {
let mut codecs = Vec::new();
if self.allow_mjpeg {
codecs.push("mjpeg");
}
for codec in &self.allowed_webrtc_codecs {
codecs.push(codec_to_id(*codec));
}
codecs
}
}
pub async fn enforce_constraints_with_stream_manager(
stream_manager: &Arc<VideoStreamManager>,
constraints: &StreamCodecConstraints,
) -> Result<ConstraintEnforcementResult> {
let current_mode = stream_manager.current_mode().await;
if current_mode == StreamMode::Mjpeg && !constraints.allow_mjpeg {
let target_codec = constraints.preferred_webrtc_codec();
stream_manager.set_video_codec(target_codec).await?;
let _ = stream_manager
.switch_mode_transaction(StreamMode::WebRTC)
.await?;
return Ok(ConstraintEnforcementResult {
changed: true,
message: Some(format!(
"Auto-switched from MJPEG to {} due to codec lock",
codec_to_id(target_codec)
)),
});
}
if current_mode == StreamMode::WebRTC {
let current_codec = stream_manager.webrtc_streamer().current_video_codec().await;
if !constraints.is_webrtc_codec_allowed(current_codec) {
let target_codec = constraints.preferred_webrtc_codec();
stream_manager.set_video_codec(target_codec).await?;
return Ok(ConstraintEnforcementResult {
changed: true,
message: Some(format!(
"Auto-switched codec from {} to {} due to codec lock",
codec_to_id(current_codec),
codec_to_id(target_codec)
)),
});
}
}
Ok(ConstraintEnforcementResult {
changed: false,
message: None,
})
}
pub fn codec_to_id(codec: VideoCodecType) -> &'static str {
match codec {
VideoCodecType::H264 => "h264",
VideoCodecType::H265 => "h265",
VideoCodecType::VP8 => "vp8",
VideoCodecType::VP9 => "vp9",
}
}
pub fn encoder_codec_to_id(codec: VideoEncoderType) -> &'static str {
match codec {
VideoEncoderType::H264 => "h264",
VideoEncoderType::H265 => "h265",
VideoEncoderType::VP8 => "vp8",
VideoEncoderType::VP9 => "vp9",
}
}
pub fn video_codec_to_encoder_codec(codec: VideoCodecType) -> VideoEncoderType {
match codec {
VideoCodecType::H264 => VideoEncoderType::H264,
VideoCodecType::H265 => VideoEncoderType::H265,
VideoCodecType::VP8 => VideoEncoderType::VP8,
VideoCodecType::VP9 => VideoEncoderType::VP9,
}
}
pub fn encoder_codec_to_video_codec(codec: VideoEncoderType) -> VideoCodecType {
match codec {
VideoEncoderType::H264 => VideoCodecType::H264,
VideoEncoderType::H265 => VideoCodecType::H265,
VideoEncoderType::VP8 => VideoCodecType::VP8,
VideoEncoderType::VP9 => VideoCodecType::VP9,
}
}

View File

@@ -1,15 +1,17 @@
//! V4L2 device enumeration and capability query
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::path::{Path, PathBuf};
use std::sync::mpsc;
use std::time::Duration;
use tracing::{debug, info, warn};
use v4l::capability::Flags;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::Format;
use v4l::FourCC;
use v4l2r::bindings::{v4l2_frmivalenum, v4l2_frmsizeenum};
use v4l2r::ioctl::{
self, Capabilities, Capability as V4l2rCapability, FormatIterator, FrmIvalTypes, FrmSizeTypes,
};
use v4l2r::nix::errno::Errno;
use v4l2r::{Format as V4l2rFormat, QueueType};
use super::format::{PixelFormat, Resolution};
use crate::error::{AppError, Result};
@@ -81,7 +83,7 @@ pub struct DeviceCapabilities {
/// Wrapper around a V4L2 video device
pub struct VideoDevice {
pub path: PathBuf,
device: Device,
fd: File,
}
impl VideoDevice {
@@ -90,42 +92,55 @@ impl VideoDevice {
let path = path.as_ref().to_path_buf();
debug!("Opening video device: {:?}", path);
let device = Device::with_path(&path).map_err(|e| {
let fd = File::options()
.read(true)
.write(true)
.open(&path)
.map_err(|e| {
AppError::VideoError(format!("Failed to open device {:?}: {}", path, e))
})?;
Ok(Self { path, fd })
}
/// Open a video device read-only (for probing/enumeration)
pub fn open_readonly(path: impl AsRef<Path>) -> Result<Self> {
let path = path.as_ref().to_path_buf();
debug!("Opening video device (read-only): {:?}", path);
let fd = File::options().read(true).open(&path).map_err(|e| {
AppError::VideoError(format!("Failed to open device {:?}: {}", path, e))
})?;
Ok(Self { path, device })
Ok(Self { path, fd })
}
/// Get device capabilities
pub fn capabilities(&self) -> Result<DeviceCapabilities> {
let caps = self
.device
.query_caps()
let caps: V4l2rCapability = ioctl::querycap(&self.fd)
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let flags = caps.device_caps();
Ok(DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE),
video_capture_mplane: caps.capabilities.contains(Flags::VIDEO_CAPTURE_MPLANE),
video_output: caps.capabilities.contains(Flags::VIDEO_OUTPUT),
streaming: caps.capabilities.contains(Flags::STREAMING),
read_write: caps.capabilities.contains(Flags::READ_WRITE),
video_capture: flags.contains(Capabilities::VIDEO_CAPTURE),
video_capture_mplane: flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE),
video_output: flags.contains(Capabilities::VIDEO_OUTPUT),
streaming: flags.contains(Capabilities::STREAMING),
read_write: flags.contains(Capabilities::READWRITE),
})
}
/// Get detailed device information
pub fn info(&self) -> Result<VideoDeviceInfo> {
let caps = self
.device
.query_caps()
let caps: V4l2rCapability = ioctl::querycap(&self.fd)
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let flags = caps.device_caps();
let capabilities = DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE),
video_capture_mplane: caps.capabilities.contains(Flags::VIDEO_CAPTURE_MPLANE),
video_output: caps.capabilities.contains(Flags::VIDEO_OUTPUT),
streaming: caps.capabilities.contains(Flags::STREAMING),
read_write: caps.capabilities.contains(Flags::READ_WRITE),
video_capture: flags.contains(Capabilities::VIDEO_CAPTURE),
video_capture_mplane: flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE),
video_output: flags.contains(Capabilities::VIDEO_OUTPUT),
streaming: flags.contains(Capabilities::STREAMING),
read_write: flags.contains(Capabilities::READWRITE),
};
let formats = self.enumerate_formats()?;
@@ -141,7 +156,7 @@ impl VideoDevice {
path: self.path.clone(),
name: caps.card.clone(),
driver: caps.driver.clone(),
bus_info: caps.bus.clone(),
bus_info: caps.bus_info.clone(),
card: caps.card,
formats,
capabilities,
@@ -154,16 +169,13 @@ impl VideoDevice {
pub fn enumerate_formats(&self) -> Result<Vec<FormatInfo>> {
let mut formats = Vec::new();
// Get supported formats
let format_descs = self
.device
.enum_formats()
.map_err(|e| AppError::VideoError(format!("Failed to enumerate formats: {}", e)))?;
let queue = self.capture_queue_type()?;
let format_descs = FormatIterator::new(&self.fd, queue);
for desc in format_descs {
// Try to convert FourCC to our PixelFormat
if let Some(format) = PixelFormat::from_fourcc(desc.fourcc) {
let resolutions = self.enumerate_resolutions(desc.fourcc)?;
if let Some(format) = PixelFormat::from_v4l2r(desc.pixelformat) {
let resolutions = self.enumerate_resolutions(desc.pixelformat)?;
formats.push(FormatInfo {
format,
@@ -173,7 +185,7 @@ impl VideoDevice {
} else {
debug!(
"Skipping unsupported format: {:?} ({})",
desc.fourcc, desc.description
desc.pixelformat, desc.description
);
}
}
@@ -185,46 +197,55 @@ impl VideoDevice {
}
/// Enumerate resolutions for a specific format
fn enumerate_resolutions(&self, fourcc: FourCC) -> Result<Vec<ResolutionInfo>> {
fn enumerate_resolutions(&self, fourcc: v4l2r::PixelFormat) -> Result<Vec<ResolutionInfo>> {
let mut resolutions = Vec::new();
// Try to enumerate frame sizes
match self.device.enum_framesizes(fourcc) {
Ok(sizes) => {
for size in sizes {
match size.size {
v4l::framesize::FrameSizeEnum::Discrete(d) => {
let fps = self
.enumerate_fps(fourcc, d.width, d.height)
.unwrap_or_default();
resolutions.push(ResolutionInfo::new(d.width, d.height, fps));
}
v4l::framesize::FrameSizeEnum::Stepwise(s) => {
// For stepwise, add some common resolutions
for res in [
Resolution::VGA,
Resolution::HD720,
Resolution::HD1080,
Resolution::UHD4K,
] {
if res.width >= s.min_width
&& res.width <= s.max_width
&& res.height >= s.min_height
&& res.height <= s.max_height
{
let fps = self
.enumerate_fps(fourcc, res.width, res.height)
.unwrap_or_default();
resolutions
.push(ResolutionInfo::new(res.width, res.height, fps));
let mut index = 0u32;
loop {
match ioctl::enum_frame_sizes::<v4l2_frmsizeenum>(&self.fd, index, fourcc) {
Ok(size) => {
if let Some(size) = size.size() {
match size {
FrmSizeTypes::Discrete(d) => {
let fps = self
.enumerate_fps(fourcc, d.width, d.height)
.unwrap_or_default();
resolutions.push(ResolutionInfo::new(d.width, d.height, fps));
}
FrmSizeTypes::StepWise(s) => {
for res in [
Resolution::VGA,
Resolution::HD720,
Resolution::HD1080,
Resolution::UHD4K,
] {
if res.width >= s.min_width
&& res.width <= s.max_width
&& res.height >= s.min_height
&& res.height <= s.max_height
{
let fps = self
.enumerate_fps(fourcc, res.width, res.height)
.unwrap_or_default();
resolutions
.push(ResolutionInfo::new(res.width, res.height, fps));
}
}
}
}
}
index += 1;
}
Err(e) => {
let is_einval = matches!(
e,
v4l2r::ioctl::FrameSizeError::IoctlError(err) if err == Errno::EINVAL
);
if !is_einval {
debug!("Failed to enumerate frame sizes for {:?}: {}", fourcc, e);
}
break;
}
}
Err(e) => {
debug!("Failed to enumerate frame sizes for {:?}: {}", fourcc, e);
}
}
@@ -236,36 +257,55 @@ impl VideoDevice {
}
/// Enumerate FPS for a specific resolution
fn enumerate_fps(&self, fourcc: FourCC, width: u32, height: u32) -> Result<Vec<u32>> {
fn enumerate_fps(
&self,
fourcc: v4l2r::PixelFormat,
width: u32,
height: u32,
) -> Result<Vec<u32>> {
let mut fps_list = Vec::new();
match self.device.enum_frameintervals(fourcc, width, height) {
Ok(intervals) => {
for interval in intervals {
match interval.interval {
v4l::frameinterval::FrameIntervalEnum::Discrete(fraction) => {
if fraction.numerator > 0 {
let fps = fraction.denominator / fraction.numerator;
fps_list.push(fps);
let mut index = 0u32;
loop {
match ioctl::enum_frame_intervals::<v4l2_frmivalenum>(
&self.fd, index, fourcc, width, height,
) {
Ok(interval) => {
if let Some(interval) = interval.intervals() {
match interval {
FrmIvalTypes::Discrete(fraction) => {
if fraction.numerator > 0 {
let fps = fraction.denominator / fraction.numerator;
fps_list.push(fps);
}
}
}
v4l::frameinterval::FrameIntervalEnum::Stepwise(step) => {
// Just pick max/min/step
if step.max.numerator > 0 {
let min_fps = step.max.denominator / step.max.numerator;
let max_fps = step.min.denominator / step.min.numerator;
fps_list.push(min_fps);
if max_fps != min_fps {
fps_list.push(max_fps);
FrmIvalTypes::StepWise(step) => {
if step.max.numerator > 0 {
let min_fps = step.max.denominator / step.max.numerator;
let max_fps = step.min.denominator / step.min.numerator;
fps_list.push(min_fps);
if max_fps != min_fps {
fps_list.push(max_fps);
}
}
}
}
}
index += 1;
}
Err(e) => {
let is_einval = matches!(
e,
v4l2r::ioctl::FrameIntervalsError::IoctlError(err) if err == Errno::EINVAL
);
if !is_einval {
debug!(
"Failed to enumerate frame intervals for {:?} {}x{}: {}",
fourcc, width, height, e
);
}
break;
}
}
Err(_) => {
// If enumeration fails, assume 30fps
fps_list.push(30);
}
}
@@ -275,20 +315,26 @@ impl VideoDevice {
}
/// Get current format
pub fn get_format(&self) -> Result<Format> {
self.device
.format()
pub fn get_format(&self) -> Result<V4l2rFormat> {
let queue = self.capture_queue_type()?;
ioctl::g_fmt(&self.fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e)))
}
/// Set capture format
pub fn set_format(&self, width: u32, height: u32, format: PixelFormat) -> Result<Format> {
let fmt = Format::new(width, height, format.to_fourcc());
pub fn set_format(&self, width: u32, height: u32, format: PixelFormat) -> Result<V4l2rFormat> {
let queue = self.capture_queue_type()?;
let mut fmt: V4l2rFormat = ioctl::g_fmt(&self.fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e)))?;
fmt.width = width;
fmt.height = height;
fmt.pixelformat = format.to_v4l2r();
// Request the format
let actual = self
.device
.set_format(&fmt)
let mut fd = self
.fd
.try_clone()
.map_err(|e| AppError::VideoError(format!("Failed to clone device fd: {}", e)))?;
let actual: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt))
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?;
if actual.width != width || actual.height != height {
@@ -364,7 +410,7 @@ impl VideoDevice {
.max()
.unwrap_or(0);
priority += (max_resolution / 100000) as u32;
priority += max_resolution / 100000;
// Known good drivers get bonus
let good_drivers = ["uvcvideo", "tc358743"];
@@ -376,8 +422,21 @@ impl VideoDevice {
}
/// Get the inner device reference (for advanced usage)
pub fn inner(&self) -> &Device {
&self.device
pub fn inner(&self) -> &File {
&self.fd
}
fn capture_queue_type(&self) -> Result<QueueType> {
let caps = self.capabilities()?;
if caps.video_capture {
Ok(QueueType::VideoCapture)
} else if caps.video_capture_mplane {
Ok(QueueType::VideoCaptureMplane)
} else {
Err(AppError::VideoError(
"Device does not expose a capture queue".to_string(),
))
}
}
}
@@ -446,7 +505,7 @@ fn probe_device_with_timeout(path: &Path, timeout: Duration) -> Option<VideoDevi
std::thread::spawn(move || {
let result = (|| -> Result<VideoDeviceInfo> {
let device = VideoDevice::open(&path_for_thread)?;
let device = VideoDevice::open_readonly(&path_for_thread)?;
device.info()
})();
let _ = tx.send(result);
@@ -503,15 +562,7 @@ fn sysfs_maybe_capture(path: &Path) -> bool {
}
let skip_hints = [
"codec",
"decoder",
"encoder",
"isp",
"mem2mem",
"m2m",
"vbi",
"radio",
"metadata",
"codec", "decoder", "encoder", "isp", "mem2mem", "m2m", "vbi", "radio", "metadata",
"output",
];
if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture {

View File

@@ -32,7 +32,7 @@ fn init_hwcodec_logging() {
}
/// H.264 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum H264EncoderType {
/// NVIDIA NVENC
Nvenc,
@@ -49,6 +49,7 @@ pub enum H264EncoderType {
/// Software encoding (libx264/openh264)
Software,
/// No encoder available
#[default]
None,
}
@@ -67,12 +68,6 @@ impl std::fmt::Display for H264EncoderType {
}
}
impl Default for H264EncoderType {
fn default() -> Self {
Self::None
}
}
/// Map codec name to encoder type
fn codec_name_to_type(name: &str) -> H264EncoderType {
if name.contains("nvenc") {
@@ -93,11 +88,12 @@ fn codec_name_to_type(name: &str) -> H264EncoderType {
}
/// Input pixel format for H264 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum H264InputFormat {
/// YUV420P (I420) - planar Y, U, V
Yuv420p,
/// NV12 - Y plane + interleaved UV plane (optimal for VAAPI)
#[default]
Nv12,
/// NV21 - Y plane + interleaved VU plane
Nv21,
@@ -113,12 +109,6 @@ pub enum H264InputFormat {
Bgr24,
}
impl Default for H264InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for VAAPI compatibility
}
}
/// H.264 encoder configuration
#[derive(Debug, Clone)]
pub struct H264Config {

View File

@@ -30,7 +30,7 @@ fn init_hwcodec_logging() {
}
/// H.265 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum H265EncoderType {
/// NVIDIA NVENC
Nvenc,
@@ -47,6 +47,7 @@ pub enum H265EncoderType {
/// Software encoder (libx265)
Software,
/// No encoder available
#[default]
None,
}
@@ -65,12 +66,6 @@ impl std::fmt::Display for H265EncoderType {
}
}
impl Default for H265EncoderType {
fn default() -> Self {
Self::None
}
}
impl From<EncoderBackend> for H265EncoderType {
fn from(backend: EncoderBackend) -> Self {
match backend {
@@ -86,11 +81,12 @@ impl From<EncoderBackend> for H265EncoderType {
}
/// Input pixel format for H265 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum H265InputFormat {
/// YUV420P (I420) - planar Y, U, V
Yuv420p,
/// NV12 - Y plane + interleaved UV plane (optimal for hardware encoders)
#[default]
Nv12,
/// NV21 - Y plane + interleaved VU plane
Nv21,
@@ -106,12 +102,6 @@ pub enum H265InputFormat {
Bgr24,
}
impl Default for H265InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for hardware encoder compatibility
}
}
/// H.265 encoder configuration
#[derive(Debug, Clone)]
pub struct H265Config {
@@ -256,8 +246,6 @@ pub fn detect_best_h265_encoder(width: u32, height: u32) -> (H265EncoderType, Op
H265EncoderType::Rkmpp
} else if codec.name.contains("v4l2m2m") {
H265EncoderType::V4l2M2m
} else if codec.name.contains("libx265") {
H265EncoderType::Software
} else {
H265EncoderType::Software // Default to software for unknown
};

View File

@@ -145,6 +145,7 @@ impl EncoderBackend {
}
/// Parse from string (case-insensitive)
#[allow(clippy::should_implement_trait)]
pub fn from_str(s: &str) -> Option<Self> {
match s.to_lowercase().as_str() {
"vaapi" => Some(EncoderBackend::Vaapi),

View File

@@ -15,12 +15,14 @@ use crate::video::format::{PixelFormat, Resolution};
#[typeshare]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", content = "value")]
#[derive(Default)]
pub enum BitratePreset {
/// Speed priority: 1 Mbps, lowest latency, smaller GOP
/// Best for: slow networks, remote management, low-bandwidth scenarios
Speed,
/// Balanced: 4 Mbps, good quality/latency tradeoff
/// Best for: typical usage, recommended default
#[default]
Balanced,
/// Quality priority: 8 Mbps, best visual quality
/// Best for: local network, high-bandwidth scenarios, detailed work
@@ -74,12 +76,6 @@ impl BitratePreset {
}
}
impl Default for BitratePreset {
fn default() -> Self {
Self::Balanced
}
}
impl std::fmt::Display for BitratePreset {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {

View File

@@ -30,13 +30,14 @@ fn init_hwcodec_logging() {
}
/// VP8 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum VP8EncoderType {
/// VAAPI (Intel on Linux)
Vaapi,
/// Software encoder (libvpx)
Software,
/// No encoder available
#[default]
None,
}
@@ -50,12 +51,6 @@ impl std::fmt::Display for VP8EncoderType {
}
}
impl Default for VP8EncoderType {
fn default() -> Self {
Self::None
}
}
impl From<EncoderBackend> for VP8EncoderType {
fn from(backend: EncoderBackend) -> Self {
match backend {
@@ -67,20 +62,15 @@ impl From<EncoderBackend> for VP8EncoderType {
}
/// Input pixel format for VP8 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum VP8InputFormat {
/// YUV420P (I420) - planar Y, U, V
Yuv420p,
/// NV12 - Y plane + interleaved UV plane
#[default]
Nv12,
}
impl Default for VP8InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for VAAPI compatibility
}
}
/// VP8 encoder configuration
#[derive(Debug, Clone)]
pub struct VP8Config {
@@ -180,8 +170,6 @@ pub fn detect_best_vp8_encoder(width: u32, height: u32) -> (VP8EncoderType, Opti
let encoder_type = if codec.name.contains("vaapi") {
VP8EncoderType::Vaapi
} else if codec.name.contains("libvpx") {
VP8EncoderType::Software
} else {
VP8EncoderType::Software // Default to software for unknown
};

View File

@@ -30,13 +30,14 @@ fn init_hwcodec_logging() {
}
/// VP9 encoder type (detected from hwcodec)
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub enum VP9EncoderType {
/// VAAPI (Intel on Linux)
Vaapi,
/// Software encoder (libvpx-vp9)
Software,
/// No encoder available
#[default]
None,
}
@@ -50,12 +51,6 @@ impl std::fmt::Display for VP9EncoderType {
}
}
impl Default for VP9EncoderType {
fn default() -> Self {
Self::None
}
}
impl From<EncoderBackend> for VP9EncoderType {
fn from(backend: EncoderBackend) -> Self {
match backend {
@@ -67,20 +62,15 @@ impl From<EncoderBackend> for VP9EncoderType {
}
/// Input pixel format for VP9 encoder
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum VP9InputFormat {
/// YUV420P (I420) - planar Y, U, V
Yuv420p,
/// NV12 - Y plane + interleaved UV plane
#[default]
Nv12,
}
impl Default for VP9InputFormat {
fn default() -> Self {
Self::Nv12 // Default to NV12 for VAAPI compatibility
}
}
/// VP9 encoder configuration
#[derive(Debug, Clone)]
pub struct VP9Config {
@@ -180,8 +170,6 @@ pub fn detect_best_vp9_encoder(width: u32, height: u32) -> (VP9EncoderType, Opti
let encoder_type = if codec.name.contains("vaapi") {
VP9EncoderType::Vaapi
} else if codec.name.contains("libvpx") {
VP9EncoderType::Software
} else {
VP9EncoderType::Software // Default to software for unknown
};

View File

@@ -2,7 +2,7 @@
use serde::{Deserialize, Serialize};
use std::fmt;
use v4l::format::fourcc;
use v4l2r::PixelFormat as V4l2rPixelFormat;
/// Supported pixel formats
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@@ -41,30 +41,29 @@ pub enum PixelFormat {
}
impl PixelFormat {
/// Convert to V4L2 FourCC
pub fn to_fourcc(&self) -> fourcc::FourCC {
/// Convert to V4L2 FourCC bytes
pub fn to_fourcc(&self) -> [u8; 4] {
match self {
PixelFormat::Mjpeg => fourcc::FourCC::new(b"MJPG"),
PixelFormat::Jpeg => fourcc::FourCC::new(b"JPEG"),
PixelFormat::Yuyv => fourcc::FourCC::new(b"YUYV"),
PixelFormat::Yvyu => fourcc::FourCC::new(b"YVYU"),
PixelFormat::Uyvy => fourcc::FourCC::new(b"UYVY"),
PixelFormat::Nv12 => fourcc::FourCC::new(b"NV12"),
PixelFormat::Nv21 => fourcc::FourCC::new(b"NV21"),
PixelFormat::Nv16 => fourcc::FourCC::new(b"NV16"),
PixelFormat::Nv24 => fourcc::FourCC::new(b"NV24"),
PixelFormat::Yuv420 => fourcc::FourCC::new(b"YU12"),
PixelFormat::Yvu420 => fourcc::FourCC::new(b"YV12"),
PixelFormat::Rgb565 => fourcc::FourCC::new(b"RGBP"),
PixelFormat::Rgb24 => fourcc::FourCC::new(b"RGB3"),
PixelFormat::Bgr24 => fourcc::FourCC::new(b"BGR3"),
PixelFormat::Grey => fourcc::FourCC::new(b"GREY"),
PixelFormat::Mjpeg => *b"MJPG",
PixelFormat::Jpeg => *b"JPEG",
PixelFormat::Yuyv => *b"YUYV",
PixelFormat::Yvyu => *b"YVYU",
PixelFormat::Uyvy => *b"UYVY",
PixelFormat::Nv12 => *b"NV12",
PixelFormat::Nv21 => *b"NV21",
PixelFormat::Nv16 => *b"NV16",
PixelFormat::Nv24 => *b"NV24",
PixelFormat::Yuv420 => *b"YU12",
PixelFormat::Yvu420 => *b"YV12",
PixelFormat::Rgb565 => *b"RGBP",
PixelFormat::Rgb24 => *b"RGB3",
PixelFormat::Bgr24 => *b"BGR3",
PixelFormat::Grey => *b"GREY",
}
}
/// Try to convert from V4L2 FourCC
pub fn from_fourcc(fourcc: fourcc::FourCC) -> Option<Self> {
let repr = fourcc.repr;
pub fn from_fourcc(repr: [u8; 4]) -> Option<Self> {
match &repr {
b"MJPG" => Some(PixelFormat::Mjpeg),
b"JPEG" => Some(PixelFormat::Jpeg),
@@ -85,6 +84,17 @@ impl PixelFormat {
}
}
/// Convert to v4l2r PixelFormat
pub fn to_v4l2r(&self) -> V4l2rPixelFormat {
V4l2rPixelFormat::from(&self.to_fourcc())
}
/// Convert from v4l2r PixelFormat
pub fn from_v4l2r(format: V4l2rPixelFormat) -> Option<Self> {
let repr: [u8; 4] = format.into();
Self::from_fourcc(repr)
}
/// Check if format is compressed (JPEG/MJPEG)
pub fn is_compressed(&self) -> bool {
matches!(self, PixelFormat::Mjpeg | PixelFormat::Jpeg)

View File

@@ -81,6 +81,11 @@ impl FrameBuffer {
pub fn len(&self) -> usize {
self.data.len()
}
/// Check if the frame buffer has no data
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
}
impl std::fmt::Debug for FrameBuffer {

View File

@@ -3,6 +3,7 @@
//! This module provides V4L2 video capture, encoding, and streaming functionality.
pub mod capture;
pub mod codec_constraints;
pub mod convert;
pub mod decoder;
pub mod device;
@@ -13,6 +14,7 @@ pub mod h264_pipeline;
pub mod shared_video_pipeline;
pub mod stream_manager;
pub mod streamer;
pub mod v4l2r_capture;
pub mod video_session;
pub use capture::VideoCapturer;

View File

@@ -18,6 +18,7 @@
use bytes::Bytes;
use parking_lot::RwLock as ParkingRwLock;
use std::collections::HashMap;
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant};
@@ -26,22 +27,17 @@ use tracing::{debug, error, info, trace, warn};
/// Grace period before auto-stopping pipeline when no subscribers (in seconds)
const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3;
/// Restart capture stream after this many consecutive timeouts.
const CAPTURE_TIMEOUT_RESTART_THRESHOLD: u32 = 5;
/// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128;
/// Validate JPEG header every N frames to reduce overhead
const JPEG_VALIDATE_INTERVAL: u64 = 30;
use crate::error::{AppError, Result};
use crate::utils::LogThrottler;
use crate::video::convert::{Nv12Converter, PixelConverter};
use crate::video::decoder::MjpegTurboDecoder;
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use hwcodec::ffmpeg_hw::{last_error_message as ffmpeg_hw_last_error, HwMjpegH26xConfig, HwMjpegH26xPipeline};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::video::capture::Parameters;
use v4l::Format;
use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat};
use crate::video::encoder::h265::{
detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat,
@@ -52,6 +48,11 @@ use crate::video::encoder::vp8::{detect_best_vp8_encoder, VP8Config, VP8Encoder}
use crate::video::encoder::vp9::{detect_best_vp9_encoder, VP9Config, VP9Encoder};
use crate::video::format::{PixelFormat, Resolution};
use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use crate::video::v4l2r_capture::V4l2rCaptureStream;
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use hwcodec::ffmpeg_hw::{
last_error_message as ffmpeg_hw_last_error, HwMjpegH26xConfig, HwMjpegH26xPipeline,
};
/// Encoded video frame for distribution
#[derive(Debug, Clone)]
@@ -511,7 +512,10 @@ impl SharedVideoPipeline {
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
if needs_mjpeg_decode
&& is_rkmpp_encoder
&& matches!(config.output_codec, VideoEncoderType::H264 | VideoEncoderType::H265)
&& matches!(
config.output_codec,
VideoEncoderType::H264 | VideoEncoderType::H265
)
{
info!(
"Initializing FFmpeg HW MJPEG->{} pipeline (no fallback)",
@@ -528,7 +532,11 @@ impl SharedVideoPipeline {
thread_count: 1,
};
let pipeline = HwMjpegH26xPipeline::new(hw_config).map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e };
let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!(
"FFmpeg HW MJPEG->{} init failed: {}",
config.output_codec, detail
@@ -902,7 +910,11 @@ impl SharedVideoPipeline {
/// Get subscriber count
pub fn subscriber_count(&self) -> usize {
self.subscribers.read().iter().filter(|tx| !tx.is_closed()).count()
self.subscribers
.read()
.iter()
.filter(|tx| !tx.is_closed())
.count()
}
/// Report that a receiver has lagged behind
@@ -951,7 +963,11 @@ impl SharedVideoPipeline {
pipeline
.reconfigure(bitrate_kbps as i32, gop as i32)
.map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e };
let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!(
"FFmpeg HW reconfigure failed: {}",
detail
@@ -1279,53 +1295,17 @@ impl SharedVideoPipeline {
let frame_seq_tx = frame_seq_tx.clone();
let buffer_pool = buffer_pool.clone();
std::thread::spawn(move || {
let device = match Device::with_path(&device_path) {
Ok(d) => d,
Err(e) => {
error!("Failed to open device {:?}: {}", device_path, e);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
return;
}
};
let requested_format = Format::new(
config.resolution.width,
config.resolution.height,
config.input_format.to_fourcc(),
);
let actual_format = match device.set_format(&requested_format) {
Ok(f) => f,
Err(e) => {
error!("Failed to set capture format: {}", e);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
return;
}
};
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.input_format);
let stride = actual_format.stride;
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream = match MmapStream::with_buffers(
&device,
BufferType::VideoCapture,
let mut stream = match V4l2rCaptureStream::open(
&device_path,
config.resolution,
config.input_format,
config.fps,
buffer_count.max(1),
Duration::from_secs(2),
) {
Ok(s) => s,
Ok(stream) => stream,
Err(e) => {
error!("Failed to create capture stream: {}", e);
error!("Failed to open capture stream: {}", e);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
@@ -1333,10 +1313,28 @@ impl SharedVideoPipeline {
}
};
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
let mut no_subscribers_since: Option<Instant> = None;
let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS);
let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0;
let mut consecutive_timeouts: u32 = 0;
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while pipeline.running_flag.load(Ordering::Acquire) {
let subscriber_count = pipeline.subscriber_count();
@@ -1366,49 +1364,78 @@ impl SharedVideoPipeline {
no_subscribers_since = None;
}
let (buf, meta) = match stream.next() {
Ok(frame_data) => frame_data,
let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
let meta = match stream.next_into(&mut owned) {
Ok(meta) => {
consecutive_timeouts = 0;
meta
}
Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut {
consecutive_timeouts = consecutive_timeouts.saturating_add(1);
warn!("Capture timeout - no signal?");
if consecutive_timeouts >= CAPTURE_TIMEOUT_RESTART_THRESHOLD {
warn!(
"Capture timed out {} consecutive times, restarting video pipeline",
consecutive_timeouts
);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(sequence.wrapping_add(1));
break;
}
} else {
error!("Capture error: {}", e);
consecutive_timeouts = 0;
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed =
suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!(
"Capture error: {} (suppressed {} repeats)",
e, suppressed
);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
}
continue;
}
};
let frame_size = meta.bytesused as usize;
let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue;
}
validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size])
&& validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{
continue;
}
let mut owned = buffer_pool.take(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
owned.truncate(frame_size);
let frame = Arc::new(VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution,
pixel_format,
stride,
sequence,
meta.sequence,
));
sequence = sequence.wrapping_add(1);
sequence = meta.sequence.wrapping_add(1);
{
let mut guard = latest_frame.write();
*guard = Some(frame);
}
let _ = frame_seq_tx.send(sequence);
}
pipeline.running_flag.store(false, Ordering::Release);
@@ -1473,7 +1500,11 @@ impl SharedVideoPipeline {
}
let packet = pipeline.encode(raw_frame, pts_ms).map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e };
let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!("FFmpeg HW encode failed: {}", detail))
})?;
@@ -1493,9 +1524,10 @@ impl SharedVideoPipeline {
}
let decoded_buf = if input_format.is_compressed() {
let decoder = state.mjpeg_decoder.as_mut().ok_or_else(|| {
AppError::VideoError("MJPEG decoder not initialized".to_string())
})?;
let decoder = state
.mjpeg_decoder
.as_mut()
.ok_or_else(|| AppError::VideoError("MJPEG decoder not initialized".to_string()))?;
let decoded = decoder.decode(raw_frame)?;
Some(decoded)
} else {
@@ -1525,16 +1557,18 @@ impl SharedVideoPipeline {
debug!("[Pipeline] Keyframe will be generated for this frame");
}
let encode_result = if needs_yuv420p && state.yuv420p_converter.is_some() {
let encode_result = if needs_yuv420p {
// Software encoder with direct input conversion to YUV420P
let conv = state.yuv420p_converter.as_mut().unwrap();
let yuv420p_data = conv
.convert(raw_frame)
.map_err(|e| AppError::VideoError(format!("YUV420P conversion failed: {}", e)))?;
encoder.encode_raw(yuv420p_data, pts_ms)
} else if state.nv12_converter.is_some() {
if let Some(conv) = state.yuv420p_converter.as_mut() {
let yuv420p_data = conv.convert(raw_frame).map_err(|e| {
AppError::VideoError(format!("YUV420P conversion failed: {}", e))
})?;
encoder.encode_raw(yuv420p_data, pts_ms)
} else {
encoder.encode_raw(raw_frame, pts_ms)
}
} else if let Some(conv) = state.nv12_converter.as_mut() {
// Hardware encoder with input conversion to NV12
let conv = state.nv12_converter.as_mut().unwrap();
let nv12_data = conv
.convert(raw_frame)
.map_err(|e| AppError::VideoError(format!("NV12 conversion failed: {}", e)))?;

View File

@@ -37,6 +37,7 @@ use crate::error::Result;
use crate::events::{EventBus, SystemEvent, VideoDeviceInfo};
use crate::hid::HidController;
use crate::stream::MjpegStreamHandler;
use crate::video::codec_constraints::StreamCodecConstraints;
use crate::video::format::{PixelFormat, Resolution};
use crate::video::streamer::{Streamer, StreamerState};
use crate::webrtc::WebRtcStreamer;
@@ -144,6 +145,16 @@ impl VideoStreamManager {
*self.config_store.write().await = Some(config);
}
/// Get current stream codec constraints derived from global configuration.
pub async fn codec_constraints(&self) -> StreamCodecConstraints {
if let Some(ref config_store) = *self.config_store.read().await {
let config = config_store.get();
StreamCodecConstraints::from_config(&config)
} else {
StreamCodecConstraints::unrestricted()
}
}
/// Get current streaming mode
pub async fn current_mode(&self) -> StreamMode {
self.mode.read().await.clone()
@@ -718,9 +729,11 @@ impl VideoStreamManager {
/// Returns None if video capture cannot be started or pipeline creation fails.
pub async fn subscribe_encoded_frames(
&self,
) -> Option<tokio::sync::mpsc::Receiver<std::sync::Arc<
crate::video::shared_video_pipeline::EncodedVideoFrame,
>>> {
) -> Option<
tokio::sync::mpsc::Receiver<
std::sync::Arc<crate::video::shared_video_pipeline::EncodedVideoFrame>,
>,
> {
// 1. Ensure video capture is initialized (for config discovery)
if self.streamer.state().await == StreamerState::Uninitialized {
tracing::info!("Initializing video capture for encoded frame subscription");
@@ -756,7 +769,11 @@ impl VideoStreamManager {
}
// 3. Use WebRtcStreamer to ensure the shared video pipeline is running
match self.webrtc_streamer.ensure_video_pipeline_for_external().await {
match self
.webrtc_streamer
.ensure_video_pipeline_for_external()
.await
{
Ok(pipeline) => Some(pipeline.subscribe()),
Err(e) => {
tracing::error!("Failed to start shared video pipeline: {}", e);

View File

@@ -3,9 +3,11 @@
//! This module provides a high-level interface for video capture and streaming,
//! managing the lifecycle of the capture thread and MJPEG/WebRTC distribution.
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::RwLock;
use tracing::{debug, error, info, trace, warn};
@@ -15,12 +17,8 @@ use super::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use crate::error::{AppError, Result};
use crate::events::{EventBus, SystemEvent};
use crate::stream::MjpegStreamHandler;
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::capture::Parameters;
use v4l::video::Capture;
use v4l::Format;
use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream;
/// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128;
@@ -573,11 +571,9 @@ impl Streamer {
break;
}
}
} else {
if zero_since.is_some() {
info!("Clients reconnected, canceling auto-pause");
zero_since = None;
}
} else if zero_since.is_some() {
info!("Clients reconnected, canceling auto-pause");
zero_since = None;
}
}
});
@@ -632,8 +628,7 @@ impl Streamer {
}
};
let mut device_opt: Option<Device> = None;
let mut format_opt: Option<Format> = None;
let mut stream_opt: Option<V4l2rCaptureStream> = None;
let mut last_error: Option<String> = None;
for attempt in 0..MAX_RETRIES {
@@ -642,8 +637,18 @@ impl Streamer {
return;
}
let device = match Device::with_path(&device_path) {
Ok(d) => d,
match V4l2rCaptureStream::open(
&device_path,
config.resolution,
config.format,
config.fps,
BUFFER_COUNT,
Duration::from_secs(2),
) {
Ok(stream) => {
stream_opt = Some(stream);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
@@ -660,42 +665,12 @@ impl Streamer {
last_error = Some(err_str);
break;
}
};
let requested = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
match device.set_format(&requested) {
Ok(actual) => {
device_opt = Some(device);
format_opt = Some(actual);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(std::time::Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(err_str);
continue;
}
last_error = Some(err_str);
break;
}
}
}
let (device, actual_format) = match (device_opt, format_opt) {
(Some(d), Some(f)) => (d, f),
_ => {
let mut stream = match stream_opt {
Some(stream) => stream,
None => {
error!(
"Failed to open device {:?}: {}",
device_path,
@@ -709,42 +684,35 @@ impl Streamer {
}
};
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!(
"Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride
resolution.width, resolution.height, pixel_format, stride
);
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.format);
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream =
match MmapStream::with_buffers(&device, BufferType::VideoCapture, BUFFER_COUNT) {
Ok(s) => s,
Err(e) => {
error!("Failed to create capture stream: {}", e);
self.mjpeg_handler.set_offline();
set_state(StreamerState::Error);
self.direct_active.store(false, Ordering::SeqCst);
self.current_fps.store(0, Ordering::Relaxed);
return;
}
};
let buffer_pool = Arc::new(FrameBufferPool::new(BUFFER_COUNT.max(4) as usize));
let mut signal_present = true;
let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0;
let mut idle_since: Option<std::time::Instant> = None;
let mut fps_frame_count: u64 = 0;
let mut last_fps_time = std::time::Instant::now();
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while !self.direct_stop.load(Ordering::Relaxed) {
let mjpeg_clients = self.mjpeg_handler.client_count();
@@ -768,8 +736,9 @@ impl Streamer {
idle_since = None;
}
let (buf, meta) = match stream.next() {
Ok(frame_data) => frame_data,
let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
let meta = match stream.next_into(&mut owned) {
Ok(meta) => meta,
Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut {
if signal_present {
@@ -811,35 +780,43 @@ impl Streamer {
break;
}
error!("Capture error: {}", e);
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue;
}
};
let frame_size = meta.bytesused as usize;
let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue;
}
validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size])
&& validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{
continue;
}
let mut owned = buffer_pool.take(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
owned.truncate(frame_size);
let frame = VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution,
pixel_format,
actual_format.stride,
sequence,
stride,
meta.sequence,
);
sequence = sequence.wrapping_add(1);
if !signal_present {
signal_present = true;
@@ -985,7 +962,7 @@ impl Streamer {
*streamer.state.write().await = StreamerState::Recovering;
// Publish reconnecting event (every 5 attempts to avoid spam)
if attempt == 1 || attempt % 5 == 0 {
if attempt == 1 || attempt.is_multiple_of(5) {
streamer
.publish_event(SystemEvent::StreamReconnecting {
device: device_path.clone(),

277
src/video/v4l2r_capture.rs Normal file
View File

@@ -0,0 +1,277 @@
//! V4L2 capture implementation using v4l2r (ioctl layer).
use std::fs::File;
use std::io;
use std::os::fd::AsFd;
use std::path::Path;
use std::time::Duration;
use nix::poll::{poll, PollFd, PollFlags, PollTimeout};
use tracing::{debug, warn};
use v4l2r::bindings::{v4l2_requestbuffers, v4l2_streamparm, v4l2_streamparm__bindgen_ty_1};
use v4l2r::ioctl::{
self, Capabilities, Capability as V4l2rCapability, MemoryConsistency, PlaneMapping, QBufPlane,
QBuffer, QueryBuffer, V4l2Buffer,
};
use v4l2r::memory::{MemoryType, MmapHandle};
use v4l2r::{Format as V4l2rFormat, PixelFormat as V4l2rPixelFormat, QueueType};
use crate::error::{AppError, Result};
use crate::video::format::{PixelFormat, Resolution};
/// Metadata for a captured frame.
#[derive(Debug, Clone, Copy)]
pub struct CaptureMeta {
pub bytes_used: usize,
pub sequence: u64,
}
/// V4L2 capture stream backed by v4l2r ioctl.
pub struct V4l2rCaptureStream {
fd: File,
queue: QueueType,
resolution: Resolution,
format: PixelFormat,
stride: u32,
timeout: Duration,
mappings: Vec<Vec<PlaneMapping>>,
}
impl V4l2rCaptureStream {
pub fn open(
device_path: impl AsRef<Path>,
resolution: Resolution,
format: PixelFormat,
fps: u32,
buffer_count: u32,
timeout: Duration,
) -> Result<Self> {
let mut fd = File::options()
.read(true)
.write(true)
.open(device_path.as_ref())
.map_err(|e| AppError::VideoError(format!("Failed to open device: {}", e)))?;
let caps: V4l2rCapability = ioctl::querycap(&fd)
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let caps_flags = caps.device_caps();
// Prefer multi-planar capture when available, as it is required for some
// devices/pixel formats (e.g. NV12 via VIDEO_CAPTURE_MPLANE).
let queue = if caps_flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE) {
QueueType::VideoCaptureMplane
} else if caps_flags.contains(Capabilities::VIDEO_CAPTURE) {
QueueType::VideoCapture
} else {
return Err(AppError::VideoError(
"Device does not support capture queues".to_string(),
));
};
let mut fmt: V4l2rFormat = ioctl::g_fmt(&fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get device format: {}", e)))?;
fmt.width = resolution.width;
fmt.height = resolution.height;
fmt.pixelformat = V4l2rPixelFormat::from(&format.to_fourcc());
let actual_fmt: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt))
.map_err(|e| AppError::VideoError(format!("Failed to set device format: {}", e)))?;
let actual_resolution = Resolution::new(actual_fmt.width, actual_fmt.height);
let actual_format = PixelFormat::from_v4l2r(actual_fmt.pixelformat).unwrap_or(format);
let stride = actual_fmt
.plane_fmt
.first()
.map(|p| p.bytesperline)
.unwrap_or_else(|| match actual_format.bytes_per_pixel() {
Some(bpp) => actual_resolution.width * bpp as u32,
None => actual_resolution.width,
});
if fps > 0 {
if let Err(e) = set_fps(&fd, queue, fps) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let req: v4l2_requestbuffers = ioctl::reqbufs(
&fd,
queue,
MemoryType::Mmap,
buffer_count,
MemoryConsistency::empty(),
)
.map_err(|e| AppError::VideoError(format!("Failed to request buffers: {}", e)))?;
let allocated = req.count as usize;
if allocated == 0 {
return Err(AppError::VideoError(
"Driver returned zero capture buffers".to_string(),
));
}
let mut mappings = Vec::with_capacity(allocated);
for index in 0..allocated as u32 {
let query: QueryBuffer = ioctl::querybuf(&fd, queue, index as usize).map_err(|e| {
AppError::VideoError(format!("Failed to query buffer {}: {}", index, e))
})?;
if query.planes.is_empty() {
return Err(AppError::VideoError(format!(
"Driver returned zero planes for buffer {}",
index
)));
}
let mut plane_maps = Vec::with_capacity(query.planes.len());
for plane in &query.planes {
let mapping = ioctl::mmap(&fd, plane.mem_offset, plane.length).map_err(|e| {
AppError::VideoError(format!("Failed to mmap buffer {}: {}", index, e))
})?;
plane_maps.push(mapping);
}
mappings.push(plane_maps);
}
let mut stream = Self {
fd,
queue,
resolution: actual_resolution,
format: actual_format,
stride,
timeout,
mappings,
};
stream.queue_all_buffers()?;
ioctl::streamon(&stream.fd, stream.queue)
.map_err(|e| AppError::VideoError(format!("Failed to start capture stream: {}", e)))?;
Ok(stream)
}
pub fn resolution(&self) -> Resolution {
self.resolution
}
pub fn format(&self) -> PixelFormat {
self.format
}
pub fn stride(&self) -> u32 {
self.stride
}
pub fn next_into(&mut self, dst: &mut Vec<u8>) -> io::Result<CaptureMeta> {
self.wait_ready()?;
let dqbuf: V4l2Buffer = ioctl::dqbuf(&self.fd, self.queue)
.map_err(|e| io::Error::other(format!("dqbuf failed: {}", e)))?;
let index = dqbuf.as_v4l2_buffer().index as usize;
let sequence = dqbuf.as_v4l2_buffer().sequence as u64;
let mut total = 0usize;
for (plane_idx, plane) in dqbuf.planes_iter().enumerate() {
let bytes_used = *plane.bytesused as usize;
let data_offset = plane.data_offset.copied().unwrap_or(0) as usize;
if bytes_used == 0 {
continue;
}
let mapping = &self.mappings[index][plane_idx];
let start = data_offset.min(mapping.len());
let end = (data_offset + bytes_used).min(mapping.len());
total += end.saturating_sub(start);
}
dst.resize(total, 0);
let mut cursor = 0usize;
for (plane_idx, plane) in dqbuf.planes_iter().enumerate() {
let bytes_used = *plane.bytesused as usize;
let data_offset = plane.data_offset.copied().unwrap_or(0) as usize;
if bytes_used == 0 {
continue;
}
let mapping = &self.mappings[index][plane_idx];
let start = data_offset.min(mapping.len());
let end = (data_offset + bytes_used).min(mapping.len());
let len = end.saturating_sub(start);
if len == 0 {
continue;
}
dst[cursor..cursor + len].copy_from_slice(&mapping[start..end]);
cursor += len;
}
self.queue_buffer(index as u32)
.map_err(|e| io::Error::other(e.to_string()))?;
Ok(CaptureMeta {
bytes_used: total,
sequence,
})
}
fn wait_ready(&self) -> io::Result<()> {
if self.timeout.is_zero() {
return Ok(());
}
let mut fds = [PollFd::new(self.fd.as_fd(), PollFlags::POLLIN)];
let timeout_ms = self.timeout.as_millis().min(u16::MAX as u128) as u16;
let ready = poll(&mut fds, PollTimeout::from(timeout_ms))?;
if ready == 0 {
return Err(io::Error::new(io::ErrorKind::TimedOut, "capture timeout"));
}
Ok(())
}
fn queue_all_buffers(&mut self) -> Result<()> {
for index in 0..self.mappings.len() as u32 {
self.queue_buffer(index)?;
}
Ok(())
}
fn queue_buffer(&mut self, index: u32) -> Result<()> {
let handle = MmapHandle;
let planes = self.mappings[index as usize]
.iter()
.map(|mapping| {
let mut plane = QBufPlane::new_from_handle(&handle, 0);
plane.0.length = mapping.len() as u32;
plane
})
.collect();
let mut qbuf: QBuffer<MmapHandle> = QBuffer::new(self.queue, index);
qbuf.planes = planes;
ioctl::qbuf::<_, ()>(&self.fd, qbuf)
.map_err(|e| AppError::VideoError(format!("Failed to queue buffer: {}", e)))?;
Ok(())
}
}
impl Drop for V4l2rCaptureStream {
fn drop(&mut self) {
if let Err(e) = ioctl::streamoff(&self.fd, self.queue) {
debug!("Failed to stop capture stream: {}", e);
}
}
}
fn set_fps(fd: &File, queue: QueueType, fps: u32) -> Result<()> {
let mut params = unsafe { std::mem::zeroed::<v4l2_streamparm>() };
params.type_ = queue as u32;
params.parm = v4l2_streamparm__bindgen_ty_1 {
capture: v4l2r::bindings::v4l2_captureparm {
timeperframe: v4l2r::bindings::v4l2_fract {
numerator: 1,
denominator: fps,
},
..unsafe { std::mem::zeroed() }
},
};
let _actual: v4l2_streamparm = ioctl::s_parm(fd, params)
.map_err(|e| AppError::VideoError(format!("Failed to set FPS: {}", e)))?;
Ok(())
}

View File

@@ -326,7 +326,6 @@ impl VideoSessionManager {
bitrate_preset: self.config.bitrate_preset,
fps: self.config.fps,
encoder_backend: self.config.encoder_backend,
..Default::default()
};
// Create new pipeline

View File

@@ -7,7 +7,11 @@ use std::sync::Arc;
use crate::config::*;
use crate::error::{AppError, Result};
use crate::events::SystemEvent;
use crate::rtsp::RtspService;
use crate::state::AppState;
use crate::video::codec_constraints::{
enforce_constraints_with_stream_manager, StreamCodecConstraints,
};
/// 应用 Video 配置变更
pub async fn apply_video_config(
@@ -191,9 +195,7 @@ pub async fn apply_hid_config(
// Low-endpoint UDCs (e.g., musb) cannot handle consumer control endpoints reliably
if new_config.backend == HidBackend::Otg {
if let Some(udc) =
crate::otg::configfs::resolve_udc_name(new_config.otg_udc.as_deref())
{
if let Some(udc) = crate::otg::configfs::resolve_udc_name(new_config.otg_udc.as_deref()) {
if crate::otg::configfs::is_low_endpoint_udc(&udc) && new_hid_functions.consumer {
tracing::warn!(
"UDC {} has low endpoint resources, disabling consumer control",
@@ -446,6 +448,15 @@ pub async fn apply_audio_config(
Ok(())
}
/// Apply stream codec constraints derived from global config.
pub async fn enforce_stream_codec_constraints(state: &Arc<AppState>) -> Result<Option<String>> {
let config = state.config.get();
let constraints = StreamCodecConstraints::from_config(&config);
let enforcement =
enforce_constraints_with_stream_manager(&state.stream_manager, &constraints).await?;
Ok(enforcement.message)
}
/// 应用 RustDesk 配置变更
pub async fn apply_rustdesk_config(
state: &Arc<AppState>,
@@ -455,6 +466,7 @@ pub async fn apply_rustdesk_config(
tracing::info!("Applying RustDesk config changes...");
let mut rustdesk_guard = state.rustdesk.write().await;
let mut credentials_to_save = None;
// Check if service needs to be stopped
if old_config.enabled && !new_config.enabled {
@@ -466,7 +478,6 @@ pub async fn apply_rustdesk_config(
tracing::info!("RustDesk service stopped");
}
*rustdesk_guard = None;
return Ok(());
}
// Check if service needs to be started or restarted
@@ -475,8 +486,6 @@ pub async fn apply_rustdesk_config(
|| old_config.device_id != new_config.device_id
|| old_config.device_password != new_config.device_password;
let mut credentials_to_save = None;
if rustdesk_guard.is_none() {
// Create new service
tracing::info!("Initializing RustDesk service...");
@@ -509,28 +518,82 @@ pub async fn apply_rustdesk_config(
}
}
}
}
// Save credentials to persistent config store (outside the lock)
drop(rustdesk_guard);
if let Some(updated_config) = credentials_to_save {
tracing::info!("Saving RustDesk credentials to config store...");
if let Err(e) = state
.config
.update(|cfg| {
cfg.rustdesk.public_key = updated_config.public_key.clone();
cfg.rustdesk.private_key = updated_config.private_key.clone();
cfg.rustdesk.signing_public_key = updated_config.signing_public_key.clone();
cfg.rustdesk.signing_private_key = updated_config.signing_private_key.clone();
cfg.rustdesk.uuid = updated_config.uuid.clone();
})
.await
{
tracing::warn!("Failed to save RustDesk credentials: {}", e);
} else {
tracing::info!("RustDesk credentials saved successfully");
}
// Save credentials to persistent config store (outside the lock)
drop(rustdesk_guard);
if let Some(updated_config) = credentials_to_save {
tracing::info!("Saving RustDesk credentials to config store...");
if let Err(e) = state
.config
.update(|cfg| {
cfg.rustdesk.public_key = updated_config.public_key.clone();
cfg.rustdesk.private_key = updated_config.private_key.clone();
cfg.rustdesk.signing_public_key = updated_config.signing_public_key.clone();
cfg.rustdesk.signing_private_key = updated_config.signing_private_key.clone();
cfg.rustdesk.uuid = updated_config.uuid.clone();
})
.await
{
tracing::warn!("Failed to save RustDesk credentials: {}", e);
} else {
tracing::info!("RustDesk credentials saved successfully");
}
}
if let Some(message) = enforce_stream_codec_constraints(state).await? {
tracing::info!("{}", message);
}
Ok(())
}
/// 应用 RTSP 配置变更
pub async fn apply_rtsp_config(
state: &Arc<AppState>,
old_config: &RtspConfig,
new_config: &RtspConfig,
) -> Result<()> {
tracing::info!("Applying RTSP config changes...");
let mut rtsp_guard = state.rtsp.write().await;
if old_config.enabled && !new_config.enabled {
if let Some(ref service) = *rtsp_guard {
if let Err(e) = service.stop().await {
tracing::error!("Failed to stop RTSP service: {}", e);
}
}
*rtsp_guard = None;
}
if new_config.enabled {
let need_restart = old_config.bind != new_config.bind
|| old_config.port != new_config.port
|| old_config.path != new_config.path
|| old_config.codec != new_config.codec
|| old_config.username != new_config.username
|| old_config.password != new_config.password
|| old_config.allow_one_client != new_config.allow_one_client;
if rtsp_guard.is_none() {
let service = RtspService::new(new_config.clone(), state.stream_manager.clone());
service.start().await?;
tracing::info!("RTSP service started");
*rtsp_guard = Some(Arc::new(service));
} else if need_restart {
if let Some(ref service) = *rtsp_guard {
service.restart(new_config.clone()).await?;
tracing::info!("RTSP service restarted");
}
}
}
drop(rtsp_guard);
if let Some(message) = enforce_stream_codec_constraints(state).await? {
tracing::info!("{}", message);
}
Ok(())
}

View File

@@ -24,6 +24,7 @@ mod audio;
mod auth;
mod hid;
mod msd;
mod rtsp;
mod rustdesk;
mod stream;
pub(crate) mod video;
@@ -35,6 +36,7 @@ pub use audio::{get_audio_config, update_audio_config};
pub use auth::{get_auth_config, update_auth_config};
pub use hid::{get_hid_config, update_hid_config};
pub use msd::{get_msd_config, update_msd_config};
pub use rtsp::{get_rtsp_config, get_rtsp_status, update_rtsp_config};
pub use rustdesk::{
get_device_password, get_rustdesk_config, get_rustdesk_status, regenerate_device_id,
regenerate_device_password, update_rustdesk_config,
@@ -50,10 +52,29 @@ use std::sync::Arc;
use crate::config::AppConfig;
use crate::state::AppState;
fn sanitize_config_for_api(config: &mut AppConfig) {
// Auth secrets
config.auth.totp_secret = None;
// Stream secrets
config.stream.turn_password = None;
// RustDesk secrets
config.rustdesk.device_password.clear();
config.rustdesk.relay_key = None;
config.rustdesk.public_key = None;
config.rustdesk.private_key = None;
config.rustdesk.signing_public_key = None;
config.rustdesk.signing_private_key = None;
// RTSP secrets
config.rtsp.password = None;
}
/// 获取完整配置
pub async fn get_all_config(State(state): State<Arc<AppState>>) -> Json<AppConfig> {
let mut config = (*state.config.get()).clone();
// 不暴露敏感信息
config.auth.totp_secret = None;
sanitize_config_for_api(&mut config);
Json(config)
}

View File

@@ -0,0 +1,70 @@
use axum::{extract::State, Json};
use std::sync::Arc;
use crate::error::{AppError, Result};
use crate::state::AppState;
use super::apply::apply_rtsp_config;
use super::types::{RtspConfigResponse, RtspConfigUpdate, RtspStatusResponse};
/// Get RTSP config
pub async fn get_rtsp_config(State(state): State<Arc<AppState>>) -> Json<RtspConfigResponse> {
let config = state.config.get();
Json(RtspConfigResponse::from(&config.rtsp))
}
/// Get RTSP status (config + service status)
pub async fn get_rtsp_status(State(state): State<Arc<AppState>>) -> Json<RtspStatusResponse> {
let config = state.config.get().rtsp.clone();
let status = {
let guard = state.rtsp.read().await;
if let Some(ref service) = *guard {
service.status().await
} else {
crate::rtsp::RtspServiceStatus::Stopped
}
};
Json(RtspStatusResponse::new(&config, status))
}
/// Update RTSP config
pub async fn update_rtsp_config(
State(state): State<Arc<AppState>>,
Json(req): Json<RtspConfigUpdate>,
) -> Result<Json<RtspConfigResponse>> {
req.validate()?;
let old_config = state.config.get().rtsp.clone();
state
.config
.update(|config| {
req.apply_to(&mut config.rtsp);
})
.await?;
let new_config = state.config.get().rtsp.clone();
if let Err(err) = apply_rtsp_config(&state, &old_config, &new_config).await {
tracing::error!("Failed to apply RTSP config: {}", err);
if let Err(rollback_err) = state
.config
.update(|config| {
config.rtsp = old_config.clone();
})
.await
{
tracing::error!(
"Failed to rollback RTSP config after apply failure: {}",
rollback_err
);
return Err(AppError::ServiceUnavailable(format!(
"RTSP apply failed: {}; rollback failed: {}",
err, rollback_err
)));
}
return Err(err);
}
Ok(Json(RtspConfigResponse::from(&new_config)))
}

View File

@@ -106,6 +106,15 @@ pub async fn update_rustdesk_config(
tracing::error!("Failed to apply RustDesk config: {}", e);
}
// Share a non-sensitive summary for frontend UX
let constraints = state.stream_manager.codec_constraints().await;
if constraints.rustdesk_enabled || constraints.rtsp_enabled {
tracing::info!(
"Stream codec constraints active after RustDesk update: {}",
constraints.reason
);
}
Ok(Json(RustDeskConfigResponse::from(&new_config)))
}
@@ -139,7 +148,7 @@ pub async fn regenerate_device_password(
Ok(Json(RustDeskConfigResponse::from(&new_config)))
}
/// 获取设备密码(管理员专用
/// 获取设备密码(已认证用户
pub async fn get_device_password(State(state): State<Arc<AppState>>) -> Json<serde_json::Value> {
let config = state.config.get().rustdesk.clone();
Json(serde_json::json!({

View File

@@ -42,5 +42,10 @@ pub async fn update_stream_config(
tracing::error!("Failed to apply stream config: {}", e);
}
// 6. Enforce codec constraints after any stream config update
if let Err(e) = super::apply::enforce_stream_codec_constraints(&state).await {
tracing::error!("Failed to enforce stream codec constraints: {}", e);
}
Ok(Json(StreamConfigResponse::from(&new_stream_config)))
}

View File

@@ -1,5 +1,6 @@
use crate::config::*;
use crate::error::AppError;
use crate::rtsp::RtspServiceStatus;
use crate::rustdesk::config::RustDeskConfig;
use crate::video::encoder::BitratePreset;
use serde::Deserialize;
@@ -604,6 +605,124 @@ impl RustDeskConfigUpdate {
}
}
// ===== RTSP Config =====
#[typeshare]
#[derive(Debug, serde::Serialize)]
pub struct RtspConfigResponse {
pub enabled: bool,
pub bind: String,
pub port: u16,
pub path: String,
pub allow_one_client: bool,
pub codec: RtspCodec,
pub username: Option<String>,
pub has_password: bool,
}
impl From<&RtspConfig> for RtspConfigResponse {
fn from(config: &RtspConfig) -> Self {
Self {
enabled: config.enabled,
bind: config.bind.clone(),
port: config.port,
path: config.path.clone(),
allow_one_client: config.allow_one_client,
codec: config.codec.clone(),
username: config.username.clone(),
has_password: config.password.is_some(),
}
}
}
#[typeshare]
#[derive(Debug, serde::Serialize)]
pub struct RtspStatusResponse {
pub config: RtspConfigResponse,
pub service_status: String,
}
impl RtspStatusResponse {
pub fn new(config: &RtspConfig, status: RtspServiceStatus) -> Self {
Self {
config: RtspConfigResponse::from(config),
service_status: status.to_string(),
}
}
}
#[typeshare]
#[derive(Debug, Deserialize)]
pub struct RtspConfigUpdate {
pub enabled: Option<bool>,
pub bind: Option<String>,
pub port: Option<u16>,
pub path: Option<String>,
pub allow_one_client: Option<bool>,
pub codec: Option<RtspCodec>,
pub username: Option<String>,
pub password: Option<String>,
}
impl RtspConfigUpdate {
pub fn validate(&self) -> crate::error::Result<()> {
if let Some(port) = self.port {
if port == 0 {
return Err(AppError::BadRequest("RTSP port cannot be 0".into()));
}
}
if let Some(ref bind) = self.bind {
if bind.parse::<std::net::IpAddr>().is_err() {
return Err(AppError::BadRequest("RTSP bind must be a valid IP".into()));
}
}
if let Some(ref path) = self.path {
let normalized = path.trim_matches('/');
if normalized.is_empty() {
return Err(AppError::BadRequest("RTSP path cannot be empty".into()));
}
}
Ok(())
}
pub fn apply_to(&self, config: &mut RtspConfig) {
if let Some(enabled) = self.enabled {
config.enabled = enabled;
}
if let Some(ref bind) = self.bind {
config.bind = bind.clone();
}
if let Some(port) = self.port {
config.port = port;
}
if let Some(ref path) = self.path {
config.path = path.trim_matches('/').to_string();
}
if let Some(allow_one_client) = self.allow_one_client {
config.allow_one_client = allow_one_client;
}
if let Some(codec) = self.codec.clone() {
config.codec = codec;
}
if let Some(ref username) = self.username {
config.username = if username.is_empty() {
None
} else {
Some(username.clone())
};
}
if let Some(ref password) = self.password {
config.password = if password.is_empty() {
None
} else {
Some(password.clone())
};
}
}
}
// ===== Web Config =====
#[typeshare]
#[derive(Debug, Deserialize)]

View File

@@ -86,7 +86,7 @@ pub async fn start_extension(
// Start the extension
mgr.start(ext_id, &config.extensions)
.await
.map_err(|e| AppError::Internal(e))?;
.map_err(AppError::Internal)?;
// Return updated status
Ok(Json(ExtensionInfo {
@@ -108,7 +108,7 @@ pub async fn stop_extension(
let mgr = &state.extensions;
// Stop the extension
mgr.stop(ext_id).await.map_err(|e| AppError::Internal(e))?;
mgr.stop(ext_id).await.map_err(AppError::Internal)?;
// Return updated status
Ok(Json(ExtensionInfo {
@@ -156,7 +156,6 @@ pub struct TtydConfigUpdate {
pub enabled: Option<bool>,
pub port: Option<u16>,
pub shell: Option<String>,
pub credential: Option<String>,
}
/// Update gostc config
@@ -203,9 +202,6 @@ pub async fn update_ttyd_config(
if let Some(ref shell) = req.shell {
ttyd.shell = shell.clone();
}
if req.credential.is_some() {
ttyd.credential = req.credential.clone();
}
})
.await?;
@@ -263,14 +259,16 @@ pub async fn update_gostc_config(
if was_enabled && !is_enabled {
state.extensions.stop(ExtensionId::Gostc).await.ok();
} else if !was_enabled && is_enabled && has_key {
if state.extensions.check_available(ExtensionId::Gostc) {
state
.extensions
.start(ExtensionId::Gostc, &new_config.extensions)
.await
.ok();
}
} else if !was_enabled
&& is_enabled
&& has_key
&& state.extensions.check_available(ExtensionId::Gostc)
{
state
.extensions
.start(ExtensionId::Gostc, &new_config.extensions)
.await
.ok();
}
Ok(Json(new_config.extensions.gostc.clone()))
@@ -312,14 +310,16 @@ pub async fn update_easytier_config(
if was_enabled && !is_enabled {
state.extensions.stop(ExtensionId::Easytier).await.ok();
} else if !was_enabled && is_enabled && has_name {
if state.extensions.check_available(ExtensionId::Easytier) {
state
.extensions
.start(ExtensionId::Easytier, &new_config.extensions)
.await
.ok();
}
} else if !was_enabled
&& is_enabled
&& has_name
&& state.extensions.check_available(ExtensionId::Easytier)
{
state
.extensions
.start(ExtensionId::Easytier, &new_config.extensions)
.await
.ok();
}
Ok(Json(new_config.extensions.easytier.clone()))

File diff suppressed because it is too large Load Diff

View File

@@ -50,6 +50,7 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/stream/mode", post(handlers::stream_mode_set))
.route("/stream/bitrate", post(handlers::stream_set_bitrate))
.route("/stream/codecs", get(handlers::stream_codecs_list))
.route("/stream/constraints", get(handlers::stream_constraints_get))
// WebRTC endpoints
.route("/webrtc/session", post(handlers::webrtc_create_session))
.route("/webrtc/offer", post(handlers::webrtc_offer))
@@ -59,6 +60,7 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/webrtc/close", post(handlers::webrtc_close_session))
// HID endpoints
.route("/hid/status", get(handlers::hid_status))
.route("/hid/otg/self-check", get(handlers::hid_otg_self_check))
.route("/hid/reset", post(handlers::hid_reset))
// WebSocket HID endpoint (for MJPEG mode)
.route("/ws/hid", any(ws_hid_handler))
@@ -120,6 +122,13 @@ pub fn create_router(state: Arc<AppState>) -> Router {
"/config/rustdesk/regenerate-password",
post(handlers::config::regenerate_device_password),
)
// RTSP configuration endpoints
.route("/config/rtsp", get(handlers::config::get_rtsp_config))
.route("/config/rtsp", patch(handlers::config::update_rtsp_config))
.route(
"/config/rtsp/status",
get(handlers::config::get_rtsp_status),
)
// Web server configuration
.route("/config/web", get(handlers::config::get_web_config))
.route("/config/web", patch(handlers::config::update_web_config))
@@ -128,6 +137,9 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/config/auth", patch(handlers::config::update_auth_config))
// System control
.route("/system/restart", post(handlers::system_restart))
.route("/update/overview", get(handlers::update_overview))
.route("/update/upgrade", post(handlers::update_upgrade))
.route("/update/status", get(handlers::update_status))
// MSD (Mass Storage Device) endpoints
.route("/msd/status", get(handlers::msd_status))
.route("/msd/images", get(handlers::msd_images_list))
@@ -158,6 +170,7 @@ pub fn create_router(state: Arc<AppState>) -> Router {
.route("/atx/status", get(handlers::atx_status))
.route("/atx/power", post(handlers::atx_power))
.route("/atx/wol", post(handlers::atx_wol))
.route("/atx/wol/history", get(handlers::atx_wol_history))
// Device discovery endpoints
.route("/devices/atx", get(handlers::devices::list_atx_devices))
// Extension management endpoints

View File

@@ -127,14 +127,14 @@ fn try_serve_file(path: &str) -> Option<Response<Body>> {
.first_or_octet_stream()
.to_string();
return Some(
Some(
Response::builder()
.status(StatusCode::OK)
.header(header::CONTENT_TYPE, mime)
.header(header::CACHE_CONTROL, "public, max-age=86400")
.body(Body::from(data))
.unwrap(),
);
)
}
Err(e) => {
tracing::debug!(
@@ -143,7 +143,7 @@ fn try_serve_file(path: &str) -> Option<Response<Body>> {
file_path.display(),
e
);
return None;
None
}
}
}

View File

@@ -108,19 +108,15 @@ impl TurnServer {
/// Video codec preference
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum VideoCodec {
#[default]
H264,
VP8,
VP9,
AV1,
}
impl Default for VideoCodec {
fn default() -> Self {
Self::H264
}
}
impl std::fmt::Display for VideoCodec {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {

View File

@@ -18,7 +18,9 @@ pub fn mdns_mode_from_env() -> Option<MulticastDnsMode> {
}
pub fn mdns_mode() -> MulticastDnsMode {
mdns_mode_from_env().unwrap_or(MulticastDnsMode::QueryAndGather)
// Default to QueryOnly to avoid gathering .local host candidates by default.
// This is generally more stable for LAN first-connection while preserving mDNS queries.
mdns_mode_from_env().unwrap_or(MulticastDnsMode::QueryOnly)
}
pub fn mdns_mode_label(mode: MulticastDnsMode) -> &'static str {

View File

@@ -93,7 +93,6 @@ impl PeerConnection {
urls: turn.urls.clone(),
username: turn.username.clone(),
credential: turn.credential.clone(),
..Default::default()
});
}
@@ -318,14 +317,26 @@ impl PeerConnection {
.await
.map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?;
// Wait for ICE gathering complete (or timeout) after setting local description.
// This improves first-connection robustness by returning a fuller initial candidate set.
let mut gather_complete = self.pc.gathering_complete_promise().await;
// Set local description
self.pc
.set_local_description(answer.clone())
.await
.map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?;
// Wait a bit for ICE candidates to gather
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
const ICE_GATHER_TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_millis(2500);
if tokio::time::timeout(ICE_GATHER_TIMEOUT, gather_complete.recv())
.await
.is_err()
{
debug!(
"ICE gathering timeout after {:?} for session {}",
ICE_GATHER_TIMEOUT, self.session_id
);
}
// Get gathered ICE candidates
let candidates = self.ice_candidates.lock().await.clone();

View File

@@ -330,9 +330,7 @@ impl OpusAudioTrack {
stream_id.to_string(),
));
Self {
track,
}
Self { track }
}
/// Get the underlying WebRTC track
@@ -365,13 +363,10 @@ impl OpusAudioTrack {
..Default::default()
};
self.track
.write_sample(&sample)
.await
.map_err(|e| {
error!("Failed to write Opus sample: {}", e);
AppError::WebRtcError(format!("Failed to write audio sample: {}", e))
})
self.track.write_sample(&sample).await.map_err(|e| {
error!("Failed to write Opus sample: {}", e);
AppError::WebRtcError(format!("Failed to write audio sample: {}", e))
})
}
}

View File

@@ -199,7 +199,7 @@ impl VideoTrack {
let data = frame.data();
let max_payload_size = 1200; // MTU - headers
let packet_count = (data.len() + max_payload_size - 1) / max_payload_size;
let packet_count = data.len().div_ceil(max_payload_size);
let mut bytes_sent = 0u64;
for i in 0..packet_count {

View File

@@ -292,7 +292,6 @@ impl UniversalSession {
urls: turn.urls.clone(),
username: turn.username.clone(),
credential: turn.credential.clone(),
..Default::default()
});
}
@@ -430,7 +429,9 @@ impl UniversalSession {
let candidate = IceCandidate {
candidate: candidate_str,
sdp_mid: candidate_json.as_ref().and_then(|j| j.sdp_mid.clone()),
sdp_mline_index: candidate_json.as_ref().and_then(|j| j.sdp_mline_index),
sdp_mline_index: candidate_json
.as_ref()
.and_then(|j| j.sdp_mline_index),
username_fragment: candidate_json
.as_ref()
.and_then(|j| j.username_fragment.clone()),
@@ -615,20 +616,15 @@ impl UniversalSession {
};
// Verify codec matches
let frame_codec = match encoded_frame.codec {
VideoEncoderType::H264 => VideoEncoderType::H264,
VideoEncoderType::H265 => VideoEncoderType::H265,
VideoEncoderType::VP8 => VideoEncoderType::VP8,
VideoEncoderType::VP9 => VideoEncoderType::VP9,
};
let frame_codec = encoded_frame.codec;
if frame_codec != expected_codec {
continue;
}
// Debug log for H265 frames
if expected_codec == VideoEncoderType::H265 {
if encoded_frame.is_keyframe || frames_sent % 30 == 0 {
if expected_codec == VideoEncoderType::H265
&& (encoded_frame.is_keyframe || frames_sent.is_multiple_of(30)) {
debug!(
"[Session-H265] Received frame #{}: size={}, keyframe={}, seq={}",
frames_sent,
@@ -637,7 +633,6 @@ impl UniversalSession {
encoded_frame.sequence
);
}
}
// Ensure decoder starts from a keyframe and recover on gaps.
let mut gap_detected = false;
@@ -768,7 +763,7 @@ impl UniversalSession {
// 20ms at 48kHz = 960 samples
let samples = 960u32;
if let Err(e) = audio_track.write_packet(&opus_frame.data, samples).await {
if packets_sent % 100 == 0 {
if packets_sent.is_multiple_of(100) {
debug!("Failed to write audio packet: {}", e);
}
} else {
@@ -838,13 +833,24 @@ impl UniversalSession {
}
}
let mut gather_complete = self.pc.gathering_complete_promise().await;
self.pc
.set_local_description(answer.clone())
.await
.map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?;
// Wait for ICE candidates
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
// Wait for ICE gathering complete (or timeout) to return a fuller initial candidate set.
const ICE_GATHER_TIMEOUT: Duration = Duration::from_millis(2500);
if tokio::time::timeout(ICE_GATHER_TIMEOUT, gather_complete.recv())
.await
.is_err()
{
debug!(
"ICE gathering timeout after {:?} for session {}",
ICE_GATHER_TIMEOUT, self.session_id
);
}
let candidates = self.ice_candidates.lock().await.clone();
Ok(SdpAnswer::with_candidates(answer.sdp, candidates))

View File

@@ -285,7 +285,7 @@ impl UniversalVideoTrack {
}
/// Get current statistics
///
/// Write an encoded frame to the track
///
/// Handles codec-specific processing:
@@ -464,7 +464,6 @@ impl UniversalVideoTrack {
if let Err(e) = rtp_track.write_rtp(&packet).await {
trace!("H265 write_rtp failed: {}", e);
}
}
Ok(())

View File

@@ -35,8 +35,8 @@ use tokio::sync::RwLock;
use tracing::{debug, info, trace, warn};
use crate::audio::{AudioController, OpusFrame};
use crate::events::EventBus;
use crate::error::{AppError, Result};
use crate::events::EventBus;
use crate::hid::HidController;
use crate::video::encoder::registry::EncoderBackend;
use crate::video::encoder::registry::VideoEncoderType;
@@ -250,6 +250,33 @@ impl WebRtcStreamer {
}
}
fn should_stop_pipeline(session_count: usize, subscriber_count: usize) -> bool {
session_count == 0 && subscriber_count == 0
}
async fn stop_pipeline_if_idle(&self, reason: &str) {
let session_count = self.sessions.read().await.len();
let pipeline = self.video_pipeline.read().await.clone();
let Some(pipeline) = pipeline else {
return;
};
let subscriber_count = pipeline.subscriber_count();
if Self::should_stop_pipeline(session_count, subscriber_count) {
info!(
"{} stopping video pipeline (sessions={}, subscribers={})",
reason, session_count, subscriber_count
);
pipeline.stop();
} else {
debug!(
"Keeping video pipeline alive (reason={}, sessions={}, subscribers={})",
reason, session_count, subscriber_count
);
}
}
/// Ensure video pipeline is initialized and running
async fn ensure_video_pipeline(self: &Arc<Self>) -> Result<Arc<SharedVideoPipeline>> {
let mut pipeline_guard = self.video_pipeline.write().await;
@@ -270,7 +297,6 @@ impl WebRtcStreamer {
bitrate_preset: config.bitrate_preset,
fps: config.fps,
encoder_backend: config.encoder_backend,
..Default::default()
};
info!("Creating shared video pipeline for {:?}", codec);
@@ -311,7 +337,9 @@ impl WebRtcStreamer {
}
drop(pipeline_guard);
info!("Video pipeline stopped, but keeping capture config for new sessions");
info!(
"Video pipeline stopped, but keeping capture config for new sessions"
);
}
break;
}
@@ -739,13 +767,7 @@ impl WebRtcStreamer {
session.close().await?;
}
// Stop pipeline if no more sessions
if self.sessions.read().await.is_empty() {
if let Some(ref pipeline) = *self.video_pipeline.read().await {
info!("No more sessions, stopping video pipeline");
pipeline.stop();
}
}
self.stop_pipeline_if_idle("After close_session").await;
Ok(())
}
@@ -762,11 +784,8 @@ impl WebRtcStreamer {
}
}
// Stop pipeline
drop(sessions);
if let Some(ref pipeline) = *self.video_pipeline.read().await {
pipeline.stop();
}
self.stop_pipeline_if_idle("After close_all_sessions").await;
count
}
@@ -825,14 +844,9 @@ impl WebRtcStreamer {
sessions.remove(id);
}
// Stop pipeline if no more sessions
if sessions.is_empty() {
drop(sessions);
if let Some(ref pipeline) = *self.video_pipeline.read().await {
info!("No more sessions after cleanup, stopping video pipeline");
pipeline.stop();
}
}
drop(sessions);
self.stop_pipeline_if_idle("After cleanup_closed_sessions")
.await;
}
}
@@ -926,10 +940,7 @@ impl WebRtcStreamer {
let pipeline = pipeline_for_callback.clone();
let sid = sid.clone();
tokio::spawn(async move {
info!(
"Requesting keyframe for session {} after reconnect",
sid
);
info!("Requesting keyframe for session {} after reconnect", sid);
pipeline.request_keyframe().await;
});
});
@@ -992,4 +1003,12 @@ mod tests {
let codecs = streamer.supported_video_codecs();
assert!(codecs.contains(&VideoCodecType::H264));
}
#[test]
fn stop_pipeline_requires_no_sessions_and_no_subscribers() {
assert!(WebRtcStreamer::should_stop_pipeline(0, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(1, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(0, 1));
assert!(!WebRtcStreamer::should_stop_pipeline(2, 3));
}
}

View File

@@ -136,6 +136,15 @@ export const msdConfigApi = {
// ===== ATX 配置 API =====
import type { AtxDevices } from '@/types/generated'
export interface WolHistoryEntry {
mac_address: string
updated_at: number
}
export interface WolHistoryResponse {
history: WolHistoryEntry[]
}
export const atxConfigApi = {
/**
* 获取 ATX 配置
@@ -166,6 +175,13 @@ export const atxConfigApi = {
method: 'POST',
body: JSON.stringify({ mac_address: macAddress }),
}),
/**
* 获取 WOL 历史记录(服务端持久化)
* @param limit 返回条数1-50
*/
getWolHistory: (limit = 5) =>
request<WolHistoryResponse>(`/atx/wol/history?limit=${Math.max(1, Math.min(50, limit))}`),
}
// ===== Audio 配置 API =====
@@ -330,6 +346,49 @@ export const rustdeskConfigApi = {
}),
}
// ===== RTSP 配置 API =====
export type RtspCodec = 'h264' | 'h265'
export interface RtspConfigResponse {
enabled: boolean
bind: string
port: number
path: string
allow_one_client: boolean
codec: RtspCodec
username?: string | null
has_password: boolean
}
export interface RtspConfigUpdate {
enabled?: boolean
bind?: string
port?: number
path?: string
allow_one_client?: boolean
codec?: RtspCodec
username?: string
password?: string
}
export interface RtspStatusResponse {
config: RtspConfigResponse
service_status: string
}
export const rtspConfigApi = {
get: () => request<RtspConfigResponse>('/config/rtsp'),
update: (config: RtspConfigUpdate) =>
request<RtspConfigResponse>('/config/rtsp', {
method: 'PATCH',
body: JSON.stringify(config),
}),
getStatus: () => request<RtspStatusResponse>('/config/rtsp/status'),
}
// ===== Web 服务器配置 API =====
/** Web 服务器配置 */

View File

@@ -101,6 +101,46 @@ export const systemApi = {
}),
}
export type UpdateChannel = 'stable' | 'beta'
export interface UpdateOverviewResponse {
success: boolean
current_version: string
channel: UpdateChannel
latest_version: string
upgrade_available: boolean
target_version?: string
notes_between: Array<{
version: string
published_at: string
notes: string[]
}>
}
export interface UpdateStatusResponse {
success: boolean
phase: 'idle' | 'checking' | 'downloading' | 'verifying' | 'installing' | 'restarting' | 'success' | 'failed'
progress: number
current_version: string
target_version?: string
message?: string
last_error?: string
}
export const updateApi = {
overview: (channel: UpdateChannel = 'stable') =>
request<UpdateOverviewResponse>(`/update/overview?channel=${encodeURIComponent(channel)}`),
upgrade: (payload: { channel?: UpdateChannel; target_version?: string }) =>
request<{ success: boolean; message?: string }>('/update/upgrade', {
method: 'POST',
body: JSON.stringify(payload),
}),
status: () =>
request<UpdateStatusResponse>('/update/status'),
}
// Stream API
export interface VideoCodecInfo {
id: string
@@ -124,6 +164,19 @@ export interface AvailableCodecsResponse {
codecs: VideoCodecInfo[]
}
export interface StreamConstraintsResponse {
success: boolean
allowed_codecs: string[]
locked_codec: string | null
disallow_mjpeg: boolean
sources: {
rustdesk: boolean
rtsp: boolean
}
reason: string
current_mode: string
}
export const streamApi = {
status: () =>
request<{
@@ -161,6 +214,9 @@ export const streamApi = {
getCodecs: () =>
request<AvailableCodecsResponse>('/stream/codecs'),
getConstraints: () =>
request<StreamConstraintsResponse>('/stream/constraints'),
setBitratePreset: (bitrate_preset: import('@/types/generated').BitratePreset) =>
request<{ success: boolean; message?: string }>('/stream/bitrate', {
method: 'POST',
@@ -186,10 +242,10 @@ export const webrtcApi = {
createSession: () =>
request<{ session_id: string }>('/webrtc/session', { method: 'POST' }),
offer: (sdp: string, clientId?: string) =>
offer: (sdp: string) =>
request<{ sdp: string; session_id: string; ice_candidates: IceCandidate[] }>('/webrtc/offer', {
method: 'POST',
body: JSON.stringify({ sdp, client_id: clientId }),
body: JSON.stringify({ sdp }),
}),
addIceCandidate: (sessionId: string, candidate: IceCandidate) =>
@@ -247,17 +303,34 @@ export const hidApi = {
screen_resolution: [number, number] | null
}>('/hid/status'),
keyboard: async (type: 'down' | 'up', key: number, modifiers?: {
ctrl?: boolean
shift?: boolean
alt?: boolean
meta?: boolean
}) => {
otgSelfCheck: () =>
request<{
overall_ok: boolean
error_count: number
warning_count: number
hid_backend: string
selected_udc: string | null
bound_udc: string | null
udc_state: string | null
udc_speed: string | null
available_udcs: string[]
other_gadgets: string[]
checks: Array<{
id: string
ok: boolean
level: 'info' | 'warn' | 'error'
message: string
hint?: string
path?: string
}>
}>('/hid/otg/self-check'),
keyboard: async (type: 'down' | 'up', key: number, modifier?: number) => {
await ensureHidConnection()
const event: HidKeyboardEvent = {
type: type === 'down' ? 'keydown' : 'keyup',
key,
modifiers,
modifier: (modifier ?? 0) & 0xff,
}
await hidWs.sendKeyboard(event)
return { success: true }
@@ -481,6 +554,25 @@ export const msdApi = {
}),
}
interface SerialDeviceOption {
path: string
name: string
}
function getSerialDevicePriority(path: string): number {
if (/^\/dev\/ttyUSB/i.test(path)) return 0
if (/^\/dev\/(ttyS|S)/i.test(path)) return 2
return 1
}
function sortSerialDevices(serialDevices: SerialDeviceOption[]): SerialDeviceOption[] {
return [...serialDevices].sort((a, b) => {
const priorityDiff = getSerialDevicePriority(a.path) - getSerialDevicePriority(b.path)
if (priorityDiff !== 0) return priorityDiff
return a.path.localeCompare(b.path, undefined, { numeric: true, sensitivity: 'base' })
})
}
// Config API
/** @deprecated 使用域特定 APIvideoConfigApi, hidConfigApi 等)替代 */
export const configApi = {
@@ -493,8 +585,8 @@ export const configApi = {
body: JSON.stringify(updates),
}),
listDevices: () =>
request<{
listDevices: async () => {
const result = await request<{
video: Array<{
path: string
name: string
@@ -522,7 +614,13 @@ export const configApi = {
ttyd_available: boolean
rustdesk_available: boolean
}
}>('/devices'),
}>('/devices')
return {
...result,
serial: sortSerialDevices(result.serial),
}
},
}
// 导出新的域分离配置 API
@@ -536,11 +634,15 @@ export {
audioConfigApi,
extensionsApi,
rustdeskConfigApi,
rtspConfigApi,
webConfigApi,
type RustDeskConfigResponse,
type RustDeskStatusResponse,
type RustDeskConfigUpdate,
type RustDeskPasswordResponse,
type RtspConfigResponse,
type RtspConfigUpdate,
type RtspStatusResponse,
type WebConfig,
} from './config'

View File

@@ -52,7 +52,7 @@ async function handleLogout() {
</script>
<template>
<div class="h-screen flex flex-col bg-background overflow-hidden">
<div class="h-screen h-dvh flex flex-col bg-background overflow-hidden">
<!-- Header -->
<header class="shrink-0 z-50 w-full border-b bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60">
<div class="flex h-14 items-center px-4 max-w-full">
@@ -86,14 +86,14 @@ async function handleLogout() {
</span>
<!-- Theme Toggle -->
<Button variant="ghost" size="icon" @click="toggleTheme">
<Button variant="ghost" size="icon" :aria-label="t('common.toggleTheme')" @click="toggleTheme">
<Sun class="h-4 w-4 rotate-0 scale-100 transition-all dark:-rotate-90 dark:scale-0" />
<Moon class="absolute h-4 w-4 rotate-90 scale-0 transition-all dark:rotate-0 dark:scale-100" />
<span class="sr-only">{{ t('common.toggleTheme') }}</span>
</Button>
<!-- Language Toggle -->
<Button variant="ghost" size="icon" @click="toggleLanguage">
<Button variant="ghost" size="icon" :aria-label="t('common.toggleLanguage')" @click="toggleLanguage">
<Languages class="h-4 w-4" />
<span class="sr-only">{{ t('common.toggleLanguage') }}</span>
</Button>
@@ -101,7 +101,7 @@ async function handleLogout() {
<!-- Mobile Menu -->
<DropdownMenu>
<DropdownMenuTrigger as-child class="md:hidden">
<Button variant="ghost" size="icon">
<Button variant="ghost" size="icon" :aria-label="t('common.menu')">
<Menu class="h-4 w-4" />
</Button>
</DropdownMenuTrigger>
@@ -119,7 +119,7 @@ async function handleLogout() {
</DropdownMenu>
<!-- Logout Button (Desktop) -->
<Button variant="ghost" size="icon" class="hidden md:flex" @click="handleLogout">
<Button variant="ghost" size="icon" class="hidden md:flex" :aria-label="t('nav.logout')" @click="handleLogout">
<LogOut class="h-4 w-4" />
<span class="sr-only">{{ t('nav.logout') }}</span>
</Button>

View File

@@ -1,5 +1,5 @@
<script setup lang="ts">
import { ref, computed } from 'vue'
import { ref, computed, watch } from 'vue'
import { useI18n } from 'vue-i18n'
import { Button } from '@/components/ui/button'
import { Badge } from '@/components/ui/badge'
@@ -18,6 +18,7 @@ import {
AlertDialogTitle,
} from '@/components/ui/alert-dialog'
import { Power, RotateCcw, CircleDot, Wifi, Send } from 'lucide-vue-next'
import { atxConfigApi } from '@/api/config'
const emit = defineEmits<{
(e: 'close'): void
@@ -41,6 +42,7 @@ const confirmDialogOpen = ref(false)
const wolMacAddress = ref('')
const wolHistory = ref<string[]>([])
const wolSending = ref(false)
const wolLoadingHistory = ref(false)
const powerStateColor = computed(() => {
switch (powerState.value) {
@@ -110,16 +112,11 @@ function sendWol() {
emit('wol', mac)
// Add to history if not exists
if (!wolHistory.value.includes(mac)) {
wolHistory.value.unshift(mac)
// Keep only last 5
if (wolHistory.value.length > 5) {
wolHistory.value.pop()
}
// Save to localStorage
localStorage.setItem('wol_history', JSON.stringify(wolHistory.value))
}
// Optimistic update, then sync from server after request likely completes
wolHistory.value = [mac, ...wolHistory.value.filter(item => item !== mac)].slice(0, 5)
setTimeout(() => {
loadWolHistory().catch(() => {})
}, 1200)
setTimeout(() => {
wolSending.value = false
@@ -130,15 +127,27 @@ function selectFromHistory(mac: string) {
wolMacAddress.value = mac
}
// Load WOL history on mount
const savedHistory = localStorage.getItem('wol_history')
if (savedHistory) {
async function loadWolHistory() {
wolLoadingHistory.value = true
try {
wolHistory.value = JSON.parse(savedHistory)
} catch (e) {
const response = await atxConfigApi.getWolHistory(5)
wolHistory.value = response.history.map(item => item.mac_address)
} catch {
wolHistory.value = []
} finally {
wolLoadingHistory.value = false
}
}
watch(
() => activeTab.value,
(tab) => {
if (tab === 'wol') {
loadWolHistory().catch(() => {})
}
},
{ immediate: true },
)
</script>
<template>
@@ -234,6 +243,10 @@ if (savedHistory) {
</p>
</div>
<p v-if="wolLoadingHistory" class="text-xs text-muted-foreground">
{{ t('common.loading') }}
</p>
<!-- History -->
<div v-if="wolHistory.length > 0" class="space-y-2">
<Separator />

View File

@@ -69,24 +69,24 @@ async function typeChar(char: string, signal: AbortSignal): Promise<boolean> {
return true
}
const { keyCode, shift } = mapping
const modifiers = shift ? { shift: true } : undefined
const { hidCode, shift } = mapping
const modifier = shift ? 0x02 : 0
try {
// Send keydown
await hidApi.keyboard('down', keyCode, modifiers)
await hidApi.keyboard('down', hidCode, modifier)
// Small delay between down and up to ensure key is registered
await sleep(5)
if (signal.aborted) {
// Even if aborted, still send keyup to release the key
await hidApi.keyboard('up', keyCode, modifiers)
await hidApi.keyboard('up', hidCode, modifier)
return false
}
// Send keyup
await hidApi.keyboard('up', keyCode, modifiers)
await hidApi.keyboard('up', hidCode, modifier)
// Additional small delay after keyup to ensure it's processed
await sleep(2)
@@ -96,7 +96,7 @@ async function typeChar(char: string, signal: AbortSignal): Promise<boolean> {
console.error('[Paste] Failed to type character:', char, error)
// Try to release the key even on error
try {
await hidApi.keyboard('up', keyCode, modifiers)
await hidApi.keyboard('up', hidCode, modifier)
} catch {
// Ignore cleanup errors
}

View File

@@ -442,7 +442,7 @@ onUnmounted(() => {
<Sheet :open="props.open" @update:open="emit('update:open', $event)">
<SheetContent
side="right"
class="w-[400px] sm:w-[440px] p-0 border-l border-slate-200 dark:border-slate-800 bg-white dark:bg-slate-950"
class="w-[90vw] max-w-[440px] p-0 border-l border-slate-200 dark:border-slate-800 bg-white dark:bg-slate-950"
>
<!-- Header -->
<SheetHeader class="px-6 py-3 border-b border-slate-200 dark:border-slate-800">
@@ -454,7 +454,7 @@ onUnmounted(() => {
</div>
</SheetHeader>
<ScrollArea class="h-[calc(100vh-60px)]">
<ScrollArea class="h-[calc(100dvh-60px)]">
<div class="px-6 py-4 space-y-6">
<!-- Video Section Header -->
<div>

View File

@@ -129,9 +129,11 @@ const statusBadgeText = computed(() => {
<HoverCard v-if="!prefersPopover" :open-delay="200" :close-delay="100">
<HoverCardTrigger as-child>
<!-- New layout: vertical with title on top, status+quickInfo on bottom -->
<div
<button
type="button"
:aria-label="`${title}: ${quickInfo || subtitle || statusText}`"
:class="cn(
'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors',
'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors text-left focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
compact ? 'px-2 py-1 text-xs min-w-[80px]' : 'px-3 py-1.5 text-sm min-w-[100px]',
'bg-white dark:bg-slate-800 hover:bg-slate-50 dark:hover:bg-slate-700',
'border-slate-200 dark:border-slate-700',
@@ -147,7 +149,7 @@ const statusBadgeText = computed(() => {
{{ quickInfo || subtitle || statusText }}
</span>
</div>
</div>
</button>
</HoverCardTrigger>
<HoverCardContent class="w-80" :align="hoverAlign">
@@ -228,9 +230,11 @@ const statusBadgeText = computed(() => {
<Popover v-else>
<PopoverTrigger as-child>
<!-- New layout: vertical with title on top, status+quickInfo on bottom -->
<div
<button
type="button"
:aria-label="`${title}: ${quickInfo || subtitle || statusText}`"
:class="cn(
'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors',
'flex flex-col gap-0.5 rounded-md border cursor-pointer transition-colors text-left focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
compact ? 'px-2 py-1 text-xs min-w-[80px]' : 'px-3 py-1.5 text-sm min-w-[100px]',
'bg-white dark:bg-slate-800 hover:bg-slate-50 dark:hover:bg-slate-700',
'border-slate-200 dark:border-slate-700',
@@ -246,7 +250,7 @@ const statusBadgeText = computed(() => {
{{ quickInfo || subtitle || statusText }}
</span>
</div>
</div>
</button>
</PopoverTrigger>
<PopoverContent class="w-80" :align="hoverAlign">

View File

@@ -17,9 +17,16 @@ import {
SelectTrigger,
SelectValue,
} from '@/components/ui/select'
import { Monitor, RefreshCw, Loader2, Settings, Zap, Scale, Image } from 'lucide-vue-next'
import { Monitor, RefreshCw, Loader2, Settings, Zap, Scale, Image, AlertTriangle } from 'lucide-vue-next'
import HelpTooltip from '@/components/HelpTooltip.vue'
import { configApi, streamApi, type VideoCodecInfo, type EncoderBackendInfo, type BitratePreset } from '@/api'
import {
configApi,
streamApi,
type VideoCodecInfo,
type EncoderBackendInfo,
type BitratePreset,
type StreamConstraintsResponse,
} from '@/api'
import { useConfigStore } from '@/stores/config'
import { useRouter } from 'vue-router'
@@ -64,7 +71,50 @@ const loadingCodecs = ref(false)
// Backend list
const backends = ref<EncoderBackendInfo[]>([])
const constraints = ref<StreamConstraintsResponse | null>(null)
const currentEncoderBackend = computed(() => configStore.stream?.encoder || 'auto')
const isRtspEnabled = computed(() => {
if (typeof configStore.rtspStatus?.config?.enabled === 'boolean') {
return configStore.rtspStatus.config.enabled
}
return !!configStore.rtspConfig?.enabled
})
const isRustdeskEnabled = computed(() => {
if (typeof configStore.rustdeskStatus?.config?.enabled === 'boolean') {
return configStore.rustdeskStatus.config.enabled
}
return !!configStore.rustdeskConfig?.enabled
})
const isRtspCodecLocked = computed(() => isRtspEnabled.value)
const isRustdeskWebrtcLocked = computed(() => !isRtspEnabled.value && isRustdeskEnabled.value)
const codecLockSources = computed(() => {
if (isRtspCodecLocked.value) {
return isRustdeskEnabled.value ? 'RTSP/RustDesk' : 'RTSP'
}
if (isRustdeskWebrtcLocked.value) return 'RustDesk'
return ''
})
const codecLockMessage = computed(() => {
if (!codecLockSources.value) return ''
return t('actionbar.multiSourceCodecLocked', { sources: codecLockSources.value })
})
const videoParamWarningSources = computed(() => {
if (isRtspEnabled.value && isRustdeskEnabled.value) return 'RTSP/RustDesk'
if (isRtspEnabled.value) return 'RTSP'
if (isRustdeskEnabled.value) return 'RustDesk'
return ''
})
const videoParamWarningMessage = computed(() => {
if (!videoParamWarningSources.value) return ''
return t('actionbar.multiSourceVideoParamsWarning', { sources: videoParamWarningSources.value })
})
const isCodecLocked = computed(() => !!codecLockMessage.value)
const isCodecOptionDisabled = (codecId: string): boolean => {
if (!isBrowserSupported(codecId)) return true
if (isRustdeskWebrtcLocked.value && codecId === 'mjpeg') return true
return false
}
// Browser supported codecs (WebRTC receive capabilities)
const browserSupportedCodecs = ref<Set<string>>(new Set())
@@ -220,7 +270,7 @@ const availableCodecs = computed(() => {
const backend = backends.value.find(b => b.id === currentEncoderBackend.value)
if (!backend) return allAvailable
return allAvailable
const backendFiltered = allAvailable
.filter(codec => {
// MJPEG is always available (doesn't require encoder)
if (codec.id === 'mjpeg') return true
@@ -238,6 +288,13 @@ const availableCodecs = computed(() => {
backend: backend.name,
}
})
const allowed = constraints.value?.allowed_codecs
if (!allowed || allowed.length === 0) {
return backendFiltered
}
return backendFiltered.filter(codec => allowed.includes(codec.id))
})
// Cascading filters
@@ -303,6 +360,14 @@ async function loadCodecs() {
}
}
async function loadConstraints() {
try {
constraints.value = await streamApi.getConstraints()
} catch {
constraints.value = null
}
}
// Navigate to settings page (video tab)
function goToSettings() {
router.push('/settings?tab=video')
@@ -339,6 +404,22 @@ function syncFromCurrentIfChanged() {
// Handle video mode change
function handleVideoModeChange(mode: unknown) {
if (typeof mode !== 'string') return
if (isRtspCodecLocked.value) {
toast.warning(codecLockMessage.value)
return
}
if (isRustdeskWebrtcLocked.value && mode === 'mjpeg') {
toast.warning(codecLockMessage.value)
return
}
if (constraints.value?.allowed_codecs?.length && !constraints.value.allowed_codecs.includes(mode)) {
toast.error(constraints.value.reason || t('actionbar.selectMode'))
return
}
emit('update:videoMode', mode as VideoMode)
}
@@ -466,9 +547,13 @@ watch(() => props.open, (isOpen) => {
loadCodecs()
}
loadConstraints()
Promise.all([
configStore.refreshVideo(),
configStore.refreshStream(),
configStore.refreshRtspStatus(),
configStore.refreshRustdeskStatus(),
]).then(() => {
initializeFromCurrent()
}).catch(() => {
@@ -508,7 +593,7 @@ watch(currentConfig, () => {
<Select
:model-value="props.videoMode"
@update:model-value="handleVideoModeChange"
:disabled="loadingCodecs || availableCodecs.length === 0"
:disabled="loadingCodecs || availableCodecs.length === 0 || isRtspCodecLocked"
>
<SelectTrigger class="h-8 text-xs">
<div v-if="selectedCodecInfo" class="flex items-center gap-1.5 truncate">
@@ -530,8 +615,8 @@ watch(currentConfig, () => {
v-for="codec in availableCodecs"
:key="codec.id"
:value="codec.id"
:disabled="!isBrowserSupported(codec.id)"
:class="['text-xs', { 'opacity-50': !isBrowserSupported(codec.id) }]"
:disabled="isCodecOptionDisabled(codec.id)"
:class="['text-xs', { 'opacity-50': isCodecOptionDisabled(codec.id) }]"
>
<div class="flex items-center gap-2">
<span>{{ codec.name }}</span>
@@ -558,6 +643,9 @@ watch(currentConfig, () => {
<p v-if="props.videoMode !== 'mjpeg'" class="text-xs text-muted-foreground">
{{ t('actionbar.webrtcHint') }}
</p>
<p v-if="isCodecLocked" class="text-xs text-amber-600 dark:text-amber-400">
{{ codecLockMessage }}
</p>
</div>
<!-- Bitrate Preset - Only shown for WebRTC modes -->
@@ -624,6 +712,16 @@ watch(currentConfig, () => {
<Separator />
<div class="space-y-3">
<div
v-if="videoParamWarningMessage"
class="rounded-md border border-amber-500/30 bg-amber-500/10 px-2.5 py-2"
>
<p class="flex items-start gap-1.5 text-xs text-amber-700 dark:text-amber-300">
<AlertTriangle class="h-3.5 w-3.5 mt-0.5 shrink-0" />
<span>{{ videoParamWarningMessage }}</span>
</p>
</div>
<div class="flex items-center justify-between">
<h5 class="text-xs font-medium text-muted-foreground">{{ t('actionbar.deviceSettings') }}</h5>
<Button
@@ -655,7 +753,7 @@ watch(currentConfig, () => {
:value="device.path"
class="text-xs"
>
{{ device.name }}
{{ device.name }} ({{ device.path }})
</SelectItem>
</SelectContent>
</Select>

View File

@@ -9,6 +9,7 @@ import {
consumerKeys,
latchingKeys,
modifiers,
updateModifierMaskForHidKey,
type KeyName,
type ConsumerKeyName,
} from '@/lib/keyboardMappings'
@@ -304,9 +305,10 @@ async function onKeyDown(key: string) {
// Handle latching keys (Caps Lock, etc.)
if ((latchingKeys as readonly string[]).includes(cleanKey)) {
emit('keyDown', cleanKey)
await sendKeyPress(keyCode, true)
const currentMask = pressedModifiers.value & 0xff
await sendKeyPress(keyCode, true, currentMask)
setTimeout(() => {
sendKeyPress(keyCode, false)
sendKeyPress(keyCode, false, currentMask)
emit('keyUp', cleanKey)
}, 100)
return
@@ -318,12 +320,14 @@ async function onKeyDown(key: string) {
const isCurrentlyDown = (pressedModifiers.value & mask) !== 0
if (isCurrentlyDown) {
pressedModifiers.value &= ~mask
await sendKeyPress(keyCode, false)
const nextMask = pressedModifiers.value & ~mask
pressedModifiers.value = nextMask
await sendKeyPress(keyCode, false, nextMask)
emit('keyUp', cleanKey)
} else {
pressedModifiers.value |= mask
await sendKeyPress(keyCode, true)
const nextMask = pressedModifiers.value | mask
pressedModifiers.value = nextMask
await sendKeyPress(keyCode, true, nextMask)
emit('keyDown', cleanKey)
}
updateKeyboardButtonTheme()
@@ -333,11 +337,12 @@ async function onKeyDown(key: string) {
// Regular key: press and release
keysDown.value.push(cleanKey)
emit('keyDown', cleanKey)
await sendKeyPress(keyCode, true)
const currentMask = pressedModifiers.value & 0xff
await sendKeyPress(keyCode, true, currentMask)
updateKeyboardButtonTheme()
setTimeout(async () => {
keysDown.value = keysDown.value.filter(k => k !== cleanKey)
await sendKeyPress(keyCode, false)
await sendKeyPress(keyCode, false, currentMask)
emit('keyUp', cleanKey)
updateKeyboardButtonTheme()
}, 50)
@@ -347,16 +352,9 @@ async function onKeyUp() {
// Not used for now - we handle up in onKeyDown with setTimeout
}
async function sendKeyPress(keyCode: number, press: boolean) {
async function sendKeyPress(keyCode: number, press: boolean, modifierMask: number) {
try {
const mods = {
ctrl: (pressedModifiers.value & 0x11) !== 0,
shift: (pressedModifiers.value & 0x22) !== 0,
alt: (pressedModifiers.value & 0x44) !== 0,
meta: (pressedModifiers.value & 0x88) !== 0,
}
await hidApi.keyboard(press ? 'down' : 'up', keyCode, mods)
await hidApi.keyboard(press ? 'down' : 'up', keyCode, modifierMask & 0xff)
} catch (err) {
console.error('[VirtualKeyboard] Key send failed:', err)
}
@@ -368,16 +366,20 @@ interface MacroStep {
}
async function executeMacro(steps: MacroStep[]) {
let macroModifierMask = pressedModifiers.value & 0xff
for (const step of steps) {
for (const mod of step.modifiers) {
if (mod in keys) {
await sendKeyPress(keys[mod as KeyName], true)
const modHid = keys[mod as KeyName]
macroModifierMask = updateModifierMaskForHidKey(macroModifierMask, modHid, true)
await sendKeyPress(modHid, true, macroModifierMask)
}
}
for (const key of step.keys) {
if (key in keys) {
await sendKeyPress(keys[key as KeyName], true)
await sendKeyPress(keys[key as KeyName], true, macroModifierMask)
}
}
@@ -385,13 +387,15 @@ async function executeMacro(steps: MacroStep[]) {
for (const key of step.keys) {
if (key in keys) {
await sendKeyPress(keys[key as KeyName], false)
await sendKeyPress(keys[key as KeyName], false, macroModifierMask)
}
}
for (const mod of step.modifiers) {
if (mod in keys) {
await sendKeyPress(keys[mod as KeyName], false)
const modHid = keys[mod as KeyName]
macroModifierMask = updateModifierMaskForHidKey(macroModifierMask, modHid, false)
await sendKeyPress(modHid, false, macroModifierMask)
}
}
}

View File

@@ -5,6 +5,7 @@ import { ref, type Ref } from 'vue'
import { useI18n } from 'vue-i18n'
import { toast } from 'vue-sonner'
import { hidApi } from '@/api'
import { keyboardEventToHidCode, updateModifierMaskForHidKey } from '@/lib/keyboardMappings'
export interface HidInputState {
mouseMode: Ref<'absolute' | 'relative'>
@@ -32,6 +33,7 @@ export function useHidInput(options: UseHidInputOptions) {
numLock: false,
scrollLock: false,
})
const activeModifierMask = ref(0)
const mousePosition = ref({ x: 0, y: 0 })
const lastMousePosition = ref({ x: 0, y: 0 })
const isPointerLocked = ref(false)
@@ -83,14 +85,14 @@ export function useHidInput(options: UseHidInputOptions) {
keyboardLed.value.numLock = e.getModifierState('NumLock')
keyboardLed.value.scrollLock = e.getModifierState('ScrollLock')
const modifiers = {
ctrl: e.ctrlKey,
shift: e.shiftKey,
alt: e.altKey,
meta: e.metaKey,
const hidKey = keyboardEventToHidCode(e.code, e.key)
if (hidKey === undefined) {
return
}
hidApi.keyboard('down', e.keyCode, modifiers).catch(err => handleHidError(err, 'keyboard down'))
const modifierMask = updateModifierMaskForHidKey(activeModifierMask.value, hidKey, true)
activeModifierMask.value = modifierMask
hidApi.keyboard('down', hidKey, modifierMask).catch(err => handleHidError(err, 'keyboard down'))
}
function handleKeyUp(e: KeyboardEvent) {
@@ -107,7 +109,14 @@ export function useHidInput(options: UseHidInputOptions) {
const keyName = e.key === ' ' ? 'Space' : e.key
pressedKeys.value = pressedKeys.value.filter(k => k !== keyName)
hidApi.keyboard('up', e.keyCode).catch(err => handleHidError(err, 'keyboard up'))
const hidKey = keyboardEventToHidCode(e.code, e.key)
if (hidKey === undefined) {
return
}
const modifierMask = updateModifierMaskForHidKey(activeModifierMask.value, hidKey, false)
activeModifierMask.value = modifierMask
hidApi.keyboard('up', hidKey, modifierMask).catch(err => handleHidError(err, 'keyboard up'))
}
// Mouse handlers
@@ -233,6 +242,7 @@ export function useHidInput(options: UseHidInputOptions) {
function handleBlur() {
pressedKeys.value = []
activeModifierMask.value = 0
if (pressedMouseButton.value !== null) {
const button = pressedMouseButton.value
pressedMouseButton.value = null

View File

@@ -3,7 +3,6 @@
import { ref, onUnmounted, computed, type Ref } from 'vue'
import { webrtcApi, type IceCandidate } from '@/api'
import { generateUUID } from '@/lib/utils'
import {
type HidKeyboardEvent,
type HidMouseEvent,
@@ -15,6 +14,19 @@ import { useWebSocket } from '@/composables/useWebSocket'
export type { HidKeyboardEvent, HidMouseEvent }
export type WebRTCState = 'disconnected' | 'connecting' | 'connected' | 'failed'
export type WebRTCConnectStage =
| 'idle'
| 'fetching_ice_servers'
| 'creating_peer_connection'
| 'creating_data_channel'
| 'creating_offer'
| 'waiting_server_answer'
| 'setting_remote_description'
| 'applying_ice_candidates'
| 'waiting_connection'
| 'connected'
| 'disconnected'
| 'failed'
// ICE candidate type: host=P2P local, srflx=P2P STUN, relay=TURN relay
export type IceCandidateType = 'host' | 'srflx' | 'prflx' | 'relay' | 'unknown'
@@ -99,6 +111,7 @@ let dataChannel: RTCDataChannel | null = null
let sessionId: string | null = null
let statsInterval: number | null = null
let isConnecting = false // Lock to prevent concurrent connect calls
let connectInFlight: Promise<boolean> | null = null
let pendingIceCandidates: RTCIceCandidate[] = [] // Queue for ICE candidates before sessionId is set
let pendingRemoteCandidates: WebRTCIceCandidateEvent[] = [] // Queue for server ICE candidates
let pendingRemoteIceComplete = new Set<string>() // Session IDs waiting for end-of-candidates
@@ -131,6 +144,7 @@ const stats = ref<WebRTCStats>({
})
const error = ref<string | null>(null)
const dataChannelReady = ref(false)
const connectStage = ref<WebRTCConnectStage>('idle')
// Create RTCPeerConnection with configuration
function createPeerConnection(iceServers: RTCIceServer[]): RTCPeerConnection {
@@ -149,16 +163,19 @@ function createPeerConnection(iceServers: RTCIceServer[]): RTCPeerConnection {
break
case 'connected':
state.value = 'connected'
connectStage.value = 'connected'
error.value = null
startStatsCollection()
break
case 'disconnected':
case 'closed':
state.value = 'disconnected'
connectStage.value = 'disconnected'
stopStatsCollection()
break
case 'failed':
state.value = 'failed'
connectStage.value = 'failed'
error.value = 'Connection failed'
stopStatsCollection()
break
@@ -450,100 +467,123 @@ async function flushPendingIceCandidates() {
// Connect to WebRTC server
async function connect(): Promise<boolean> {
registerWebSocketHandlers()
// Prevent concurrent connection attempts
if (isConnecting) {
return false
if (connectInFlight) {
return connectInFlight
}
if (peerConnection && state.value === 'connected') {
return true
}
connectInFlight = (async () => {
registerWebSocketHandlers()
isConnecting = true
// Prevent concurrent connection attempts
if (isConnecting) {
return state.value === 'connected'
}
// Clean up any existing connection first
if (peerConnection || sessionId) {
await disconnect()
}
if (peerConnection && state.value === 'connected') {
return true
}
// Clear pending ICE candidates from previous attempt
pendingIceCandidates = []
isConnecting = true
// Clean up any existing connection first
if (peerConnection || sessionId) {
await disconnect()
}
// Clear pending ICE candidates from previous attempt
pendingIceCandidates = []
try {
state.value = 'connecting'
error.value = null
connectStage.value = 'fetching_ice_servers'
// Fetch ICE servers from backend API
const iceServers = await fetchIceServers()
connectStage.value = 'creating_peer_connection'
// Create peer connection with fetched ICE servers
peerConnection = createPeerConnection(iceServers)
connectStage.value = 'creating_data_channel'
// Create data channel before offer (for HID)
createDataChannel(peerConnection)
// Add transceiver for receiving video
peerConnection.addTransceiver('video', { direction: 'recvonly' })
peerConnection.addTransceiver('audio', { direction: 'recvonly' })
connectStage.value = 'creating_offer'
// Create offer
const offer = await peerConnection.createOffer()
await peerConnection.setLocalDescription(offer)
connectStage.value = 'waiting_server_answer'
// Send offer to server and get answer
// Do not pass client_id here: each connect creates a fresh session.
const response = await webrtcApi.offer(offer.sdp!)
sessionId = response.session_id
// Send any ICE candidates that were queued while waiting for sessionId
await flushPendingIceCandidates()
// Set remote description (answer)
const answer: RTCSessionDescriptionInit = {
type: 'answer',
sdp: response.sdp,
}
connectStage.value = 'setting_remote_description'
await peerConnection.setRemoteDescription(answer)
// Flush any pending server ICE candidates once remote description is set
connectStage.value = 'applying_ice_candidates'
await flushPendingRemoteIce()
// Add any ICE candidates from the response
if (response.ice_candidates && response.ice_candidates.length > 0) {
for (const candidateObj of response.ice_candidates) {
await addRemoteIceCandidate(candidateObj)
}
}
// 等待连接真正建立(最多等待 15 秒)
// 直接检查 peerConnection.connectionState 而不是 reactive state
// 因为 TypeScript 不知道 state 会被 onconnectionstatechange 回调异步修改
const connectionTimeout = 15000
const pollInterval = 100
let waited = 0
connectStage.value = 'waiting_connection'
while (waited < connectionTimeout && peerConnection) {
const pcState = peerConnection.connectionState
if (pcState === 'connected') {
connectStage.value = 'connected'
isConnecting = false
return true
}
if (pcState === 'failed' || pcState === 'closed') {
throw new Error('Connection failed during ICE negotiation')
}
await new Promise(resolve => setTimeout(resolve, pollInterval))
waited += pollInterval
}
// 超时
throw new Error('Connection timeout waiting for ICE negotiation')
} catch (err) {
state.value = 'failed'
connectStage.value = 'failed'
error.value = err instanceof Error ? err.message : 'Connection failed'
isConnecting = false
await disconnect()
return false
}
})()
try {
state.value = 'connecting'
error.value = null
// Fetch ICE servers from backend API
const iceServers = await fetchIceServers()
// Create peer connection with fetched ICE servers
peerConnection = createPeerConnection(iceServers)
// Create data channel before offer (for HID)
createDataChannel(peerConnection)
// Add transceiver for receiving video
peerConnection.addTransceiver('video', { direction: 'recvonly' })
peerConnection.addTransceiver('audio', { direction: 'recvonly' })
// Create offer
const offer = await peerConnection.createOffer()
await peerConnection.setLocalDescription(offer)
// Send offer to server and get answer
const response = await webrtcApi.offer(offer.sdp!, generateUUID())
sessionId = response.session_id
// Send any ICE candidates that were queued while waiting for sessionId
await flushPendingIceCandidates()
// Set remote description (answer)
const answer: RTCSessionDescriptionInit = {
type: 'answer',
sdp: response.sdp,
}
await peerConnection.setRemoteDescription(answer)
// Flush any pending server ICE candidates once remote description is set
await flushPendingRemoteIce()
// Add any ICE candidates from the response
if (response.ice_candidates && response.ice_candidates.length > 0) {
for (const candidateObj of response.ice_candidates) {
await addRemoteIceCandidate(candidateObj)
}
}
// 等待连接真正建立(最多等待 15 秒)
// 直接检查 peerConnection.connectionState 而不是 reactive state
// 因为 TypeScript 不知道 state 会被 onconnectionstatechange 回调异步修改
const connectionTimeout = 15000
const pollInterval = 100
let waited = 0
while (waited < connectionTimeout && peerConnection) {
const pcState = peerConnection.connectionState
if (pcState === 'connected') {
isConnecting = false
return true
}
if (pcState === 'failed' || pcState === 'closed') {
throw new Error('Connection failed during ICE negotiation')
}
await new Promise(resolve => setTimeout(resolve, pollInterval))
waited += pollInterval
}
// 超时
throw new Error('Connection timeout waiting for ICE negotiation')
} catch (err) {
state.value = 'failed'
error.value = err instanceof Error ? err.message : 'Connection failed'
isConnecting = false
disconnect()
return false
return await connectInFlight
} finally {
connectInFlight = null
}
}
@@ -583,6 +623,7 @@ async function disconnect() {
audioTrack.value = null
cachedMediaStream = null // Clear cached stream on disconnect
state.value = 'disconnected'
connectStage.value = 'disconnected'
error.value = null
// Reset stats
@@ -694,6 +735,7 @@ export function useWebRTC() {
stats,
error,
dataChannelReady,
connectStage,
sessionId: computed(() => sessionId),
// Methods

Some files were not shown because too many files have changed in this diff Show More