feat: 深入适配 RK628D CSI 采集卡的设备识别、参数读取、自恢复和音频采集

This commit is contained in:
mofeng-git
2026-04-19 11:26:21 +08:00
parent 8eac31f69f
commit 7c703b8b4b
39 changed files with 3261 additions and 769 deletions

View File

@@ -29,7 +29,6 @@ serde_json = "1"
# Logging # Logging
tracing = "0.1" tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "tracing-log"] } tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "tracing-log"] }
tracing-log = "0.2"
# Error handling # Error handling
thiserror = "2" thiserror = "2"
@@ -41,7 +40,6 @@ rand = "0.9"
# Utilities # Utilities
uuid = { version = "1", features = ["v4", "serde"] } uuid = { version = "1", features = ["v4", "serde"] }
chrono = { version = "0.4", features = ["serde"] }
base64 = "0.22" base64 = "0.22"
nix = { version = "0.30", features = ["fs", "net", "hostname", "poll"] } nix = { version = "0.30", features = ["fs", "net", "hostname", "poll"] }
@@ -62,8 +60,8 @@ axum-server = { version = "0.8", features = ["tls-rustls"] }
# CLI argument parsing # CLI argument parsing
clap = { version = "4", features = ["derive"] } clap = { version = "4", features = ["derive"] }
# Time # Time (cookie max_age + RFC3339 timestamps)
time = "0.3" time = { version = "0.3", features = ["serde", "formatting", "parsing"] }
# Video capture (V4L2) # Video capture (V4L2)
v4l2r = "0.0.7" v4l2r = "0.0.7"

View File

@@ -4,6 +4,9 @@ version = "0.8.0"
edition = "2021" edition = "2021"
description = "Hardware video codec for IP-KVM (Windows/Linux)" description = "Hardware video codec for IP-KVM (Windows/Linux)"
[package.metadata.cargo-machete]
ignored = ["serde"]
[features] [features]
default = [] default = []
rkmpp = [] rkmpp = []

View File

@@ -45,25 +45,10 @@ impl Default for AudioConfig {
} }
impl AudioConfig { impl AudioConfig {
/// Create config for a specific device /// Create config for a specific device (48 kHz stereo only; must match ALSA hardware).
pub fn for_device(device: &AudioDeviceInfo) -> Self { pub fn for_device(device: &AudioDeviceInfo) -> Self {
let sample_rate = if device.sample_rates.contains(&48000) {
48000
} else {
*device.sample_rates.first().unwrap_or(&48000)
};
let channels = if device.channels.contains(&2) {
2
} else {
*device.channels.first().unwrap_or(&2)
};
Self { Self {
device_name: device.name.clone(), device_name: device.name.clone(),
sample_rate,
channels,
frame_size: sample_rate / 50, // 20ms
..Default::default() ..Default::default()
} }
} }
@@ -281,23 +266,29 @@ fn run_capture(
.map_err(|e| AppError::AudioError(format!("Failed to apply hw params: {}", e)))?; .map_err(|e| AppError::AudioError(format!("Failed to apply hw params: {}", e)))?;
} }
// Get actual configuration // Fixed 48 kHz stereo: fail if hardware negotiated something else.
let actual_rate = pcm let hw_now = pcm.hw_params_current().map_err(|e| {
.hw_params_current() AppError::AudioError(format!("Failed to read hw_params after apply: {}", e))
.map(|h| h.get_rate().unwrap_or(config.sample_rate)) })?;
.unwrap_or(config.sample_rate); let actual_rate = hw_now
.get_rate()
if actual_rate != config.sample_rate { .map_err(|e| AppError::AudioError(format!("Failed to read sample rate: {}", e)))?;
info!( let actual_ch = hw_now
"ALSA sample rate differs from requested ({}Hz vs {}Hz); streamer will resample to 48000Hz for Opus", .get_channels()
actual_rate, config.sample_rate .map_err(|e| AppError::AudioError(format!("Failed to read channels: {}", e)))?;
); if actual_rate != 48_000 {
} else { return Err(AppError::AudioError(format!(
info!( "Audio capture requires 48000 Hz; device is {} Hz",
"Audio capture configured: {}Hz {}ch (requested {}Hz)", actual_rate
actual_rate, config.channels, config.sample_rate )));
);
} }
if actual_ch != 2 {
return Err(AppError::AudioError(format!(
"Audio capture requires 2 channels (stereo); device has {}",
actual_ch
)));
}
info!("Audio capture: 48000 Hz, 2 ch");
// Prepare for capture // Prepare for capture
pcm.prepare() pcm.prepare()
@@ -357,7 +348,7 @@ fn run_capture(
let frame = AudioFrame::new_interleaved( let frame = AudioFrame::new_interleaved(
Bytes::copy_from_slice(&buffer[..byte_count]), Bytes::copy_from_slice(&buffer[..byte_count]),
config.channels, config.channels,
actual_rate, 48_000,
seq, seq,
); );

View File

@@ -342,8 +342,7 @@ impl AudioController {
} }
/// Subscribe to Opus frames (for WebSocket clients) /// Subscribe to Opus frames (for WebSocket clients)
pub fn subscribe_opus(&self) -> Option<tokio::sync::watch::Receiver<Option<Arc<OpusFrame>>>> { pub fn subscribe_opus(&self) -> Option<tokio::sync::mpsc::Receiver<Arc<OpusFrame>>> {
// Use try_read to avoid blocking - this is called from sync context sometimes
if let Ok(guard) = self.streamer.try_read() { if let Ok(guard) = self.streamer.try_read() {
guard.as_ref().map(|s| s.subscribe_opus()) guard.as_ref().map(|s| s.subscribe_opus())
} else { } else {
@@ -354,7 +353,7 @@ impl AudioController {
/// Subscribe to Opus frames (async version) /// Subscribe to Opus frames (async version)
pub async fn subscribe_opus_async( pub async fn subscribe_opus_async(
&self, &self,
) -> Option<tokio::sync::watch::Receiver<Option<Arc<OpusFrame>>>> { ) -> Option<tokio::sync::mpsc::Receiver<Arc<OpusFrame>>> {
self.streamer self.streamer
.read() .read()
.await .await

View File

@@ -13,7 +13,6 @@ pub mod controller;
pub mod device; pub mod device;
pub mod encoder; pub mod encoder;
pub mod monitor; pub mod monitor;
pub mod resample;
pub mod streamer; pub mod streamer;
pub use capture::{AudioCapturer, AudioConfig, AudioFrame}; pub use capture::{AudioCapturer, AudioConfig, AudioFrame};

View File

@@ -1,202 +0,0 @@
//! Resample capture PCM to 48 kHz stereo for Opus (fixed 20 ms / 960×2 samples).
const OUT_RATE: f64 = 48000.0;
const OPUS_STEREO_SAMPLES: usize = 960 * 2;
enum PipelineState {
/// Native 48 kHz interleaved stereo: only buffer and slice into 20 ms blocks (no float work).
Stereo48kPassthrough,
/// Other rates / mono: linear interpolation to 48 kHz stereo.
Resample {
in_rate: u32,
in_channels: u32,
next_out_frame: u64,
buffer_start_frame: u64,
},
}
/// Converts incoming interleaved PCM to 48 kHz stereo, then exposes fixed 960×2-sample chunks.
pub struct Opus48kPcmBuffer {
state: PipelineState,
pending: Vec<i16>,
}
impl Opus48kPcmBuffer {
pub fn new(in_rate: u32, in_channels: u32) -> Self {
let ch = in_channels.max(1);
let rate = in_rate.max(1);
let state = if rate == 48000 && ch == 2 {
PipelineState::Stereo48kPassthrough
} else {
PipelineState::Resample {
in_rate: rate,
in_channels: ch,
next_out_frame: 0,
buffer_start_frame: 0,
}
};
Self {
state,
pending: Vec::new(),
}
}
/// True when input is already 48 kHz stereo (no interpolation loop).
#[cfg(test)]
pub fn is_passthrough(&self) -> bool {
matches!(self.state, PipelineState::Stereo48kPassthrough)
}
/// Append one capture block (`sample_rate` must match the rate this buffer was built for).
pub fn push_interleaved(&mut self, data: &[i16]) {
self.pending.extend_from_slice(data);
}
/// Drain as many 960×2 stereo S16LE samples (20 ms @ 48 kHz) as possible.
pub fn pop_opus_frames(&mut self, out: &mut Vec<i16>) {
match &mut self.state {
PipelineState::Stereo48kPassthrough => {
while self.pending.len() >= OPUS_STEREO_SAMPLES {
out.extend_from_slice(&self.pending[..OPUS_STEREO_SAMPLES]);
self.pending.drain(..OPUS_STEREO_SAMPLES);
}
}
PipelineState::Resample {
in_rate,
in_channels,
next_out_frame,
buffer_start_frame,
} => {
let ch = *in_channels as usize;
if ch == 0 {
return;
}
loop {
let batch_start = *next_out_frame;
let mut block = Vec::with_capacity(OPUS_STEREO_SAMPLES);
let mut complete = true;
for i in 0u64..960 {
let k = batch_start + i;
let p_abs = (k as f64) * (*in_rate as f64) / OUT_RATE;
let f_abs = p_abs.floor() as u64;
let frac = p_abs - f_abs as f64;
let f_rel = f_abs.saturating_sub(*buffer_start_frame) as usize;
if f_rel + 1 >= self.pending.len() / ch {
complete = false;
break;
}
let base0 = f_rel * ch;
let base1 = (f_rel + 1) * ch;
let (l, r) = if *in_channels >= 2 {
let l0 = self.pending[base0] as f64;
let l1 = self.pending[base1] as f64;
let r0 = self.pending[base0 + 1] as f64;
let r1 = self.pending[base1 + 1] as f64;
(l0 + frac * (l1 - l0), r0 + frac * (r1 - r0))
} else {
let m0 = self.pending[base0] as f64;
let m1 = self.pending[base1] as f64;
let v = m0 + frac * (m1 - m0);
(v, v)
};
block.push(clamp_f64_to_i16(l));
block.push(clamp_f64_to_i16(r));
}
if !complete || block.len() != OPUS_STEREO_SAMPLES {
break;
}
out.extend_from_slice(&block);
*next_out_frame = batch_start + 960;
trim_resample_prefix(
&mut self.pending,
*in_rate,
*next_out_frame,
buffer_start_frame,
ch,
);
}
}
}
}
}
fn trim_resample_prefix(
pending: &mut Vec<i16>,
in_rate: u32,
next_out_frame: u64,
buffer_start_frame: &mut u64,
ch: usize,
) {
if pending.is_empty() {
return;
}
let p_next = (next_out_frame as f64) * (in_rate as f64) / OUT_RATE;
let need_abs = p_next.floor() as u64;
let keep_from_abs = need_abs.saturating_sub(1);
if keep_from_abs <= *buffer_start_frame {
return;
}
let drop_frames = (keep_from_abs - *buffer_start_frame) as usize;
let drop_samples = drop_frames.saturating_mul(ch).min(pending.len());
if drop_samples > 0 {
pending.drain(0..drop_samples);
*buffer_start_frame += drop_frames as u64;
}
}
#[inline]
fn clamp_f64_to_i16(v: f64) -> i16 {
v.round().clamp(i16::MIN as f64, i16::MAX as f64) as i16
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn passthrough_48k_identity_tone_length() {
let mut buf = Opus48kPcmBuffer::new(48000, 2);
assert!(buf.is_passthrough());
let mut chunk = vec![0i16; 960 * 2];
for i in 0..960 {
let s = (i as f32 * 0.1).sin() * 3000.0;
chunk[2 * i] = s as i16;
chunk[2 * i + 1] = s as i16;
}
buf.push_interleaved(&chunk);
let mut out = Vec::new();
buf.pop_opus_frames(&mut out);
assert_eq!(out.len(), 960 * 2);
}
#[test]
fn upsample_44k_to_48k_chunk() {
let mut buf = Opus48kPcmBuffer::new(44100, 2);
assert!(!buf.is_passthrough());
let mut chunk = vec![0i16; 882 * 2];
for i in 0..882 {
chunk[2 * i] = (i as i16).wrapping_mul(10);
chunk[2 * i + 1] = (i as i16).wrapping_mul(-7);
}
buf.push_interleaved(&chunk);
let mut out = Vec::new();
buf.pop_opus_frames(&mut out);
assert_eq!(out.len(), 960 * 2, "expected one 20ms Opus block");
}
#[test]
fn mono_48k_not_passthrough() {
let buf = Opus48kPcmBuffer::new(48000, 1);
assert!(!buf.is_passthrough());
}
}

View File

@@ -1,21 +1,22 @@
//! Audio streaming pipeline //! Audio streaming pipeline
//! //!
//! Coordinates audio capture and Opus encoding, distributing encoded //! ALSA capture (48 kHz stereo only) → fixed Opus 20 ms frames → `mpsc` fan-out per subscriber.
//! frames to multiple subscribers via broadcast channel.
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::Arc; use std::sync::{Arc, Mutex};
use std::time::Instant; use std::time::Instant;
use tokio::sync::{broadcast, watch, Mutex, RwLock}; use tokio::sync::{broadcast, mpsc, watch, Mutex as AsyncMutex, RwLock};
use tracing::{error, info, warn}; use tracing::{error, info, warn};
use super::capture::{AudioCapturer, AudioConfig, AudioFrame, CaptureState}; use super::capture::{AudioCapturer, AudioConfig, AudioFrame, CaptureState};
use super::encoder::{OpusConfig, OpusEncoder, OpusFrame}; use super::encoder::{OpusConfig, OpusEncoder, OpusFrame};
use super::resample::Opus48kPcmBuffer;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use bytemuck; use bytemuck;
use bytes::Bytes; use bytes::Bytes;
/// Stereo 48 kHz: 20 ms = 960 frames × 2 channels (S16LE).
const OPUS_STEREO_SAMPLES: usize = 960 * 2;
/// Audio stream state /// Audio stream state
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum AudioStreamState { pub enum AudioStreamState {
@@ -68,15 +69,16 @@ pub struct AudioStreamStats {
/// Audio streamer /// Audio streamer
/// ///
/// Manages the audio capture -> encode -> broadcast pipeline. /// Manages the audio capture encode → mpsc fan-out pipeline.
pub struct AudioStreamer { pub struct AudioStreamer {
config: RwLock<AudioStreamerConfig>, config: RwLock<AudioStreamerConfig>,
state: watch::Sender<AudioStreamState>, state: watch::Sender<AudioStreamState>,
state_rx: watch::Receiver<AudioStreamState>, state_rx: watch::Receiver<AudioStreamState>,
capturer: RwLock<Option<Arc<AudioCapturer>>>, capturer: RwLock<Option<Arc<AudioCapturer>>>,
encoder: Arc<Mutex<Option<OpusEncoder>>>, encoder: Arc<AsyncMutex<Option<OpusEncoder>>>,
opus_tx: watch::Sender<Option<Arc<OpusFrame>>>, /// One `mpsc::Sender` per subscriber (like shared video pipeline).
stats: Arc<Mutex<AudioStreamStats>>, opus_subscribers: Arc<Mutex<Vec<mpsc::Sender<Arc<OpusFrame>>>>>,
stats: Arc<AsyncMutex<AudioStreamStats>>,
sequence: AtomicU64, sequence: AtomicU64,
stream_start_time: RwLock<Option<Instant>>, stream_start_time: RwLock<Option<Instant>>,
stop_flag: Arc<AtomicBool>, stop_flag: Arc<AtomicBool>,
@@ -91,16 +93,15 @@ impl AudioStreamer {
/// Create a new audio streamer with specified configuration /// Create a new audio streamer with specified configuration
pub fn with_config(config: AudioStreamerConfig) -> Self { pub fn with_config(config: AudioStreamerConfig) -> Self {
let (state_tx, state_rx) = watch::channel(AudioStreamState::Stopped); let (state_tx, state_rx) = watch::channel(AudioStreamState::Stopped);
let (opus_tx, _opus_rx) = watch::channel(None);
Self { Self {
config: RwLock::new(config), config: RwLock::new(config),
state: state_tx, state: state_tx,
state_rx, state_rx,
capturer: RwLock::new(None), capturer: RwLock::new(None),
encoder: Arc::new(Mutex::new(None)), encoder: Arc::new(AsyncMutex::new(None)),
opus_tx, opus_subscribers: Arc::new(Mutex::new(Vec::new())),
stats: Arc::new(Mutex::new(AudioStreamStats::default())), stats: Arc::new(AsyncMutex::new(AudioStreamStats::default())),
sequence: AtomicU64::new(0), sequence: AtomicU64::new(0),
stream_start_time: RwLock::new(None), stream_start_time: RwLock::new(None),
stop_flag: Arc::new(AtomicBool::new(false)), stop_flag: Arc::new(AtomicBool::new(false)),
@@ -117,14 +118,21 @@ impl AudioStreamer {
self.state_rx.clone() self.state_rx.clone()
} }
/// Subscribe to Opus frames /// Subscribe to Opus frames (each packet is one encoded 20 ms frame).
pub fn subscribe_opus(&self) -> watch::Receiver<Option<Arc<OpusFrame>>> { pub fn subscribe_opus(&self) -> mpsc::Receiver<Arc<OpusFrame>> {
self.opus_tx.subscribe() let (tx, rx) = mpsc::channel::<Arc<OpusFrame>>(128);
self.opus_subscribers.lock().unwrap().push(tx);
rx
} }
/// Get number of active subscribers /// Get number of active subscribers
pub fn subscriber_count(&self) -> usize { pub fn subscriber_count(&self) -> usize {
self.opus_tx.receiver_count() self.opus_subscribers
.lock()
.unwrap()
.iter()
.filter(|s| !s.is_closed())
.count()
} }
/// Get current statistics /// Get current statistics
@@ -202,12 +210,13 @@ impl AudioStreamer {
// Start encoding task // Start encoding task
let capturer_for_task = capturer.clone(); let capturer_for_task = capturer.clone();
let encoder = self.encoder.clone(); let encoder = self.encoder.clone();
let opus_tx = self.opus_tx.clone(); let opus_subscribers = self.opus_subscribers.clone();
let state = self.state.clone(); let state = self.state.clone();
let stop_flag = self.stop_flag.clone(); let stop_flag = self.stop_flag.clone();
tokio::spawn(async move { tokio::spawn(async move {
Self::stream_task(capturer_for_task, encoder, opus_tx, state, stop_flag).await; Self::stream_task(capturer_for_task, encoder, opus_subscribers, state, stop_flag)
.await;
}); });
Ok(()) Ok(())
@@ -229,10 +238,11 @@ impl AudioStreamer {
capturer.stop().await?; capturer.stop().await?;
} }
// Clear resources // Clear resources — drop Opus senders so mpsc receivers see end-of-stream
*self.capturer.write().await = None; *self.capturer.write().await = None;
*self.encoder.lock().await = None; *self.encoder.lock().await = None;
*self.stream_start_time.write().await = None; *self.stream_start_time.write().await = None;
self.opus_subscribers.lock().unwrap().clear();
let _ = self.state.send(AudioStreamState::Stopped); let _ = self.state.send(AudioStreamState::Stopped);
info!("Audio stream stopped"); info!("Audio stream stopped");
@@ -244,51 +254,63 @@ impl AudioStreamer {
self.state() == AudioStreamState::Running self.state() == AudioStreamState::Running
} }
/// Internal streaming task async fn fanout_opus(
subscribers: &Arc<Mutex<Vec<mpsc::Sender<Arc<OpusFrame>>>>>,
frame: Arc<OpusFrame>,
) {
let txs: Vec<_> = {
let g = subscribers.lock().unwrap();
if g.is_empty() {
return;
}
g.clone()
};
for tx in &txs {
let _ = tx.send(frame.clone()).await;
}
if txs.iter().any(|tx| tx.is_closed()) {
let mut g = subscribers.lock().unwrap();
g.retain(|tx| !tx.is_closed());
}
}
async fn stream_task( async fn stream_task(
capturer: Arc<AudioCapturer>, capturer: Arc<AudioCapturer>,
encoder: Arc<Mutex<Option<OpusEncoder>>>, encoder: Arc<AsyncMutex<Option<OpusEncoder>>>,
opus_tx: watch::Sender<Option<Arc<OpusFrame>>>, opus_subscribers: Arc<Mutex<Vec<mpsc::Sender<Arc<OpusFrame>>>>>,
state: watch::Sender<AudioStreamState>, state: watch::Sender<AudioStreamState>,
stop_flag: Arc<AtomicBool>, stop_flag: Arc<AtomicBool>,
) { ) {
let mut pcm_rx = capturer.subscribe(); let mut pcm_rx = capturer.subscribe();
let _ = state.send(AudioStreamState::Running); let _ = state.send(AudioStreamState::Running);
info!("Audio stream task started"); info!("Audio stream task started (48 kHz stereo → Opus, mpsc fan-out)");
let mut to_48k: Option<Opus48kPcmBuffer> = None; let mut pending: Vec<i16> = Vec::new();
let mut queued_48k: Vec<i16> = Vec::new();
loop { loop {
// Check stop flag (atomic, no async lock needed)
if stop_flag.load(Ordering::Relaxed) { if stop_flag.load(Ordering::Relaxed) {
break; break;
} }
// Check capturer state
if capturer.state() == CaptureState::Error { if capturer.state() == CaptureState::Error {
error!("Audio capture error, stopping stream"); error!("Audio capture error, stopping stream");
let _ = state.send(AudioStreamState::Error); let _ = state.send(AudioStreamState::Error);
break; break;
} }
// Receive PCM frame with timeout
let recv_result = let recv_result =
tokio::time::timeout(std::time::Duration::from_secs(2), pcm_rx.recv()).await; tokio::time::timeout(std::time::Duration::from_secs(2), pcm_rx.recv()).await;
match recv_result { match recv_result {
Ok(Ok(audio_frame)) => { Ok(Ok(audio_frame)) => {
if to_48k.is_none() { if audio_frame.sample_rate != 48_000 || audio_frame.channels != 2 {
to_48k = Some(Opus48kPcmBuffer::new( warn!(
audio_frame.sample_rate, "Skip non48 kHz/stereo PCM ({} Hz, {} ch)",
audio_frame.channels, audio_frame.sample_rate, audio_frame.channels
)); );
continue;
} }
let pipeline = match to_48k.as_mut() {
Some(p) => p,
None => continue,
};
let samples: &[i16] = match bytemuck::try_cast_slice(&audio_frame.data) { let samples: &[i16] = match bytemuck::try_cast_slice(&audio_frame.data) {
Ok(s) => s, Ok(s) => s,
@@ -298,16 +320,16 @@ impl AudioStreamer {
} }
}; };
if !samples.is_empty() { if !samples.is_empty() {
pipeline.push_interleaved(samples); pending.extend_from_slice(samples);
} }
pipeline.pop_opus_frames(&mut queued_48k);
while queued_48k.len() >= 960 * 2 { while pending.len() >= OPUS_STEREO_SAMPLES {
let pcm_20ms = let pcm_20ms = Bytes::copy_from_slice(bytemuck::cast_slice(
Bytes::copy_from_slice(bytemuck::cast_slice(&queued_48k[..960 * 2])); &pending[..OPUS_STEREO_SAMPLES],
queued_48k.drain(..960 * 2); ));
pending.drain(..OPUS_STEREO_SAMPLES);
let frame_48k = AudioFrame::new_interleaved(pcm_20ms, 2, 48000, 0); let frame_48k = AudioFrame::new_interleaved(pcm_20ms, 2, 48_000, 0);
let opus_result = { let opus_result = {
let mut enc_guard = encoder.lock().await; let mut enc_guard = encoder.lock().await;
@@ -318,9 +340,7 @@ impl AudioStreamer {
match opus_result { match opus_result {
Some(Ok(opus_frame)) => { Some(Ok(opus_frame)) => {
if opus_tx.receiver_count() > 0 { Self::fanout_opus(&opus_subscribers, Arc::new(opus_frame)).await;
let _ = opus_tx.send(Some(Arc::new(opus_frame)));
}
} }
Some(Err(e)) => { Some(Err(e)) => {
error!("Opus encode error: {}", e); error!("Opus encode error: {}", e);
@@ -337,10 +357,9 @@ impl AudioStreamer {
break; break;
} }
Ok(Err(broadcast::error::RecvError::Lagged(n))) => { Ok(Err(broadcast::error::RecvError::Lagged(n))) => {
warn!("Audio receiver lagged by {} frames", n); warn!("PCM receiver lagged by {} frames", n);
} }
Err(_) => { Err(_) => {
// Timeout - check if still capturing
if capturer.state() != CaptureState::Running { if capturer.state() != CaptureState::Running {
info!("Audio capture stopped, ending stream task"); info!("Audio capture stopped, ending stream task");
break; break;

View File

@@ -1,5 +1,6 @@
pub mod middleware; pub mod middleware;
mod password; mod password;
mod rfc3339;
mod session; mod session;
mod user; mod user;

13
src/auth/rfc3339.rs Normal file
View File

@@ -0,0 +1,13 @@
//! RFC3339 strings in SQLite; structs use `time::serde::rfc3339`.
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
/// Parse DB text; bad input → `now_utc()`.
pub fn parse(s: &str) -> OffsetDateTime {
OffsetDateTime::parse(s, &Rfc3339).unwrap_or_else(|_| OffsetDateTime::now_utc())
}
pub fn format(dt: OffsetDateTime) -> String {
dt.format(&Rfc3339).expect("RFC3339 format")
}

View File

@@ -1,8 +1,9 @@
use chrono::{DateTime, Duration, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite}; use sqlx::{Pool, Sqlite};
use time::{Duration, OffsetDateTime};
use uuid::Uuid; use uuid::Uuid;
use super::rfc3339;
use crate::error::Result; use crate::error::Result;
/// Session data /// Session data
@@ -10,15 +11,17 @@ use crate::error::Result;
pub struct Session { pub struct Session {
pub id: String, pub id: String,
pub user_id: String, pub user_id: String,
pub created_at: DateTime<Utc>, #[serde(with = "time::serde::rfc3339")]
pub expires_at: DateTime<Utc>, pub created_at: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub expires_at: OffsetDateTime,
pub data: Option<serde_json::Value>, pub data: Option<serde_json::Value>,
} }
impl Session { impl Session {
/// Check if session is expired /// Check if session is expired
pub fn is_expired(&self) -> bool { pub fn is_expired(&self) -> bool {
Utc::now() > self.expires_at OffsetDateTime::now_utc() > self.expires_at
} }
} }
@@ -40,11 +43,12 @@ impl SessionStore {
/// Create a new session /// Create a new session
pub async fn create(&self, user_id: &str) -> Result<Session> { pub async fn create(&self, user_id: &str) -> Result<Session> {
let now = OffsetDateTime::now_utc();
let session = Session { let session = Session {
id: Uuid::new_v4().to_string(), id: Uuid::new_v4().to_string(),
user_id: user_id.to_string(), user_id: user_id.to_string(),
created_at: Utc::now(), created_at: now,
expires_at: Utc::now() + self.default_ttl, expires_at: now + self.default_ttl,
data: None, data: None,
}; };
@@ -56,8 +60,8 @@ impl SessionStore {
) )
.bind(&session.id) .bind(&session.id)
.bind(&session.user_id) .bind(&session.user_id)
.bind(session.created_at.to_rfc3339()) .bind(rfc3339::format(session.created_at))
.bind(session.expires_at.to_rfc3339()) .bind(rfc3339::format(session.expires_at))
.bind(session.data.as_ref().map(|d| d.to_string())) .bind(session.data.as_ref().map(|d| d.to_string()))
.execute(&self.pool) .execute(&self.pool)
.await?; .await?;
@@ -79,12 +83,8 @@ impl SessionStore {
let session = Session { let session = Session {
id, id,
user_id, user_id,
created_at: DateTime::parse_from_rfc3339(&created_at) created_at: rfc3339::parse(&created_at),
.map(|dt| dt.with_timezone(&Utc)) expires_at: rfc3339::parse(&expires_at),
.unwrap_or_else(|_| Utc::now()),
expires_at: DateTime::parse_from_rfc3339(&expires_at)
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|_| Utc::now()),
data: data.and_then(|d| serde_json::from_str(&d).ok()), data: data.and_then(|d| serde_json::from_str(&d).ok()),
}; };
@@ -110,7 +110,7 @@ impl SessionStore {
/// Delete all expired sessions /// Delete all expired sessions
pub async fn cleanup_expired(&self) -> Result<u64> { pub async fn cleanup_expired(&self) -> Result<u64> {
let now = Utc::now().to_rfc3339(); let now = rfc3339::format(OffsetDateTime::now_utc());
let result = sqlx::query("DELETE FROM sessions WHERE expires_at < ?1") let result = sqlx::query("DELETE FROM sessions WHERE expires_at < ?1")
.bind(now) .bind(now)
.execute(&self.pool) .execute(&self.pool)
@@ -145,9 +145,9 @@ impl SessionStore {
/// Extend session expiration /// Extend session expiration
pub async fn extend(&self, session_id: &str) -> Result<()> { pub async fn extend(&self, session_id: &str) -> Result<()> {
let new_expires = Utc::now() + self.default_ttl; let new_expires = OffsetDateTime::now_utc() + self.default_ttl;
sqlx::query("UPDATE sessions SET expires_at = ?1 WHERE id = ?2") sqlx::query("UPDATE sessions SET expires_at = ?1 WHERE id = ?2")
.bind(new_expires.to_rfc3339()) .bind(rfc3339::format(new_expires))
.bind(session_id) .bind(session_id)
.execute(&self.pool) .execute(&self.pool)
.await?; .await?;

View File

@@ -1,9 +1,10 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite}; use sqlx::{Pool, Sqlite};
use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;
use super::password::{hash_password, verify_password}; use super::password::{hash_password, verify_password};
use super::rfc3339;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
/// User row type from database /// User row type from database
@@ -16,8 +17,10 @@ pub struct User {
pub username: String, pub username: String,
#[serde(skip_serializing)] #[serde(skip_serializing)]
pub password_hash: String, pub password_hash: String,
pub created_at: DateTime<Utc>, #[serde(with = "time::serde::rfc3339")]
pub updated_at: DateTime<Utc>, pub created_at: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub updated_at: OffsetDateTime,
} }
impl User { impl User {
@@ -28,12 +31,8 @@ impl User {
id, id,
username, username,
password_hash, password_hash,
created_at: DateTime::parse_from_rfc3339(&created_at) created_at: rfc3339::parse(&created_at),
.map(|dt| dt.with_timezone(&Utc)) updated_at: rfc3339::parse(&updated_at),
.unwrap_or_else(|_| Utc::now()),
updated_at: DateTime::parse_from_rfc3339(&updated_at)
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|_| Utc::now()),
} }
} }
} }
@@ -61,7 +60,7 @@ impl UserStore {
} }
let password_hash = hash_password(password)?; let password_hash = hash_password(password)?;
let now = Utc::now(); let now = OffsetDateTime::now_utc();
let user = User { let user = User {
id: Uuid::new_v4().to_string(), id: Uuid::new_v4().to_string(),
username: username.to_string(), username: username.to_string(),
@@ -79,8 +78,8 @@ impl UserStore {
.bind(&user.id) .bind(&user.id)
.bind(&user.username) .bind(&user.username)
.bind(&user.password_hash) .bind(&user.password_hash)
.bind(user.created_at.to_rfc3339()) .bind(rfc3339::format(user.created_at))
.bind(user.updated_at.to_rfc3339()) .bind(rfc3339::format(user.updated_at))
.execute(&self.pool) .execute(&self.pool)
.await?; .await?;
@@ -128,12 +127,12 @@ impl UserStore {
/// Update user password /// Update user password
pub async fn update_password(&self, user_id: &str, new_password: &str) -> Result<()> { pub async fn update_password(&self, user_id: &str, new_password: &str) -> Result<()> {
let password_hash = hash_password(new_password)?; let password_hash = hash_password(new_password)?;
let now = Utc::now(); let now = OffsetDateTime::now_utc();
let result = let result =
sqlx::query("UPDATE users SET password_hash = ?1, updated_at = ?2 WHERE id = ?3") sqlx::query("UPDATE users SET password_hash = ?1, updated_at = ?2 WHERE id = ?3")
.bind(&password_hash) .bind(&password_hash)
.bind(now.to_rfc3339()) .bind(rfc3339::format(now))
.bind(user_id) .bind(user_id)
.execute(&self.pool) .execute(&self.pool)
.await?; .await?;
@@ -156,10 +155,10 @@ impl UserStore {
} }
} }
let now = Utc::now(); let now = OffsetDateTime::now_utc();
let result = sqlx::query("UPDATE users SET username = ?1, updated_at = ?2 WHERE id = ?3") let result = sqlx::query("UPDATE users SET username = ?1, updated_at = ?2 WHERE id = ?3")
.bind(new_username) .bind(new_username)
.bind(now.to_rfc3339()) .bind(rfc3339::format(now))
.bind(user_id) .bind(user_id)
.execute(&self.pool) .execute(&self.pool)
.await?; .await?;

View File

@@ -45,6 +45,10 @@ pub enum AppError {
#[error("Video device lost [{device}]: {reason}")] #[error("Video device lost [{device}]: {reason}")]
VideoDeviceLost { device: String, reason: String }, VideoDeviceLost { device: String, reason: String },
/// No input signal while opening capture; `kind` is `SignalStatus` as string (`from_str`).
#[error("Capture has no valid signal: {kind}")]
CaptureNoSignal { kind: String },
#[error("Audio error: {0}")] #[error("Audio error: {0}")]
AudioError(String), AudioError(String),

View File

@@ -64,6 +64,8 @@ fn topic_prefix(event_name: &str) -> Option<String> {
/// bus.publish(SystemEvent::StreamStateChanged { /// bus.publish(SystemEvent::StreamStateChanged {
/// state: "streaming".to_string(), /// state: "streaming".to_string(),
/// device: Some("/dev/video0".to_string()), /// device: Some("/dev/video0".to_string()),
/// reason: None,
/// next_retry_ms: None,
/// }); /// });
/// ///
/// // Subscribe to events /// // Subscribe to events
@@ -188,6 +190,8 @@ mod tests {
bus.publish(SystemEvent::StreamStateChanged { bus.publish(SystemEvent::StreamStateChanged {
state: "streaming".to_string(), state: "streaming".to_string(),
device: Some("/dev/video0".to_string()), device: Some("/dev/video0".to_string()),
reason: None,
next_retry_ms: None,
}); });
let event = rx.recv().await.unwrap(); let event = rx.recv().await.unwrap();
@@ -205,6 +209,8 @@ mod tests {
bus.publish(SystemEvent::StreamStateChanged { bus.publish(SystemEvent::StreamStateChanged {
state: "ready".to_string(), state: "ready".to_string(),
device: Some("/dev/video0".to_string()), device: Some("/dev/video0".to_string()),
reason: None,
next_retry_ms: None,
}); });
let event1 = rx1.recv().await.unwrap(); let event1 = rx1.recv().await.unwrap();
@@ -222,6 +228,8 @@ mod tests {
bus.publish(SystemEvent::StreamStateChanged { bus.publish(SystemEvent::StreamStateChanged {
state: "ready".to_string(), state: "ready".to_string(),
device: None, device: None,
reason: None,
next_retry_ms: None,
}); });
let event = rx.recv().await.unwrap(); let event = rx.recv().await.unwrap();
@@ -236,6 +244,8 @@ mod tests {
bus.publish(SystemEvent::StreamStateChanged { bus.publish(SystemEvent::StreamStateChanged {
state: "ready".to_string(), state: "ready".to_string(),
device: None, device: None,
reason: None,
next_retry_ms: None,
}); });
let event = rx.recv().await.unwrap(); let event = rx.recv().await.unwrap();
@@ -257,6 +267,8 @@ mod tests {
bus.publish(SystemEvent::StreamStateChanged { bus.publish(SystemEvent::StreamStateChanged {
state: "ready".to_string(), state: "ready".to_string(),
device: None, device: None,
reason: None,
next_retry_ms: None,
}); });
} }
} }

View File

@@ -158,13 +158,16 @@ pub enum SystemEvent {
from_mode: String, from_mode: String,
}, },
/// Stream state changed (e.g., started, stopped, error) /// Stream state for the UI (`streaming`, `no_signal`, `device_lost`, `device_busy`, etc.).
/// Optional `reason` / `next_retry_ms` are hints only; branch on `state`.
#[serde(rename = "stream.state_changed")] #[serde(rename = "stream.state_changed")]
StreamStateChanged { StreamStateChanged {
/// Current state: "uninitialized", "ready", "streaming", "no_signal", "error"
state: String, state: String,
/// Device path if available
device: Option<String>, device: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
reason: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
next_retry_ms: Option<u64>,
}, },
/// Stream configuration is being changed /// Stream configuration is being changed
@@ -407,6 +410,8 @@ mod tests {
let event = SystemEvent::StreamStateChanged { let event = SystemEvent::StreamStateChanged {
state: "streaming".to_string(), state: "streaming".to_string(),
device: Some("/dev/video0".to_string()), device: Some("/dev/video0".to_string()),
reason: None,
next_retry_ms: None,
}; };
assert_eq!(event.event_name(), "stream.state_changed"); assert_eq!(event.event_name(), "stream.state_changed");
} }
@@ -416,6 +421,8 @@ mod tests {
let event = SystemEvent::StreamStateChanged { let event = SystemEvent::StreamStateChanged {
state: "streaming".to_string(), state: "streaming".to_string(),
device: None, device: None,
reason: None,
next_retry_ms: None,
}; };
assert!(event.matches_topic("*")); assert!(event.matches_topic("*"));

View File

@@ -490,8 +490,13 @@ async fn main() -> anyhow::Result<()> {
.update_video_config(actual_resolution, actual_format, actual_fps) .update_video_config(actual_resolution, actual_format, actual_fps)
.await; .await;
if let Some(device_path) = device_path { if let Some(device_path) = device_path {
let (subdev_path, bridge_kind) = streamer
.current_device()
.await
.map(|d| (d.subdev_path.clone(), d.bridge_kind.clone()))
.unwrap_or((None, None));
webrtc_streamer webrtc_streamer
.set_capture_device(device_path, jpeg_quality) .set_capture_device(device_path, jpeg_quality, subdev_path, bridge_kind)
.await; .await;
tracing::info!("WebRTC streamer configured for direct capture"); tracing::info!("WebRTC streamer configured for direct capture");
} else { } else {

View File

@@ -7,8 +7,8 @@
//! - Metadata management //! - Metadata management
//! - Download from URL //! - Download from URL
use chrono::Utc;
use futures::StreamExt; use futures::StreamExt;
use time::OffsetDateTime;
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@@ -87,9 +87,10 @@ impl ImageManager {
.ok() .ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| { .map(|d| {
chrono::DateTime::from_timestamp(d.as_secs() as i64, 0).unwrap_or_else(Utc::now) OffsetDateTime::from_unix_timestamp(d.as_secs() as i64)
.unwrap_or_else(|_| OffsetDateTime::now_utc())
}) })
.unwrap_or_else(Utc::now); .unwrap_or_else(OffsetDateTime::now_utc);
Some(ImageInfo { Some(ImageInfo {
id, id,

View File

@@ -1,8 +1,8 @@
//! MSD data types and structures //! MSD data types and structures
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::PathBuf; use std::path::PathBuf;
use time::OffsetDateTime;
/// MSD operating mode /// MSD operating mode
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@@ -31,7 +31,8 @@ pub struct ImageInfo {
/// File size in bytes /// File size in bytes
pub size: u64, pub size: u64,
/// Creation timestamp /// Creation timestamp
pub created_at: DateTime<Utc>, #[serde(with = "time::serde::rfc3339")]
pub created_at: OffsetDateTime,
} }
impl ImageInfo { impl ImageInfo {
@@ -42,7 +43,7 @@ impl ImageInfo {
name, name,
path, path,
size, size,
created_at: Utc::now(), created_at: OffsetDateTime::now_utc(),
} }
} }
@@ -132,7 +133,8 @@ pub struct DriveFile {
/// Whether this is a directory /// Whether this is a directory
pub is_dir: bool, pub is_dir: bool,
/// Last modified timestamp /// Last modified timestamp
pub modified: Option<DateTime<Utc>>, #[serde(with = "time::serde::rfc3339::option")]
pub modified: Option<OffsetDateTime>,
} }
/// MSD connect request /// MSD connect request

View File

@@ -1831,18 +1831,18 @@ async fn run_audio_streaming(
break 'subscribe_loop; break 'subscribe_loop;
} }
result = opus_rx.changed() => { result = opus_rx.recv() => {
if result.is_err() { let opus_frame = match result {
// Pipeline was restarted Some(frame) => frame,
info!("Audio pipeline closed for connection {}, re-subscribing...", conn_id); None => {
info!(
"Audio pipeline closed for connection {}, re-subscribing...",
conn_id
);
audio_adapter.reset(); audio_adapter.reset();
tokio::time::sleep(Duration::from_millis(100)).await; tokio::time::sleep(Duration::from_millis(100)).await;
continue 'subscribe_loop; continue 'subscribe_loop;
} }
let opus_frame = match opus_rx.borrow().clone() {
Some(frame) => frame,
None => continue,
}; };
// Convert OpusFrame to RustDesk AudioFrame message // Convert OpusFrame to RustDesk AudioFrame message

View File

@@ -3,63 +3,21 @@
//! Manages video frame distribution and per-client statistics. //! Manages video frame distribution and per-client statistics.
use arc_swap::ArcSwap; use arc_swap::ArcSwap;
use bytes::Bytes;
use parking_lot::Mutex as ParkingMutex; use parking_lot::Mutex as ParkingMutex;
use parking_lot::RwLock as ParkingRwLock; use parking_lot::RwLock as ParkingRwLock;
use std::collections::{HashMap, VecDeque}; use std::collections::{HashMap, VecDeque};
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{Arc, OnceLock}; use std::sync::Arc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use tokio::sync::broadcast; use tokio::sync::broadcast;
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use crate::video::encoder::traits::{Encoder, EncoderConfig}; use crate::video::encoder::traits::{Encoder, EncoderConfig};
use crate::video::encoder::JpegEncoder; use crate::video::encoder::JpegEncoder;
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::PixelFormat;
use crate::video::VideoFrame; use crate::video::VideoFrame;
/// Cached "no signal" placeholder JPEG (640×360 dark-gray image). // No placeholder JPEGs: capture calls `set_offline()`; UI uses `stream.state_changed`.
/// Generated once on first use and reused for all NoSignal frames.
static NO_SIGNAL_JPEG: OnceLock<Bytes> = OnceLock::new();
/// Generate a minimal "no signal" JPEG (640×360, dark gray background).
/// Uses turbojpeg directly to produce a valid JPEG without additional deps.
fn generate_no_signal_jpeg() -> Bytes {
const W: usize = 640;
const H: usize = 360;
let y_size = W * H;
let uv_size = y_size / 4;
let mut i420 = vec![0u8; y_size + uv_size * 2];
// Y = 32 (dark gray, above the 16 black floor so it is clearly visible)
i420[..y_size].fill(32);
// U and V = 128 (neutral chroma → no colour tint)
i420[y_size..].fill(128);
match turbojpeg::Compressor::new() {
Ok(mut compressor) => {
let _ = compressor.set_quality(70);
let yuv = turbojpeg::YuvImage {
pixels: i420.as_slice(),
width: W,
height: H,
align: 1,
subsamp: turbojpeg::Subsamp::Sub2x2,
};
match compressor.compress_yuv_to_vec(yuv) {
Ok(jpeg) => Bytes::from(jpeg),
Err(_) => Bytes::new(),
}
}
Err(_) => Bytes::new(),
}
}
/// Return a reference to the cached no-signal JPEG bytes.
fn no_signal_jpeg() -> &'static Bytes {
NO_SIGNAL_JPEG.get_or_init(generate_no_signal_jpeg)
}
/// Client ID type (UUID string) /// Client ID type (UUID string)
pub type ClientId = String; pub type ClientId = String;
@@ -359,6 +317,9 @@ impl MjpegStreamHandler {
PixelFormat::Yuyv => encoder PixelFormat::Yuyv => encoder
.encode_yuyv(frame.data(), sequence) .encode_yuyv(frame.data(), sequence)
.map_err(|e| format!("YUYV encode failed: {}", e))?, .map_err(|e| format!("YUYV encode failed: {}", e))?,
PixelFormat::Yvyu => encoder
.encode_yvyu(frame.data(), sequence)
.map_err(|e| format!("YVYU encode failed: {}", e))?,
PixelFormat::Nv12 => encoder PixelFormat::Nv12 => encoder
.encode_nv12(frame.data(), sequence) .encode_nv12(frame.data(), sequence)
.map_err(|e| format!("NV12 encode failed: {}", e))?, .map_err(|e| format!("NV12 encode failed: {}", e))?,
@@ -392,40 +353,12 @@ impl MjpegStreamHandler {
)) ))
} }
/// Set stream offline /// Marks offline; clients exit their read loop. UI overlay comes from `stream.state_changed`.
pub fn set_offline(&self) { pub fn set_offline(&self) {
self.online.store(false, Ordering::SeqCst); self.online.store(false, Ordering::SeqCst);
let _ = self.frame_notify.send(()); let _ = self.frame_notify.send(());
} }
/// Push a "no signal" placeholder JPEG to all connected MJPEG clients.
///
/// Unlike `set_offline()`, this keeps the stream marked as **online** so
/// that HTTP clients remain connected and see the placeholder image instead
/// of a black/empty screen. Call this whenever the capture thread enters
/// the `NoSignal` state.
pub fn push_no_signal_placeholder(&self) {
let jpeg = no_signal_jpeg();
if jpeg.is_empty() {
return;
}
let frame = VideoFrame::new(
jpeg.clone(),
Resolution::new(640, 360),
PixelFormat::Mjpeg,
0,
self.sequence.fetch_add(1, Ordering::Relaxed),
);
// Store as current frame so late-joining clients get it immediately.
self.current_frame.store(Arc::new(Some(frame)));
// Ensure stream is marked online so the HTTP handler keeps iterating.
self.online.store(true, Ordering::SeqCst);
// Wake up waiting HTTP clients.
let _ = self.frame_notify.send(());
}
/// Set stream online (called when streaming starts) /// Set stream online (called when streaming starts)
pub fn set_online(&self) { pub fn set_online(&self) {
self.online.store(true, Ordering::SeqCst); self.online.store(true, Ordering::SeqCst);

363
src/video/csi_bridge.rs Normal file
View File

@@ -0,0 +1,363 @@
//! CSI/HDMI bridge helpers: subdev discovery, DV probe, RK628 "fake VGA" filter (must run before `S_FMT` / `STREAMON` on capture — see RK628 driver).
use std::fs::File;
use std::io;
use std::os::fd::{AsFd, AsRawFd, FromRawFd};
use std::path::{Path, PathBuf};
use std::sync::mpsc;
use std::thread;
use std::time::Duration;
use libc;
use nix::poll::{poll, PollFd, PollFlags, PollTimeout};
use tracing::{debug, info, warn};
use v4l2r::bindings::{
v4l2_bt_timings, v4l2_dv_timings, V4L2_DV_BT_656_1120, V4L2_DV_FL_HAS_CEA861_VIC,
};
use v4l2r::ioctl::{
self, Event as V4l2Event, EventType, QueryDvTimingsError, SubscribeEventFlags,
};
use v4l2r::nix::errno::Errno;
use crate::video::SignalStatus;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CsiBridgeKind {
Rk628,
RkHdmirx,
Tc358743,
Unknown,
}
impl CsiBridgeKind {
fn from_subdev_name(name: &str) -> Option<Self> {
let lower = name.to_ascii_lowercase();
if lower.contains("rk628") {
Some(Self::Rk628)
} else if lower.contains("hdmirx") || lower.contains("hdmi-rx") {
Some(Self::RkHdmirx)
} else if lower.contains("tc358743") || lower.contains("tc358746") {
Some(Self::Tc358743)
} else {
None
}
}
fn has_no_signal_fingerprint(self) -> bool {
matches!(self, Self::Rk628)
}
}
#[derive(Debug, Clone)]
pub enum ProbeResult {
Locked(DvTimingsMode),
NoCable,
NoSync,
OutOfRange,
NoSignal,
}
impl ProbeResult {
pub fn as_status(&self) -> Option<SignalStatus> {
match self {
ProbeResult::Locked(_) => None,
ProbeResult::NoCable => Some(SignalStatus::NoCable),
ProbeResult::NoSync => Some(SignalStatus::NoSync),
ProbeResult::OutOfRange => Some(SignalStatus::OutOfRange),
ProbeResult::NoSignal => Some(SignalStatus::NoSignal),
}
}
pub fn is_locked(&self) -> bool {
matches!(self, ProbeResult::Locked(_))
}
}
/// Scalar copy of BT timings (avoids unaligned refs into packed union).
#[derive(Clone, Copy)]
pub struct DvTimingsMode {
pub width: u32,
pub height: u32,
pub pixelclock: u64,
pub fps: Option<f64>,
pub raw: v4l2_dv_timings,
}
impl std::fmt::Debug for DvTimingsMode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DvTimingsMode")
.field("width", &self.width)
.field("height", &self.height)
.field("pixelclock", &self.pixelclock)
.field("fps", &self.fps)
.finish()
}
}
/// Heuristic: scan `/sys/class/video4linux/v4l-subdev*` names for rk628 / hdmirx / tc358743.
pub fn discover_subdev_for_video(video_path: &Path) -> Option<(PathBuf, CsiBridgeKind)> {
let sysfs_base = Path::new("/sys/class/video4linux");
let entries = std::fs::read_dir(sysfs_base).ok()?;
for entry in entries.flatten() {
let name = entry.file_name();
let name_str = name.to_string_lossy();
if !name_str.starts_with("v4l-subdev") {
continue;
}
let Some(kind) = read_sysfs_name(&entry.path())
.as_deref()
.and_then(CsiBridgeKind::from_subdev_name)
else {
continue;
};
let dev_path = PathBuf::from("/dev").join(&*name_str);
if dev_path.exists() {
info!(
"Discovered CSI bridge subdev for {:?}: {:?} ({:?})",
video_path, dev_path, kind
);
return Some((dev_path, kind));
}
}
debug!(
"No CSI bridge subdev found in /sys/class/video4linux for {:?}",
video_path
);
None
}
fn read_sysfs_name(subdev_sysfs: &Path) -> Option<String> {
std::fs::read_to_string(subdev_sysfs.join("name"))
.ok()
.map(|s| s.trim().to_string())
}
pub fn open_subdev(path: &Path) -> io::Result<File> {
File::options().read(true).write(true).open(path)
}
pub fn probe_signal(subdev_fd: &impl AsRawFd, kind: CsiBridgeKind) -> ProbeResult {
match ioctl::query_dv_timings::<v4l2_dv_timings>(subdev_fd) {
Ok(timings) => classify_timings(timings, kind),
Err(QueryDvTimingsError::NoLink) => ProbeResult::NoCable,
Err(QueryDvTimingsError::UnstableSignal) => ProbeResult::NoSync,
Err(QueryDvTimingsError::IoctlError(Errno::ERANGE)) => ProbeResult::OutOfRange,
Err(QueryDvTimingsError::IoctlError(
Errno::EIO | Errno::EREMOTEIO | Errno::ETIMEDOUT,
)) => ProbeResult::NoSync,
Err(QueryDvTimingsError::Unsupported) | Err(QueryDvTimingsError::IoctlError(_)) => {
ProbeResult::NoSignal
}
}
}
/// RK628 can block `QUERY_DV_TIMINGS` for seconds; probe uses a dup + timeout.
pub const RK628_SUBDEV_PROBE_TIMEOUT: Duration = Duration::from_millis(3000);
pub fn probe_signal_thread_timeout(
subdev_fd: &impl AsRawFd,
kind: CsiBridgeKind,
limit: Duration,
) -> Option<ProbeResult> {
let raw = subdev_fd.as_raw_fd();
let dup_fd = unsafe { libc::dup(raw) };
if dup_fd < 0 {
warn!(
"dup(subdev) for threaded DV probe failed: {}",
io::Error::last_os_error()
);
return None;
}
let dup_file = unsafe { File::from_raw_fd(dup_fd) };
let (tx, rx) = mpsc::channel::<ProbeResult>();
let handle = thread::spawn(move || {
let probe = probe_signal(&dup_file, kind);
let _ = tx.send(probe);
});
match rx.recv_timeout(limit) {
Ok(r) => {
let _ = handle.join();
Some(r)
}
Err(mpsc::RecvTimeoutError::Timeout) => {
warn!(
"QUERY_DV_TIMINGS exceeded {:?} (RK628 HDMI mode change?) — abandoning probe thread",
limit
);
drop(handle);
None
}
Err(mpsc::RecvTimeoutError::Disconnected) => {
let _ = handle.join();
None
}
}
}
fn classify_timings(timings: v4l2_dv_timings, kind: CsiBridgeKind) -> ProbeResult {
let timings_type: u32 = timings.type_;
if timings_type != V4L2_DV_BT_656_1120 {
warn!(
"QUERY_DV_TIMINGS returned unexpected type {}, treating as NoSignal",
timings_type
);
return ProbeResult::NoSignal;
}
let bt: v4l2_bt_timings = unsafe { timings.__bindgen_anon_1.bt };
let width: u32 = bt.width;
let height: u32 = bt.height;
let pixelclock: u64 = bt.pixelclock;
if width == 0 || height == 0 || width <= 64 || height <= 64 {
return ProbeResult::NoSignal;
}
if kind.has_no_signal_fingerprint() && is_rk628_no_signal_fingerprint(&bt) {
debug!(
"RK628 reports synthetic {}x{} @ {} Hz VGA fingerprint → NoSignal",
width, height, pixelclock
);
return ProbeResult::NoSignal;
}
let total_h: u64 = (width
+ bt.hfrontporch
+ bt.hsync
+ bt.hbackporch) as u64;
let total_v: u64 = (height
+ bt.vfrontporch
+ bt.vsync
+ bt.vbackporch) as u64;
let fps = if total_h > 0 && total_v > 0 && pixelclock > 0 {
Some(pixelclock as f64 / (total_h as f64 * total_v as f64))
} else {
None
};
ProbeResult::Locked(DvTimingsMode {
width,
height,
pixelclock,
fps,
raw: timings,
})
}
/// RK628 returns DMT 640x480 @ ~25.175 MHz, VIC=1 when unlocked; do not stream on that.
fn is_rk628_no_signal_fingerprint(bt: &v4l2_bt_timings) -> bool {
let width: u32 = bt.width;
let height: u32 = bt.height;
let pixelclock: u64 = bt.pixelclock;
let flags: u32 = bt.flags;
let vic: u8 = bt.cea861_vic;
if width != 640 || height != 480 {
return false;
}
let pclk_matches = (pixelclock as i64 - 25_175_000).abs() < 50_000;
let has_vic_flag = flags & V4L2_DV_FL_HAS_CEA861_VIC != 0;
pclk_matches && has_vic_flag && vic == 1
}
pub fn apply_dv_timings(subdev_fd: &impl AsRawFd, timings: v4l2_dv_timings) {
match ioctl::s_dv_timings::<_, v4l2_dv_timings>(subdev_fd, timings) {
Ok(_) => debug!("S_DV_TIMINGS ok on subdev"),
Err(e) => debug!(
"S_DV_TIMINGS failed on subdev ({}), continuing with queried mode",
e
),
}
}
pub fn subscribe_source_change(subdev_fd: &impl AsRawFd) -> io::Result<()> {
ioctl::subscribe_event(
subdev_fd,
EventType::SourceChange(0),
SubscribeEventFlags::empty(),
)
.map_err(|e| io::Error::other(format!("subscribe_event(SOURCE_CHANGE): {}", e)))
}
/// `Ok(true)` if a SOURCE_CHANGE was drained; `Ok(false)` on timeout.
pub fn wait_source_change(subdev_fd: &File, timeout: Duration) -> io::Result<bool> {
let mut fds = [PollFd::new(subdev_fd.as_fd(), PollFlags::POLLPRI)];
let timeout_ms = timeout.as_millis().min(u16::MAX as u128) as u16;
let ready = poll(&mut fds, PollTimeout::from(timeout_ms))?;
if ready == 0 {
return Ok(false);
}
if let Some(revents) = fds[0].revents() {
if !revents.contains(PollFlags::POLLPRI) {
return Ok(false);
}
}
let mut drained = 0u32;
while let Ok(_ev) = ioctl::dqevent::<V4l2Event>(subdev_fd) {
drained = drained.saturating_add(1);
if drained >= 16 {
break;
}
}
debug!("subdev source_change drained {} event(s)", drained);
Ok(true)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn rk628_fingerprint_matches_vga() {
let mut bt: v4l2_bt_timings = unsafe { std::mem::zeroed() };
bt.width = 640;
bt.height = 480;
bt.pixelclock = 25_175_000;
bt.flags = V4L2_DV_FL_HAS_CEA861_VIC;
bt.cea861_vic = 1;
assert!(is_rk628_no_signal_fingerprint(&bt));
}
#[test]
fn rk628_fingerprint_rejects_real_1080p() {
let mut bt: v4l2_bt_timings = unsafe { std::mem::zeroed() };
bt.width = 1920;
bt.height = 1080;
bt.pixelclock = 148_500_000;
bt.flags = V4L2_DV_FL_HAS_CEA861_VIC;
bt.cea861_vic = 16;
assert!(!is_rk628_no_signal_fingerprint(&bt));
}
#[test]
fn rk628_fingerprint_rejects_real_vga_without_vic() {
// A hypothetical legit VGA source would *not* carry the CEA VIC
// flag from the bridge (RK628 sets it synthetically when unlocked).
let mut bt: v4l2_bt_timings = unsafe { std::mem::zeroed() };
bt.width = 640;
bt.height = 480;
bt.pixelclock = 25_175_000;
bt.flags = 0;
bt.cea861_vic = 0;
assert!(!is_rk628_no_signal_fingerprint(&bt));
}
#[test]
fn from_subdev_name_recognises_known_bridges() {
assert_eq!(
CsiBridgeKind::from_subdev_name("rk628-csi-v4l2 9-0051"),
Some(CsiBridgeKind::Rk628)
);
assert_eq!(
CsiBridgeKind::from_subdev_name("rk-hdmirx-ctrl"),
Some(CsiBridgeKind::RkHdmirx)
);
assert_eq!(
CsiBridgeKind::from_subdev_name("tc358743 2-000f"),
Some(CsiBridgeKind::Tc358743)
);
assert_eq!(CsiBridgeKind::from_subdev_name("mystery"), None);
}
}

View File

@@ -16,11 +16,13 @@ use v4l2r::ioctl::{
use v4l2r::nix::errno::Errno; use v4l2r::nix::errno::Errno;
use v4l2r::{Format as V4l2rFormat, QueueType}; use v4l2r::{Format as V4l2rFormat, QueueType};
use super::csi_bridge;
use super::format::{PixelFormat, Resolution}; use super::format::{PixelFormat, Resolution};
use super::is_rk_hdmirx_driver; use super::{is_rk_hdmirx_driver, is_rkcif_driver};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
const DEVICE_PROBE_TIMEOUT_MS: u64 = 400; /// Per-node probe limit; rkcif/RK628 ioctl chains can exceed 1s under contention.
const DEVICE_PROBE_TIMEOUT_MS: u64 = 10_000;
/// Information about a video device /// Information about a video device
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@@ -43,6 +45,20 @@ pub struct VideoDeviceInfo {
pub is_capture_card: bool, pub is_capture_card: bool,
/// Priority score for device selection (higher is better) /// Priority score for device selection (higher is better)
pub priority: u32, pub priority: u32,
/// Whether an HDMI signal is currently detected (CSI/HDMI bridge devices only;
/// always `true` for USB capture cards).
pub has_signal: bool,
/// Path of the bridge subdev (`/dev/v4l-subdevN`) paired with this
/// capture node, if any. On Rockchip boards that wire an RK628 /
/// TC358746 / RK-HDMIRX through `rkcif`, `QUERY_DV_TIMINGS`,
/// `S_DV_TIMINGS`, `SUBSCRIBE_EVENT(SOURCE_CHANGE)`, `S_EDID` etc. all
/// return `ENOTTY` on the video node — they only work here. `None`
/// for USB UVC and for bridges that expose DV ioctls on the video node
/// directly (tc358743 via `uvcvideo`).
pub subdev_path: Option<PathBuf>,
/// Classification of the paired bridge (drives fingerprint logic for
/// RK628's synthetic-VGA no-signal pattern).
pub bridge_kind: Option<String>,
} }
/// Information about a supported format /// Information about a supported format
@@ -147,12 +163,114 @@ impl VideoDevice {
read_write: flags.contains(Capabilities::READWRITE), read_write: flags.contains(Capabilities::READWRITE),
}; };
let formats = if is_rk_hdmirx_driver(&caps.driver, &caps.card) { // For CSI/HDMI bridges, try to locate the paired subdev *before*
self.enumerate_current_format_only()? // the signal check: RK628 + rkcif places QUERY_DV_TIMINGS on the
// subdev (the video node returns ENOTTY). Tc358743 and rk_hdmirx
// typically expose DV ioctls on the video node itself, but having
// the subdev handle for EDID/event subscription doesn't hurt.
let (subdev_path, bridge_kind) = if is_rkcif_driver(&caps.driver)
|| is_rk_hdmirx_driver(&caps.driver, &caps.card)
{
match csi_bridge::discover_subdev_for_video(&self.path) {
Some((path, kind)) => (Some(path), Some(format!("{:?}", kind).to_lowercase())),
None => (None, None),
}
} else {
(None, None)
};
// Probe the HDMI source for both signal presence *and* the live
// frame-rate. rkcif's `VIDIOC_ENUM_FRAMEINTERVALS` returns a
// meaningless `1.0..30.0` StepWise range, so the only trustworthy
// fps for rkcif + RK628 / rk_hdmirx boards comes from the bridge
// subdev's DV timings (pixelclock / total_width / total_height).
//
// Preference order:
// 1. Bridge subdev — on rkcif boards this is the *only* node
// where QUERY_DV_TIMINGS works, and it lets the RK628
// fingerprint filter kick in before we return has_signal=true.
// 2. Video node fallback — for rk_hdmirx / tc358743 where DV
// timings are exposed on the capture node directly.
// 3. USB UVC — always true (no signal concept), no hdmi_fps.
// Subdev-reported HDMI source mode (width, height, fps). On rkcif +
// RK628 boards this is the *only* place DV timings work; the video
// node itself returns ENOTTY for QUERY/G_DV_TIMINGS, so without
// threading this through to `enumerate_bridge_formats` the format
// list ends up with zero resolutions and `select_resolution` falls
// back to the user's preferred value (e.g. 4K) even when the real
// source is 1080p.
let mut subdev_hdmi_mode: Option<(u32, u32, Option<f64>)> = None;
let (has_signal, hdmi_fps) = if let Some(subdev_path) = subdev_path.as_ref() {
match csi_bridge::open_subdev(subdev_path) {
Ok(subdev_fd) => {
let kind = parse_bridge_kind(bridge_kind.as_deref())
.unwrap_or(csi_bridge::CsiBridgeKind::Unknown);
let probe = csi_bridge::probe_signal(&subdev_fd, kind);
debug!(
"has_signal via subdev {:?} ({:?}): {:?}",
subdev_path, kind, probe
);
let fps = match &probe {
csi_bridge::ProbeResult::Locked(mode) => {
subdev_hdmi_mode = Some((mode.width, mode.height, mode.fps));
mode.fps
}
_ => None,
};
(probe.is_locked(), fps)
}
Err(e) => {
warn!("Failed to open subdev {:?}: {}", subdev_path, e);
(false, None)
}
}
} else if is_rk_hdmirx_driver(&caps.driver, &caps.card)
|| is_rkcif_driver(&caps.driver)
{
let dv = self.current_dv_timings_mode();
debug!(
"has_signal via video node {:?} (driver={}): dv_timings={:?}",
self.path, caps.driver, dv
);
let has_signal = dv
.as_ref()
.map(|(w, h, _)| *w > 64 && *h > 64)
.unwrap_or(false);
let fps = if has_signal {
dv.and_then(|(_, _, f)| f)
} else {
None
};
(has_signal, fps)
} else {
(true, None)
};
let mut formats = if is_rk_hdmirx_driver(&caps.driver, &caps.card)
|| is_rkcif_driver(&caps.driver)
{
// CSI/HDMI bridge drivers (rk_hdmirx, rkcif) expose multiple pixel
// formats via ENUM_FMT (e.g. rk_hdmirx: BGR3/NV24/NV16/NV12) but
// `ENUM_FRAMESIZES` is fiction for these drivers (rkcif reports a
// degenerate `64x64 StepWise 8/8` that only describes its DMA
// engine, rk_hdmirx returns ENOTTY). The only authoritative
// resolution is whatever the bridge subdev's DV timings report,
// so we treat the HDMI source mode as the single allowed
// resolution for every pixel format.
self.enumerate_bridge_formats(subdev_hdmi_mode)?
} else { } else {
self.enumerate_formats()? self.enumerate_formats()?
}; };
// For CSI/HDMI bridges, the driver-enumerated fps list is fiction
// (rkcif: always `1..30`; rk_hdmirx: typically `ENOTTY`). Replace
// it with the live HDMI source fps derived from the bridge DV
// timings so the UI reflects what the sink is actually receiving.
if let Some(fps) = hdmi_fps {
override_resolution_fps(&mut formats, fps);
}
// Determine if this is likely an HDMI capture card // Determine if this is likely an HDMI capture card
let is_capture_card = Self::detect_capture_card(&caps.card, &caps.driver, &formats); let is_capture_card = Self::detect_capture_card(&caps.card, &caps.driver, &formats);
@@ -160,6 +278,11 @@ impl VideoDevice {
let priority = let priority =
Self::calculate_priority(&caps.card, &caps.driver, &formats, is_capture_card); Self::calculate_priority(&caps.card, &caps.driver, &formats, is_capture_card);
debug!(
"Device {:?}: {} formats, priority={}, has_signal={}, hdmi_fps={:?}, is_capture_card={}, subdev={:?}",
self.path, formats.len(), priority, has_signal, hdmi_fps, is_capture_card, subdev_path
);
Ok(VideoDeviceInfo { Ok(VideoDeviceInfo {
path: self.path.clone(), path: self.path.clone(),
name: caps.card.clone(), name: caps.card.clone(),
@@ -170,6 +293,9 @@ impl VideoDevice {
capabilities, capabilities,
is_capture_card, is_capture_card,
priority, priority,
has_signal,
subdev_path,
bridge_kind,
}) })
} }
@@ -213,32 +339,119 @@ impl VideoDevice {
Ok(formats) Ok(formats)
} }
fn enumerate_current_format_only(&self) -> Result<Vec<FormatInfo>> { /// Enumerate formats for CSI/HDMI bridge devices (rk_hdmirx, rkcif).
let current = self.get_format()?; ///
let Some(format) = PixelFormat::from_v4l2r(current.pixelformat) else { /// Uses `VIDIOC_ENUM_FMT` to discover all supported pixel formats (the
/// output of `v4l2-ctl --list-formats`) and attaches the HDMI source
/// resolution read from the bridge DV timings (or G_FMT as a last
/// resort) as the single allowed resolution for every format.
///
/// `ENUM_FRAMESIZES` is deliberately ignored here: rkcif advertises a
/// degenerate `64x64 StepWise 8/8` that only describes its DMA engine
/// (not what the HDMI source can actually deliver), and rk_hdmirx
/// typically returns ENOTTY. Neither the bridge nor rkcif performs
/// any hardware scaling, so the capture resolution is always the
/// HDMI source mode.
///
/// Returned formats are sorted by `PixelFormat::priority()` so the
/// higher-level `select_format` picks a sensible default (NV12 > YUYV on
/// rkcif / rk_hdmirx) instead of whatever the driver happens to
/// have stuck as the current active format.
fn enumerate_bridge_formats(
&self,
subdev_hdmi_mode: Option<(u32, u32, Option<f64>)>,
) -> Result<Vec<FormatInfo>> {
let queue = self.capture_queue_type()?;
let current_fmt = self.get_format().ok();
if let Some(fmt) = &current_fmt {
debug!( debug!(
"Current active format {:?} is not supported by One-KVM, falling back to full enumeration", "enumerate_bridge_formats: current G_FMT -> {:?} {}x{}",
current.pixelformat fmt.pixelformat, fmt.width, fmt.height
); );
return self.enumerate_formats(); }
// Preference order for the HDMI source resolution:
// 1. Subdev-reported DV timings (authoritative on rkcif + RK628 where
// the video node returns ENOTTY for QUERY_DV_TIMINGS).
// 2. Video-node DV timings / G_FMT (rk_hdmirx, tc358743 direct).
let hdmi_mode = subdev_hdmi_mode
.map(|(w, h, fps)| {
let mut fps_list = Vec::new();
if let Some(f) = fps {
fps_list.push(f);
}
if let Some(parm_fps) = self.current_parm_fps() {
fps_list.push(parm_fps);
}
normalize_fps_list(&mut fps_list);
ResolutionInfo::new(w, h, fps_list)
})
.or_else(|| self.current_mode_resolution_info());
if let Some(info) = &hdmi_mode {
debug!(
"enumerate_bridge_formats: HDMI source mode {}x{} (from {})",
info.width,
info.height,
if subdev_hdmi_mode.is_some() {
"subdev"
} else {
"video node"
}
);
} else {
debug!("enumerate_bridge_formats: no HDMI source mode available");
}
let mut formats: Vec<FormatInfo> = Vec::new();
for desc in FormatIterator::new(&self.fd, queue) {
let Some(format) = PixelFormat::from_v4l2r(desc.pixelformat) else {
debug!(
"enumerate_bridge_formats: skipping unsupported fourcc {:?} ({})",
desc.pixelformat, desc.description
);
continue;
}; };
let resolutions = hdmi_mode.clone().into_iter().collect();
formats.push(FormatInfo {
format,
resolutions,
description: desc.description.clone(),
});
}
if formats.is_empty() {
// Fallback: driver refused ENUM_FMT entirely, use just the current
// active format reported by G_FMT so we still have something.
if let Some(fmt) = current_fmt {
if let Some(format) = PixelFormat::from_v4l2r(fmt.pixelformat) {
let description = self let description = self
.format_description(current.pixelformat) .format_description(fmt.pixelformat)
.unwrap_or_else(|| format.to_string()); .unwrap_or_else(|| format.to_string());
let resolutions = hdmi_mode.into_iter().collect();
let mut resolutions = self.enumerate_resolutions(current.pixelformat)?; formats.push(FormatInfo {
if resolutions.is_empty() {
if let Some(current_mode) = self.current_mode_resolution_info() {
resolutions.push(current_mode);
}
}
Ok(vec![FormatInfo {
format, format,
resolutions, resolutions,
description, description,
}]) });
}
}
}
// Highest priority first (MJPEG > NV12 > NV16 > NV24 > BGR24 > ...).
formats.sort_by(|a, b| b.format.priority().cmp(&a.format.priority()));
debug!(
"enumerate_bridge_formats: resolved formats {:?}",
formats
.iter()
.map(|f| format!("{}({} res)", f.format, f.resolutions.len()))
.collect::<Vec<_>>()
);
Ok(formats)
} }
/// Enumerate resolutions for a specific format /// Enumerate resolutions for a specific format
@@ -259,24 +472,26 @@ impl VideoDevice {
resolutions.push(ResolutionInfo::new(d.width, d.height, fps)); resolutions.push(ResolutionInfo::new(d.width, d.height, fps));
} }
FrmSizeTypes::StepWise(s) => { FrmSizeTypes::StepWise(s) => {
for res in [ // StepWise ranges are ignored on purpose: on
Resolution::VGA, // CSI/HDMI bridge drivers (rkcif) the range
Resolution::HD720, // only describes the DMA engine's capability
Resolution::HD1080, // and not what the HDMI source can deliver,
Resolution::UHD4K, // so synthesising candidate resolutions from
] { // it is misleading. Bridge devices go
if res.width >= s.min_width // through `enumerate_bridge_formats` and use
&& res.width <= s.max_width // the DV-timings source mode directly; for
&& res.height >= s.min_height // any other driver that emits StepWise we
&& res.height <= s.max_height // fall back to the current active mode below.
{ debug!(
let fps = self "ENUM_FRAMESIZES {:?}: ignoring StepWise {}x{} - {}x{} step {}/{}",
.enumerate_fps(fourcc, res.width, res.height) fourcc, s.min_width, s.min_height,
.unwrap_or_default(); s.max_width, s.max_height,
resolutions s.step_width, s.step_height
.push(ResolutionInfo::new(res.width, res.height, fps)); );
} if resolutions.is_empty() {
should_fallback_to_current_mode = true;
} }
break;
} }
} }
} }
@@ -449,6 +664,8 @@ impl VideoDevice {
"macrosilicon", "macrosilicon",
"tc358743", "tc358743",
"uvc", "uvc",
"rkcif",
"rk_hdmirx",
]; ];
// Check card/driver names // Check card/driver names
@@ -639,20 +856,16 @@ impl VideoDevice {
pub fn enumerate_devices() -> Result<Vec<VideoDeviceInfo>> { pub fn enumerate_devices() -> Result<Vec<VideoDeviceInfo>> {
info!("Enumerating video devices..."); info!("Enumerating video devices...");
let mut devices = Vec::new(); // First pass: collect candidates that pass the sysfs-based pre-filter.
// This avoids opening orphan /dev/videoN nodes (ENODEV) and m2m codec
// Scan /dev/video* devices // nodes (ENOTTY) that would otherwise waste one syscall + one ioctl each.
let mut candidates: Vec<PathBuf> = Vec::new();
for entry in std::fs::read_dir("/dev") for entry in std::fs::read_dir("/dev")
.map_err(|e| AppError::VideoError(format!("Failed to read /dev: {}", e)))? .map_err(|e| AppError::VideoError(format!("Failed to read /dev: {}", e)))?
{ {
let entry = match entry { let Ok(entry) = entry else { continue };
Ok(e) => e,
Err(_) => continue,
};
let path = entry.path(); let path = entry.path();
let name = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if !name.starts_with("video") { if !name.starts_with("video") {
continue; continue;
} }
@@ -663,11 +876,31 @@ pub fn enumerate_devices() -> Result<Vec<VideoDeviceInfo>> {
debug!("Skipping non-capture candidate (sysfs): {:?}", path); debug!("Skipping non-capture candidate (sysfs): {:?}", path);
continue; continue;
} }
candidates.push(path);
}
// Try to open and query the device (with timeout) collapse_rkcif_probe_candidates(&mut candidates);
match probe_device_with_timeout(&path, Duration::from_millis(DEVICE_PROBE_TIMEOUT_MS)) {
// Second pass: probe the remaining candidates in parallel. Each probe
// already spawns its own worker thread inside `probe_device_with_timeout`,
// so the total wall-clock time is bounded by `DEVICE_PROBE_TIMEOUT_MS`
// rather than (N × per-probe-latency).
let timeout = Duration::from_millis(DEVICE_PROBE_TIMEOUT_MS);
let mut handles = Vec::with_capacity(candidates.len());
for path in candidates {
handles.push(std::thread::spawn(move || {
(path.clone(), probe_device_with_timeout(&path, timeout))
}));
}
let mut devices = Vec::new();
for handle in handles {
let (path, info) = match handle.join() {
Ok(pair) => pair,
Err(_) => continue,
};
match info {
Some(info) => { Some(info) => {
// Only include devices with video capture capability
if info.capabilities.video_capture || info.capabilities.video_capture_mplane { if info.capabilities.video_capture || info.capabilities.video_capture_mplane {
info!( info!(
"Found capture device: {} ({}) - {} formats", "Found capture device: {} ({}) - {} formats",
@@ -686,13 +919,76 @@ pub fn enumerate_devices() -> Result<Vec<VideoDeviceInfo>> {
} }
} }
// Sort by priority (highest first) // Sort by priority (highest first), then by path (lowest first) as tiebreaker.
devices.sort_by(|a, b| b.priority.cmp(&a.priority)); // The path tiebreaker ensures deterministic ordering when multiple sub-devices
// share the same priority (e.g. rkcif nodes), so that /dev/video0 is preferred
// over /dev/video10 after deduplication.
devices.sort_by(|a, b| b.priority.cmp(&a.priority).then_with(|| a.path.cmp(&b.path)));
// Deduplicate rkcif sub-devices: the driver exposes many /dev/video* nodes
// for a single MIPI CSI pipeline. Keep only the highest-priority node per
// (driver, bus_info) group so users see one device instead of ~11.
dedup_platform_subdevices(&mut devices);
info!("Found {} video capture devices", devices.len()); info!("Found {} video capture devices", devices.len());
Ok(devices) Ok(devices)
} }
/// Collapse platform sub-device nodes that share the same driver + bus_info
/// into a single entry (the one with the highest priority / most formats).
/// Currently applies to the `rkcif` driver on Rockchip SoCs where each
/// media-pipeline link creates its own `/dev/video*` node.
fn dedup_platform_subdevices(devices: &mut Vec<VideoDeviceInfo>) {
// devices is already sorted by priority (descending).
// Walk the list and keep only the first (highest-priority) representative
// of each (driver, bus_info) group that needs deduplication.
let mut seen = std::collections::HashSet::new();
devices.retain(|d| {
if !is_rkcif_driver(&d.driver) || d.bus_info.is_empty() {
return true;
}
let key = (d.driver.clone(), d.bus_info.clone());
seen.insert(key)
});
}
/// rkcif registers many `/dev/video*` queues; probing all in parallel can
/// contend and time out. Keep one node per board (lowest `videoN`).
fn collapse_rkcif_probe_candidates(candidates: &mut Vec<PathBuf>) {
let mut rkcif: Vec<PathBuf> = Vec::new();
let mut rest: Vec<PathBuf> = Vec::new();
for p in candidates.drain(..) {
if sysfs_uevent_driver(&p).is_some_and(|d| d.contains("rkcif")) {
rkcif.push(p);
} else {
rest.push(p);
}
}
if let Some(one) = rkcif
.iter()
.min_by_key(|p| video_index(p).unwrap_or(u32::MAX))
.cloned()
{
rest.push(one);
}
*candidates = rest;
}
fn sysfs_uevent_driver(path: &Path) -> Option<String> {
let name = path.file_name()?.to_str()?;
let uevent =
read_sysfs_string(&Path::new("/sys/class/video4linux").join(name).join("device/uevent"))?;
extract_uevent_value(&uevent, "driver")
}
fn video_index(path: &Path) -> Option<u32> {
path.file_name()?
.to_str()?
.strip_prefix("video")?
.parse()
.ok()
}
fn probe_device_with_timeout(path: &Path, timeout: Duration) -> Option<VideoDeviceInfo> { fn probe_device_with_timeout(path: &Path, timeout: Duration) -> Option<VideoDeviceInfo> {
let path = path.to_path_buf(); let path = path.to_path_buf();
let path_for_thread = path.clone(); let path_for_thread = path.clone();
@@ -725,8 +1021,26 @@ fn sysfs_maybe_capture(path: &Path) -> bool {
Some(name) => name, Some(name) => name,
None => return true, None => return true,
}; };
// Fast-path: nodes whose filename clearly marks them as m2m codecs
// (e.g. /dev/video-enc0, /dev/video-dec0 on Rockchip). These never
// answer VIDIOC_QUERYCAP as capture devices.
let name_lower = name.to_ascii_lowercase();
let filename_skip = ["-enc", "-dec", "-codec", "-m2m", "-vepu", "-vdpu"];
if filename_skip.iter().any(|hint| name_lower.contains(hint)) {
return false;
}
let sysfs_base = Path::new("/sys/class/video4linux").join(name); let sysfs_base = Path::new("/sys/class/video4linux").join(name);
// Orphan /dev/videoN nodes (no matching sysfs entry) can appear when the
// kernel driver that created them has been unloaded but the device nodes
// were never cleaned up. Opening them returns ENODEV; skip the probe.
if !sysfs_base.exists() {
debug!("Skipping {:?}: no matching /sys/class/video4linux entry", path);
return false;
}
let sysfs_name = read_sysfs_string(&sysfs_base.join("name")) let sysfs_name = read_sysfs_string(&sysfs_base.join("name"))
.unwrap_or_default() .unwrap_or_default()
.to_lowercase(); .to_lowercase();
@@ -746,19 +1060,51 @@ fn sysfs_maybe_capture(path: &Path) -> bool {
"macrosilicon", "macrosilicon",
"tc358743", "tc358743",
"grabber", "grabber",
"rkcif",
"rk_hdmirx",
]; ];
if capture_hints.iter().any(|hint| sysfs_name.contains(hint)) { if capture_hints.iter().any(|hint| sysfs_name.contains(hint)) {
maybe_capture = true; maybe_capture = true;
} }
if let Some(driver) = driver { if let Some(driver) = &driver {
if driver.contains("uvcvideo") || driver.contains("tc358743") { if driver.contains("uvcvideo")
|| driver.contains("tc358743")
|| driver.contains("rkcif")
|| driver.contains("rk_hdmirx")
{
maybe_capture = true; maybe_capture = true;
} }
} }
// Skip known non-capture drivers (RK video codecs, Hantro VPU, ISP/VPE
// pipelines, MIPI ISP statistics / params nodes). These would otherwise
// succeed QUERYCAP but expose only VIDEO_M2M / STATS / PARAMS and get
// filtered later — skipping here saves an open() + ioctl() per node.
let driver_skip = [
"rkvenc", "rkvdec", "vepu", "vdpu", "hantro", "mpp_", "rockchip-vpu",
];
if let Some(driver) = &driver {
if driver_skip.iter().any(|hint| driver.contains(hint)) {
return false;
}
}
let skip_hints = [ let skip_hints = [
"codec", "decoder", "encoder", "isp", "mem2mem", "m2m", "vbi", "radio", "metadata", "codec",
"decoder",
"encoder",
"isp",
"mem2mem",
"m2m",
"vbi",
"radio",
"metadata",
"output", "output",
// rkisp sub-nodes that are not video capture queues
"rkisp-statistics",
"rkisp-input-params",
"rkisp_rawrd",
"rkisp_rawwr",
]; ];
if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture { if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture {
return false; return false;
@@ -783,6 +1129,18 @@ fn extract_uevent_value(content: &str, key: &str) -> Option<String> {
None None
} }
/// Parse the `bridge_kind` string serialised into `VideoDeviceInfo` back
/// into the strongly-typed enum used by [`csi_bridge`].
pub(crate) fn parse_bridge_kind(kind: Option<&str>) -> Option<csi_bridge::CsiBridgeKind> {
Some(match kind? {
"rk628" => csi_bridge::CsiBridgeKind::Rk628,
"rkhdmirx" => csi_bridge::CsiBridgeKind::RkHdmirx,
"tc358743" => csi_bridge::CsiBridgeKind::Tc358743,
"unknown" => csi_bridge::CsiBridgeKind::Unknown,
_ => return None,
})
}
fn dv_timings_fps(bt: &v4l2_bt_timings) -> Option<f64> { fn dv_timings_fps(bt: &v4l2_bt_timings) -> Option<f64> {
let total_width = bt.width + bt.hfrontporch + bt.hsync + bt.hbackporch; let total_width = bt.width + bt.hfrontporch + bt.hsync + bt.hbackporch;
let total_height = if bt.interlaced != 0 { let total_height = if bt.interlaced != 0 {
@@ -813,6 +1171,24 @@ fn normalize_fps_list(fps_list: &mut Vec<f64>) {
fps_list.dedup_by(|a, b| (*a - *b).abs() < 0.01); fps_list.dedup_by(|a, b| (*a - *b).abs() < 0.01);
} }
/// Replace every `ResolutionInfo::fps` in `formats` with the single HDMI
/// source frame-rate. Used for CSI/HDMI bridge devices (rkcif, rk_hdmirx)
/// whose `VIDIOC_ENUM_FRAMEINTERVALS` returns meaningless StepWise values
/// — the only trustworthy fps comes from the bridge DV-timings on the
/// paired subdev. Silently no-op when `fps` normalises to empty.
fn override_resolution_fps(formats: &mut [FormatInfo], fps: f64) {
let mut normalized = vec![fps];
normalize_fps_list(&mut normalized);
if normalized.is_empty() {
return;
}
for fi in formats.iter_mut() {
for res in fi.resolutions.iter_mut() {
res.fps = normalized.clone();
}
}
}
/// Find the best video device for KVM use /// Find the best video device for KVM use
pub fn find_best_device() -> Result<VideoDeviceInfo> { pub fn find_best_device() -> Result<VideoDeviceInfo> {
let devices = enumerate_devices()?; let devices = enumerate_devices()?;

View File

@@ -152,6 +152,41 @@ impl JpegEncoder {
self.encode_i420_to_jpeg(sequence) self.encode_i420_to_jpeg(sequence)
} }
/// YVYU → swap chroma to YUYV in scratch, then same as [`Self::encode_yuyv`].
pub fn encode_yvyu(&mut self, data: &[u8], sequence: u64) -> Result<EncodedFrame> {
let width = self.config.resolution.width as usize;
let height = self.config.resolution.height as usize;
let expected_size = width * height * 2;
if data.len() < expected_size {
return Err(AppError::VideoError(format!(
"YVYU data too small: {} < {}",
data.len(),
expected_size
)));
}
// Reuse bgra_buffer as scratch for the swapped YUYV data.
if self.bgra_buffer.len() < expected_size {
self.bgra_buffer.resize(expected_size, 0);
}
let dst = &mut self.bgra_buffer[..expected_size];
let src = &data[..expected_size];
// Swap bytes [1] and [3] in every 4-byte macropixel: Y0 V0 Y1 U0 → Y0 U0 Y1 V0
for (chunk_dst, chunk_src) in dst.chunks_exact_mut(4).zip(src.chunks_exact(4)) {
chunk_dst[0] = chunk_src[0]; // Y0
chunk_dst[1] = chunk_src[3]; // U0
chunk_dst[2] = chunk_src[2]; // Y1
chunk_dst[3] = chunk_src[1]; // V0
}
libyuv::yuy2_to_i420(dst, &mut self.i420_buffer, width as i32, height as i32)
.map_err(|e| AppError::VideoError(format!("libyuv YVYU→I420 failed: {}", e)))?;
self.encode_i420_to_jpeg(sequence)
}
/// Encode NV12 frame to JPEG /// Encode NV12 frame to JPEG
pub fn encode_nv12(&mut self, data: &[u8], sequence: u64) -> Result<EncodedFrame> { pub fn encode_nv12(&mut self, data: &[u8], sequence: u64) -> Result<EncodedFrame> {
let width = self.config.resolution.width as usize; let width = self.config.resolution.width as usize;
@@ -323,7 +358,8 @@ impl crate::video::encoder::traits::Encoder for JpegEncoder {
fn encode(&mut self, data: &[u8], sequence: u64) -> Result<EncodedFrame> { fn encode(&mut self, data: &[u8], sequence: u64) -> Result<EncodedFrame> {
match self.config.input_format { match self.config.input_format {
PixelFormat::Yuyv | PixelFormat::Yvyu => self.encode_yuyv(data, sequence), PixelFormat::Yuyv => self.encode_yuyv(data, sequence),
PixelFormat::Yvyu => self.encode_yvyu(data, sequence),
PixelFormat::Nv12 => self.encode_nv12(data, sequence), PixelFormat::Nv12 => self.encode_nv12(data, sequence),
PixelFormat::Nv16 => self.encode_nv16(data, sequence), PixelFormat::Nv16 => self.encode_nv16(data, sequence),
PixelFormat::Nv24 => self.encode_nv24(data, sequence), PixelFormat::Nv24 => self.encode_nv24(data, sequence),

View File

@@ -141,8 +141,8 @@ impl PixelFormat {
match self { match self {
PixelFormat::Mjpeg => 100, PixelFormat::Mjpeg => 100,
PixelFormat::Jpeg => 99, PixelFormat::Jpeg => 99,
PixelFormat::Yuyv => 80, PixelFormat::Nv12 => 80,
PixelFormat::Nv12 => 75, PixelFormat::Yuyv => 75,
PixelFormat::Nv21 => 74, PixelFormat::Nv21 => 74,
PixelFormat::Yuv420 => 70, PixelFormat::Yuv420 => 70,
PixelFormat::Uyvy => 65, PixelFormat::Uyvy => 65,
@@ -159,31 +159,19 @@ impl PixelFormat {
/// Get recommended format for video encoding (WebRTC) /// Get recommended format for video encoding (WebRTC)
/// ///
/// Hardware encoding prefers: NV12 > YUYV /// Prefers NV12 over YUYV (matches [`Self::priority`])
/// Software encoding prefers: YUYV > NV12
/// ///
/// Returns None if no suitable format is available /// Returns None if no suitable format is available
pub fn recommended_for_encoding( pub fn recommended_for_encoding(
available: &[PixelFormat], available: &[PixelFormat],
is_hardware: bool, _is_hardware: bool,
) -> Option<PixelFormat> { ) -> Option<PixelFormat> {
if is_hardware {
// Hardware encoding: NV12 > YUYV
if available.contains(&PixelFormat::Nv12) { if available.contains(&PixelFormat::Nv12) {
return Some(PixelFormat::Nv12); return Some(PixelFormat::Nv12);
} }
if available.contains(&PixelFormat::Yuyv) { if available.contains(&PixelFormat::Yuyv) {
return Some(PixelFormat::Yuyv); return Some(PixelFormat::Yuyv);
} }
} else {
// Software encoding: YUYV > NV12
if available.contains(&PixelFormat::Yuyv) {
return Some(PixelFormat::Yuyv);
}
if available.contains(&PixelFormat::Nv12) {
return Some(PixelFormat::Nv12);
}
}
// Fallback to any non-compressed format // Fallback to any non-compressed format
available.iter().find(|f| !f.is_compressed()).copied() available.iter().find(|f| !f.is_compressed()).copied()
} }
@@ -280,19 +268,76 @@ impl Resolution {
self.width as u64 * self.height as u64 self.width as u64 * self.height as u64
} }
/// Common resolutions /// Common resolutions.
///
/// All constants here are 8-pixel aligned on both axes so they survive
/// the `step=8` constraint imposed by most CSI bridge drivers (e.g.
/// `rkcif` on Rockchip). If you add a new entry, make sure
/// `width % 8 == 0 && height % 8 == 0`, otherwise the driver will
/// silently round it at `S_FMT` time and the UI will report a
/// different resolution than the one the user picked.
pub const VGA: Resolution = Resolution { pub const VGA: Resolution = Resolution {
width: 640, width: 640,
height: 480, height: 480,
}; };
/// CEA-2/3 NTSC SD (480p)
pub const NTSC: Resolution = Resolution {
width: 720,
height: 480,
};
/// CEA-5/17/18 PAL SD (576p)
pub const PAL: Resolution = Resolution {
width: 720,
height: 576,
};
/// SVGA — legacy BIOS / POST output
pub const SVGA: Resolution = Resolution {
width: 800,
height: 600,
};
/// XGA — very common BIOS / server console output
pub const XGA: Resolution = Resolution {
width: 1024,
height: 768,
};
pub const HD720: Resolution = Resolution { pub const HD720: Resolution = Resolution {
width: 1280, width: 1280,
height: 720, height: 720,
}; };
/// WXGA — older laptop panels
pub const WXGA: Resolution = Resolution {
width: 1280,
height: 800,
};
/// SXGA — 4:3 / 5:4 legacy desktop displays
pub const SXGA: Resolution = Resolution {
width: 1280,
height: 1024,
};
/// 1360×768 — fallback for 8-aligned "1366×768"-like panels
pub const HDTV: Resolution = Resolution {
width: 1360,
height: 768,
};
/// UXGA — industrial / 4:3 legacy displays
pub const UXGA: Resolution = Resolution {
width: 1600,
height: 1200,
};
pub const HD1080: Resolution = Resolution { pub const HD1080: Resolution = Resolution {
width: 1920, width: 1920,
height: 1080, height: 1080,
}; };
/// WUXGA — 16:10 professional monitors
pub const WUXGA: Resolution = Resolution {
width: 1920,
height: 1200,
};
/// QHD / 2K — modern PC monitors
pub const QHD: Resolution = Resolution {
width: 2560,
height: 1440,
};
pub const UHD4K: Resolution = Resolution { pub const UHD4K: Resolution = Resolution {
width: 3840, width: 3840,
height: 2160, height: 2160,

View File

@@ -4,6 +4,7 @@
pub mod codec_constraints; pub mod codec_constraints;
pub mod convert; pub mod convert;
pub mod csi_bridge;
pub mod decoder; pub mod decoder;
pub mod device; pub mod device;
pub mod encoder; pub mod encoder;
@@ -25,6 +26,55 @@ pub use shared_video_pipeline::{
pub use stream_manager::VideoStreamManager; pub use stream_manager::VideoStreamManager;
pub use streamer::{Streamer, StreamerState}; pub use streamer::{Streamer, StreamerState};
/// Fine-grained signal status reported by CSI/HDMI bridge devices.
///
/// Only `rk_hdmirx` / `rkcif` / tc358743-class bridges can distinguish these
/// via `VIDIOC_QUERY_DV_TIMINGS` errno; USB UVC devices always report `Ok`
/// until they fail with a generic timeout.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SignalStatus {
/// HDMI cable physically disconnected (`ENOLINK`).
NoCable,
/// TMDS signal present but timings cannot be locked (`ENOLCK`).
NoSync,
/// Timings outside of hardware capability (`ERANGE`).
OutOfRange,
/// Generic "no usable source" (fallback for EINVAL / EIO / unknown errnos).
NoSignal,
}
impl SignalStatus {
pub fn as_str(self) -> &'static str {
match self {
SignalStatus::NoCable => "no_cable",
SignalStatus::NoSync => "no_sync",
SignalStatus::OutOfRange => "out_of_range",
SignalStatus::NoSignal => "no_signal",
}
}
pub fn from_str(s: &str) -> Option<Self> {
Some(match s {
"no_cable" => SignalStatus::NoCable,
"no_sync" => SignalStatus::NoSync,
"out_of_range" => SignalStatus::OutOfRange,
"no_signal" => SignalStatus::NoSignal,
_ => return None,
})
}
}
impl From<SignalStatus> for streamer::StreamerState {
fn from(value: SignalStatus) -> Self {
match value {
SignalStatus::NoCable => streamer::StreamerState::NoCable,
SignalStatus::NoSync => streamer::StreamerState::NoSync,
SignalStatus::OutOfRange => streamer::StreamerState::OutOfRange,
SignalStatus::NoSignal => streamer::StreamerState::NoSignal,
}
}
}
pub(crate) fn is_rk_hdmirx_driver(driver: &str, card: &str) -> bool { pub(crate) fn is_rk_hdmirx_driver(driver: &str, card: &str) -> bool {
driver.eq_ignore_ascii_case("rk_hdmirx") || card.eq_ignore_ascii_case("rk_hdmirx") driver.eq_ignore_ascii_case("rk_hdmirx") || card.eq_ignore_ascii_case("rk_hdmirx")
} }
@@ -32,3 +82,13 @@ pub(crate) fn is_rk_hdmirx_driver(driver: &str, card: &str) -> bool {
pub(crate) fn is_rk_hdmirx_device(device: &device::VideoDeviceInfo) -> bool { pub(crate) fn is_rk_hdmirx_device(device: &device::VideoDeviceInfo) -> bool {
is_rk_hdmirx_driver(&device.driver, &device.card) is_rk_hdmirx_driver(&device.driver, &device.card)
} }
pub(crate) fn is_rkcif_driver(driver: &str) -> bool {
driver.eq_ignore_ascii_case("rkcif")
}
/// Unified check for CSI/HDMI bridge devices (rk_hdmirx, rkcif, etc.)
/// that require special enumeration and format-selection logic.
pub(crate) fn is_csi_hdmi_bridge(device: &device::VideoDeviceInfo) -> bool {
is_rk_hdmirx_device(device) || is_rkcif_driver(&device.driver)
}

View File

@@ -19,6 +19,7 @@
mod encoder_state; mod encoder_state;
use bytes::Bytes; use bytes::Bytes;
use parking_lot::Mutex as ParkingMutex;
use parking_lot::RwLock as ParkingRwLock; use parking_lot::RwLock as ParkingRwLock;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering}; use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
@@ -33,10 +34,10 @@ use self::encoder_state::{build_encoder_state, EncoderThreadState};
const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3; const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3;
/// After this many consecutive timeouts, log a prominent warning. /// After this many consecutive timeouts, log a prominent warning.
const CAPTURE_TIMEOUT_RESTART_THRESHOLD: u32 = 5; const CAPTURE_TIMEOUT_RESTART_THRESHOLD: u32 = 5;
/// After this many consecutive timeouts, actually stop the pipeline.
/// Setting this high (60 × 2 s poll = ~120 s) keeps WebRTC sessions alive
/// while the source is temporarily unavailable (e.g. resolution change/reboot).
const CAPTURE_TIMEOUT_STOP_THRESHOLD: u32 = 60; const CAPTURE_TIMEOUT_STOP_THRESHOLD: u32 = 60;
const CAPTURE_TIMEOUT_SOFT_RESTART_THRESHOLD: u32 = 3;
const CSI_BRIDGE_NOSIGNAL_INTERVAL_MS: u64 = 500;
const NOSIGNAL_POLL_MAX: Duration = Duration::from_secs(20);
/// Minimum valid frame size for capture /// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128; const MIN_CAPTURE_FRAME_SIZE: usize = 128;
/// Validate every JPEG frame during startup to avoid poisoning HW decoders /// Validate every JPEG frame during startup to avoid poisoning HW decoders
@@ -49,10 +50,13 @@ const ENCODE_ERROR_THROTTLE_SECS: u64 = 5;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::utils::LogThrottler; use crate::utils::LogThrottler;
use crate::video::csi_bridge::{self, ProbeResult};
use crate::video::encoder::registry::{EncoderBackend, VideoEncoderType}; use crate::video::encoder::registry::{EncoderBackend, VideoEncoderType};
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame}; use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use crate::video::v4l2r_capture::V4l2rCaptureStream; use crate::video::device::parse_bridge_kind;
use crate::video::SignalStatus;
use crate::video::v4l2r_capture::{is_source_changed_error, BridgeContext, V4l2rCaptureStream};
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))] #[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use hwcodec::ffmpeg_hw::last_error_message as ffmpeg_hw_last_error; use hwcodec::ffmpeg_hw::last_error_message as ffmpeg_hw_last_error;
@@ -77,6 +81,39 @@ enum PipelineCmd {
SetBitrate { bitrate_kbps: u32, gop: u32 }, SetBitrate { bitrate_kbps: u32, gop: u32 },
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct PipelineStateNotification {
pub state: &'static str,
pub reason: Option<&'static str>,
pub next_retry_ms: Option<u64>,
}
impl PipelineStateNotification {
fn streaming() -> Self {
Self {
state: "streaming",
reason: None,
next_retry_ms: None,
}
}
fn no_signal(status: SignalStatus, next_retry_ms: Option<u64>) -> Self {
Self {
state: "no_signal",
reason: Some(status.as_str()),
next_retry_ms,
}
}
fn device_busy(reason: &'static str) -> Self {
Self {
state: "device_busy",
reason: Some(reason),
next_retry_ms: None,
}
}
}
/// Shared video pipeline configuration /// Shared video pipeline configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct SharedVideoPipelineConfig { pub struct SharedVideoPipelineConfig {
@@ -241,6 +278,84 @@ pub struct SharedVideoPipeline {
/// Pipeline start time for PTS calculation (epoch millis, 0 = not set) /// Pipeline start time for PTS calculation (epoch millis, 0 = not set)
/// Uses AtomicI64 instead of Mutex for lock-free access /// Uses AtomicI64 instead of Mutex for lock-free access
pipeline_start_time_ms: AtomicI64, pipeline_start_time_ms: AtomicI64,
pending_sync_geometry: ParkingMutex<Option<(Resolution, PixelFormat)>>,
state_notifier: ParkingRwLock<Option<Arc<dyn Fn(PipelineStateNotification) + Send + Sync>>>,
last_state_notification: ParkingMutex<Option<PipelineStateNotification>>,
}
fn poll_bridge_subdev_after_no_signal(
bridge_ctx: &BridgeContext,
pipeline: &SharedVideoPipeline,
) {
let Some(subdev_path) = bridge_ctx.subdev_path.as_ref() else {
return;
};
let kind = bridge_ctx
.kind
.unwrap_or(csi_bridge::CsiBridgeKind::Unknown);
let deadline = Instant::now() + NOSIGNAL_POLL_MAX;
let mut poll_count: u32 = 0;
info!(
"No-signal poll: scanning subdev {:?} every {} ms (max {:?})",
subdev_path, CSI_BRIDGE_NOSIGNAL_INTERVAL_MS, NOSIGNAL_POLL_MAX
);
loop {
if !pipeline.running_flag.load(Ordering::Acquire) {
return;
}
if Instant::now() >= deadline {
info!(
"No-signal poll: stopped after {:?} ({} attempts)",
NOSIGNAL_POLL_MAX, poll_count
);
return;
}
let fd = match csi_bridge::open_subdev(subdev_path) {
Ok(f) => f,
Err(e) => {
debug!("No-signal poll: open subdev {:?} failed: {}", subdev_path, e);
std::thread::sleep(Duration::from_millis(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS));
continue;
}
};
match csi_bridge::probe_signal_thread_timeout(
&fd,
kind,
csi_bridge::RK628_SUBDEV_PROBE_TIMEOUT,
) {
Some(ProbeResult::Locked(mode)) => {
info!(
"No-signal poll: locked {}x{} @ {} Hz — proceeding to capture re-open",
mode.width, mode.height, mode.pixelclock
);
return;
}
Some(other) => {
poll_count = poll_count.saturating_add(1);
if poll_count == 1 || poll_count.is_multiple_of(8) {
debug!(
"No-signal poll: attempt {} — still {:?}",
poll_count,
other.as_status()
);
}
if let Some(st) = other.as_status() {
pipeline.notify_state(PipelineStateNotification::no_signal(
st,
Some(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS.saturating_add(50)),
));
}
}
None => {
poll_count = poll_count.saturating_add(1);
debug!(
"No-signal poll: attempt {} — probe ioctl timed out",
poll_count
);
}
}
std::thread::sleep(Duration::from_millis(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS));
}
} }
impl SharedVideoPipeline { impl SharedVideoPipeline {
@@ -268,11 +383,43 @@ impl SharedVideoPipeline {
sequence: AtomicU64::new(0), sequence: AtomicU64::new(0),
keyframe_requested: AtomicBool::new(false), keyframe_requested: AtomicBool::new(false),
pipeline_start_time_ms: AtomicI64::new(0), pipeline_start_time_ms: AtomicI64::new(0),
pending_sync_geometry: ParkingMutex::new(None),
state_notifier: ParkingRwLock::new(None),
last_state_notification: ParkingMutex::new(None),
}); });
Ok(pipeline) Ok(pipeline)
} }
pub fn take_pending_sync_geometry(&self) -> Option<(Resolution, PixelFormat)> {
self.pending_sync_geometry.lock().take()
}
pub fn set_state_notifier(
&self,
notifier: Option<Arc<dyn Fn(PipelineStateNotification) + Send + Sync>>,
) {
*self.state_notifier.write() = notifier;
}
fn notify_state(&self, notification: PipelineStateNotification) {
let should_emit = {
let mut last = self.last_state_notification.lock();
if last.as_ref() == Some(&notification) {
false
} else {
*last = Some(notification);
true
}
};
if !should_emit {
return;
}
if let Some(notifier) = self.state_notifier.read().clone() {
notifier(notification);
}
}
/// Subscribe to encoded frames /// Subscribe to encoded frames
pub fn subscribe(&self) -> mpsc::Receiver<Arc<EncodedVideoFrame>> { pub fn subscribe(&self) -> mpsc::Receiver<Arc<EncodedVideoFrame>> {
let (tx, rx) = mpsc::channel(4); let (tx, rx) = mpsc::channel(4);
@@ -393,13 +540,68 @@ impl SharedVideoPipeline {
device_path: std::path::PathBuf, device_path: std::path::PathBuf,
buffer_count: u32, buffer_count: u32,
_jpeg_quality: u8, _jpeg_quality: u8,
subdev_path: Option<std::path::PathBuf>,
bridge_kind: Option<String>,
) -> Result<()> { ) -> Result<()> {
if *self.running_rx.borrow() { if *self.running_rx.borrow() {
warn!("Pipeline already running"); warn!("Pipeline already running");
return Ok(()); return Ok(());
} }
let config = self.config.read().await.clone(); let mut config = self.config.read().await.clone();
{
let mut last = self.last_state_notification.lock();
*last = None;
}
// Pre-open for DV negotiation; align encoder to probed size.
let bridge_ctx_probe = BridgeContext::from_parts(
subdev_path.clone(),
parse_bridge_kind(bridge_kind.as_deref()),
);
let preopened: Option<V4l2rCaptureStream> =
match V4l2rCaptureStream::open_with_bridge(
&device_path,
config.resolution,
config.input_format,
config.fps,
buffer_count.max(1),
Duration::from_secs(2),
bridge_ctx_probe,
) {
Ok(s) => {
let negotiated_res = s.resolution();
let negotiated_fmt = s.format();
if negotiated_res != config.resolution || negotiated_fmt != config.input_format {
info!(
"Negotiated capture {}x{} {:?} (configured {}x{} {:?}) — aligning encoder to source",
negotiated_res.width,
negotiated_res.height,
negotiated_fmt,
config.resolution.width,
config.resolution.height,
config.input_format
);
config.resolution = negotiated_res;
config.input_format = negotiated_fmt;
*self.config.write().await = config.clone();
}
Some(s)
}
Err(AppError::CaptureNoSignal { kind }) => {
debug!(
"Pre-probe: no signal — encoder uses configured geometry until capture opens"
);
let status = SignalStatus::from_str(&kind).unwrap_or(SignalStatus::NoSignal);
self.notify_state(PipelineStateNotification::no_signal(
status,
Some(Duration::from_secs(2).as_millis() as u64),
));
None
}
Err(e) => return Err(e),
};
let mut encoder_state = build_encoder_state(&config)?; let mut encoder_state = build_encoder_state(&config)?;
let _ = self.running.send(true); let _ = self.running.send(true);
self.running_flag.store(true, Ordering::Release); self.running_flag.store(true, Ordering::Release);
@@ -499,16 +701,74 @@ impl SharedVideoPipeline {
let latest_frame = latest_frame.clone(); let latest_frame = latest_frame.clone();
let frame_seq_tx = frame_seq_tx.clone(); let frame_seq_tx = frame_seq_tx.clone();
let buffer_pool = buffer_pool.clone(); let buffer_pool = buffer_pool.clone();
let bridge_ctx = BridgeContext::from_parts(
subdev_path,
parse_bridge_kind(bridge_kind.as_deref()),
);
std::thread::spawn(move || { std::thread::spawn(move || {
let mut stream = match V4l2rCaptureStream::open( let mut stream: Option<V4l2rCaptureStream> = None;
let mut initial_geometry: Option<(Resolution, PixelFormat)> = None;
let mut resolution = config.resolution;
let mut pixel_format = config.input_format;
let mut stride: u32 = 0;
match preopened {
Some(s) => {
resolution = s.resolution();
pixel_format = s.format();
stride = s.stride();
initial_geometry = Some((resolution, pixel_format));
stream = Some(s);
}
None => {
match V4l2rCaptureStream::open_with_bridge(
&device_path, &device_path,
config.resolution, config.resolution,
config.input_format, config.input_format,
config.fps, config.fps,
buffer_count.max(1), buffer_count.max(1),
Duration::from_secs(2), Duration::from_secs(2),
bridge_ctx.clone(),
) { ) {
Ok(stream) => stream, Ok(s) => {
resolution = s.resolution();
pixel_format = s.format();
stride = s.stride();
if resolution != config.resolution
|| pixel_format != config.input_format
{
info!(
"First capture open negotiated {}x{} {:?} but encoder expects {}x{} {:?} — stopping for dimension resync",
resolution.width,
resolution.height,
pixel_format,
config.resolution.width,
config.resolution.height,
config.input_format
);
pipeline.notify_state(PipelineStateNotification::device_busy(
"config_changing",
));
*pipeline.pending_sync_geometry.lock() =
Some((resolution, pixel_format));
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
return;
}
initial_geometry = Some((resolution, pixel_format));
stream = Some(s);
}
Err(AppError::CaptureNoSignal { kind }) => {
warn!(
"Capture stream open reports no signal ({}) — pipeline will retry",
kind
);
pipeline.notify_state(PipelineStateNotification::no_signal(
SignalStatus::from_str(&kind).unwrap_or(SignalStatus::NoSignal),
Some(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS),
));
}
Err(e) => { Err(e) => {
error!("Failed to open capture stream: {}", e); error!("Failed to open capture stream: {}", e);
let _ = pipeline.running.send(false); let _ = pipeline.running.send(false);
@@ -516,11 +776,48 @@ impl SharedVideoPipeline {
let _ = frame_seq_tx.send(1); let _ = frame_seq_tx.send(1);
return; return;
} }
}; }
}
}
let resolution = stream.resolution(); /// Helper: try to (re)open the capture stream. Returns:
let pixel_format = stream.format(); /// * `Ok(Some(stream))` — opened successfully
let stride = stream.stride(); /// * `Ok(None)` — CaptureNoSignal, keep retrying later
/// * `Err(())` — fatal (stop pipeline)
enum OpenResult {
Opened(V4l2rCaptureStream),
NoSignal(SignalStatus),
Fatal,
}
fn open_or_retry(
device_path: &std::path::Path,
config: &SharedVideoPipelineConfig,
buffer_count: u32,
bridge_ctx: BridgeContext,
) -> OpenResult {
match V4l2rCaptureStream::open_with_bridge(
device_path,
config.resolution,
config.input_format,
config.fps,
buffer_count.max(1),
Duration::from_secs(2),
bridge_ctx,
) {
Ok(s) => OpenResult::Opened(s),
Err(AppError::CaptureNoSignal { kind }) => {
debug!("Capture soft-restart: still no signal ({})", kind);
OpenResult::NoSignal(
SignalStatus::from_str(&kind).unwrap_or(SignalStatus::NoSignal),
)
}
Err(e) => {
error!("Capture soft-restart failed: {}", e);
OpenResult::Fatal
}
}
}
let mut no_subscribers_since: Option<Instant> = None; let mut no_subscribers_since: Option<Instant> = None;
let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS); let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS);
@@ -569,16 +866,243 @@ impl SharedVideoPipeline {
no_subscribers_since = None; no_subscribers_since = None;
} }
// ── No usable stream? Try to (re)open, back off on failure. ──
if stream.is_none() {
match open_or_retry(&device_path, &config, buffer_count, bridge_ctx.clone()) {
OpenResult::Opened(new_stream) => {
let new_res = new_stream.resolution();
let new_fmt = new_stream.format();
let new_stride = new_stream.stride();
// Pre-probe was skipped (no signal at pipeline start) but the
// encoder was sized to saved settings — if DV timings now
// disagree, we cannot encode until WebRTC resyncs dimensions.
if initial_geometry.is_none()
&& (new_res != config.resolution || new_fmt != config.input_format)
{
info!(
"Deferred capture open is {}x{} {:?} but encoder expects {}x{} {:?} — stopping for dimension resync",
new_res.width,
new_res.height,
new_fmt,
config.resolution.width,
config.resolution.height,
config.input_format
);
pipeline.notify_state(PipelineStateNotification::device_busy(
"config_changing",
));
*pipeline.pending_sync_geometry.lock() = Some((new_res, new_fmt));
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(sequence.wrapping_add(1));
break;
}
// If this is the very first successful open,
// record it and run normally. Otherwise check
// for a geometry change — the encoder thread
// is pinned to the original geometry, so a
// change requires tearing the pipeline down
// and letting the upper layer rebuild.
match initial_geometry {
Some((orig_res, orig_fmt))
if orig_res != new_res || orig_fmt != new_fmt =>
{
info!(
"Capture soft-restart detected geometry change \
{:?}/{:?} -> {:?}/{:?}, stopping pipeline for \
encoder rebuild",
orig_res, orig_fmt, new_res, new_fmt
);
pipeline.notify_state(
PipelineStateNotification::device_busy(
"config_changing",
),
);
*pipeline.pending_sync_geometry.lock() =
Some((new_res, new_fmt));
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(sequence.wrapping_add(1));
break;
}
_ => {}
}
if initial_geometry.is_none() {
initial_geometry = Some((new_res, new_fmt));
}
resolution = new_res;
pixel_format = new_fmt;
stride = new_stride;
stream = Some(new_stream);
consecutive_timeouts = 0;
info!(
"Capture stream (re)opened: {}x{} {:?} stride={}",
resolution.width, resolution.height, pixel_format, stride
);
}
OpenResult::NoSignal(status) => {
consecutive_timeouts =
consecutive_timeouts.saturating_add(1);
if consecutive_timeouts >= CAPTURE_TIMEOUT_STOP_THRESHOLD {
warn!(
"Capture soft-restart gave up after {} attempts, \
stopping pipeline",
consecutive_timeouts
);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(sequence.wrapping_add(1));
break;
}
let wait_ms = CSI_BRIDGE_NOSIGNAL_INTERVAL_MS;
pipeline.notify_state(PipelineStateNotification::no_signal(
status,
Some(wait_ms),
));
std::thread::sleep(Duration::from_millis(wait_ms));
continue;
}
OpenResult::Fatal => {
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(sequence.wrapping_add(1));
break;
}
}
}
let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE); let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
let meta = match stream.next_into(&mut owned) { let next_result = stream
.as_mut()
.expect("stream is Some above")
.next_into(&mut owned);
let meta = match next_result {
Ok(meta) => { Ok(meta) => {
consecutive_timeouts = 0; consecutive_timeouts = 0;
pipeline.notify_state(PipelineStateNotification::streaming());
meta meta
} }
Err(e) => { Err(e) => {
// V4L2 driver reported V4L2_EVENT_SOURCE_CHANGE.
// The current capture is effectively invalidated:
// drop the stream so the next iteration re-opens
// via a fresh DV_TIMINGS probe. This is the fast
// path for source-side resolution switches on
// RK628 / rkcif — sub-second recovery vs. the ~8 s
// timeout fallback.
if is_source_changed_error(&e) {
info!(
"Capture reported SOURCE_CHANGE — \
dropping stream for immediate re-open"
);
consecutive_timeouts = 0;
stream = None;
continue;
}
if e.kind() == std::io::ErrorKind::TimedOut { if e.kind() == std::io::ErrorKind::TimedOut {
consecutive_timeouts = consecutive_timeouts.saturating_add(1); consecutive_timeouts = consecutive_timeouts.saturating_add(1);
let probe_result = {
let sr = stream.as_mut().expect("stream is Some above");
sr.probe_bridge_signal_with_timeout(
csi_bridge::RK628_SUBDEV_PROBE_TIMEOUT,
)
};
match probe_result {
Some(ProbeResult::Locked(mode)) => {
let probed_resolution =
Resolution::new(mode.width, mode.height);
if probed_resolution == resolution {
info!(
"Capture timeout but bridge is locked at {}x{} — soft-restarting capture without encoder rebuild",
probed_resolution.width,
probed_resolution.height
);
} else {
info!(
"Capture timeout probe detected geometry change {}x{} -> {}x{} — soft-restarting capture for encoder rebuild",
resolution.width,
resolution.height,
probed_resolution.width,
probed_resolution.height
);
pipeline.notify_state(
PipelineStateNotification::device_busy(
"config_changing",
),
);
}
consecutive_timeouts = 0;
stream = None;
continue;
}
Some(other) => {
let status =
other.as_status().unwrap_or(SignalStatus::NoSignal);
warn!(
"Capture timeout probe reports no signal ({})",
status.as_str()
);
pipeline.notify_state(
PipelineStateNotification::no_signal(
status,
Some(Duration::from_secs(2).as_millis() as u64),
),
);
// Drop capture so RK628 / rkcif can release the queue,
// then poll subdev on a fresh fd until timings lock (or
// timeout). Avoids sitting on DQBUF 2s × N with a dead
// stream while `v4l2-ctl --query-dv-timings` already shows
// a real mode.
stream = None;
consecutive_timeouts = 0;
if bridge_ctx.has_subdev()
&& matches!(
other,
ProbeResult::NoSignal
| ProbeResult::NoSync
| ProbeResult::OutOfRange
)
{
poll_bridge_subdev_after_no_signal(
&bridge_ctx,
&pipeline,
);
}
continue;
}
None if bridge_ctx.has_subdev() => {
warn!(
"DV-timings probe timed out or failed — forcing stream re-open (RK628 / rkcif)"
);
consecutive_timeouts = 0;
stream = None;
poll_bridge_subdev_after_no_signal(&bridge_ctx, &pipeline);
continue;
}
None => {
warn!("Capture timeout - no signal?"); warn!("Capture timeout - no signal?");
}
}
if consecutive_timeouts
>= CAPTURE_TIMEOUT_SOFT_RESTART_THRESHOLD
{
// Drop the stream so the next loop
// iteration re-opens via the DV-timings
// probe. This catches source-side
// resolution changes in ~6 s without
// taking the encoder down.
warn!(
"Capture timed out {} consecutive times, \
closing stream for soft-restart",
consecutive_timeouts
);
stream = None;
continue;
}
if consecutive_timeouts == CAPTURE_TIMEOUT_RESTART_THRESHOLD { if consecutive_timeouts == CAPTURE_TIMEOUT_RESTART_THRESHOLD {
warn!( warn!(
@@ -599,6 +1123,20 @@ impl SharedVideoPipeline {
} }
} else { } else {
consecutive_timeouts = 0; consecutive_timeouts = 0;
// EIO (5) / EPIPE (32) in next_into generally
// mean the source glitched mid-stream.
// Tear down the stream and let the open loop
// re-probe via DV_TIMINGS — same logic as
// timeouts, just triggered earlier.
if matches!(e.raw_os_error(), Some(5) | Some(32)) {
warn!(
"Capture transient error ({}), closing stream for \
soft-restart",
e
);
stream = None;
continue;
}
let key = classify_capture_error(&e); let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) { if capture_error_throttler.should_log(&key) {
let suppressed = let suppressed =

View File

@@ -38,8 +38,8 @@ use crate::hid::HidController;
use crate::stream::MjpegStreamHandler; use crate::stream::MjpegStreamHandler;
use crate::video::codec_constraints::StreamCodecConstraints; use crate::video::codec_constraints::StreamCodecConstraints;
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::is_rk_hdmirx_device; use crate::video::is_csi_hdmi_bridge;
use crate::video::streamer::{Streamer, StreamerState}; use crate::video::streamer::{Streamer, StreamerStats, StreamerState};
use crate::webrtc::WebRtcStreamer; use crate::webrtc::WebRtcStreamer;
/// Video stream manager configuration /// Video stream manager configuration
@@ -353,8 +353,17 @@ impl VideoStreamManager {
.update_video_config(resolution, format, fps) .update_video_config(resolution, format, fps)
.await; .await;
if let Some(device_path) = device_path { if let Some(device_path) = device_path {
// Resolve the paired subdev so the WebRTC pipeline can run the
// RK628 STREAMON gate + SOURCE_CHANGE polling identically to the
// MJPEG path. See `csi_bridge::discover_subdev_for_video`.
let (subdev_path, bridge_kind) = self
.streamer
.current_device()
.await
.map(|d| (d.subdev_path.clone(), d.bridge_kind.clone()))
.unwrap_or((None, None));
self.webrtc_streamer self.webrtc_streamer
.set_capture_device(device_path, jpeg_quality) .set_capture_device(device_path, jpeg_quality, subdev_path, bridge_kind)
.await; .await;
} else { } else {
warn!("No capture device configured while syncing WebRTC capture source"); warn!("No capture device configured while syncing WebRTC capture source");
@@ -431,7 +440,7 @@ impl VideoStreamManager {
device.formats.iter().map(|f| f.format).collect(); device.formats.iter().map(|f| f.format).collect();
// If current format is not MJPEG and device supports MJPEG, switch to it // If current format is not MJPEG and device supports MJPEG, switch to it
if !is_rk_hdmirx_device(&device) if !is_csi_hdmi_bridge(&device)
&& current_format != PixelFormat::Mjpeg && current_format != PixelFormat::Mjpeg
&& available_formats.contains(&PixelFormat::Mjpeg) && available_formats.contains(&PixelFormat::Mjpeg)
{ {
@@ -550,8 +559,14 @@ impl VideoStreamManager {
} }
if let Some(device_path) = device_path { if let Some(device_path) = device_path {
info!("Configuring direct capture for WebRTC after config change"); info!("Configuring direct capture for WebRTC after config change");
let (subdev_path, bridge_kind) = self
.streamer
.current_device()
.await
.map(|d| (d.subdev_path.clone(), d.bridge_kind.clone()))
.unwrap_or((None, None));
self.webrtc_streamer self.webrtc_streamer
.set_capture_device(device_path, jpeg_quality) .set_capture_device(device_path, jpeg_quality, subdev_path, bridge_kind)
.await; .await;
} else { } else {
warn!("No capture device configured for WebRTC after config change"); warn!("No capture device configured for WebRTC after config change");
@@ -610,7 +625,7 @@ impl VideoStreamManager {
/// Get video device info for device_info event /// Get video device info for device_info event
pub async fn get_video_info(&self) -> VideoDeviceInfo { pub async fn get_video_info(&self) -> VideoDeviceInfo {
let stats = self.streamer.stats().await; let stats = self.stats().await;
let state = self.streamer.state().await; let state = self.streamer.state().await;
let device = self.streamer.current_device().await; let device = self.streamer.current_device().await;
let mode = self.mode.read().await.clone(); let mode = self.mode.read().await.clone();
@@ -636,7 +651,7 @@ impl VideoStreamManager {
config_changing: self.streamer.is_config_changing(), config_changing: self.streamer.is_config_changing(),
error: if state == StreamerState::Error { error: if state == StreamerState::Error {
Some("Video stream error".to_string()) Some("Video stream error".to_string())
} else if state == StreamerState::NoSignal { } else if state.is_no_signal_like() {
Some("No video signal".to_string()) Some("No video signal".to_string())
} else { } else {
None None
@@ -687,8 +702,24 @@ impl VideoStreamManager {
} }
/// Get streamer statistics /// Get streamer statistics
pub async fn stats(&self) -> crate::video::streamer::StreamerStats { ///
self.streamer.stats().await /// In WebRTC mode, resolution/format/target_fps/fps reflect
/// [`WebRtcStreamer`]'s config (updated after DV negotiation / geometry sync),
/// not only the MJPEG [`Streamer`] snapshot — so `/api/stream/status` matches
/// what the shared encoder actually uses.
pub async fn stats(&self) -> StreamerStats {
let mut s = self.streamer.stats().await;
if *self.mode.read().await == StreamMode::WebRTC {
let (res, fmt, tgt_fps) = self.webrtc_streamer.current_video_geometry().await;
s.format = Some(fmt.to_string());
s.resolution = Some((res.width, res.height));
s.target_fps = tgt_fps;
if let Some(ps) = self.webrtc_streamer.pipeline_stats().await {
s.fps = ps.current_fps;
}
s.clients = self.webrtc_streamer.session_count().await as u64;
}
s
} }
/// Check if config is being changed /// Check if config is being changed

View File

@@ -5,21 +5,22 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicU32, Ordering}; use std::sync::atomic::{AtomicBool, AtomicU32, AtomicU64, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::{debug, error, info, trace, warn}; use tracing::{debug, error, info, trace, warn};
use super::device::{enumerate_devices, find_best_device, VideoDevice, VideoDeviceInfo}; use super::csi_bridge;
use super::device::{enumerate_devices, find_best_device, parse_bridge_kind, VideoDevice, VideoDeviceInfo};
use super::format::{PixelFormat, Resolution}; use super::format::{PixelFormat, Resolution};
use super::frame::{FrameBuffer, FrameBufferPool, VideoFrame}; use super::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use super::is_rk_hdmirx_device; use super::is_csi_hdmi_bridge;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::events::{EventBus, SystemEvent}; use crate::events::{EventBus, SystemEvent};
use crate::stream::MjpegStreamHandler; use crate::stream::MjpegStreamHandler;
use crate::utils::LogThrottler; use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream; use crate::video::v4l2r_capture::{is_source_changed_error, BridgeContext, V4l2rCaptureStream};
/// Minimum valid frame size for capture /// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128; const MIN_CAPTURE_FRAME_SIZE: usize = 128;
@@ -53,7 +54,7 @@ impl Default for StreamerConfig {
} }
} }
/// Streamer state /// Fine-grained capture state; [`external_state`] maps to UI wire names.
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StreamerState { pub enum StreamerState {
/// Not initialized /// Not initialized
@@ -62,14 +63,83 @@ pub enum StreamerState {
Ready, Ready,
/// Actively streaming /// Actively streaming
Streaming, Streaming,
/// No video signal /// No video signal (generic / source not detected)
NoSignal, NoSignal,
/// HDMI cable not connected (DV_RX_POWER_PRESENT = false or ENOLINK)
NoCable,
/// TMDS signal present but timings not locked (ENOLCK)
NoSync,
/// Source timings are outside of what the capture hardware supports (ERANGE)
OutOfRange,
/// Error occurred /// Error occurred
Error, Error,
/// Device was lost (unplugged) /// Device was lost (unplugged)
DeviceLost, DeviceLost,
/// Device is being recovered (reconnecting) /// Device is being recovered (reconnecting)
Recovering, Recovering,
Busy,
}
impl StreamerState {
pub fn as_str(self) -> &'static str {
match self {
StreamerState::Uninitialized => "uninitialized",
StreamerState::Ready => "ready",
StreamerState::Streaming => "streaming",
StreamerState::NoSignal => "no_signal",
StreamerState::NoCable => "no_cable",
StreamerState::NoSync => "no_sync",
StreamerState::OutOfRange => "out_of_range",
StreamerState::Error => "error",
StreamerState::DeviceLost => "device_lost",
StreamerState::Recovering => "recovering",
StreamerState::Busy => "device_busy",
}
}
/// Parse a state string as produced by [`StreamerState::as_str`].
pub fn from_str(s: &str) -> Option<Self> {
Some(match s {
"uninitialized" => StreamerState::Uninitialized,
"ready" => StreamerState::Ready,
"streaming" => StreamerState::Streaming,
"no_signal" => StreamerState::NoSignal,
"no_cable" => StreamerState::NoCable,
"no_sync" => StreamerState::NoSync,
"out_of_range" => StreamerState::OutOfRange,
"error" => StreamerState::Error,
"device_lost" => StreamerState::DeviceLost,
"recovering" => StreamerState::Recovering,
"device_busy" | "busy" => StreamerState::Busy,
_ => return None,
})
}
pub fn is_no_signal_like(self) -> bool {
matches!(
self,
StreamerState::NoSignal
| StreamerState::NoCable
| StreamerState::NoSync
| StreamerState::OutOfRange
)
}
pub fn external_state(self) -> (&'static str, Option<&'static str>) {
match self {
StreamerState::Streaming => ("streaming", None),
StreamerState::Ready => ("ready", None),
StreamerState::Uninitialized => ("uninitialized", None),
StreamerState::Error => ("error", None),
StreamerState::NoSignal => ("no_signal", Some("no_signal")),
StreamerState::NoCable => ("no_signal", Some("no_cable")),
StreamerState::NoSync => ("no_signal", Some("no_sync")),
StreamerState::OutOfRange => ("no_signal", Some("out_of_range")),
StreamerState::DeviceLost => ("device_lost", Some("device_lost")),
StreamerState::Recovering => ("device_lost", Some("recovering")),
StreamerState::Busy => ("device_busy", None),
}
}
} }
/// Video streamer service /// Video streamer service
@@ -85,8 +155,8 @@ pub struct Streamer {
current_fps: AtomicU32, current_fps: AtomicU32,
/// Event bus for broadcasting state changes (optional) /// Event bus for broadcasting state changes (optional)
events: RwLock<Option<Arc<EventBus>>>, events: RwLock<Option<Arc<EventBus>>>,
/// Last published state (for change detection) last_published_state: RwLock<Option<(String, Option<String>, Option<u64>)>>,
last_published_state: RwLock<Option<StreamerState>>, next_retry_ms: AtomicU64,
/// Flag to indicate config is being changed (prevents auto-start during config change) /// Flag to indicate config is being changed (prevents auto-start during config change)
config_changing: std::sync::atomic::AtomicBool, config_changing: std::sync::atomic::AtomicBool,
/// Flag to indicate background tasks (stats, cleanup, monitor) have been started /// Flag to indicate background tasks (stats, cleanup, monitor) have been started
@@ -117,6 +187,7 @@ impl Streamer {
current_fps: AtomicU32::new(0), current_fps: AtomicU32::new(0),
events: RwLock::new(None), events: RwLock::new(None),
last_published_state: RwLock::new(None), last_published_state: RwLock::new(None),
next_retry_ms: AtomicU64::new(0),
config_changing: std::sync::atomic::AtomicBool::new(false), config_changing: std::sync::atomic::AtomicBool::new(false),
background_tasks_started: std::sync::atomic::AtomicBool::new(false), background_tasks_started: std::sync::atomic::AtomicBool::new(false),
recovery_retry_count: std::sync::atomic::AtomicU32::new(0), recovery_retry_count: std::sync::atomic::AtomicU32::new(0),
@@ -140,6 +211,7 @@ impl Streamer {
current_fps: AtomicU32::new(0), current_fps: AtomicU32::new(0),
events: RwLock::new(None), events: RwLock::new(None),
last_published_state: RwLock::new(None), last_published_state: RwLock::new(None),
next_retry_ms: AtomicU64::new(0),
config_changing: std::sync::atomic::AtomicBool::new(false), config_changing: std::sync::atomic::AtomicBool::new(false),
background_tasks_started: std::sync::atomic::AtomicBool::new(false), background_tasks_started: std::sync::atomic::AtomicBool::new(false),
recovery_retry_count: std::sync::atomic::AtomicU32::new(0), recovery_retry_count: std::sync::atomic::AtomicU32::new(0),
@@ -149,7 +221,6 @@ impl Streamer {
}) })
} }
/// Get current state as SystemEvent
pub async fn current_state_event(&self) -> SystemEvent { pub async fn current_state_event(&self) -> SystemEvent {
let state = *self.state.read().await; let state = *self.state.read().await;
let device = self let device = self
@@ -158,21 +229,21 @@ impl Streamer {
.await .await
.as_ref() .as_ref()
.map(|d| d.path.display().to_string()); .map(|d| d.path.display().to_string());
let (external, reason) = state.external_state();
let next = self.next_retry_ms.load(Ordering::Relaxed);
SystemEvent::StreamStateChanged { SystemEvent::StreamStateChanged {
state: match state { state: external.to_string(),
StreamerState::Uninitialized => "uninitialized".to_string(),
StreamerState::Ready => "ready".to_string(),
StreamerState::Streaming => "streaming".to_string(),
StreamerState::NoSignal => "no_signal".to_string(),
StreamerState::Error => "error".to_string(),
StreamerState::DeviceLost => "device_lost".to_string(),
StreamerState::Recovering => "recovering".to_string(),
},
device, device,
reason: reason.map(|s| s.to_string()),
next_retry_ms: if next == 0 { None } else { Some(next) },
} }
} }
pub fn set_next_retry_ms(&self, ms: u64) {
self.next_retry_ms.store(ms, Ordering::Relaxed);
}
/// Set event bus for broadcasting state changes /// Set event bus for broadcasting state changes
pub async fn set_event_bus(&self, events: Arc<EventBus>) { pub async fn set_event_bus(&self, events: Arc<EventBus>) {
*self.events.write().await = Some(events); *self.events.write().await = Some(events);
@@ -264,6 +335,13 @@ impl Streamer {
}) })
.await; .await;
// Surface a "device busy" state so the frontend can render a
// "please wait" overlay for the (short) duration of the config
// change. The capture loop itself will flip to `Streaming` once
// the first frame of the new geometry arrives.
*self.state.write().await = StreamerState::Busy;
self.publish_event(self.current_state_event().await).await;
let devices = enumerate_devices()?; let devices = enumerate_devices()?;
let device = devices let device = devices
.into_iter() .into_iter()
@@ -369,12 +447,36 @@ impl Streamer {
device: &VideoDeviceInfo, device: &VideoDeviceInfo,
preferred: PixelFormat, preferred: PixelFormat,
) -> Result<PixelFormat> { ) -> Result<PixelFormat> {
if is_rk_hdmirx_device(device) { if is_csi_hdmi_bridge(device) {
return device if !device.has_signal {
info!(
"select_format: CSI bridge no signal, keeping preferred {:?}",
preferred
);
return Ok(preferred);
}
// Prefer the user-configured format if the device actually supports
// it; otherwise fall back to the highest-priority format (formats
// are pre-sorted by PixelFormat::priority(), e.g. NV12 > YUYV for rkcif/rk_hdmirx).
if device.formats.iter().any(|f| f.format == preferred) {
info!(
"select_format: CSI bridge with signal, using preferred {:?}",
preferred
);
return Ok(preferred);
}
let fmt = device
.formats .formats
.first() .first()
.map(|f| f.format) .map(|f| f.format)
.ok_or_else(|| AppError::VideoError("No supported formats found".to_string())); .ok_or_else(|| AppError::VideoError("No supported formats found".to_string()))?;
info!(
"select_format: CSI bridge with signal, preferred {:?} unavailable, selected {:?} from {:?}",
preferred,
fmt,
device.formats.iter().map(|f| f.format).collect::<Vec<_>>()
);
return Ok(fmt);
} }
// Check if preferred format is available // Check if preferred format is available
@@ -397,18 +499,32 @@ impl Streamer {
format: &PixelFormat, format: &PixelFormat,
preferred: Resolution, preferred: Resolution,
) -> Result<Resolution> { ) -> Result<Resolution> {
if is_csi_hdmi_bridge(device) && !device.has_signal {
info!(
"select_resolution: CSI bridge no signal, keeping preferred {}",
preferred
);
return Ok(preferred);
}
let format_info = device let format_info = device
.formats .formats
.iter() .iter()
.find(|f| &f.format == format) .find(|f| &f.format == format)
.ok_or_else(|| AppError::VideoError("Format not found".to_string()))?; .ok_or_else(|| AppError::VideoError("Format not found".to_string()))?;
if is_rk_hdmirx_device(device) { if is_csi_hdmi_bridge(device) {
return Ok(format_info let res = format_info
.resolutions .resolutions
.first() .first()
.map(|r| r.resolution()) .map(|r| r.resolution())
.unwrap_or(preferred)); .unwrap_or(preferred);
info!(
"select_resolution: CSI bridge with signal, selected {} (preferred {}, available {:?})",
res, preferred,
format_info.resolutions.iter().map(|r| format!("{}x{}", r.width, r.height)).collect::<Vec<_>>()
);
return Ok(res);
} }
// Check if preferred resolution is available // Check if preferred resolution is available
@@ -451,7 +567,8 @@ impl Streamer {
tokio::time::sleep(std::time::Duration::from_millis(100)).await; tokio::time::sleep(std::time::Duration::from_millis(100)).await;
let state = *self.state.read().await; let state = *self.state.read().await;
match state { match state {
StreamerState::Streaming | StreamerState::NoSignal => return Ok(()), StreamerState::Streaming => return Ok(()),
s if s.is_no_signal_like() => return Ok(()),
StreamerState::Error | StreamerState::DeviceLost => { StreamerState::Error | StreamerState::DeviceLost => {
return Err(AppError::VideoError( return Err(AppError::VideoError(
"Failed to restart capture".to_string(), "Failed to restart capture".to_string(),
@@ -631,14 +748,26 @@ impl Streamer {
const RETRY_DELAY_MS: u64 = 200; const RETRY_DELAY_MS: u64 = 200;
const IDLE_STOP_DELAY_SECS: u64 = 5; const IDLE_STOP_DELAY_SECS: u64 = 5;
const BUFFER_COUNT: u32 = 2; const BUFFER_COUNT: u32 = 2;
/// After this many seconds without signal, close+re-open the device. /// Initial back-off after signal loss before the first soft restart.
const NOSIGNAL_SOFT_RESTART_SECS: u64 = 8; ///
/// Placeholder frame re-send interval while in NoSignal state (iterations of 100 ms). /// PiKVM/ustreamer drops to sub-second recovery because it subscribes to
const NOSIGNAL_PLACEHOLDER_INTERVAL: u32 = 10; // every ~1 s /// `V4L2_EVENT_SOURCE_CHANGE`; lacking that (for now), we bound how long
/// the user has to stare at a placeholder after a source-side resolution
/// change by driving a soft-restart at 1 s, then 2 s, 4 s, …, 8 s.
const NOSIGNAL_SOFT_RESTART_INITIAL_SECS: u64 = 1;
const NOSIGNAL_SOFT_RESTART_MAX_SECS: u64 = 8;
let handle = tokio::runtime::Handle::current(); let handle = tokio::runtime::Handle::current();
let mut last_state = StreamerState::Streaming; let mut last_state = StreamerState::Streaming;
// Compute the current soft-restart back-off window (in seconds)
// for the exponential ladder 1 s → 2 s → 4 s → 8 s (capped).
let backoff_secs = |count: u32| -> u64 {
NOSIGNAL_SOFT_RESTART_INITIAL_SECS
.saturating_mul(2u64.pow(count.min(3)))
.min(NOSIGNAL_SOFT_RESTART_MAX_SECS)
};
let mut set_state = |new_state: StreamerState| { let mut set_state = |new_state: StreamerState| {
if new_state != last_state { if new_state != last_state {
handle.block_on(async { handle.block_on(async {
@@ -649,9 +778,32 @@ impl Streamer {
} }
}; };
// Helper: drop the MJPEG online flag so any connected HTTP clients
// exit their streaming tasks cleanly. Replaces the old "push a
// placeholder JPEG every second" scheme — the frontend now renders
// its own overlay from `stream.state_changed` and doesn't need a
// fake image to keep the connection alive. Idempotent.
let go_offline = || {
self.mjpeg_handler.set_offline();
};
// Helper: record the back-off window on the streamer so it rides
// along on the next `stream.state_changed` event; cleared when we
// return to `Streaming`.
let set_retry = |ms: u64| {
self.next_retry_ms.store(ms, Ordering::Relaxed);
};
// How many soft-restart cycles have been attempted (for exponential back-off). // How many soft-restart cycles have been attempted (for exponential back-off).
let mut no_signal_restart_count: u32 = 0; let mut no_signal_restart_count: u32 = 0;
// Last (resolution, format, fps) combination for which we emitted a
// `StreamConfigApplied` event. Used to de-duplicate the event across
// soft-restarts that produce the exact same geometry (e.g. a spurious
// single-frame timeout on a stable source) — the frontend would
// otherwise re-layout the `<img>` on every glitch.
let mut last_applied: Option<(u32, u32, PixelFormat, u32)> = None;
'session: loop { 'session: loop {
if self.direct_stop.load(Ordering::Relaxed) { if self.direct_stop.load(Ordering::Relaxed) {
break 'session; break 'session;
@@ -661,6 +813,62 @@ impl Streamer {
// call (from a previous soft-restart or recovery) is reflected here. // call (from a previous soft-restart or recovery) is reflected here.
let config = handle.block_on(async { self.config.read().await.clone() }); let config = handle.block_on(async { self.config.read().await.clone() });
// ── Resolve the CSI bridge subdev (if any) for this video ──────────
//
// The subdev is where QUERY_DV_TIMINGS and SOURCE_CHANGE events
// actually live on RK628-on-rkcif. It's stored in
// `VideoDeviceInfo` during enumeration; we re-read it here
// rather than caching on Streamer so a hot-plug recovery picks
// up a possibly-different subdev path.
let bridge_ctx = handle.block_on(async {
self.current_device
.read()
.await
.as_ref()
.map(|info| {
BridgeContext::from_parts(
info.subdev_path.clone(),
parse_bridge_kind(info.bridge_kind.as_deref()),
)
})
.unwrap_or_default()
});
// ── STREAMON gate: for CSI bridges with a subdev, refuse to
// open the video node when the subdev reports no signal.
// On RK628 this prevents a kernel null-pointer deref.
if let Some(subdev_path) = bridge_ctx.subdev_path.as_ref() {
match probe_subdev_signal(subdev_path, bridge_ctx.kind) {
Some(crate::video::SignalStatus::NoCable)
| Some(crate::video::SignalStatus::NoSync)
| Some(crate::video::SignalStatus::NoSignal)
| Some(crate::video::SignalStatus::OutOfRange) => {
let status = probe_subdev_signal(subdev_path, bridge_ctx.kind)
.unwrap_or(crate::video::SignalStatus::NoSignal);
let wait_secs = backoff_secs(no_signal_restart_count);
debug!(
"Pre-STREAMON gate: subdev {:?} reports {:?} — \
waiting for SOURCE_CHANGE (<= {}s) before opening {:?}",
subdev_path, status, wait_secs, device_path
);
set_retry(wait_secs.saturating_mul(1000));
go_offline();
set_state(status.into());
// Wait for SOURCE_CHANGE or timeout before retrying.
// Opens the subdev just for the poll — cheap and
// does NOT touch the video node.
wait_subdev_for_source_change(
subdev_path,
&self.direct_stop,
Duration::from_secs(wait_secs),
);
no_signal_restart_count = no_signal_restart_count.saturating_add(1);
continue 'session;
}
_ => {} // Locked (None from as_status) or unknown — proceed
}
}
// ── Open the capture stream ───────────────────────────────────────── // ── Open the capture stream ─────────────────────────────────────────
let mut stream_opt: Option<V4l2rCaptureStream> = None; let mut stream_opt: Option<V4l2rCaptureStream> = None;
let mut last_error: Option<String> = None; let mut last_error: Option<String> = None;
@@ -671,18 +879,39 @@ impl Streamer {
return; return;
} }
match V4l2rCaptureStream::open( match V4l2rCaptureStream::open_with_bridge(
&device_path, &device_path,
config.resolution, config.resolution,
config.format, config.format,
config.fps, config.fps,
BUFFER_COUNT, BUFFER_COUNT,
Duration::from_secs(2), Duration::from_secs(2),
bridge_ctx.clone(),
) { ) {
Ok(stream) => { Ok(stream) => {
stream_opt = Some(stream); stream_opt = Some(stream);
break; break;
} }
Err(AppError::CaptureNoSignal { kind }) => {
// CSI bridge open-time DV-timings probe failed.
// Drop the HTTP stream so the frontend renders its
// "no signal" overlay, update the state with the
// fine-grained reason, and let the outer 'session
// loop back off before the next retry.
let status = crate::video::SignalStatus::from_str(&kind)
.unwrap_or(crate::video::SignalStatus::NoSignal);
debug!(
"CSI open probe reports no signal ({:?}), will soft-restart",
status
);
set_retry(
backoff_secs(no_signal_restart_count).saturating_mul(1000),
);
go_offline();
set_state(status.into());
last_error = Some(format!("CaptureNoSignal({})", kind));
break;
}
Err(e) => { Err(e) => {
let err_str = e.to_string(); let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") { if err_str.contains("busy") || err_str.contains("resource") {
@@ -705,6 +934,15 @@ impl Streamer {
let mut stream = match stream_opt { let mut stream = match stream_opt {
Some(stream) => stream, Some(stream) => stream,
None => { None => {
// If the open failed because of a no-signal condition, do
// *not* escalate to Error — instead keep the capture loop
// alive in NoSignal-like state and retry via the soft
// restart path. This lets CSI bridges recover on their
// own when the source comes back (resolution change,
// host reboot, HDMI cable re-plug).
let was_no_signal =
handle.block_on(async { self.state().await }).is_no_signal_like();
if !was_no_signal {
error!( error!(
"Failed to open device {:?}: {}", "Failed to open device {:?}: {}",
device_path, device_path,
@@ -714,6 +952,16 @@ impl Streamer {
set_state(StreamerState::Error); set_state(StreamerState::Error);
break 'session; break 'session;
} }
debug!(
"Open failed in NoSignal-like state, backing off before soft-restart"
);
let wait = backoff_secs(no_signal_restart_count);
set_retry(wait.saturating_mul(1000));
std::thread::sleep(Duration::from_secs(wait));
no_signal_restart_count = no_signal_restart_count.saturating_add(1);
continue 'session;
}
}; };
let resolution = stream.resolution(); let resolution = stream.resolution();
@@ -748,8 +996,6 @@ impl Streamer {
// None = signal is present; Some(Instant) = when signal was first lost. // None = signal is present; Some(Instant) = when signal was first lost.
let mut no_signal_since: Option<std::time::Instant> = None; let mut no_signal_since: Option<std::time::Instant> = None;
// Counter for periodic placeholder pushes during NoSignal.
let mut nosignal_placeholder_counter: u32 = 0;
// Whether the inner 'capture loop should trigger a soft restart. // Whether the inner 'capture loop should trigger a soft restart.
let mut need_soft_restart = false; let mut need_soft_restart = false;
@@ -780,62 +1026,62 @@ impl Streamer {
let meta = match stream.next_into(&mut owned) { let meta = match stream.next_into(&mut owned) {
Ok(meta) => meta, Ok(meta) => meta,
Err(e) => { Err(e) => {
if is_source_changed_error(&e) {
info!("Capture SOURCE_CHANGE — soft-restart for DV re-probe");
set_retry(
backoff_secs(no_signal_restart_count).saturating_mul(1000),
);
go_offline();
set_state(StreamerState::NoSignal);
need_soft_restart = true;
break 'capture;
}
if e.kind() == std::io::ErrorKind::TimedOut { if e.kind() == std::io::ErrorKind::TimedOut {
if signal_present { if signal_present {
signal_present = false; signal_present = false;
// Don't call set_offline() instead keep the MJPEG stream let wait = backoff_secs(no_signal_restart_count);
// alive by pushing a placeholder frame so clients stay set_retry(wait.saturating_mul(1000));
// connected and see the "no signal" image. go_offline();
self.mjpeg_handler.push_no_signal_placeholder();
set_state(StreamerState::NoSignal); set_state(StreamerState::NoSignal);
no_signal_since = Some(std::time::Instant::now()); no_signal_since = Some(std::time::Instant::now());
self.current_fps.store(0, Ordering::Relaxed); self.current_fps.store(0, Ordering::Relaxed);
fps_frame_count = 0; fps_frame_count = 0;
last_fps_time = std::time::Instant::now(); last_fps_time = std::time::Instant::now();
nosignal_placeholder_counter = 0; } else if let Some(since) = no_signal_since {
} else { let wait = backoff_secs(no_signal_restart_count);
// Already in NoSignal re-send placeholder periodically so if since.elapsed().as_secs() >= wait {
// the HTTP keepalive timer does not expire.
nosignal_placeholder_counter =
nosignal_placeholder_counter.wrapping_add(1);
if nosignal_placeholder_counter >= NOSIGNAL_PLACEHOLDER_INTERVAL {
nosignal_placeholder_counter = 0;
self.mjpeg_handler.push_no_signal_placeholder();
}
// Soft-restart after exponential back-off.
if let Some(since) = no_signal_since {
let backoff_secs = NOSIGNAL_SOFT_RESTART_SECS
.saturating_mul(2u64.pow(no_signal_restart_count.min(2)))
.min(30);
if since.elapsed().as_secs() >= backoff_secs {
info!( info!(
"NoSignal for {}s, attempting soft restart (attempt {})", "NoSignal for {}s, attempting soft restart (attempt {})",
backoff_secs, wait,
no_signal_restart_count + 1 no_signal_restart_count + 1
); );
need_soft_restart = true; need_soft_restart = true;
break 'capture; break 'capture;
} }
} }
}
std::thread::sleep(std::time::Duration::from_millis(100)); std::thread::sleep(std::time::Duration::from_millis(100));
continue 'capture; continue 'capture;
} }
let is_device_lost = match e.raw_os_error() { // Classify the capture error.
Some(6) => true, // ENXIO //
Some(19) => true, // ENODEV // Only errnos that mean "the device file is gone"
Some(5) => true, // EIO // (ENODEV, ENXIO, ESHUTDOWN) trigger the full
Some(32) => true, // EPIPE // DeviceLost → recovery path.
Some(108) => true, // ESHUTDOWN //
_ => false, // EIO / EPIPE are common transient errors on rkcif
}; // when the source glitches or re-locks; those are
// treated as NoSignal + soft-restart so we recover
// in ~1 s instead of the 1 s recovery-poll loop.
let os_err = e.raw_os_error();
let is_device_lost = matches!(os_err, Some(6) | Some(19) | Some(108));
let is_transient_signal_error = matches!(os_err, Some(5) | Some(32));
if is_device_lost { if is_device_lost {
error!("Video device lost: {} - {}", device_path.display(), e); error!("Video device lost: {} - {}", device_path.display(), e);
self.mjpeg_handler.set_offline(); go_offline();
set_retry(0);
handle.block_on(async { handle.block_on(async {
*self.last_lost_device.write().await = *self.last_lost_device.write().await =
Some(device_path.display().to_string()); Some(device_path.display().to_string());
@@ -851,6 +1097,20 @@ impl Streamer {
break 'capture; break 'capture;
} }
if is_transient_signal_error {
warn!(
"Capture transient error ({}): treating as NoSignal + soft-restart",
e
);
set_retry(
backoff_secs(no_signal_restart_count).saturating_mul(1000),
);
go_offline();
set_state(StreamerState::NoSignal);
need_soft_restart = true;
break 'capture;
}
let key = classify_capture_error(&e); let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) { if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0); let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
@@ -893,8 +1153,28 @@ impl Streamer {
signal_present = true; signal_present = true;
no_signal_since = None; no_signal_since = None;
no_signal_restart_count = 0; no_signal_restart_count = 0;
// Stream was kept online (placeholder pushes), just update state. set_retry(0);
set_state(StreamerState::Streaming); set_state(StreamerState::Streaming);
let fps_val = config.fps;
let current = (resolution.width, resolution.height, pixel_format, fps_val);
if last_applied != Some(current) {
last_applied = Some(current);
let dp = device_path.display().to_string();
let fmt = format!("{:?}", pixel_format);
let w = resolution.width;
let h = resolution.height;
handle.block_on(async {
self.publish_event(SystemEvent::StreamConfigApplied {
transition_id: None,
device: dp,
resolution: (w, h),
format: fmt,
fps: fps_val,
})
.await;
});
}
} }
self.mjpeg_handler.update_frame(frame); self.mjpeg_handler.update_frame(frame);
@@ -923,12 +1203,30 @@ impl Streamer {
break 'session; break 'session;
} }
// ── Soft restart path ───────────────────────────────────────────────
no_signal_restart_count = no_signal_restart_count.saturating_add(1); no_signal_restart_count = no_signal_restart_count.saturating_add(1);
// Re-probe the device to pick up a changed resolution/format.
match VideoDevice::open_readonly(&device_path).and_then(|d| d.info()) { match VideoDevice::open_readonly(&device_path).and_then(|d| d.info()) {
Ok(device_info) => { Ok(device_info) => {
// Skip re-open while rkcif still reports placeholder (≤64²) geometry.
let probed_res = device_info
.formats
.first()
.and_then(|f| f.resolutions.first())
.map(|r| (r.width, r.height));
if matches!(probed_res, Some((w, h)) if w <= 64 || h <= 64)
|| probed_res.is_none()
{
warn!(
"Soft restart: probed resolution too small ({:?}), still no signal",
probed_res
);
set_retry(2_000);
go_offline();
std::thread::sleep(Duration::from_secs(2));
continue 'session;
}
handle.block_on(async { handle.block_on(async {
let fmt; let fmt;
let res; let res;
@@ -971,19 +1269,12 @@ impl Streamer {
self.current_fps.store(0, Ordering::Relaxed); self.current_fps.store(0, Ordering::Relaxed);
} }
/// Check if streaming (or in NoSignal state capture thread is still running) /// `Streaming` or any no-signal-like state (capture thread still alive).
pub async fn is_streaming(&self) -> bool { pub async fn is_streaming(&self) -> bool {
matches!( let s = self.state().await;
self.state().await, s == StreamerState::Streaming || s.is_no_signal_like()
StreamerState::Streaming | StreamerState::NoSignal
)
} }
/// Re-probe a device and update the stored config/device info.
///
/// Called during recovery or after a NoSignal soft restart so that a
/// resolution / format change on the source side is picked up before
/// the capture stream is re-opened.
pub async fn re_init_device(self: &Arc<Self>, device_path: &str) -> Result<()> { pub async fn re_init_device(self: &Arc<Self>, device_path: &str) -> Result<()> {
let device = VideoDevice::open_readonly(device_path) let device = VideoDevice::open_readonly(device_path)
.map_err(|e| AppError::VideoError(format!("Cannot open device for re-init: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Cannot open device for re-init: {}", e)))?;
@@ -1030,30 +1321,27 @@ impl Streamer {
} }
} }
/// Publish event to event bus (if configured) /// Dedupes `StreamStateChanged` on `(state, reason, next_retry_ms)`.
/// For StreamStateChanged events, only publishes if state actually changed (de-duplication)
async fn publish_event(&self, event: SystemEvent) { async fn publish_event(&self, event: SystemEvent) {
if let Some(events) = self.events.read().await.as_ref() { if let Some(events) = self.events.read().await.as_ref() {
// For state change events, check if state actually changed if let SystemEvent::StreamStateChanged {
if let SystemEvent::StreamStateChanged { ref state, .. } = event { ref state,
let current_state = match state.as_str() { ref reason,
"uninitialized" => StreamerState::Uninitialized, next_retry_ms,
"ready" => StreamerState::Ready, ..
"streaming" => StreamerState::Streaming, } = event
"no_signal" => StreamerState::NoSignal, {
"error" => StreamerState::Error, let key = (state.clone(), reason.clone(), next_retry_ms);
"device_lost" => StreamerState::DeviceLost,
"recovering" => StreamerState::Recovering,
_ => StreamerState::Error,
};
let mut last_state = self.last_published_state.write().await; let mut last_state = self.last_published_state.write().await;
if *last_state == Some(current_state) { if last_state.as_ref() == Some(&key) {
// State hasn't changed, skip publishing trace!(
trace!("Skipping duplicate stream state event: {}", state); "Skipping duplicate stream state event: {} (reason={:?})",
state,
reason
);
return; return;
} }
*last_state = Some(current_state); *last_state = Some(key);
} }
events.publish(event); events.publish(event);
@@ -1143,8 +1431,12 @@ impl Streamer {
); );
} }
// Wait before retry (1 second) let wait = if attempt == 1 {
tokio::time::sleep(std::time::Duration::from_secs(1)).await; std::time::Duration::from_millis(200)
} else {
std::time::Duration::from_secs(1)
};
tokio::time::sleep(wait).await;
// Check if device file exists // Check if device file exists
let device_exists = std::path::Path::new(&device_path).exists(); let device_exists = std::path::Path::new(&device_path).exists();
@@ -1212,6 +1504,7 @@ impl Default for Streamer {
current_fps: AtomicU32::new(0), current_fps: AtomicU32::new(0),
events: RwLock::new(None), events: RwLock::new(None),
last_published_state: RwLock::new(None), last_published_state: RwLock::new(None),
next_retry_ms: AtomicU64::new(0),
config_changing: std::sync::atomic::AtomicBool::new(false), config_changing: std::sync::atomic::AtomicBool::new(false),
background_tasks_started: std::sync::atomic::AtomicBool::new(false), background_tasks_started: std::sync::atomic::AtomicBool::new(false),
recovery_retry_count: std::sync::atomic::AtomicU32::new(0), recovery_retry_count: std::sync::atomic::AtomicU32::new(0),
@@ -1236,20 +1529,73 @@ pub struct StreamerStats {
pub fps: f32, pub fps: f32,
} }
fn probe_subdev_signal(
subdev_path: &std::path::Path,
kind: Option<csi_bridge::CsiBridgeKind>,
) -> Option<crate::video::SignalStatus> {
let fd = match csi_bridge::open_subdev(subdev_path) {
Ok(f) => f,
Err(e) => {
debug!("probe_subdev_signal: failed to open {:?}: {}", subdev_path, e);
return Some(crate::video::SignalStatus::NoSignal);
}
};
let kind = kind.unwrap_or(csi_bridge::CsiBridgeKind::Unknown);
let probe = csi_bridge::probe_signal(&fd, kind);
probe.as_status()
}
fn wait_subdev_for_source_change(
subdev_path: &std::path::Path,
direct_stop: &AtomicBool,
max_wait: Duration,
) {
let fd = match csi_bridge::open_subdev(subdev_path) {
Ok(f) => f,
Err(e) => {
debug!(
"wait_subdev_for_source_change: failed to open {:?}: {}",
subdev_path, e
);
std::thread::sleep(max_wait.min(Duration::from_secs(1)));
return;
}
};
if let Err(e) = csi_bridge::subscribe_source_change(&fd) {
debug!(
"wait_subdev_for_source_change: subscribe failed on {:?}: {}",
subdev_path, e
);
}
let slice = Duration::from_millis(250);
let deadline = std::time::Instant::now() + max_wait;
while std::time::Instant::now() < deadline {
if direct_stop.load(Ordering::Relaxed) {
return;
}
let remaining = deadline.saturating_duration_since(std::time::Instant::now());
let wait = remaining.min(slice);
match csi_bridge::wait_source_change(&fd, wait) {
Ok(true) => {
info!(
"Subdev SOURCE_CHANGE during no-signal wait, retrying open immediately"
);
return;
}
Ok(false) => continue,
Err(e) => {
debug!("wait_source_change error on {:?}: {}", subdev_path, e);
return;
}
}
}
}
impl serde::Serialize for StreamerState { impl serde::Serialize for StreamerState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where where
S: serde::Serializer, S: serde::Serializer,
{ {
let s = match self { serializer.serialize_str(self.as_str())
StreamerState::Uninitialized => "uninitialized",
StreamerState::Ready => "ready",
StreamerState::Streaming => "streaming",
StreamerState::NoSignal => "no_signal",
StreamerState::Error => "error",
StreamerState::DeviceLost => "device_lost",
StreamerState::Recovering => "recovering",
};
serializer.serialize_str(s)
} }
} }

View File

@@ -3,22 +3,37 @@
use std::fs::File; use std::fs::File;
use std::io; use std::io;
use std::os::fd::AsFd; use std::os::fd::AsFd;
use std::path::Path; use std::path::{Path, PathBuf};
use std::time::Duration; use std::time::Duration;
use nix::poll::{poll, PollFd, PollFlags, PollTimeout}; use nix::poll::{poll, PollFd, PollFlags, PollTimeout};
use tracing::{debug, warn}; use tracing::{debug, info, warn};
use v4l2r::bindings::{v4l2_requestbuffers, v4l2_streamparm, v4l2_streamparm__bindgen_ty_1}; use v4l2r::bindings::{
v4l2_dv_timings, v4l2_requestbuffers, v4l2_streamparm, v4l2_streamparm__bindgen_ty_1,
V4L2_DV_BT_656_1120,
};
use v4l2r::ioctl::{ use v4l2r::ioctl::{
self, Capabilities, Capability as V4l2rCapability, MemoryConsistency, PlaneMapping, QBufPlane, self, Capabilities, Capability as V4l2rCapability, Event as V4l2Event, EventType,
QBuffer, QueryBuffer, V4l2Buffer, MemoryConsistency, PlaneMapping, QBufPlane, QBuffer, QueryBuffer, QueryDvTimingsError,
SubscribeEventFlags, V4l2Buffer,
}; };
use v4l2r::memory::{MemoryType, MmapHandle}; use v4l2r::memory::{MemoryType, MmapHandle};
use v4l2r::nix::errno::Errno; use v4l2r::nix::errno::Errno;
use v4l2r::{Format as V4l2rFormat, PixelFormat as V4l2rPixelFormat, QueueType}; use v4l2r::{Format as V4l2rFormat, PixelFormat as V4l2rPixelFormat, QueueType};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::video::csi_bridge::{self, CsiBridgeKind, ProbeResult};
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::SignalStatus;
/// `io::Error` payload when the driver posts `V4L2_EVENT_SOURCE_CHANGE`.
pub const SOURCE_CHANGED_MARKER: &str = "v4l2_source_changed";
pub fn is_source_changed_error(err: &io::Error) -> bool {
err.get_ref()
.map(|inner| inner.to_string() == SOURCE_CHANGED_MARKER)
.unwrap_or(false)
}
/// Metadata for a captured frame. /// Metadata for a captured frame.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
@@ -27,6 +42,23 @@ pub struct CaptureMeta {
pub sequence: u64, pub sequence: u64,
} }
/// When set, DV ioctls use the subdev (rkcif: video node has no DV ioctls).
#[derive(Debug, Clone, Default)]
pub struct BridgeContext {
pub subdev_path: Option<PathBuf>,
pub kind: Option<CsiBridgeKind>,
}
impl BridgeContext {
pub fn from_parts(subdev_path: Option<PathBuf>, kind: Option<CsiBridgeKind>) -> Self {
Self { subdev_path, kind }
}
pub fn has_subdev(&self) -> bool {
self.subdev_path.is_some()
}
}
/// V4L2 capture stream backed by v4l2r ioctl. /// V4L2 capture stream backed by v4l2r ioctl.
pub struct V4l2rCaptureStream { pub struct V4l2rCaptureStream {
fd: File, fd: File,
@@ -36,9 +68,12 @@ pub struct V4l2rCaptureStream {
stride: u32, stride: u32,
timeout: Duration, timeout: Duration,
mappings: Vec<Vec<PlaneMapping>>, mappings: Vec<Vec<PlaneMapping>>,
subdev_fd: Option<File>,
bridge_kind: Option<CsiBridgeKind>,
} }
impl V4l2rCaptureStream { impl V4l2rCaptureStream {
/// UVC: uses `resolution`. CSI bridges: DV-probe first; may return `CaptureNoSignal`.
pub fn open( pub fn open(
device_path: impl AsRef<Path>, device_path: impl AsRef<Path>,
resolution: Resolution, resolution: Resolution,
@@ -47,6 +82,69 @@ impl V4l2rCaptureStream {
buffer_count: u32, buffer_count: u32,
timeout: Duration, timeout: Duration,
) -> Result<Self> { ) -> Result<Self> {
Self::open_with_bridge(
device_path,
resolution,
format,
fps,
buffer_count,
timeout,
BridgeContext::default(),
)
}
/// With subdev: probe DV on subdev before opening video (RK628 safety); may ignore requested size.
pub fn open_with_bridge(
device_path: impl AsRef<Path>,
resolution: Resolution,
format: PixelFormat,
fps: u32,
buffer_count: u32,
timeout: Duration,
bridge: BridgeContext,
) -> Result<Self> {
// Probe subdev before video open (RK628: no-signal must not reach capture STREAMON).
let mut subdev_fd_opt: Option<File> = None;
let mut subdev_dv_mode: Option<csi_bridge::DvTimingsMode> = None;
if let Some(subdev_path) = bridge.subdev_path.as_ref() {
let subdev_fd = csi_bridge::open_subdev(subdev_path).map_err(|e| {
AppError::VideoError(format!(
"Failed to open CSI bridge subdev {:?}: {}",
subdev_path, e
))
})?;
let kind = bridge.kind.unwrap_or(CsiBridgeKind::Unknown);
match csi_bridge::probe_signal(&subdev_fd, kind) {
ProbeResult::Locked(mode) => {
info!(
"Subdev {:?} locked: {}x{} @ {}Hz",
subdev_path, mode.width, mode.height, mode.pixelclock
);
csi_bridge::apply_dv_timings(&subdev_fd, mode.raw);
if let Err(e) = csi_bridge::subscribe_source_change(&subdev_fd) {
debug!("subdev SOURCE_CHANGE subscribe failed: {}", e);
}
subdev_dv_mode = Some(mode);
}
other => {
let status = other
.as_status()
.unwrap_or(SignalStatus::NoSignal);
debug!(
"Subdev {:?} reports no signal ({:?}) — refusing STREAMON",
subdev_path, status
);
return Err(AppError::CaptureNoSignal {
kind: status.as_str().to_string(),
});
}
}
subdev_fd_opt = Some(subdev_fd);
}
// ── Phase 1: open the capture (video) node ─────────────────────
let mut fd = File::options() let mut fd = File::options()
.read(true) .read(true)
.write(true) .write(true)
@@ -56,6 +154,8 @@ impl V4l2rCaptureStream {
let caps: V4l2rCapability = ioctl::querycap(&fd) let caps: V4l2rCapability = ioctl::querycap(&fd)
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let caps_flags = caps.device_caps(); let caps_flags = caps.device_caps();
let driver_name = caps.driver.to_string();
let is_csi_bridge = is_csi_bridge_driver(&driver_name);
// Prefer multi-planar capture when available, as it is required for some // Prefer multi-planar capture when available, as it is required for some
// devices/pixel formats (e.g. NV12 via VIDEO_CAPTURE_MPLANE). // devices/pixel formats (e.g. NV12 via VIDEO_CAPTURE_MPLANE).
@@ -69,11 +169,50 @@ impl V4l2rCaptureStream {
)); ));
}; };
let mut fmt: V4l2rFormat = ioctl::g_fmt(&fd, queue) // CSI/HDMI bridge without a subdev pairing (tc358743 on uvcvideo,
.map_err(|e| AppError::VideoError(format!("Failed to get device format: {}", e)))?; // rk_hdmirx on RK3588): probe DV timings on the video node, with
// the same no-signal gate as the subdev path. When we *do* have
// a subdev, reuse its already-probed mode to drive S_FMT.
let dv_mode = if let Some(mode) = subdev_dv_mode.as_ref() {
Some(DvTimingsMode {
width: mode.width,
height: mode.height,
fps: mode.fps,
})
} else if is_csi_bridge {
Some(probe_and_apply_dv_timings(&fd)?)
} else {
None
};
fmt.width = resolution.width; // rkcif + RK628: G_FMT is often 0×0 until the first S_FMT; G_FMT may
fmt.height = resolution.height; // also fail. With DV timings from the subdev, build the format (same as
// `v4l2-ctl --set-fmt-video=width=…,height=…`).
let mut fmt: V4l2rFormat = match (
ioctl::g_fmt::<V4l2rFormat>(&fd, queue),
is_csi_bridge,
dv_mode.as_ref(),
) {
(Ok(f), _, _) if f.width > 0 && f.height > 0 => f,
(_, true, Some(m)) => {
let fourcc = format.to_fourcc();
V4l2rFormat::from((&fourcc, (m.width as usize, m.height as usize)))
}
(Ok(f), _, _) => f,
(Err(e), _, _) => {
return Err(AppError::VideoError(format!("Failed to get device format: {}", e)));
}
};
// Prefer the DV-timings-reported geometry for CSI bridges — the
// source, not the user config, dictates what the capture hardware
// will actually deliver.
let (target_w, target_h) = match dv_mode {
Some(DvTimingsMode { width, height, .. }) => (width, height),
None => (resolution.width, resolution.height),
};
fmt.width = target_w;
fmt.height = target_h;
fmt.pixelformat = V4l2rPixelFormat::from(&format.to_fourcc()); fmt.pixelformat = V4l2rPixelFormat::from(&format.to_fourcc());
let actual_fmt: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt)) let actual_fmt: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt))
@@ -146,12 +285,33 @@ impl V4l2rCaptureStream {
stride, stride,
timeout, timeout,
mappings, mappings,
subdev_fd: subdev_fd_opt,
bridge_kind: bridge.kind,
}; };
stream.queue_all_buffers()?; stream.queue_all_buffers()?;
ioctl::streamon(&stream.fd, stream.queue) ioctl::streamon(&stream.fd, stream.queue)
.map_err(|e| AppError::VideoError(format!("Failed to start capture stream: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to start capture stream: {}", e)))?;
// When the subdev path was used, SOURCE_CHANGE was already
// subscribed *there* (the rkcif video node returns ENOTTY).
// Otherwise try on the video node as a best-effort fallback for
// drivers that do honour it (tc358743/uvcvideo, rk_hdmirx).
if stream.subdev_fd.is_none() {
match ioctl::subscribe_event(
&stream.fd,
EventType::SourceChange(0),
SubscribeEventFlags::empty(),
) {
Ok(()) => debug!("Subscribed to V4L2_EVENT_SOURCE_CHANGE on video node"),
Err(e) => debug!(
"V4L2_EVENT_SOURCE_CHANGE subscription unavailable on video node \
({}), falling back to timeout-based restart",
e
),
}
}
Ok(stream) Ok(stream)
} }
@@ -167,6 +327,51 @@ impl V4l2rCaptureStream {
self.stride self.stride
} }
/// Re-probe DV timings on the persistent subdev handle (no extra `open`).
pub fn probe_bridge_signal(&self) -> Option<ProbeResult> {
let subdev_fd = self.subdev_fd.as_ref()?;
Some(csi_bridge::probe_signal(
subdev_fd,
self.bridge_kind.unwrap_or(CsiBridgeKind::Unknown),
))
}
/// Like [`Self::probe_bridge_signal`] but isolates the ioctl on a dup'd
/// fd with a wall-clock cap — see [`csi_bridge::probe_signal_thread_timeout`].
pub fn probe_bridge_signal_with_timeout(&self, limit: Duration) -> Option<ProbeResult> {
let subdev_fd = self.subdev_fd.as_ref()?;
csi_bridge::probe_signal_thread_timeout(
subdev_fd,
self.bridge_kind.unwrap_or(CsiBridgeKind::Unknown),
limit,
)
}
fn expected_capture_bytes(&self) -> Option<usize> {
if self.format.is_compressed() {
return None;
}
// Stride is bytesperline; packed formats use stride × height (not × bpp).
if self.format.bytes_per_pixel().is_some() {
return (self.stride as usize).checked_mul(self.resolution.height as usize);
}
match self.format {
PixelFormat::Nv12 | PixelFormat::Nv21 | PixelFormat::Yuv420 | PixelFormat::Yvu420 => {
(self.stride as usize)
.checked_mul(self.resolution.height as usize)?
.checked_mul(3)?
.checked_div(2)
}
PixelFormat::Nv16 => (self.stride as usize)
.checked_mul(self.resolution.height as usize)?
.checked_mul(2),
PixelFormat::Nv24 => (self.stride as usize)
.checked_mul(self.resolution.height as usize)?
.checked_mul(3),
_ => None,
}
}
pub fn next_into(&mut self, dst: &mut Vec<u8>) -> io::Result<CaptureMeta> { pub fn next_into(&mut self, dst: &mut Vec<u8>) -> io::Result<CaptureMeta> {
self.wait_ready()?; self.wait_ready()?;
@@ -210,6 +415,21 @@ impl V4l2rCaptureStream {
self.queue_buffer(index as u32) self.queue_buffer(index as u32)
.map_err(|e| io::Error::other(e.to_string()))?; .map_err(|e| io::Error::other(e.to_string()))?;
if let Some(expected) = self.expected_capture_bytes() {
if total > 0 && total != expected {
warn!(
"DQBUF bytes_used ({}) != expected ({}) for {:?} {}x{} stride={} — requesting stream re-open",
total,
expected,
self.format,
self.resolution.width,
self.resolution.height,
self.stride
);
return Err(io::Error::other(SOURCE_CHANGED_MARKER));
}
}
Ok(CaptureMeta { Ok(CaptureMeta {
bytes_used: total, bytes_used: total,
sequence, sequence,
@@ -220,13 +440,79 @@ impl V4l2rCaptureStream {
if self.timeout.is_zero() { if self.timeout.is_zero() {
return Ok(()); return Ok(());
} }
let mut fds = [PollFd::new(self.fd.as_fd(), PollFlags::POLLIN)]; // Multiplex video fd (POLLIN for DQBUF, POLLPRI as fallback for
// drivers that deliver events here) and the optional subdev fd
// (POLLPRI only — SOURCE_CHANGE on RK628 / rkcif).
let mut poll_fds: Vec<PollFd> = Vec::with_capacity(2);
poll_fds.push(PollFd::new(
self.fd.as_fd(),
PollFlags::POLLIN
| PollFlags::POLLPRI
| PollFlags::POLLERR
| PollFlags::POLLHUP,
));
if let Some(subdev_fd) = self.subdev_fd.as_ref() {
poll_fds.push(PollFd::new(subdev_fd.as_fd(), PollFlags::POLLPRI));
}
let timeout_ms = self.timeout.as_millis().min(u16::MAX as u128) as u16; let timeout_ms = self.timeout.as_millis().min(u16::MAX as u128) as u16;
let ready = poll(&mut fds, PollTimeout::from(timeout_ms))?; let ready = poll(&mut poll_fds, PollTimeout::from(timeout_ms))?;
if ready == 0 { if ready == 0 {
return Err(io::Error::new(io::ErrorKind::TimedOut, "capture timeout")); return Err(io::Error::new(io::ErrorKind::TimedOut, "capture timeout"));
} }
Ok(())
// Subdev POLLPRI fires first on rkcif/RK628 when the source-side
// HDMI timings changed. Drain all pending events and bubble up
// the `source_changed` marker so the upper layer re-opens with a
// fresh DV_TIMINGS probe.
if let Some(subdev_fd) = self.subdev_fd.as_ref() {
if let Some(revents) = poll_fds.get(1).and_then(|f| f.revents()) {
if revents.contains(PollFlags::POLLPRI) {
let drained = drain_events(subdev_fd);
info!(
"Subdev SOURCE_CHANGE detected (drained {} event(s)), \
requesting stream re-open",
drained
);
return Err(io::Error::other(SOURCE_CHANGED_MARKER));
}
}
}
if let Some(revents) = poll_fds[0].revents() {
if revents.contains(PollFlags::POLLERR) || revents.contains(PollFlags::POLLHUP) {
debug!(
"capture poll: video revents={:?} (ERR/HUP) — requesting stream re-open",
revents
);
return Err(io::Error::other(SOURCE_CHANGED_MARKER));
}
if revents.contains(PollFlags::POLLPRI) {
let drained = drain_events(&self.fd);
info!(
"Video-node SOURCE_CHANGE detected (drained {} event(s)), \
requesting stream re-open",
drained
);
return Err(io::Error::other(SOURCE_CHANGED_MARKER));
}
if !revents.contains(PollFlags::POLLIN) {
// rkcif + RK628: the driver may wake `poll` after internally
// invalidating queued buffers without queueing a V4L2 event.
// Treat like SOURCE_CHANGE so we STREAMOFF / re-S_FMT.
debug!(
"capture poll: ready={} video revents={:?} (no POLLIN) — requesting stream re-open",
ready, revents
);
return Err(io::Error::other(SOURCE_CHANGED_MARKER));
}
return Ok(());
}
debug!(
"capture poll: ready={} but video revents unavailable — requesting stream re-open",
ready
);
Err(io::Error::other(SOURCE_CHANGED_MARKER))
} }
fn queue_all_buffers(&mut self) -> Result<()> { fn queue_all_buffers(&mut self) -> Result<()> {
@@ -256,10 +542,197 @@ impl V4l2rCaptureStream {
impl Drop for V4l2rCaptureStream { impl Drop for V4l2rCaptureStream {
fn drop(&mut self) { fn drop(&mut self) {
// Release ordering matters on rkcif: a subsequent open()/S_FMT from a
// freshly-constructed stream returns EBUSY if the previous capture has
// not fully relinquished its buffers. Mirror the ustreamer teardown
// order:
// 1. STREAMOFF (stop DMA)
// 2. unsubscribe_all (no further DQEVENT paths)
// 3. munmap via Drop (release buffer mappings)
// 4. REQBUFS count=0 (free kernel buffer list)
// 5. close(fd) (implicit on File Drop)
if let Err(e) = ioctl::streamoff(&self.fd, self.queue) { if let Err(e) = ioctl::streamoff(&self.fd, self.queue) {
debug!("Failed to stop capture stream: {}", e); debug!("Failed to stop capture stream: {}", e);
} }
if let Err(e) = ioctl::unsubscribe_all_events(&self.fd) {
debug!("Failed to unsubscribe V4L2 events: {}", e);
} }
// Explicit munmap *before* REQBUFS(0) — the kernel refuses to free the
// buffer list while mappings are outstanding.
self.mappings.clear();
if let Err(e) = ioctl::reqbufs::<v4l2_requestbuffers>(
&self.fd,
self.queue,
MemoryType::Mmap,
0,
MemoryConsistency::empty(),
) {
debug!("Failed to release capture buffers: {}", e);
}
}
}
/// Driver-name check for CSI/HDMI bridge devices (rk_hdmirx, rkcif, tc358743,
/// …) that expose DV timings. Kept in sync with `video::is_csi_hdmi_bridge`
/// but queries the raw V4L2 driver string so we don't need a full
/// `VideoDeviceInfo` at `V4l2rCaptureStream::open` time.
fn is_csi_bridge_driver(driver: &str) -> bool {
let d = driver.to_ascii_lowercase();
d == "rk_hdmirx" || d == "rkcif" || d == "tc358743" || d.starts_with("rkcif")
}
/// Drain any pending `V4L2_EVENT_*` events on `fd`. Used after POLLPRI to
/// clear the queue so the next poll doesn't immediately wake up on stale
/// state. Capped at 16 events per call.
fn drain_events(fd: &File) -> u32 {
let mut drained = 0u32;
while let Ok(_ev) = ioctl::dqevent::<V4l2Event>(fd) {
drained = drained.saturating_add(1);
if drained >= 16 {
break;
}
}
drained
}
/// Result of a successful `VIDIOC_QUERY_DV_TIMINGS` + `VIDIOC_S_DV_TIMINGS`
/// probe. Used by the CSI bridge path to override the requested resolution
/// with the source-reported geometry before `S_FMT`.
#[derive(Debug, Clone, Copy)]
struct DvTimingsMode {
width: u32,
height: u32,
#[allow(dead_code)]
fps: Option<f64>,
}
/// Probe DV timings from the source and latch them into the driver.
///
/// Mirrors PiKVM/ustreamer's `src_hdmi_open_sequence`:
/// 1. `VIDIOC_QUERY_DV_TIMINGS` — active-probe the source.
/// 2. On success, `VIDIOC_S_DV_TIMINGS` — commit so that subsequent
/// `S_FMT` is accepted at the matching geometry.
/// 3. Return the timings for the caller to feed into `S_FMT`.
///
/// Errno mapping (see `V4L2_CID_DV_RX_POWER_PRESENT` semantics):
/// * `ENOLINK` → `NoCable` (TMDS clock absent, cable unplugged)
/// * `ENOLCK` → `NoSync` (TMDS present, timings unstable)
/// * `ERANGE` → `OutOfRange` (timings outside hardware caps)
/// * `ENODATA` → `NoSignal` (driver says "no DV timings support on
/// this input", e.g. EDID not applied yet)
/// * anything else → `NoSignal` (fallback, keeps the retry loop going)
fn probe_and_apply_dv_timings(fd: &File) -> Result<DvTimingsMode> {
let timings: v4l2_dv_timings = match ioctl::query_dv_timings(fd) {
Ok(t) => t,
Err(err) => {
let status = match &err {
QueryDvTimingsError::NoLink => SignalStatus::NoCable,
QueryDvTimingsError::UnstableSignal => SignalStatus::NoSync,
QueryDvTimingsError::IoctlError(Errno::ERANGE) => SignalStatus::OutOfRange,
QueryDvTimingsError::Unsupported => SignalStatus::NoSignal,
// I2C-layer failures between rkcif and the RK628 bridge
// (`ret=-110`/-121/-5) typically mean the bridge is in the
// middle of a PHY re-lock, not that the source is gone.
// Classify them as `NoSync` so the upper layer keeps retrying
// on the short end of the back-off ladder.
QueryDvTimingsError::IoctlError(Errno::EIO)
| QueryDvTimingsError::IoctlError(Errno::EREMOTEIO)
| QueryDvTimingsError::IoctlError(Errno::ETIMEDOUT) => SignalStatus::NoSync,
QueryDvTimingsError::IoctlError(_) => SignalStatus::NoSignal,
};
info!(
"VIDIOC_QUERY_DV_TIMINGS failed: {} -> SignalStatus::{:?}",
err, status
);
return Err(AppError::CaptureNoSignal {
kind: status.as_str().to_string(),
});
}
};
// `v4l2_dv_timings` is a packed union; copy the scalar fields out to
// aligned locals before formatting / comparing to avoid UB (and the
// rustc E0793 "reference to field of packed struct is unaligned" error).
let timings_type: u32 = timings.type_;
if timings_type != V4L2_DV_BT_656_1120 {
warn!(
"QUERY_DV_TIMINGS returned unknown type {}, treating as NoSignal",
timings_type
);
return Err(AppError::CaptureNoSignal {
kind: SignalStatus::NoSignal.as_str().to_string(),
});
}
let bt = unsafe { timings.__bindgen_anon_1.bt };
let bt_width: u32 = bt.width;
let bt_height: u32 = bt.height;
let bt_pixelclock: u64 = bt.pixelclock;
let bt_hfrontporch: u32 = bt.hfrontporch;
let bt_hsync: u32 = bt.hsync;
let bt_hbackporch: u32 = bt.hbackporch;
let bt_vfrontporch: u32 = bt.vfrontporch;
let bt_vsync: u32 = bt.vsync;
let bt_vbackporch: u32 = bt.vbackporch;
if bt_width == 0 || bt_height == 0 || bt_width <= 64 || bt_height <= 64 {
warn!(
"QUERY_DV_TIMINGS returned degenerate {}x{}, treating as NoSignal",
bt_width, bt_height
);
return Err(AppError::CaptureNoSignal {
kind: SignalStatus::NoSignal.as_str().to_string(),
});
}
// Latch the detected timings so subsequent S_FMT / STREAMON use the
// right pixel clock + blanking. Failure here is *not* fatal on some
// drivers (rkcif doesn't implement S_DV_TIMINGS per-output-device, only
// on the bridging subdev), so degrade to a warning and keep going.
if let Err(e) = ioctl::s_dv_timings::<_, v4l2_dv_timings>(fd, timings) {
debug!(
"VIDIOC_S_DV_TIMINGS failed ({}), continuing with queried timings for S_FMT",
e
);
}
let fps = dv_timings_fps_from_scalars(
bt_width,
bt_height,
bt_hfrontporch + bt_hsync + bt_hbackporch,
bt_vfrontporch + bt_vsync + bt_vbackporch,
bt_pixelclock,
);
info!(
"DV timings locked: {}x{} @ {} (pix_clk={})",
bt_width,
bt_height,
fps.map(|f| format!("{:.2} fps", f))
.unwrap_or_else(|| "?fps".to_string()),
bt_pixelclock
);
Ok(DvTimingsMode {
width: bt_width,
height: bt_height,
fps,
})
}
fn dv_timings_fps_from_scalars(
width: u32,
height: u32,
h_blanking: u32,
v_blanking: u32,
pixelclock: u64,
) -> Option<f64> {
let total_h = (width + h_blanking) as u64;
let total_v = (height + v_blanking) as u64;
let denom = total_h.checked_mul(total_v)?;
if denom == 0 || pixelclock == 0 {
return None;
}
Some(pixelclock as f64 / denom as f64)
} }
fn set_fps(fd: &File, queue: QueueType, fps: u32) -> std::result::Result<(), ioctl::GParmError> { fn set_fps(fd: &File, queue: QueueType, fps: u32) -> std::result::Result<(), ioctl::GParmError> {

View File

@@ -78,15 +78,13 @@ async fn handle_audio_socket(socket: WebSocket, state: Arc<AppState>) {
loop { loop {
tokio::select! { tokio::select! {
// Receive Opus frames and send to client // Receive Opus frames and send to client
opus_result = opus_rx.changed() => { opus_result = opus_rx.recv() => {
if opus_result.is_err() { let frame = match opus_result {
Some(f) => f,
None => {
info!("Audio stream closed"); info!("Audio stream closed");
break; break;
} }
let frame = match opus_rx.borrow().clone() {
Some(frame) => frame,
None => continue,
}; };
let binary = encode_audio_packet(&frame, stream_start); let binary = encode_audio_packet(&frame, stream_start);

View File

@@ -988,6 +988,7 @@ pub struct VideoDevice {
pub driver: String, pub driver: String,
pub formats: Vec<VideoFormat>, pub formats: Vec<VideoFormat>,
pub usb_bus: Option<String>, pub usb_bus: Option<String>,
pub has_signal: bool,
} }
#[derive(Serialize)] #[derive(Serialize)]
@@ -1083,10 +1084,14 @@ pub async fn list_devices(State(state): State<Arc<AppState>>) -> Json<DeviceList
}) })
.collect(), .collect(),
usb_bus, usb_bus,
has_signal: d.has_signal,
} }
}) })
.collect(), .collect(),
Err(_) => vec![], Err(e) => {
warn!(error = %e, "Video device enumeration failed; returning empty video list for /api/devices");
vec![]
}
}; };
// Detect serial devices (common USB/ACM ports) - single directory read // Detect serial devices (common USB/ACM ports) - single directory read

View File

@@ -589,6 +589,9 @@ impl UniversalSession {
mut frame_rx: tokio::sync::mpsc::Receiver<std::sync::Arc<EncodedVideoFrame>>, mut frame_rx: tokio::sync::mpsc::Receiver<std::sync::Arc<EncodedVideoFrame>>,
request_keyframe: Arc<dyn Fn() + Send + Sync + 'static>, request_keyframe: Arc<dyn Fn() + Send + Sync + 'static>,
) { ) {
if let Some(handle) = self.video_receiver_handle.lock().await.take() {
handle.abort();
}
info!( info!(
"Starting {} session {} with shared encoder", "Starting {} session {} with shared encoder",
self.codec, self.session_id self.codec, self.session_id
@@ -749,7 +752,7 @@ impl UniversalSession {
/// Start receiving Opus audio frames /// Start receiving Opus audio frames
pub async fn start_audio_from_opus( pub async fn start_audio_from_opus(
&self, &self,
mut opus_rx: tokio::sync::watch::Receiver<Option<std::sync::Arc<OpusFrame>>>, mut opus_rx: tokio::sync::mpsc::Receiver<std::sync::Arc<OpusFrame>>,
) { ) {
let audio_track = match &self.audio_track { let audio_track = match &self.audio_track {
Some(track) => track.clone(), Some(track) => track.clone(),
@@ -805,15 +808,13 @@ impl UniversalSession {
} }
} }
result = opus_rx.changed() => { result = opus_rx.recv() => {
if result.is_err() { let opus_frame = match result {
info!("Opus channel closed for session {}", session_id); Some(frame) => frame,
None => {
info!("Opus mpsc closed for session {}", session_id);
break; break;
} }
let opus_frame = match opus_rx.borrow().clone() {
Some(frame) => frame,
None => continue,
}; };
// 20ms at 48kHz = 960 samples // 20ms at 48kHz = 960 samples

View File

@@ -36,14 +36,15 @@ use tracing::{debug, info, trace, warn};
use crate::audio::{AudioController, OpusFrame}; use crate::audio::{AudioController, OpusFrame};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::events::EventBus; use crate::events::{EventBus, SystemEvent};
use crate::hid::HidController; use crate::hid::HidController;
use crate::video::encoder::registry::EncoderBackend; use crate::video::encoder::registry::EncoderBackend;
use crate::video::encoder::registry::VideoEncoderType; use crate::video::encoder::registry::VideoEncoderType;
use crate::video::encoder::VideoCodecType; use crate::video::encoder::VideoCodecType;
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::shared_video_pipeline::{ use crate::video::shared_video_pipeline::{
SharedVideoPipeline, SharedVideoPipelineConfig, SharedVideoPipelineStats, PipelineStateNotification, SharedVideoPipeline, SharedVideoPipelineConfig,
SharedVideoPipelineStats,
}; };
use super::config::{TurnServer, WebRtcConfig}; use super::config::{TurnServer, WebRtcConfig};
@@ -93,6 +94,8 @@ pub struct CaptureDeviceConfig {
pub device_path: PathBuf, pub device_path: PathBuf,
pub buffer_count: u32, pub buffer_count: u32,
pub jpeg_quality: u8, pub jpeg_quality: u8,
pub subdev_path: Option<PathBuf>,
pub bridge_kind: Option<String>,
} }
/// WebRTC streamer statistics /// WebRTC streamer statistics
@@ -274,6 +277,73 @@ impl WebRtcStreamer {
} }
} }
fn build_pipeline_state_notifier(
device: String,
events: Option<Arc<EventBus>>,
) -> Option<Arc<dyn Fn(PipelineStateNotification) + Send + Sync>> {
events.map(|events| {
Arc::new(move |notification: PipelineStateNotification| {
events.publish(SystemEvent::StreamStateChanged {
state: notification.state.to_string(),
device: Some(device.clone()),
reason: notification.reason.map(|reason| reason.to_string()),
next_retry_ms: notification.next_retry_ms,
});
}) as Arc<dyn Fn(PipelineStateNotification) + Send + Sync>
})
}
fn make_keyframe_callback(
pipeline: Arc<SharedVideoPipeline>,
session_id: String,
) -> Arc<dyn Fn() + Send + Sync + 'static> {
Arc::new(move || {
let pipeline = pipeline.clone();
let sid = session_id.clone();
tokio::spawn(async move {
info!("Requesting keyframe for session {} after reconnect", sid);
pipeline.request_keyframe().await;
});
})
}
async fn reconnect_sessions_to_current_pipeline(
self: &Arc<Self>,
reason: &str,
) -> Result<usize> {
if self.capture_device.read().await.is_none() {
return Ok(0);
}
let sessions_to_reconnect: Vec<(String, Arc<UniversalSession>)> = {
let sessions = self.sessions.read().await;
sessions
.iter()
.map(|(session_id, session)| (session_id.clone(), session.clone()))
.collect()
};
if sessions_to_reconnect.is_empty() {
return Ok(0);
}
let pipeline = self.ensure_video_pipeline().await?;
for (session_id, session) in &sessions_to_reconnect {
info!(
"Reconnecting session {} to pipeline after {}",
session_id, reason
);
session
.start_from_video_pipeline(
pipeline.subscribe(),
Self::make_keyframe_callback(pipeline.clone(), session_id.clone()),
)
.await;
}
Ok(sessions_to_reconnect.len())
}
/// Ensure video pipeline is initialized and running /// Ensure video pipeline is initialized and running
async fn ensure_video_pipeline(self: &Arc<Self>) -> Result<Arc<SharedVideoPipeline>> { async fn ensure_video_pipeline(self: &Arc<Self>) -> Result<Arc<SharedVideoPipeline>> {
let mut pipeline_guard = self.video_pipeline.write().await; let mut pipeline_guard = self.video_pipeline.write().await;
@@ -284,24 +354,35 @@ impl WebRtcStreamer {
} }
} }
let config = self.config.read().await;
let codec = *self.video_codec.read().await; let codec = *self.video_codec.read().await;
let pipeline_config = {
let pipeline_config = SharedVideoPipelineConfig { let config = self.config.read().await;
SharedVideoPipelineConfig {
resolution: config.resolution, resolution: config.resolution,
input_format: config.input_format, input_format: config.input_format,
output_codec: Self::codec_type_to_encoder_type(codec), output_codec: Self::codec_type_to_encoder_type(codec),
bitrate_preset: config.bitrate_preset, bitrate_preset: config.bitrate_preset,
fps: config.fps, fps: config.fps,
encoder_backend: config.encoder_backend, encoder_backend: config.encoder_backend,
}
}; };
info!("Creating shared video pipeline for {:?}", codec); info!("Creating shared video pipeline for {:?}", codec);
let pipeline = SharedVideoPipeline::new(pipeline_config)?; let pipeline = SharedVideoPipeline::new(pipeline_config)?;
let capture_device = self.capture_device.read().await.clone(); let capture_device = self.capture_device.read().await.clone();
if let Some(device) = capture_device { if let Some(device) = capture_device {
pipeline.set_state_notifier(Self::build_pipeline_state_notifier(
device.device_path.display().to_string(),
self.events.read().await.clone(),
));
pipeline pipeline
.start_with_device(device.device_path, device.buffer_count, device.jpeg_quality) .start_with_device(
device.device_path,
device.buffer_count,
device.jpeg_quality,
device.subdev_path,
device.bridge_kind,
)
.await?; .await?;
} else { } else {
return Err(AppError::VideoError( return Err(AppError::VideoError(
@@ -322,11 +403,13 @@ impl WebRtcStreamer {
// Clear pipeline reference in WebRtcStreamer // Clear pipeline reference in WebRtcStreamer
if let Some(streamer) = streamer_weak.upgrade() { if let Some(streamer) = streamer_weak.upgrade() {
let mut pending_geometry: Option<(Resolution, PixelFormat)> = None;
let mut pipeline_guard = streamer.video_pipeline.write().await; let mut pipeline_guard = streamer.video_pipeline.write().await;
// Only clear if it's the same pipeline that stopped // Only clear if it's the same pipeline that stopped
if let Some(ref current) = *pipeline_guard { if let Some(ref current) = *pipeline_guard {
if let Some(stopped_pipeline) = pipeline_weak.upgrade() { if let Some(stopped_pipeline) = pipeline_weak.upgrade() {
if Arc::ptr_eq(current, &stopped_pipeline) { if Arc::ptr_eq(current, &stopped_pipeline) {
pending_geometry = stopped_pipeline.take_pending_sync_geometry();
*pipeline_guard = None; *pipeline_guard = None;
info!("Cleared stopped video pipeline reference"); info!("Cleared stopped video pipeline reference");
} }
@@ -334,6 +417,35 @@ impl WebRtcStreamer {
} }
drop(pipeline_guard); drop(pipeline_guard);
let should_reconnect = pending_geometry.is_some();
if let Some((r, f)) = pending_geometry {
streamer.sync_video_geometry_from_negotiated(r, f).await;
}
if should_reconnect {
let streamer_for_reconnect = streamer.clone();
tokio::task::spawn_blocking(move || {
let handle = tokio::runtime::Handle::current();
handle.block_on(async move {
match streamer_for_reconnect
.reconnect_sessions_to_current_pipeline(
"capture geometry change",
)
.await
{
Ok(reconnected) if reconnected > 0 => info!(
"Video pipeline rebuilt after geometry change, reconnected {} sessions",
reconnected
),
Ok(_) => {}
Err(e) => warn!(
"Failed to reconnect sessions after geometry change: {}",
e
),
}
});
});
}
info!( info!(
"Video pipeline stopped, but keeping capture config for new sessions" "Video pipeline stopped, but keeping capture config for new sessions"
); );
@@ -344,6 +456,13 @@ impl WebRtcStreamer {
debug!("Video pipeline monitor task ended"); debug!("Video pipeline monitor task ended");
}); });
let pipeline_cfg = pipeline.config().await;
self.sync_video_geometry_from_negotiated(
pipeline_cfg.resolution,
pipeline_cfg.input_format,
)
.await;
*pipeline_guard = Some(pipeline.clone()); *pipeline_guard = Some(pipeline.clone());
Ok(pipeline) Ok(pipeline)
} }
@@ -367,6 +486,15 @@ impl WebRtcStreamer {
} }
} }
pub async fn current_video_geometry(&self) -> (Resolution, PixelFormat, u32) {
if let Some(cfg) = self.get_pipeline_config().await {
(cfg.resolution, cfg.input_format, cfg.fps)
} else {
let c = self.config.read().await;
(c.resolution, c.input_format, c.fps)
}
}
/// Request the encoder to generate a keyframe on next encode /// Request the encoder to generate a keyframe on next encode
pub async fn request_keyframe(&self) -> Result<()> { pub async fn request_keyframe(&self) -> Result<()> {
if let Some(ref pipeline) = *self.video_pipeline.read().await { if let Some(ref pipeline) = *self.video_pipeline.read().await {
@@ -417,7 +545,7 @@ impl WebRtcStreamer {
/// Subscribe to encoded Opus frames (for sessions) /// Subscribe to encoded Opus frames (for sessions)
pub async fn subscribe_opus( pub async fn subscribe_opus(
&self, &self,
) -> Option<tokio::sync::watch::Receiver<Option<std::sync::Arc<OpusFrame>>>> { ) -> Option<tokio::sync::mpsc::Receiver<std::sync::Arc<OpusFrame>>> {
if let Some(ref controller) = *self.audio_controller.read().await { if let Some(ref controller) = *self.audio_controller.read().await {
controller.subscribe_opus_async().await controller.subscribe_opus_async().await
} else { } else {
@@ -441,16 +569,23 @@ impl WebRtcStreamer {
} }
} }
/// Set capture device for direct capture pipeline pub async fn set_capture_device(
pub async fn set_capture_device(&self, device_path: PathBuf, jpeg_quality: u8) { &self,
device_path: PathBuf,
jpeg_quality: u8,
subdev_path: Option<PathBuf>,
bridge_kind: Option<String>,
) {
info!( info!(
"Setting direct capture device for WebRTC: {:?}", "Setting direct capture device for WebRTC: {:?} (subdev={:?}, kind={:?})",
device_path device_path, subdev_path, bridge_kind
); );
*self.capture_device.write().await = Some(CaptureDeviceConfig { *self.capture_device.write().await = Some(CaptureDeviceConfig {
device_path, device_path,
buffer_count: 2, buffer_count: 2,
jpeg_quality, jpeg_quality,
subdev_path,
bridge_kind,
}); });
} }
@@ -519,6 +654,7 @@ impl WebRtcStreamer {
} }
// Update config (preserve user-configured bitrate) // Update config (preserve user-configured bitrate)
{
let mut config = self.config.write().await; let mut config = self.config.write().await;
config.resolution = resolution; config.resolution = resolution;
config.input_format = format; config.input_format = format;
@@ -527,10 +663,47 @@ impl WebRtcStreamer {
info!( info!(
"WebRTC config updated: {}x{} {:?} @ {} fps, {}", "WebRTC config updated: {}x{} {:?} @ {} fps, {}",
resolution.width, resolution.height, format, fps, config.bitrate_preset resolution.width,
resolution.height,
format,
fps,
config.bitrate_preset
); );
} }
self.notify_device_info_dirty().await;
}
/// Update resolution/format to match DV-negotiated capture without stopping
/// the pipeline or closing sessions. Used when hardware timing differs from
/// saved settings (e.g. RK628 `S_FMT` follows source while SQLite still has
/// a user-chosen preset).
pub async fn sync_video_geometry_from_negotiated(
&self,
resolution: Resolution,
format: PixelFormat,
) {
{
let mut config = self.config.write().await;
if config.resolution == resolution && config.input_format == format {
return;
}
info!(
"WebRTC geometry aligned to negotiated capture: {}x{} {:?} (was {}x{} {:?})",
resolution.width,
resolution.height,
format,
config.resolution.width,
config.resolution.height,
config.input_format
);
config.resolution = resolution;
config.input_format = format;
}
self.notify_device_info_dirty().await;
}
/// Update encoder backend (software/hardware selection) /// Update encoder backend (software/hardware selection)
pub async fn update_encoder_backend(&self, encoder_backend: Option<EncoderBackend>) { pub async fn update_encoder_backend(&self, encoder_backend: Option<EncoderBackend>) {
// Stop existing pipeline // Stop existing pipeline
@@ -652,6 +825,14 @@ impl WebRtcStreamer {
*self.events.write().await = Some(events); *self.events.write().await = Some(events);
} }
/// Push a debounced `system.device_info` refresh so the console status card
/// picks up DV-negotiated / pipeline resolution without a separate WebRTC message.
async fn notify_device_info_dirty(&self) {
if let Some(bus) = self.events.read().await.as_ref() {
bus.mark_device_info_dirty();
}
}
// === Session Management === // === Session Management ===
/// Create a new WebRTC session /// Create a new WebRTC session
@@ -695,17 +876,8 @@ impl WebRtcStreamer {
// Request keyframe after ICE connection is established and on gaps // Request keyframe after ICE connection is established and on gaps
let pipeline_for_callback = pipeline.clone(); let pipeline_for_callback = pipeline.clone();
let session_id_for_callback = session_id.clone(); let session_id_for_callback = session_id.clone();
let request_keyframe = Arc::new(move || { let request_keyframe =
let pipeline = pipeline_for_callback.clone(); Self::make_keyframe_callback(pipeline_for_callback, session_id_for_callback);
let sid = session_id_for_callback.clone();
tokio::spawn(async move {
info!(
"Requesting keyframe for session {} after ICE connected",
sid
);
pipeline.request_keyframe().await;
});
});
session session
.start_from_video_pipeline(pipeline.subscribe(), request_keyframe) .start_from_video_pipeline(pipeline.subscribe(), request_keyframe)
.await; .await;
@@ -939,34 +1111,14 @@ impl WebRtcStreamer {
return Ok(()); return Ok(());
} }
let session_ids: Vec<String> = self.sessions.read().await.keys().cloned().collect(); let reconnected = self
if !session_ids.is_empty() { .reconnect_sessions_to_current_pipeline("bitrate change")
let pipeline = self.ensure_video_pipeline().await?; .await?;
if reconnected > 0 {
let sessions = self.sessions.read().await;
for session_id in &session_ids {
if let Some(session) = sessions.get(session_id) {
info!("Reconnecting session {} to new pipeline", session_id);
let pipeline_for_callback = pipeline.clone();
let sid = session_id.clone();
let request_keyframe = Arc::new(move || {
let pipeline = pipeline_for_callback.clone();
let sid = sid.clone();
tokio::spawn(async move {
info!("Requesting keyframe for session {} after reconnect", sid);
pipeline.request_keyframe().await;
});
});
session
.start_from_video_pipeline(pipeline.subscribe(), request_keyframe)
.await;
}
}
info!( info!(
"Video pipeline restarted with {}, reconnected {} sessions", "Video pipeline restarted with {}, reconnected {} sessions",
preset, preset,
session_ids.len() reconnected
); );
} }
} else { } else {

View File

@@ -213,7 +213,18 @@ export interface VideoEncoderSelfCheckResponse {
export const streamApi = { export const streamApi = {
status: () => status: () =>
request<{ request<{
state: 'uninitialized' | 'ready' | 'streaming' | 'no_signal' | 'error' state:
| 'uninitialized'
| 'ready'
| 'streaming'
| 'no_signal'
| 'no_cable'
| 'no_sync'
| 'out_of_range'
| 'device_lost'
| 'recovering'
| 'device_busy'
| 'error'
device: string | null device: string | null
format: string | null format: string | null
resolution: [number, number] | null resolution: [number, number] | null
@@ -649,6 +660,7 @@ export const configApi = {
}> }>
}> }>
usb_bus: string | null usb_bus: string | null
has_signal: boolean
}> }>
serial: Array<{ path: string; name: string }> serial: Array<{ path: string; name: string }>
audio: Array<{ audio: Array<{

View File

@@ -47,6 +47,7 @@ interface VideoDevice {
fps: number[] fps: number[]
}[] }[]
}[] }[]
has_signal?: boolean
} }
const props = defineProps<{ const props = defineProps<{

View File

@@ -14,7 +14,14 @@ export interface ConsoleEventHandlers {
onStreamModeSwitching?: (data: { transition_id: string; to_mode: string; from_mode: string }) => void onStreamModeSwitching?: (data: { transition_id: string; to_mode: string; from_mode: string }) => void
onStreamModeReady?: (data: { transition_id: string; mode: string }) => void onStreamModeReady?: (data: { transition_id: string; mode: string }) => void
onWebRTCReady?: (data: { codec: string; hardware: boolean; transition_id?: string }) => void onWebRTCReady?: (data: { codec: string; hardware: boolean; transition_id?: string }) => void
onStreamStateChanged?: (data: { state: string; device?: string | null }) => void onStreamStateChanged?: (data: {
state: string
device?: string | null
/** Optional fine-grained diagnostic tag (e.g. `no_cable`, `out_of_range`, `recovering`). */
reason?: string | null
/** Optional countdown (ms) until the next backend self-recovery attempt. */
next_retry_ms?: number | null
}) => void
onStreamDeviceLost?: (data: { device: string; reason: string }) => void onStreamDeviceLost?: (data: { device: string; reason: string }) => void
onStreamReconnecting?: (data: { device: string; attempt: number }) => void onStreamReconnecting?: (data: { device: string; attempt: number }) => void
onStreamRecovered?: (data: { device: string }) => void onStreamRecovered?: (data: { device: string }) => void

View File

@@ -240,6 +240,8 @@ export default {
fps: 'Frame Rate', fps: 'Frame Rate',
selectFps: 'Select FPS', selectFps: 'Select FPS',
noVideoDevices: 'No video devices detected', noVideoDevices: 'No video devices detected',
noSignalDetected: 'No HDMI signal detected. Please connect an HDMI cable and refresh.',
refreshDevices: 'Refresh Devices',
// Audio // Audio
audioDevice: 'Audio Device', audioDevice: 'Audio Device',
selectAudioDevice: 'Select audio capture device', selectAudioDevice: 'Select audio capture device',
@@ -310,6 +312,33 @@ export default {
configChanging: 'Applying new configuration...', configChanging: 'Applying new configuration...',
videoRestarted: 'Video stream updated', videoRestarted: 'Video stream updated',
streamError: 'Stream error', streamError: 'Stream error',
// Four canonical video states (backend StreamStateChanged: streaming /
// no_signal / device_lost / device_busy). `reason` provides optional
// fine-grained diagnostic sub-text.
signal: {
noSignal: {
title: 'Waiting for video signal',
detail: 'Capture device is ready, waiting for the target to output video',
},
deviceLost: {
title: 'Video device offline',
detail: 'Capture card is not responding, attempting to re-detect…',
},
deviceBusy: {
title: 'Video channel busy',
detail: 'Applying a new configuration or another component is using the device, please wait…',
},
reason: {
no_cable: 'HDMI cable not detected — check the cable and that the target is powered on',
no_sync: 'Unstable signal: timings could not be locked — try a lower resolution or refresh rate',
out_of_range: 'Resolution or refresh rate exceeds capture capability — try 1080p60 or below',
no_signal: 'Capture card is ready, waiting for a picture…',
recovering: 'Reconnecting the video device automatically',
device_lost: 'Video node disappeared, waiting for the driver to recover',
config_changing: 'Applying new configuration',
mode_switching: 'Switching video mode',
},
},
// WebRTC // WebRTC
webrtcConnected: 'WebRTC Connected', webrtcConnected: 'WebRTC Connected',
webrtcConnectedDesc: 'Using low-latency H.264 video stream', webrtcConnectedDesc: 'Using low-latency H.264 video stream',

View File

@@ -240,6 +240,8 @@ export default {
fps: '帧率', fps: '帧率',
selectFps: '选择帧率', selectFps: '选择帧率',
noVideoDevices: '未检测到视频设备', noVideoDevices: '未检测到视频设备',
noSignalDetected: '未检测到 HDMI 信号,请连接 HDMI 线缆后刷新。',
refreshDevices: '刷新设备',
// Audio // Audio
audioDevice: '音频设备', audioDevice: '音频设备',
selectAudioDevice: '选择音频采集设备', selectAudioDevice: '选择音频采集设备',
@@ -310,6 +312,32 @@ export default {
configChanging: '正在应用新配置...', configChanging: '正在应用新配置...',
videoRestarted: '视频流已更新', videoRestarted: '视频流已更新',
streamError: '视频流错误', streamError: '视频流错误',
// 四档视频状态(对应后端 StreamStateChangedstreaming / no_signal /
// device_lost / device_busy. `reason` 子键可选,用于在副文案中补充细节。
signal: {
noSignal: {
title: '暂无视频信号',
detail: '采集卡已就绪,正在等待被控机画面',
},
deviceLost: {
title: '视频设备已断开',
detail: '采集卡离线,正在尝试重新识别…',
},
deviceBusy: {
title: '视频通道忙',
detail: '正在切换配置或被其他组件占用,请稍候…',
},
reason: {
no_cable: '未检测到 HDMI 线缆,请检查连接或被控机是否已开机',
no_sync: '信号不稳定,无法锁定时序,可尝试降低被控机分辨率/刷新率',
out_of_range: '分辨率或刷新率超出采集卡能力,建议切换到 1080p60 以内',
no_signal: '采集卡已就绪,正在等待画面…',
recovering: '正在自动重连视频设备',
device_lost: '视频节点丢失,等待驱动恢复',
config_changing: '正在应用新配置',
mode_switching: '正在切换视频模式',
},
},
// WebRTC // WebRTC
webrtcConnected: 'WebRTC 已连接', webrtcConnected: 'WebRTC 已连接',
webrtcConnectedDesc: '正在使用 H.264 低延迟视频流', webrtcConnectedDesc: '正在使用 H.264 低延迟视频流',

View File

@@ -59,7 +59,7 @@ import {
Loader2, Loader2,
} from 'lucide-vue-next' } from 'lucide-vue-next'
const { t } = useI18n() const { t, te } = useI18n()
const router = useRouter() const router = useRouter()
const systemStore = useSystemStore() const systemStore = useSystemStore()
const configStore = useConfigStore() const configStore = useConfigStore()
@@ -98,6 +98,12 @@ const videoErrorMessage = ref('')
const videoRestarting = ref(false) // Track if video is restarting due to config change const videoRestarting = ref(false) // Track if video is restarting due to config change
const mjpegFrameReceived = ref(false) // Whether MJPEG stream has received at least one frame const mjpegFrameReceived = ref(false) // Whether MJPEG stream has received at least one frame
/** From `stream.state_changed`: ok | no_signal | device_lost | device_busy */
type StreamSignalState = 'ok' | 'no_signal' | 'device_lost' | 'device_busy'
const streamSignalState = ref<StreamSignalState>('ok')
const streamSignalReason = ref<string | null>(null)
const streamNextRetryMs = ref<number | null>(null)
// Video aspect ratio (dynamically updated from actual video dimensions) // Video aspect ratio (dynamically updated from actual video dimensions)
// Using string format "width/height" to let browser handle the ratio calculation // Using string format "width/height" to let browser handle the ratio calculation
const videoAspectRatio = ref<string | null>(null) const videoAspectRatio = ref<string | null>(null)
@@ -644,6 +650,7 @@ function waitForVideoFirstFrame(el: HTMLVideoElement, timeoutMs = 2000): Promise
}) })
} }
/** For WebRTC watch: skip auto-reconnect when these hold. */
function shouldSuppressAutoReconnect(): boolean { function shouldSuppressAutoReconnect(): boolean {
return videoMode.value === 'mjpeg' return videoMode.value === 'mjpeg'
|| !isConsoleActive.value || !isConsoleActive.value
@@ -751,6 +758,17 @@ function handleVideoError() {
return return
} }
// Expected <img> error while overlay shows no_signal / device_* — do not retry.
if (streamSignalState.value !== 'ok') {
if (retryTimeoutId !== null) {
clearTimeout(retryTimeoutId)
retryTimeoutId = null
}
videoLoading.value = false
mjpegFrameReceived.value = false
return
}
// Count consecutive errors even in grace period // Count consecutive errors even in grace period
consecutiveErrors++ consecutiveErrors++
@@ -993,22 +1011,121 @@ function handleStreamModeSwitching(data: { transition_id: string; to_mode: strin
} }
function handleStreamStateChanged(data: any) { function handleStreamStateChanged(data: any) {
if (data.state === 'error') { const state = typeof data?.state === 'string' ? data.state : ''
const reason = typeof data?.reason === 'string' && data.reason.length > 0 ? data.reason : null
const nextRetry = typeof data?.next_retry_ms === 'number' && data.next_retry_ms > 0
? data.next_retry_ms
: null
streamSignalReason.value = reason
streamNextRetryMs.value = nextRetry
const previous = streamSignalState.value
switch (state) {
case 'streaming':
case 'ready':
case 'uninitialized':
streamSignalState.value = 'ok'
break
case 'no_signal':
streamSignalState.value = 'no_signal'
break
case 'device_lost':
streamSignalState.value = 'device_lost'
break
case 'device_busy':
streamSignalState.value = 'device_busy'
break
}
if (state === 'error') {
videoError.value = true videoError.value = true
videoErrorMessage.value = t('console.streamError') videoErrorMessage.value = t('console.streamError')
} else if (data.state === 'recovering' && videoMode.value !== 'mjpeg') { } else if (state === 'no_signal' && videoMode.value !== 'mjpeg') {
// Backend is in the DeviceLost recovery loop; start WebRTC reconnect if not already scheduled. cancelWebRTCRecovery()
videoRestarting.value = false
videoError.value = false
videoErrorMessage.value = ''
} else if (state === 'device_busy' && videoMode.value !== 'mjpeg') {
cancelWebRTCRecovery()
videoRestarting.value = true
videoLoading.value = true
videoError.value = false
videoErrorMessage.value = ''
if (previous !== 'device_busy') {
captureFrameOverlay().catch(() => {})
}
} else if (state === 'device_lost' && videoMode.value !== 'mjpeg') {
if (webrtcRecoveryTimerId === null && webrtcRecoveryAttempts === 0) { if (webrtcRecoveryTimerId === null && webrtcRecoveryAttempts === 0) {
scheduleWebRTCRecovery() scheduleWebRTCRecovery()
} }
} else if (data.state === 'streaming' || data.state === 'no_signal') { } else if (state === 'streaming') {
// Backend stream is alive; cancel any pending recovery timers.
if (data.state === 'streaming') {
cancelWebRTCRecovery() cancelWebRTCRecovery()
videoError.value = false
videoErrorMessage.value = ''
videoRestarting.value = false
if (
videoMode.value === 'mjpeg'
&& (previous === 'no_signal' || previous === 'device_lost' || previous === 'device_busy')
) {
refreshVideo()
} else if (
videoMode.value !== 'mjpeg'
&& (previous === 'no_signal' || previous === 'device_busy' || previous === 'device_lost')
) {
if (webrtc.isConnected.value && !webrtc.isConnecting.value) {
void rebindWebRTCVideo().then(() => {
videoLoading.value = false
})
} else if (!webrtc.isConnected.value && !webrtc.isConnecting.value) {
void connectWebRTCSerial('stream recovered').then(async (ok) => {
if (ok) {
await rebindWebRTCVideo()
videoLoading.value = false
} else if (webrtcRecoveryTimerId === null && webrtcRecoveryAttempts === 0) {
scheduleWebRTCRecovery()
}
})
}
} }
} }
} }
const showSignalOverlay = computed(() => streamSignalState.value !== 'ok')
const signalOverlayInfo = computed(() => {
const reason = streamSignalReason.value
const reasonHintKey = reason ? `console.signal.reason.${reason}` : ''
const hint = reasonHintKey && te(reasonHintKey) ? t(reasonHintKey) : ''
switch (streamSignalState.value) {
case 'no_signal':
return {
title: t('console.signal.noSignal.title'),
detail: t('console.signal.noSignal.detail'),
hint,
tone: 'info' as const,
}
case 'device_lost':
return {
title: t('console.signal.deviceLost.title'),
detail: t('console.signal.deviceLost.detail'),
hint,
tone: 'error' as const,
}
case 'device_busy':
return {
title: t('console.signal.deviceBusy.title'),
detail: t('console.signal.deviceBusy.detail'),
hint,
tone: 'info' as const,
}
default:
return { title: '', detail: '', hint: '', tone: 'info' as const }
}
})
function handleStreamStatsUpdate(data: any) { function handleStreamStatsUpdate(data: any) {
// Always update clients count in store (for MJPEG mode display) // Always update clients count in store (for MJPEG mode display)
if (typeof data.clients === 'number') { if (typeof data.clients === 'number') {
@@ -1177,8 +1294,12 @@ function refreshVideo() {
} }
// MJPEG URL with cache-busting timestamp (reactive) // MJPEG URL with cache-busting timestamp (reactive)
// Only return valid URL when in MJPEG mode to prevent unnecessary requests // Only return valid URL when in MJPEG mode and the backend reports a
const mjpegTimestamp = ref(0) // Start with 0 to prevent initial load // healthy stream. When the backend goes offline (no_signal / device_lost
// / device_busy) we deliberately return an empty string so the `<img>`
// tag has no `src` and the 4-state overlay fully owns the video area —
// no more fake placeholder JPEG peeking through.
const mjpegTimestamp = ref(0)
const mjpegUrl = computed(() => { const mjpegUrl = computed(() => {
if (videoMode.value !== 'mjpeg') { if (videoMode.value !== 'mjpeg') {
return '' // Don't load MJPEG when in H264 mode return '' // Don't load MJPEG when in H264 mode
@@ -1186,6 +1307,9 @@ const mjpegUrl = computed(() => {
if (mjpegTimestamp.value === 0) { if (mjpegTimestamp.value === 0) {
return '' // Don't load until refreshVideo() is called return '' // Don't load until refreshVideo() is called
} }
if (streamSignalState.value !== 'ok') {
return '' // Backend is offline; let the overlay own the viewport
}
return `${streamApi.getMjpegUrl(myClientId)}&t=${mjpegTimestamp.value}` return `${streamApi.getMjpegUrl(myClientId)}&t=${mjpegTimestamp.value}`
}) })
@@ -1491,21 +1615,27 @@ watch(() => webrtc.state.value, (newState, oldState) => {
webrtcReconnectTimeout = null webrtcReconnectTimeout = null
} }
if (shouldSuppressAutoReconnect()) { // Run before `shouldSuppressAutoReconnect()` so `device_busy` / `videoRestarting`
return // never blocks clearing the loading overlay when ICE becomes connected.
}
// Update stream online status based on WebRTC connection state
if (videoMode.value !== 'mjpeg') { if (videoMode.value !== 'mjpeg') {
if (newState === 'connected') { if (newState === 'connected') {
systemStore.setStreamOnline(true) systemStore.setStreamOnline(true)
webrtcReconnectFailures = 0 webrtcReconnectFailures = 0
if (videoLoading.value) {
void rebindWebRTCVideo().then(() => {
videoLoading.value = false
})
}
} else if (newState === 'disconnected' || newState === 'failed') { } else if (newState === 'disconnected' || newState === 'failed') {
// Don't immediately set offline - wait for potential reconnect // Don't immediately set offline - wait for potential reconnect
// The device_info event will eventually sync the correct state // The device_info event will eventually sync the correct state
} }
} }
if (shouldSuppressAutoReconnect()) {
return
}
// Auto-reconnect when disconnected (but was previously connected) // Auto-reconnect when disconnected (but was previously connected)
if (newState === 'disconnected' && oldState === 'connected' && videoMode.value !== 'mjpeg') { if (newState === 'disconnected' && oldState === 'connected' && videoMode.value !== 'mjpeg') {
webrtcReconnectTimeout = setTimeout(async () => { webrtcReconnectTimeout = setTimeout(async () => {
@@ -2584,6 +2714,50 @@ onUnmounted(() => {
</div> </div>
</Transition> </Transition>
<!--
Canonical 4-state signal overlay (no_signal / device_lost /
device_busy). Fully covers the video area with a solid dim
backdrop so the browser never shows a frozen last frame or a
transparent video element peeking through the MJPEG `<img>`
has its `src` cleared the moment the backend goes offline and
the WebRTC track is simply obscured. Sits below the loading /
error overlays so those take precedence when both apply.
-->
<Transition name="fade">
<div
v-if="showSignalOverlay && !videoLoading && !videoError"
class="absolute inset-0 flex flex-col items-center justify-center gap-3 p-4 transition-opacity duration-300 pointer-events-none"
:class="{
'bg-black/80 backdrop-blur-sm': signalOverlayInfo.tone === 'error',
'bg-black/70 backdrop-blur-sm': signalOverlayInfo.tone !== 'error',
}"
>
<MonitorOff
class="h-10 w-10 sm:h-16 sm:w-16"
:class="{
'text-slate-200': signalOverlayInfo.tone === 'info',
'text-red-300': signalOverlayInfo.tone === 'error',
}"
/>
<div class="text-center max-w-md">
<p
class="font-semibold text-sm sm:text-lg text-white"
>{{ signalOverlayInfo.title }}</p>
<p
class="text-xs sm:text-sm mt-1 sm:mt-2"
:class="{
'text-slate-200/80': signalOverlayInfo.tone === 'info',
'text-red-100/80': signalOverlayInfo.tone === 'error',
}"
>{{ signalOverlayInfo.detail }}</p>
<p
v-if="signalOverlayInfo.hint"
class="text-[11px] sm:text-xs mt-2 text-white/50"
>{{ signalOverlayInfo.hint }}</p>
</div>
</div>
</Transition>
<!-- Error Overlay with smooth transition and detailed info --> <!-- Error Overlay with smooth transition and detailed info -->
<Transition name="fade"> <Transition name="fade">
<div <div

View File

@@ -35,6 +35,7 @@ import {
Check, Check,
HelpCircle, HelpCircle,
Puzzle, Puzzle,
RefreshCw,
} from 'lucide-vue-next' } from 'lucide-vue-next'
const { t } = useI18n() const { t } = useI18n()
@@ -106,6 +107,7 @@ interface VideoDeviceInfo {
}> }>
}> }>
usb_bus: string | null usb_bus: string | null
has_signal: boolean
} }
interface AudioDeviceInfo { interface AudioDeviceInfo {
@@ -164,6 +166,29 @@ const passwordStrengthColor = computed(() => {
return colors[passwordStrength.value] || colors[0] return colors[passwordStrength.value] || colors[0]
}) })
// Whether the selected video device currently has an HDMI signal
const selectedDeviceHasSignal = computed(() => {
const device = devices.value.video.find((d) => d.path === videoDevice.value)
return device?.has_signal ?? true
})
const refreshingDevices = ref(false)
async function refreshDeviceList() {
refreshingDevices.value = true
try {
const result = await configApi.listDevices()
devices.value = result
if (result.extensions) {
ttydAvailable.value = result.extensions.ttyd_available
}
} catch {
// keep current list
} finally {
refreshingDevices.value = false
}
}
// Computed: available formats for selected video device // Computed: available formats for selected video device
const availableFormats = computed(() => { const availableFormats = computed(() => {
const device = devices.value.video.find((d) => d.path === videoDevice.value) const device = devices.value.video.find((d) => d.path === videoDevice.value)
@@ -735,6 +760,14 @@ const stepIcons = [User, Video, Keyboard, Puzzle]
</Select> </Select>
</div> </div>
<div v-if="videoDevice && !selectedDeviceHasSignal" class="flex items-center gap-3 p-3 rounded-lg border border-orange-500/30 bg-orange-500/5 text-sm text-orange-600 dark:text-orange-400">
<p class="flex-1">{{ t('setup.noSignalDetected') }}</p>
<Button variant="outline" size="sm" :disabled="refreshingDevices" @click="refreshDeviceList">
<RefreshCw class="w-4 h-4 mr-1" :class="{ 'animate-spin': refreshingDevices }" />
{{ t('setup.refreshDevices') }}
</Button>
</div>
<div v-if="videoDevice" class="space-y-2"> <div v-if="videoDevice" class="space-y-2">
<div class="flex items-center gap-2"> <div class="flex items-center gap-2">
<Label for="videoFormat">{{ t('setup.videoFormat') }}</Label> <Label for="videoFormat">{{ t('setup.videoFormat') }}</Label>