diff --git a/Cargo.toml b/Cargo.toml index 04cd0e65..5071fb65 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,7 +29,6 @@ serde_json = "1" # Logging tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "tracing-log"] } -tracing-log = "0.2" # Error handling thiserror = "2" @@ -41,7 +40,6 @@ rand = "0.9" # Utilities uuid = { version = "1", features = ["v4", "serde"] } -chrono = { version = "0.4", features = ["serde"] } base64 = "0.22" nix = { version = "0.30", features = ["fs", "net", "hostname", "poll"] } @@ -62,8 +60,8 @@ axum-server = { version = "0.8", features = ["tls-rustls"] } # CLI argument parsing clap = { version = "4", features = ["derive"] } -# Time -time = "0.3" +# Time (cookie max_age + RFC3339 timestamps) +time = { version = "0.3", features = ["serde", "formatting", "parsing"] } # Video capture (V4L2) v4l2r = "0.0.7" diff --git a/libs/hwcodec/Cargo.toml b/libs/hwcodec/Cargo.toml index a4ae0fac..b84d3355 100644 --- a/libs/hwcodec/Cargo.toml +++ b/libs/hwcodec/Cargo.toml @@ -4,6 +4,9 @@ version = "0.8.0" edition = "2021" description = "Hardware video codec for IP-KVM (Windows/Linux)" +[package.metadata.cargo-machete] +ignored = ["serde"] + [features] default = [] rkmpp = [] diff --git a/src/audio/capture.rs b/src/audio/capture.rs index b8479991..b0f1d64a 100644 --- a/src/audio/capture.rs +++ b/src/audio/capture.rs @@ -45,25 +45,10 @@ impl Default for AudioConfig { } impl AudioConfig { - /// Create config for a specific device + /// Create config for a specific device (48 kHz stereo only; must match ALSA hardware). pub fn for_device(device: &AudioDeviceInfo) -> Self { - let sample_rate = if device.sample_rates.contains(&48000) { - 48000 - } else { - *device.sample_rates.first().unwrap_or(&48000) - }; - - let channels = if device.channels.contains(&2) { - 2 - } else { - *device.channels.first().unwrap_or(&2) - }; - Self { device_name: device.name.clone(), - sample_rate, - channels, - frame_size: sample_rate / 50, // 20ms ..Default::default() } } @@ -281,23 +266,29 @@ fn run_capture( .map_err(|e| AppError::AudioError(format!("Failed to apply hw params: {}", e)))?; } - // Get actual configuration - let actual_rate = pcm - .hw_params_current() - .map(|h| h.get_rate().unwrap_or(config.sample_rate)) - .unwrap_or(config.sample_rate); - - if actual_rate != config.sample_rate { - info!( - "ALSA sample rate differs from requested ({}Hz vs {}Hz); streamer will resample to 48000Hz for Opus", - actual_rate, config.sample_rate - ); - } else { - info!( - "Audio capture configured: {}Hz {}ch (requested {}Hz)", - actual_rate, config.channels, config.sample_rate - ); + // Fixed 48 kHz stereo: fail if hardware negotiated something else. + let hw_now = pcm.hw_params_current().map_err(|e| { + AppError::AudioError(format!("Failed to read hw_params after apply: {}", e)) + })?; + let actual_rate = hw_now + .get_rate() + .map_err(|e| AppError::AudioError(format!("Failed to read sample rate: {}", e)))?; + let actual_ch = hw_now + .get_channels() + .map_err(|e| AppError::AudioError(format!("Failed to read channels: {}", e)))?; + if actual_rate != 48_000 { + return Err(AppError::AudioError(format!( + "Audio capture requires 48000 Hz; device is {} Hz", + actual_rate + ))); } + if actual_ch != 2 { + return Err(AppError::AudioError(format!( + "Audio capture requires 2 channels (stereo); device has {}", + actual_ch + ))); + } + info!("Audio capture: 48000 Hz, 2 ch"); // Prepare for capture pcm.prepare() @@ -357,7 +348,7 @@ fn run_capture( let frame = AudioFrame::new_interleaved( Bytes::copy_from_slice(&buffer[..byte_count]), config.channels, - actual_rate, + 48_000, seq, ); diff --git a/src/audio/controller.rs b/src/audio/controller.rs index f6f76570..9539b7dc 100644 --- a/src/audio/controller.rs +++ b/src/audio/controller.rs @@ -342,8 +342,7 @@ impl AudioController { } /// Subscribe to Opus frames (for WebSocket clients) - pub fn subscribe_opus(&self) -> Option>>> { - // Use try_read to avoid blocking - this is called from sync context sometimes + pub fn subscribe_opus(&self) -> Option>> { if let Ok(guard) = self.streamer.try_read() { guard.as_ref().map(|s| s.subscribe_opus()) } else { @@ -354,7 +353,7 @@ impl AudioController { /// Subscribe to Opus frames (async version) pub async fn subscribe_opus_async( &self, - ) -> Option>>> { + ) -> Option>> { self.streamer .read() .await diff --git a/src/audio/mod.rs b/src/audio/mod.rs index dfd203e1..829bef91 100644 --- a/src/audio/mod.rs +++ b/src/audio/mod.rs @@ -13,7 +13,6 @@ pub mod controller; pub mod device; pub mod encoder; pub mod monitor; -pub mod resample; pub mod streamer; pub use capture::{AudioCapturer, AudioConfig, AudioFrame}; diff --git a/src/audio/resample.rs b/src/audio/resample.rs deleted file mode 100644 index 02135bdc..00000000 --- a/src/audio/resample.rs +++ /dev/null @@ -1,202 +0,0 @@ -//! Resample capture PCM to 48 kHz stereo for Opus (fixed 20 ms / 960×2 samples). - -const OUT_RATE: f64 = 48000.0; -const OPUS_STEREO_SAMPLES: usize = 960 * 2; - -enum PipelineState { - /// Native 48 kHz interleaved stereo: only buffer and slice into 20 ms blocks (no float work). - Stereo48kPassthrough, - /// Other rates / mono: linear interpolation to 48 kHz stereo. - Resample { - in_rate: u32, - in_channels: u32, - next_out_frame: u64, - buffer_start_frame: u64, - }, -} - -/// Converts incoming interleaved PCM to 48 kHz stereo, then exposes fixed 960×2-sample chunks. -pub struct Opus48kPcmBuffer { - state: PipelineState, - pending: Vec, -} - -impl Opus48kPcmBuffer { - pub fn new(in_rate: u32, in_channels: u32) -> Self { - let ch = in_channels.max(1); - let rate = in_rate.max(1); - let state = if rate == 48000 && ch == 2 { - PipelineState::Stereo48kPassthrough - } else { - PipelineState::Resample { - in_rate: rate, - in_channels: ch, - next_out_frame: 0, - buffer_start_frame: 0, - } - }; - Self { - state, - pending: Vec::new(), - } - } - - /// True when input is already 48 kHz stereo (no interpolation loop). - #[cfg(test)] - pub fn is_passthrough(&self) -> bool { - matches!(self.state, PipelineState::Stereo48kPassthrough) - } - - /// Append one capture block (`sample_rate` must match the rate this buffer was built for). - pub fn push_interleaved(&mut self, data: &[i16]) { - self.pending.extend_from_slice(data); - } - - /// Drain as many 960×2 stereo S16LE samples (20 ms @ 48 kHz) as possible. - pub fn pop_opus_frames(&mut self, out: &mut Vec) { - match &mut self.state { - PipelineState::Stereo48kPassthrough => { - while self.pending.len() >= OPUS_STEREO_SAMPLES { - out.extend_from_slice(&self.pending[..OPUS_STEREO_SAMPLES]); - self.pending.drain(..OPUS_STEREO_SAMPLES); - } - } - PipelineState::Resample { - in_rate, - in_channels, - next_out_frame, - buffer_start_frame, - } => { - let ch = *in_channels as usize; - if ch == 0 { - return; - } - - loop { - let batch_start = *next_out_frame; - let mut block = Vec::with_capacity(OPUS_STEREO_SAMPLES); - let mut complete = true; - - for i in 0u64..960 { - let k = batch_start + i; - let p_abs = (k as f64) * (*in_rate as f64) / OUT_RATE; - let f_abs = p_abs.floor() as u64; - let frac = p_abs - f_abs as f64; - - let f_rel = f_abs.saturating_sub(*buffer_start_frame) as usize; - if f_rel + 1 >= self.pending.len() / ch { - complete = false; - break; - } - - let base0 = f_rel * ch; - let base1 = (f_rel + 1) * ch; - - let (l, r) = if *in_channels >= 2 { - let l0 = self.pending[base0] as f64; - let l1 = self.pending[base1] as f64; - let r0 = self.pending[base0 + 1] as f64; - let r1 = self.pending[base1 + 1] as f64; - (l0 + frac * (l1 - l0), r0 + frac * (r1 - r0)) - } else { - let m0 = self.pending[base0] as f64; - let m1 = self.pending[base1] as f64; - let v = m0 + frac * (m1 - m0); - (v, v) - }; - - block.push(clamp_f64_to_i16(l)); - block.push(clamp_f64_to_i16(r)); - } - - if !complete || block.len() != OPUS_STEREO_SAMPLES { - break; - } - - out.extend_from_slice(&block); - *next_out_frame = batch_start + 960; - trim_resample_prefix( - &mut self.pending, - *in_rate, - *next_out_frame, - buffer_start_frame, - ch, - ); - } - } - } - } -} - -fn trim_resample_prefix( - pending: &mut Vec, - in_rate: u32, - next_out_frame: u64, - buffer_start_frame: &mut u64, - ch: usize, -) { - if pending.is_empty() { - return; - } - - let p_next = (next_out_frame as f64) * (in_rate as f64) / OUT_RATE; - let need_abs = p_next.floor() as u64; - let keep_from_abs = need_abs.saturating_sub(1); - if keep_from_abs <= *buffer_start_frame { - return; - } - - let drop_frames = (keep_from_abs - *buffer_start_frame) as usize; - let drop_samples = drop_frames.saturating_mul(ch).min(pending.len()); - if drop_samples > 0 { - pending.drain(0..drop_samples); - *buffer_start_frame += drop_frames as u64; - } -} - -#[inline] -fn clamp_f64_to_i16(v: f64) -> i16 { - v.round().clamp(i16::MIN as f64, i16::MAX as f64) as i16 -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn passthrough_48k_identity_tone_length() { - let mut buf = Opus48kPcmBuffer::new(48000, 2); - assert!(buf.is_passthrough()); - let mut chunk = vec![0i16; 960 * 2]; - for i in 0..960 { - let s = (i as f32 * 0.1).sin() * 3000.0; - chunk[2 * i] = s as i16; - chunk[2 * i + 1] = s as i16; - } - buf.push_interleaved(&chunk); - let mut out = Vec::new(); - buf.pop_opus_frames(&mut out); - assert_eq!(out.len(), 960 * 2); - } - - #[test] - fn upsample_44k_to_48k_chunk() { - let mut buf = Opus48kPcmBuffer::new(44100, 2); - assert!(!buf.is_passthrough()); - let mut chunk = vec![0i16; 882 * 2]; - for i in 0..882 { - chunk[2 * i] = (i as i16).wrapping_mul(10); - chunk[2 * i + 1] = (i as i16).wrapping_mul(-7); - } - buf.push_interleaved(&chunk); - let mut out = Vec::new(); - buf.pop_opus_frames(&mut out); - assert_eq!(out.len(), 960 * 2, "expected one 20ms Opus block"); - } - - #[test] - fn mono_48k_not_passthrough() { - let buf = Opus48kPcmBuffer::new(48000, 1); - assert!(!buf.is_passthrough()); - } -} diff --git a/src/audio/streamer.rs b/src/audio/streamer.rs index 6f6db04e..9b32e27f 100644 --- a/src/audio/streamer.rs +++ b/src/audio/streamer.rs @@ -1,21 +1,22 @@ //! Audio streaming pipeline //! -//! Coordinates audio capture and Opus encoding, distributing encoded -//! frames to multiple subscribers via broadcast channel. +//! ALSA capture (48 kHz stereo only) → fixed Opus 20 ms frames → `mpsc` fan-out per subscriber. use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; use std::time::Instant; -use tokio::sync::{broadcast, watch, Mutex, RwLock}; +use tokio::sync::{broadcast, mpsc, watch, Mutex as AsyncMutex, RwLock}; use tracing::{error, info, warn}; use super::capture::{AudioCapturer, AudioConfig, AudioFrame, CaptureState}; use super::encoder::{OpusConfig, OpusEncoder, OpusFrame}; -use super::resample::Opus48kPcmBuffer; use crate::error::{AppError, Result}; use bytemuck; use bytes::Bytes; +/// Stereo 48 kHz: 20 ms = 960 frames × 2 channels (S16LE). +const OPUS_STEREO_SAMPLES: usize = 960 * 2; + /// Audio stream state #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] pub enum AudioStreamState { @@ -68,15 +69,16 @@ pub struct AudioStreamStats { /// Audio streamer /// -/// Manages the audio capture -> encode -> broadcast pipeline. +/// Manages the audio capture → encode → mpsc fan-out pipeline. pub struct AudioStreamer { config: RwLock, state: watch::Sender, state_rx: watch::Receiver, capturer: RwLock>>, - encoder: Arc>>, - opus_tx: watch::Sender>>, - stats: Arc>, + encoder: Arc>>, + /// One `mpsc::Sender` per subscriber (like shared video pipeline). + opus_subscribers: Arc>>>>, + stats: Arc>, sequence: AtomicU64, stream_start_time: RwLock>, stop_flag: Arc, @@ -91,16 +93,15 @@ impl AudioStreamer { /// Create a new audio streamer with specified configuration pub fn with_config(config: AudioStreamerConfig) -> Self { let (state_tx, state_rx) = watch::channel(AudioStreamState::Stopped); - let (opus_tx, _opus_rx) = watch::channel(None); Self { config: RwLock::new(config), state: state_tx, state_rx, capturer: RwLock::new(None), - encoder: Arc::new(Mutex::new(None)), - opus_tx, - stats: Arc::new(Mutex::new(AudioStreamStats::default())), + encoder: Arc::new(AsyncMutex::new(None)), + opus_subscribers: Arc::new(Mutex::new(Vec::new())), + stats: Arc::new(AsyncMutex::new(AudioStreamStats::default())), sequence: AtomicU64::new(0), stream_start_time: RwLock::new(None), stop_flag: Arc::new(AtomicBool::new(false)), @@ -117,14 +118,21 @@ impl AudioStreamer { self.state_rx.clone() } - /// Subscribe to Opus frames - pub fn subscribe_opus(&self) -> watch::Receiver>> { - self.opus_tx.subscribe() + /// Subscribe to Opus frames (each packet is one encoded 20 ms frame). + pub fn subscribe_opus(&self) -> mpsc::Receiver> { + let (tx, rx) = mpsc::channel::>(128); + self.opus_subscribers.lock().unwrap().push(tx); + rx } /// Get number of active subscribers pub fn subscriber_count(&self) -> usize { - self.opus_tx.receiver_count() + self.opus_subscribers + .lock() + .unwrap() + .iter() + .filter(|s| !s.is_closed()) + .count() } /// Get current statistics @@ -202,12 +210,13 @@ impl AudioStreamer { // Start encoding task let capturer_for_task = capturer.clone(); let encoder = self.encoder.clone(); - let opus_tx = self.opus_tx.clone(); + let opus_subscribers = self.opus_subscribers.clone(); let state = self.state.clone(); let stop_flag = self.stop_flag.clone(); tokio::spawn(async move { - Self::stream_task(capturer_for_task, encoder, opus_tx, state, stop_flag).await; + Self::stream_task(capturer_for_task, encoder, opus_subscribers, state, stop_flag) + .await; }); Ok(()) @@ -229,10 +238,11 @@ impl AudioStreamer { capturer.stop().await?; } - // Clear resources + // Clear resources — drop Opus senders so mpsc receivers see end-of-stream *self.capturer.write().await = None; *self.encoder.lock().await = None; *self.stream_start_time.write().await = None; + self.opus_subscribers.lock().unwrap().clear(); let _ = self.state.send(AudioStreamState::Stopped); info!("Audio stream stopped"); @@ -244,51 +254,63 @@ impl AudioStreamer { self.state() == AudioStreamState::Running } - /// Internal streaming task + async fn fanout_opus( + subscribers: &Arc>>>>, + frame: Arc, + ) { + let txs: Vec<_> = { + let g = subscribers.lock().unwrap(); + if g.is_empty() { + return; + } + g.clone() + }; + for tx in &txs { + let _ = tx.send(frame.clone()).await; + } + if txs.iter().any(|tx| tx.is_closed()) { + let mut g = subscribers.lock().unwrap(); + g.retain(|tx| !tx.is_closed()); + } + } + async fn stream_task( capturer: Arc, - encoder: Arc>>, - opus_tx: watch::Sender>>, + encoder: Arc>>, + opus_subscribers: Arc>>>>, state: watch::Sender, stop_flag: Arc, ) { let mut pcm_rx = capturer.subscribe(); let _ = state.send(AudioStreamState::Running); - info!("Audio stream task started"); + info!("Audio stream task started (48 kHz stereo → Opus, mpsc fan-out)"); - let mut to_48k: Option = None; - let mut queued_48k: Vec = Vec::new(); + let mut pending: Vec = Vec::new(); loop { - // Check stop flag (atomic, no async lock needed) if stop_flag.load(Ordering::Relaxed) { break; } - // Check capturer state if capturer.state() == CaptureState::Error { error!("Audio capture error, stopping stream"); let _ = state.send(AudioStreamState::Error); break; } - // Receive PCM frame with timeout let recv_result = tokio::time::timeout(std::time::Duration::from_secs(2), pcm_rx.recv()).await; match recv_result { Ok(Ok(audio_frame)) => { - if to_48k.is_none() { - to_48k = Some(Opus48kPcmBuffer::new( - audio_frame.sample_rate, - audio_frame.channels, - )); + if audio_frame.sample_rate != 48_000 || audio_frame.channels != 2 { + warn!( + "Skip non–48 kHz/stereo PCM ({} Hz, {} ch)", + audio_frame.sample_rate, audio_frame.channels + ); + continue; } - let pipeline = match to_48k.as_mut() { - Some(p) => p, - None => continue, - }; let samples: &[i16] = match bytemuck::try_cast_slice(&audio_frame.data) { Ok(s) => s, @@ -298,16 +320,16 @@ impl AudioStreamer { } }; if !samples.is_empty() { - pipeline.push_interleaved(samples); + pending.extend_from_slice(samples); } - pipeline.pop_opus_frames(&mut queued_48k); - while queued_48k.len() >= 960 * 2 { - let pcm_20ms = - Bytes::copy_from_slice(bytemuck::cast_slice(&queued_48k[..960 * 2])); - queued_48k.drain(..960 * 2); + while pending.len() >= OPUS_STEREO_SAMPLES { + let pcm_20ms = Bytes::copy_from_slice(bytemuck::cast_slice( + &pending[..OPUS_STEREO_SAMPLES], + )); + pending.drain(..OPUS_STEREO_SAMPLES); - let frame_48k = AudioFrame::new_interleaved(pcm_20ms, 2, 48000, 0); + let frame_48k = AudioFrame::new_interleaved(pcm_20ms, 2, 48_000, 0); let opus_result = { let mut enc_guard = encoder.lock().await; @@ -318,9 +340,7 @@ impl AudioStreamer { match opus_result { Some(Ok(opus_frame)) => { - if opus_tx.receiver_count() > 0 { - let _ = opus_tx.send(Some(Arc::new(opus_frame))); - } + Self::fanout_opus(&opus_subscribers, Arc::new(opus_frame)).await; } Some(Err(e)) => { error!("Opus encode error: {}", e); @@ -337,10 +357,9 @@ impl AudioStreamer { break; } Ok(Err(broadcast::error::RecvError::Lagged(n))) => { - warn!("Audio receiver lagged by {} frames", n); + warn!("PCM receiver lagged by {} frames", n); } Err(_) => { - // Timeout - check if still capturing if capturer.state() != CaptureState::Running { info!("Audio capture stopped, ending stream task"); break; diff --git a/src/auth/mod.rs b/src/auth/mod.rs index 8d9ba479..fc38a389 100644 --- a/src/auth/mod.rs +++ b/src/auth/mod.rs @@ -1,5 +1,6 @@ pub mod middleware; mod password; +mod rfc3339; mod session; mod user; diff --git a/src/auth/rfc3339.rs b/src/auth/rfc3339.rs new file mode 100644 index 00000000..de964988 --- /dev/null +++ b/src/auth/rfc3339.rs @@ -0,0 +1,13 @@ +//! RFC3339 strings in SQLite; structs use `time::serde::rfc3339`. + +use time::format_description::well_known::Rfc3339; +use time::OffsetDateTime; + +/// Parse DB text; bad input → `now_utc()`. +pub fn parse(s: &str) -> OffsetDateTime { + OffsetDateTime::parse(s, &Rfc3339).unwrap_or_else(|_| OffsetDateTime::now_utc()) +} + +pub fn format(dt: OffsetDateTime) -> String { + dt.format(&Rfc3339).expect("RFC3339 format") +} diff --git a/src/auth/session.rs b/src/auth/session.rs index 5ad92baa..3288a751 100644 --- a/src/auth/session.rs +++ b/src/auth/session.rs @@ -1,8 +1,9 @@ -use chrono::{DateTime, Duration, Utc}; use serde::{Deserialize, Serialize}; use sqlx::{Pool, Sqlite}; +use time::{Duration, OffsetDateTime}; use uuid::Uuid; +use super::rfc3339; use crate::error::Result; /// Session data @@ -10,15 +11,17 @@ use crate::error::Result; pub struct Session { pub id: String, pub user_id: String, - pub created_at: DateTime, - pub expires_at: DateTime, + #[serde(with = "time::serde::rfc3339")] + pub created_at: OffsetDateTime, + #[serde(with = "time::serde::rfc3339")] + pub expires_at: OffsetDateTime, pub data: Option, } impl Session { /// Check if session is expired pub fn is_expired(&self) -> bool { - Utc::now() > self.expires_at + OffsetDateTime::now_utc() > self.expires_at } } @@ -40,11 +43,12 @@ impl SessionStore { /// Create a new session pub async fn create(&self, user_id: &str) -> Result { + let now = OffsetDateTime::now_utc(); let session = Session { id: Uuid::new_v4().to_string(), user_id: user_id.to_string(), - created_at: Utc::now(), - expires_at: Utc::now() + self.default_ttl, + created_at: now, + expires_at: now + self.default_ttl, data: None, }; @@ -56,8 +60,8 @@ impl SessionStore { ) .bind(&session.id) .bind(&session.user_id) - .bind(session.created_at.to_rfc3339()) - .bind(session.expires_at.to_rfc3339()) + .bind(rfc3339::format(session.created_at)) + .bind(rfc3339::format(session.expires_at)) .bind(session.data.as_ref().map(|d| d.to_string())) .execute(&self.pool) .await?; @@ -79,12 +83,8 @@ impl SessionStore { let session = Session { id, user_id, - created_at: DateTime::parse_from_rfc3339(&created_at) - .map(|dt| dt.with_timezone(&Utc)) - .unwrap_or_else(|_| Utc::now()), - expires_at: DateTime::parse_from_rfc3339(&expires_at) - .map(|dt| dt.with_timezone(&Utc)) - .unwrap_or_else(|_| Utc::now()), + created_at: rfc3339::parse(&created_at), + expires_at: rfc3339::parse(&expires_at), data: data.and_then(|d| serde_json::from_str(&d).ok()), }; @@ -110,7 +110,7 @@ impl SessionStore { /// Delete all expired sessions pub async fn cleanup_expired(&self) -> Result { - let now = Utc::now().to_rfc3339(); + let now = rfc3339::format(OffsetDateTime::now_utc()); let result = sqlx::query("DELETE FROM sessions WHERE expires_at < ?1") .bind(now) .execute(&self.pool) @@ -145,9 +145,9 @@ impl SessionStore { /// Extend session expiration pub async fn extend(&self, session_id: &str) -> Result<()> { - let new_expires = Utc::now() + self.default_ttl; + let new_expires = OffsetDateTime::now_utc() + self.default_ttl; sqlx::query("UPDATE sessions SET expires_at = ?1 WHERE id = ?2") - .bind(new_expires.to_rfc3339()) + .bind(rfc3339::format(new_expires)) .bind(session_id) .execute(&self.pool) .await?; diff --git a/src/auth/user.rs b/src/auth/user.rs index 986fc41b..b749b6ee 100644 --- a/src/auth/user.rs +++ b/src/auth/user.rs @@ -1,9 +1,10 @@ -use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::{Pool, Sqlite}; +use time::OffsetDateTime; use uuid::Uuid; use super::password::{hash_password, verify_password}; +use super::rfc3339; use crate::error::{AppError, Result}; /// User row type from database @@ -16,8 +17,10 @@ pub struct User { pub username: String, #[serde(skip_serializing)] pub password_hash: String, - pub created_at: DateTime, - pub updated_at: DateTime, + #[serde(with = "time::serde::rfc3339")] + pub created_at: OffsetDateTime, + #[serde(with = "time::serde::rfc3339")] + pub updated_at: OffsetDateTime, } impl User { @@ -28,12 +31,8 @@ impl User { id, username, password_hash, - created_at: DateTime::parse_from_rfc3339(&created_at) - .map(|dt| dt.with_timezone(&Utc)) - .unwrap_or_else(|_| Utc::now()), - updated_at: DateTime::parse_from_rfc3339(&updated_at) - .map(|dt| dt.with_timezone(&Utc)) - .unwrap_or_else(|_| Utc::now()), + created_at: rfc3339::parse(&created_at), + updated_at: rfc3339::parse(&updated_at), } } } @@ -61,7 +60,7 @@ impl UserStore { } let password_hash = hash_password(password)?; - let now = Utc::now(); + let now = OffsetDateTime::now_utc(); let user = User { id: Uuid::new_v4().to_string(), username: username.to_string(), @@ -79,8 +78,8 @@ impl UserStore { .bind(&user.id) .bind(&user.username) .bind(&user.password_hash) - .bind(user.created_at.to_rfc3339()) - .bind(user.updated_at.to_rfc3339()) + .bind(rfc3339::format(user.created_at)) + .bind(rfc3339::format(user.updated_at)) .execute(&self.pool) .await?; @@ -128,12 +127,12 @@ impl UserStore { /// Update user password pub async fn update_password(&self, user_id: &str, new_password: &str) -> Result<()> { let password_hash = hash_password(new_password)?; - let now = Utc::now(); + let now = OffsetDateTime::now_utc(); let result = sqlx::query("UPDATE users SET password_hash = ?1, updated_at = ?2 WHERE id = ?3") .bind(&password_hash) - .bind(now.to_rfc3339()) + .bind(rfc3339::format(now)) .bind(user_id) .execute(&self.pool) .await?; @@ -156,10 +155,10 @@ impl UserStore { } } - let now = Utc::now(); + let now = OffsetDateTime::now_utc(); let result = sqlx::query("UPDATE users SET username = ?1, updated_at = ?2 WHERE id = ?3") .bind(new_username) - .bind(now.to_rfc3339()) + .bind(rfc3339::format(now)) .bind(user_id) .execute(&self.pool) .await?; diff --git a/src/error.rs b/src/error.rs index f3e321b5..aaffdac4 100644 --- a/src/error.rs +++ b/src/error.rs @@ -45,6 +45,10 @@ pub enum AppError { #[error("Video device lost [{device}]: {reason}")] VideoDeviceLost { device: String, reason: String }, + /// No input signal while opening capture; `kind` is `SignalStatus` as string (`from_str`). + #[error("Capture has no valid signal: {kind}")] + CaptureNoSignal { kind: String }, + #[error("Audio error: {0}")] AudioError(String), diff --git a/src/events/mod.rs b/src/events/mod.rs index 907ad1cf..7dfa4539 100644 --- a/src/events/mod.rs +++ b/src/events/mod.rs @@ -64,6 +64,8 @@ fn topic_prefix(event_name: &str) -> Option { /// bus.publish(SystemEvent::StreamStateChanged { /// state: "streaming".to_string(), /// device: Some("/dev/video0".to_string()), +/// reason: None, +/// next_retry_ms: None, /// }); /// /// // Subscribe to events @@ -188,6 +190,8 @@ mod tests { bus.publish(SystemEvent::StreamStateChanged { state: "streaming".to_string(), device: Some("/dev/video0".to_string()), + reason: None, + next_retry_ms: None, }); let event = rx.recv().await.unwrap(); @@ -205,6 +209,8 @@ mod tests { bus.publish(SystemEvent::StreamStateChanged { state: "ready".to_string(), device: Some("/dev/video0".to_string()), + reason: None, + next_retry_ms: None, }); let event1 = rx1.recv().await.unwrap(); @@ -222,6 +228,8 @@ mod tests { bus.publish(SystemEvent::StreamStateChanged { state: "ready".to_string(), device: None, + reason: None, + next_retry_ms: None, }); let event = rx.recv().await.unwrap(); @@ -236,6 +244,8 @@ mod tests { bus.publish(SystemEvent::StreamStateChanged { state: "ready".to_string(), device: None, + reason: None, + next_retry_ms: None, }); let event = rx.recv().await.unwrap(); @@ -257,6 +267,8 @@ mod tests { bus.publish(SystemEvent::StreamStateChanged { state: "ready".to_string(), device: None, + reason: None, + next_retry_ms: None, }); } } diff --git a/src/events/types.rs b/src/events/types.rs index 148021e1..d1bf9e9b 100644 --- a/src/events/types.rs +++ b/src/events/types.rs @@ -158,13 +158,16 @@ pub enum SystemEvent { from_mode: String, }, - /// Stream state changed (e.g., started, stopped, error) + /// Stream state for the UI (`streaming`, `no_signal`, `device_lost`, `device_busy`, etc.). + /// Optional `reason` / `next_retry_ms` are hints only; branch on `state`. #[serde(rename = "stream.state_changed")] StreamStateChanged { - /// Current state: "uninitialized", "ready", "streaming", "no_signal", "error" state: String, - /// Device path if available device: Option, + #[serde(skip_serializing_if = "Option::is_none")] + reason: Option, + #[serde(skip_serializing_if = "Option::is_none")] + next_retry_ms: Option, }, /// Stream configuration is being changed @@ -407,6 +410,8 @@ mod tests { let event = SystemEvent::StreamStateChanged { state: "streaming".to_string(), device: Some("/dev/video0".to_string()), + reason: None, + next_retry_ms: None, }; assert_eq!(event.event_name(), "stream.state_changed"); } @@ -416,6 +421,8 @@ mod tests { let event = SystemEvent::StreamStateChanged { state: "streaming".to_string(), device: None, + reason: None, + next_retry_ms: None, }; assert!(event.matches_topic("*")); diff --git a/src/main.rs b/src/main.rs index 27cf27d4..34fc6a74 100644 --- a/src/main.rs +++ b/src/main.rs @@ -490,8 +490,13 @@ async fn main() -> anyhow::Result<()> { .update_video_config(actual_resolution, actual_format, actual_fps) .await; if let Some(device_path) = device_path { + let (subdev_path, bridge_kind) = streamer + .current_device() + .await + .map(|d| (d.subdev_path.clone(), d.bridge_kind.clone())) + .unwrap_or((None, None)); webrtc_streamer - .set_capture_device(device_path, jpeg_quality) + .set_capture_device(device_path, jpeg_quality, subdev_path, bridge_kind) .await; tracing::info!("WebRTC streamer configured for direct capture"); } else { diff --git a/src/msd/image.rs b/src/msd/image.rs index e7a066f4..88c04e45 100644 --- a/src/msd/image.rs +++ b/src/msd/image.rs @@ -7,8 +7,8 @@ //! - Metadata management //! - Download from URL -use chrono::Utc; use futures::StreamExt; +use time::OffsetDateTime; use std::fs::{self, File}; use std::io::{self, Read, Write}; use std::path::{Path, PathBuf}; @@ -87,9 +87,10 @@ impl ImageManager { .ok() .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) .map(|d| { - chrono::DateTime::from_timestamp(d.as_secs() as i64, 0).unwrap_or_else(Utc::now) + OffsetDateTime::from_unix_timestamp(d.as_secs() as i64) + .unwrap_or_else(|_| OffsetDateTime::now_utc()) }) - .unwrap_or_else(Utc::now); + .unwrap_or_else(OffsetDateTime::now_utc); Some(ImageInfo { id, diff --git a/src/msd/types.rs b/src/msd/types.rs index 8f1e68cf..321222b6 100644 --- a/src/msd/types.rs +++ b/src/msd/types.rs @@ -1,8 +1,8 @@ //! MSD data types and structures -use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::path::PathBuf; +use time::OffsetDateTime; /// MSD operating mode #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -31,7 +31,8 @@ pub struct ImageInfo { /// File size in bytes pub size: u64, /// Creation timestamp - pub created_at: DateTime, + #[serde(with = "time::serde::rfc3339")] + pub created_at: OffsetDateTime, } impl ImageInfo { @@ -42,7 +43,7 @@ impl ImageInfo { name, path, size, - created_at: Utc::now(), + created_at: OffsetDateTime::now_utc(), } } @@ -132,7 +133,8 @@ pub struct DriveFile { /// Whether this is a directory pub is_dir: bool, /// Last modified timestamp - pub modified: Option>, + #[serde(with = "time::serde::rfc3339::option")] + pub modified: Option, } /// MSD connect request diff --git a/src/rustdesk/connection.rs b/src/rustdesk/connection.rs index 2c9c5e1b..02fd293c 100644 --- a/src/rustdesk/connection.rs +++ b/src/rustdesk/connection.rs @@ -1831,18 +1831,18 @@ async fn run_audio_streaming( break 'subscribe_loop; } - result = opus_rx.changed() => { - if result.is_err() { - // Pipeline was restarted - info!("Audio pipeline closed for connection {}, re-subscribing...", conn_id); - audio_adapter.reset(); - tokio::time::sleep(Duration::from_millis(100)).await; - continue 'subscribe_loop; - } - - let opus_frame = match opus_rx.borrow().clone() { + result = opus_rx.recv() => { + let opus_frame = match result { Some(frame) => frame, - None => continue, + None => { + info!( + "Audio pipeline closed for connection {}, re-subscribing...", + conn_id + ); + audio_adapter.reset(); + tokio::time::sleep(Duration::from_millis(100)).await; + continue 'subscribe_loop; + } }; // Convert OpusFrame to RustDesk AudioFrame message diff --git a/src/stream/mjpeg.rs b/src/stream/mjpeg.rs index 26c2ec61..dcb6fc55 100644 --- a/src/stream/mjpeg.rs +++ b/src/stream/mjpeg.rs @@ -3,63 +3,21 @@ //! Manages video frame distribution and per-client statistics. use arc_swap::ArcSwap; -use bytes::Bytes; use parking_lot::Mutex as ParkingMutex; use parking_lot::RwLock as ParkingRwLock; use std::collections::{HashMap, VecDeque}; use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; -use std::sync::{Arc, OnceLock}; +use std::sync::Arc; use std::time::{Duration, Instant}; use tokio::sync::broadcast; use tracing::{debug, info, warn}; use crate::video::encoder::traits::{Encoder, EncoderConfig}; use crate::video::encoder::JpegEncoder; -use crate::video::format::{PixelFormat, Resolution}; +use crate::video::format::PixelFormat; use crate::video::VideoFrame; -/// Cached "no signal" placeholder JPEG (640×360 dark-gray image). -/// Generated once on first use and reused for all NoSignal frames. -static NO_SIGNAL_JPEG: OnceLock = OnceLock::new(); - -/// Generate a minimal "no signal" JPEG (640×360, dark gray background). -/// Uses turbojpeg directly to produce a valid JPEG without additional deps. -fn generate_no_signal_jpeg() -> Bytes { - const W: usize = 640; - const H: usize = 360; - - let y_size = W * H; - let uv_size = y_size / 4; - let mut i420 = vec![0u8; y_size + uv_size * 2]; - - // Y = 32 (dark gray, above the 16 black floor so it is clearly visible) - i420[..y_size].fill(32); - // U and V = 128 (neutral chroma → no colour tint) - i420[y_size..].fill(128); - - match turbojpeg::Compressor::new() { - Ok(mut compressor) => { - let _ = compressor.set_quality(70); - let yuv = turbojpeg::YuvImage { - pixels: i420.as_slice(), - width: W, - height: H, - align: 1, - subsamp: turbojpeg::Subsamp::Sub2x2, - }; - match compressor.compress_yuv_to_vec(yuv) { - Ok(jpeg) => Bytes::from(jpeg), - Err(_) => Bytes::new(), - } - } - Err(_) => Bytes::new(), - } -} - -/// Return a reference to the cached no-signal JPEG bytes. -fn no_signal_jpeg() -> &'static Bytes { - NO_SIGNAL_JPEG.get_or_init(generate_no_signal_jpeg) -} +// No placeholder JPEGs: capture calls `set_offline()`; UI uses `stream.state_changed`. /// Client ID type (UUID string) pub type ClientId = String; @@ -359,6 +317,9 @@ impl MjpegStreamHandler { PixelFormat::Yuyv => encoder .encode_yuyv(frame.data(), sequence) .map_err(|e| format!("YUYV encode failed: {}", e))?, + PixelFormat::Yvyu => encoder + .encode_yvyu(frame.data(), sequence) + .map_err(|e| format!("YVYU encode failed: {}", e))?, PixelFormat::Nv12 => encoder .encode_nv12(frame.data(), sequence) .map_err(|e| format!("NV12 encode failed: {}", e))?, @@ -392,40 +353,12 @@ impl MjpegStreamHandler { )) } - /// Set stream offline + /// Marks offline; clients exit their read loop. UI overlay comes from `stream.state_changed`. pub fn set_offline(&self) { self.online.store(false, Ordering::SeqCst); let _ = self.frame_notify.send(()); } - /// Push a "no signal" placeholder JPEG to all connected MJPEG clients. - /// - /// Unlike `set_offline()`, this keeps the stream marked as **online** so - /// that HTTP clients remain connected and see the placeholder image instead - /// of a black/empty screen. Call this whenever the capture thread enters - /// the `NoSignal` state. - pub fn push_no_signal_placeholder(&self) { - let jpeg = no_signal_jpeg(); - if jpeg.is_empty() { - return; - } - - let frame = VideoFrame::new( - jpeg.clone(), - Resolution::new(640, 360), - PixelFormat::Mjpeg, - 0, - self.sequence.fetch_add(1, Ordering::Relaxed), - ); - - // Store as current frame so late-joining clients get it immediately. - self.current_frame.store(Arc::new(Some(frame))); - // Ensure stream is marked online so the HTTP handler keeps iterating. - self.online.store(true, Ordering::SeqCst); - // Wake up waiting HTTP clients. - let _ = self.frame_notify.send(()); - } - /// Set stream online (called when streaming starts) pub fn set_online(&self) { self.online.store(true, Ordering::SeqCst); diff --git a/src/video/csi_bridge.rs b/src/video/csi_bridge.rs new file mode 100644 index 00000000..8888aa62 --- /dev/null +++ b/src/video/csi_bridge.rs @@ -0,0 +1,363 @@ +//! CSI/HDMI bridge helpers: subdev discovery, DV probe, RK628 "fake VGA" filter (must run before `S_FMT` / `STREAMON` on capture — see RK628 driver). + +use std::fs::File; +use std::io; +use std::os::fd::{AsFd, AsRawFd, FromRawFd}; +use std::path::{Path, PathBuf}; +use std::sync::mpsc; +use std::thread; +use std::time::Duration; + +use libc; +use nix::poll::{poll, PollFd, PollFlags, PollTimeout}; +use tracing::{debug, info, warn}; +use v4l2r::bindings::{ + v4l2_bt_timings, v4l2_dv_timings, V4L2_DV_BT_656_1120, V4L2_DV_FL_HAS_CEA861_VIC, +}; +use v4l2r::ioctl::{ + self, Event as V4l2Event, EventType, QueryDvTimingsError, SubscribeEventFlags, +}; +use v4l2r::nix::errno::Errno; + +use crate::video::SignalStatus; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CsiBridgeKind { + Rk628, + RkHdmirx, + Tc358743, + Unknown, +} + +impl CsiBridgeKind { + fn from_subdev_name(name: &str) -> Option { + let lower = name.to_ascii_lowercase(); + if lower.contains("rk628") { + Some(Self::Rk628) + } else if lower.contains("hdmirx") || lower.contains("hdmi-rx") { + Some(Self::RkHdmirx) + } else if lower.contains("tc358743") || lower.contains("tc358746") { + Some(Self::Tc358743) + } else { + None + } + } + + fn has_no_signal_fingerprint(self) -> bool { + matches!(self, Self::Rk628) + } +} + +#[derive(Debug, Clone)] +pub enum ProbeResult { + Locked(DvTimingsMode), + NoCable, + NoSync, + OutOfRange, + NoSignal, +} + +impl ProbeResult { + pub fn as_status(&self) -> Option { + match self { + ProbeResult::Locked(_) => None, + ProbeResult::NoCable => Some(SignalStatus::NoCable), + ProbeResult::NoSync => Some(SignalStatus::NoSync), + ProbeResult::OutOfRange => Some(SignalStatus::OutOfRange), + ProbeResult::NoSignal => Some(SignalStatus::NoSignal), + } + } + + pub fn is_locked(&self) -> bool { + matches!(self, ProbeResult::Locked(_)) + } +} + +/// Scalar copy of BT timings (avoids unaligned refs into packed union). +#[derive(Clone, Copy)] +pub struct DvTimingsMode { + pub width: u32, + pub height: u32, + pub pixelclock: u64, + pub fps: Option, + pub raw: v4l2_dv_timings, +} + +impl std::fmt::Debug for DvTimingsMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("DvTimingsMode") + .field("width", &self.width) + .field("height", &self.height) + .field("pixelclock", &self.pixelclock) + .field("fps", &self.fps) + .finish() + } +} + +/// Heuristic: scan `/sys/class/video4linux/v4l-subdev*` names for rk628 / hdmirx / tc358743. +pub fn discover_subdev_for_video(video_path: &Path) -> Option<(PathBuf, CsiBridgeKind)> { + let sysfs_base = Path::new("/sys/class/video4linux"); + let entries = std::fs::read_dir(sysfs_base).ok()?; + + for entry in entries.flatten() { + let name = entry.file_name(); + let name_str = name.to_string_lossy(); + if !name_str.starts_with("v4l-subdev") { + continue; + } + let Some(kind) = read_sysfs_name(&entry.path()) + .as_deref() + .and_then(CsiBridgeKind::from_subdev_name) + else { + continue; + }; + let dev_path = PathBuf::from("/dev").join(&*name_str); + if dev_path.exists() { + info!( + "Discovered CSI bridge subdev for {:?}: {:?} ({:?})", + video_path, dev_path, kind + ); + return Some((dev_path, kind)); + } + } + debug!( + "No CSI bridge subdev found in /sys/class/video4linux for {:?}", + video_path + ); + None +} + +fn read_sysfs_name(subdev_sysfs: &Path) -> Option { + std::fs::read_to_string(subdev_sysfs.join("name")) + .ok() + .map(|s| s.trim().to_string()) +} + +pub fn open_subdev(path: &Path) -> io::Result { + File::options().read(true).write(true).open(path) +} + +pub fn probe_signal(subdev_fd: &impl AsRawFd, kind: CsiBridgeKind) -> ProbeResult { + match ioctl::query_dv_timings::(subdev_fd) { + Ok(timings) => classify_timings(timings, kind), + Err(QueryDvTimingsError::NoLink) => ProbeResult::NoCable, + Err(QueryDvTimingsError::UnstableSignal) => ProbeResult::NoSync, + Err(QueryDvTimingsError::IoctlError(Errno::ERANGE)) => ProbeResult::OutOfRange, + Err(QueryDvTimingsError::IoctlError( + Errno::EIO | Errno::EREMOTEIO | Errno::ETIMEDOUT, + )) => ProbeResult::NoSync, + Err(QueryDvTimingsError::Unsupported) | Err(QueryDvTimingsError::IoctlError(_)) => { + ProbeResult::NoSignal + } + } +} + +/// RK628 can block `QUERY_DV_TIMINGS` for seconds; probe uses a dup + timeout. +pub const RK628_SUBDEV_PROBE_TIMEOUT: Duration = Duration::from_millis(3000); + +pub fn probe_signal_thread_timeout( + subdev_fd: &impl AsRawFd, + kind: CsiBridgeKind, + limit: Duration, +) -> Option { + let raw = subdev_fd.as_raw_fd(); + let dup_fd = unsafe { libc::dup(raw) }; + if dup_fd < 0 { + warn!( + "dup(subdev) for threaded DV probe failed: {}", + io::Error::last_os_error() + ); + return None; + } + let dup_file = unsafe { File::from_raw_fd(dup_fd) }; + let (tx, rx) = mpsc::channel::(); + let handle = thread::spawn(move || { + let probe = probe_signal(&dup_file, kind); + let _ = tx.send(probe); + }); + match rx.recv_timeout(limit) { + Ok(r) => { + let _ = handle.join(); + Some(r) + } + Err(mpsc::RecvTimeoutError::Timeout) => { + warn!( + "QUERY_DV_TIMINGS exceeded {:?} (RK628 HDMI mode change?) — abandoning probe thread", + limit + ); + drop(handle); + None + } + Err(mpsc::RecvTimeoutError::Disconnected) => { + let _ = handle.join(); + None + } + } +} + +fn classify_timings(timings: v4l2_dv_timings, kind: CsiBridgeKind) -> ProbeResult { + let timings_type: u32 = timings.type_; + if timings_type != V4L2_DV_BT_656_1120 { + warn!( + "QUERY_DV_TIMINGS returned unexpected type {}, treating as NoSignal", + timings_type + ); + return ProbeResult::NoSignal; + } + + let bt: v4l2_bt_timings = unsafe { timings.__bindgen_anon_1.bt }; + let width: u32 = bt.width; + let height: u32 = bt.height; + let pixelclock: u64 = bt.pixelclock; + + if width == 0 || height == 0 || width <= 64 || height <= 64 { + return ProbeResult::NoSignal; + } + + if kind.has_no_signal_fingerprint() && is_rk628_no_signal_fingerprint(&bt) { + debug!( + "RK628 reports synthetic {}x{} @ {} Hz VGA fingerprint → NoSignal", + width, height, pixelclock + ); + return ProbeResult::NoSignal; + } + + let total_h: u64 = (width + + bt.hfrontporch + + bt.hsync + + bt.hbackporch) as u64; + let total_v: u64 = (height + + bt.vfrontporch + + bt.vsync + + bt.vbackporch) as u64; + let fps = if total_h > 0 && total_v > 0 && pixelclock > 0 { + Some(pixelclock as f64 / (total_h as f64 * total_v as f64)) + } else { + None + }; + + ProbeResult::Locked(DvTimingsMode { + width, + height, + pixelclock, + fps, + raw: timings, + }) +} + +/// RK628 returns DMT 640x480 @ ~25.175 MHz, VIC=1 when unlocked; do not stream on that. +fn is_rk628_no_signal_fingerprint(bt: &v4l2_bt_timings) -> bool { + let width: u32 = bt.width; + let height: u32 = bt.height; + let pixelclock: u64 = bt.pixelclock; + let flags: u32 = bt.flags; + let vic: u8 = bt.cea861_vic; + + if width != 640 || height != 480 { + return false; + } + let pclk_matches = (pixelclock as i64 - 25_175_000).abs() < 50_000; + let has_vic_flag = flags & V4L2_DV_FL_HAS_CEA861_VIC != 0; + pclk_matches && has_vic_flag && vic == 1 +} + +pub fn apply_dv_timings(subdev_fd: &impl AsRawFd, timings: v4l2_dv_timings) { + match ioctl::s_dv_timings::<_, v4l2_dv_timings>(subdev_fd, timings) { + Ok(_) => debug!("S_DV_TIMINGS ok on subdev"), + Err(e) => debug!( + "S_DV_TIMINGS failed on subdev ({}), continuing with queried mode", + e + ), + } +} + +pub fn subscribe_source_change(subdev_fd: &impl AsRawFd) -> io::Result<()> { + ioctl::subscribe_event( + subdev_fd, + EventType::SourceChange(0), + SubscribeEventFlags::empty(), + ) + .map_err(|e| io::Error::other(format!("subscribe_event(SOURCE_CHANGE): {}", e))) +} + +/// `Ok(true)` if a SOURCE_CHANGE was drained; `Ok(false)` on timeout. +pub fn wait_source_change(subdev_fd: &File, timeout: Duration) -> io::Result { + let mut fds = [PollFd::new(subdev_fd.as_fd(), PollFlags::POLLPRI)]; + let timeout_ms = timeout.as_millis().min(u16::MAX as u128) as u16; + let ready = poll(&mut fds, PollTimeout::from(timeout_ms))?; + if ready == 0 { + return Ok(false); + } + if let Some(revents) = fds[0].revents() { + if !revents.contains(PollFlags::POLLPRI) { + return Ok(false); + } + } + + let mut drained = 0u32; + while let Ok(_ev) = ioctl::dqevent::(subdev_fd) { + drained = drained.saturating_add(1); + if drained >= 16 { + break; + } + } + debug!("subdev source_change drained {} event(s)", drained); + Ok(true) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn rk628_fingerprint_matches_vga() { + let mut bt: v4l2_bt_timings = unsafe { std::mem::zeroed() }; + bt.width = 640; + bt.height = 480; + bt.pixelclock = 25_175_000; + bt.flags = V4L2_DV_FL_HAS_CEA861_VIC; + bt.cea861_vic = 1; + assert!(is_rk628_no_signal_fingerprint(&bt)); + } + + #[test] + fn rk628_fingerprint_rejects_real_1080p() { + let mut bt: v4l2_bt_timings = unsafe { std::mem::zeroed() }; + bt.width = 1920; + bt.height = 1080; + bt.pixelclock = 148_500_000; + bt.flags = V4L2_DV_FL_HAS_CEA861_VIC; + bt.cea861_vic = 16; + assert!(!is_rk628_no_signal_fingerprint(&bt)); + } + + #[test] + fn rk628_fingerprint_rejects_real_vga_without_vic() { + // A hypothetical legit VGA source would *not* carry the CEA VIC + // flag from the bridge (RK628 sets it synthetically when unlocked). + let mut bt: v4l2_bt_timings = unsafe { std::mem::zeroed() }; + bt.width = 640; + bt.height = 480; + bt.pixelclock = 25_175_000; + bt.flags = 0; + bt.cea861_vic = 0; + assert!(!is_rk628_no_signal_fingerprint(&bt)); + } + + #[test] + fn from_subdev_name_recognises_known_bridges() { + assert_eq!( + CsiBridgeKind::from_subdev_name("rk628-csi-v4l2 9-0051"), + Some(CsiBridgeKind::Rk628) + ); + assert_eq!( + CsiBridgeKind::from_subdev_name("rk-hdmirx-ctrl"), + Some(CsiBridgeKind::RkHdmirx) + ); + assert_eq!( + CsiBridgeKind::from_subdev_name("tc358743 2-000f"), + Some(CsiBridgeKind::Tc358743) + ); + assert_eq!(CsiBridgeKind::from_subdev_name("mystery"), None); + } +} diff --git a/src/video/device.rs b/src/video/device.rs index 2c517cf3..94b8957b 100644 --- a/src/video/device.rs +++ b/src/video/device.rs @@ -16,11 +16,13 @@ use v4l2r::ioctl::{ use v4l2r::nix::errno::Errno; use v4l2r::{Format as V4l2rFormat, QueueType}; +use super::csi_bridge; use super::format::{PixelFormat, Resolution}; -use super::is_rk_hdmirx_driver; +use super::{is_rk_hdmirx_driver, is_rkcif_driver}; use crate::error::{AppError, Result}; -const DEVICE_PROBE_TIMEOUT_MS: u64 = 400; +/// Per-node probe limit; rkcif/RK628 ioctl chains can exceed 1s under contention. +const DEVICE_PROBE_TIMEOUT_MS: u64 = 10_000; /// Information about a video device #[derive(Debug, Clone, Serialize, Deserialize)] @@ -43,6 +45,20 @@ pub struct VideoDeviceInfo { pub is_capture_card: bool, /// Priority score for device selection (higher is better) pub priority: u32, + /// Whether an HDMI signal is currently detected (CSI/HDMI bridge devices only; + /// always `true` for USB capture cards). + pub has_signal: bool, + /// Path of the bridge subdev (`/dev/v4l-subdevN`) paired with this + /// capture node, if any. On Rockchip boards that wire an RK628 / + /// TC358746 / RK-HDMIRX through `rkcif`, `QUERY_DV_TIMINGS`, + /// `S_DV_TIMINGS`, `SUBSCRIBE_EVENT(SOURCE_CHANGE)`, `S_EDID` etc. all + /// return `ENOTTY` on the video node — they only work here. `None` + /// for USB UVC and for bridges that expose DV ioctls on the video node + /// directly (tc358743 via `uvcvideo`). + pub subdev_path: Option, + /// Classification of the paired bridge (drives fingerprint logic for + /// RK628's synthetic-VGA no-signal pattern). + pub bridge_kind: Option, } /// Information about a supported format @@ -147,12 +163,114 @@ impl VideoDevice { read_write: flags.contains(Capabilities::READWRITE), }; - let formats = if is_rk_hdmirx_driver(&caps.driver, &caps.card) { - self.enumerate_current_format_only()? + // For CSI/HDMI bridges, try to locate the paired subdev *before* + // the signal check: RK628 + rkcif places QUERY_DV_TIMINGS on the + // subdev (the video node returns ENOTTY). Tc358743 and rk_hdmirx + // typically expose DV ioctls on the video node itself, but having + // the subdev handle for EDID/event subscription doesn't hurt. + let (subdev_path, bridge_kind) = if is_rkcif_driver(&caps.driver) + || is_rk_hdmirx_driver(&caps.driver, &caps.card) + { + match csi_bridge::discover_subdev_for_video(&self.path) { + Some((path, kind)) => (Some(path), Some(format!("{:?}", kind).to_lowercase())), + None => (None, None), + } + } else { + (None, None) + }; + + // Probe the HDMI source for both signal presence *and* the live + // frame-rate. rkcif's `VIDIOC_ENUM_FRAMEINTERVALS` returns a + // meaningless `1.0..30.0` StepWise range, so the only trustworthy + // fps for rkcif + RK628 / rk_hdmirx boards comes from the bridge + // subdev's DV timings (pixelclock / total_width / total_height). + // + // Preference order: + // 1. Bridge subdev — on rkcif boards this is the *only* node + // where QUERY_DV_TIMINGS works, and it lets the RK628 + // fingerprint filter kick in before we return has_signal=true. + // 2. Video node fallback — for rk_hdmirx / tc358743 where DV + // timings are exposed on the capture node directly. + // 3. USB UVC — always true (no signal concept), no hdmi_fps. + // Subdev-reported HDMI source mode (width, height, fps). On rkcif + + // RK628 boards this is the *only* place DV timings work; the video + // node itself returns ENOTTY for QUERY/G_DV_TIMINGS, so without + // threading this through to `enumerate_bridge_formats` the format + // list ends up with zero resolutions and `select_resolution` falls + // back to the user's preferred value (e.g. 4K) even when the real + // source is 1080p. + let mut subdev_hdmi_mode: Option<(u32, u32, Option)> = None; + + let (has_signal, hdmi_fps) = if let Some(subdev_path) = subdev_path.as_ref() { + match csi_bridge::open_subdev(subdev_path) { + Ok(subdev_fd) => { + let kind = parse_bridge_kind(bridge_kind.as_deref()) + .unwrap_or(csi_bridge::CsiBridgeKind::Unknown); + let probe = csi_bridge::probe_signal(&subdev_fd, kind); + debug!( + "has_signal via subdev {:?} ({:?}): {:?}", + subdev_path, kind, probe + ); + let fps = match &probe { + csi_bridge::ProbeResult::Locked(mode) => { + subdev_hdmi_mode = Some((mode.width, mode.height, mode.fps)); + mode.fps + } + _ => None, + }; + (probe.is_locked(), fps) + } + Err(e) => { + warn!("Failed to open subdev {:?}: {}", subdev_path, e); + (false, None) + } + } + } else if is_rk_hdmirx_driver(&caps.driver, &caps.card) + || is_rkcif_driver(&caps.driver) + { + let dv = self.current_dv_timings_mode(); + debug!( + "has_signal via video node {:?} (driver={}): dv_timings={:?}", + self.path, caps.driver, dv + ); + let has_signal = dv + .as_ref() + .map(|(w, h, _)| *w > 64 && *h > 64) + .unwrap_or(false); + let fps = if has_signal { + dv.and_then(|(_, _, f)| f) + } else { + None + }; + (has_signal, fps) + } else { + (true, None) + }; + + let mut formats = if is_rk_hdmirx_driver(&caps.driver, &caps.card) + || is_rkcif_driver(&caps.driver) + { + // CSI/HDMI bridge drivers (rk_hdmirx, rkcif) expose multiple pixel + // formats via ENUM_FMT (e.g. rk_hdmirx: BGR3/NV24/NV16/NV12) but + // `ENUM_FRAMESIZES` is fiction for these drivers (rkcif reports a + // degenerate `64x64 StepWise 8/8` that only describes its DMA + // engine, rk_hdmirx returns ENOTTY). The only authoritative + // resolution is whatever the bridge subdev's DV timings report, + // so we treat the HDMI source mode as the single allowed + // resolution for every pixel format. + self.enumerate_bridge_formats(subdev_hdmi_mode)? } else { self.enumerate_formats()? }; + // For CSI/HDMI bridges, the driver-enumerated fps list is fiction + // (rkcif: always `1..30`; rk_hdmirx: typically `ENOTTY`). Replace + // it with the live HDMI source fps derived from the bridge DV + // timings so the UI reflects what the sink is actually receiving. + if let Some(fps) = hdmi_fps { + override_resolution_fps(&mut formats, fps); + } + // Determine if this is likely an HDMI capture card let is_capture_card = Self::detect_capture_card(&caps.card, &caps.driver, &formats); @@ -160,6 +278,11 @@ impl VideoDevice { let priority = Self::calculate_priority(&caps.card, &caps.driver, &formats, is_capture_card); + debug!( + "Device {:?}: {} formats, priority={}, has_signal={}, hdmi_fps={:?}, is_capture_card={}, subdev={:?}", + self.path, formats.len(), priority, has_signal, hdmi_fps, is_capture_card, subdev_path + ); + Ok(VideoDeviceInfo { path: self.path.clone(), name: caps.card.clone(), @@ -170,6 +293,9 @@ impl VideoDevice { capabilities, is_capture_card, priority, + has_signal, + subdev_path, + bridge_kind, }) } @@ -213,32 +339,119 @@ impl VideoDevice { Ok(formats) } - fn enumerate_current_format_only(&self) -> Result> { - let current = self.get_format()?; - let Some(format) = PixelFormat::from_v4l2r(current.pixelformat) else { + /// Enumerate formats for CSI/HDMI bridge devices (rk_hdmirx, rkcif). + /// + /// Uses `VIDIOC_ENUM_FMT` to discover all supported pixel formats (the + /// output of `v4l2-ctl --list-formats`) and attaches the HDMI source + /// resolution read from the bridge DV timings (or G_FMT as a last + /// resort) as the single allowed resolution for every format. + /// + /// `ENUM_FRAMESIZES` is deliberately ignored here: rkcif advertises a + /// degenerate `64x64 StepWise 8/8` that only describes its DMA engine + /// (not what the HDMI source can actually deliver), and rk_hdmirx + /// typically returns ENOTTY. Neither the bridge nor rkcif performs + /// any hardware scaling, so the capture resolution is always the + /// HDMI source mode. + /// + /// Returned formats are sorted by `PixelFormat::priority()` so the + /// higher-level `select_format` picks a sensible default (NV12 > YUYV on + /// rkcif / rk_hdmirx) instead of whatever the driver happens to + /// have stuck as the current active format. + fn enumerate_bridge_formats( + &self, + subdev_hdmi_mode: Option<(u32, u32, Option)>, + ) -> Result> { + let queue = self.capture_queue_type()?; + let current_fmt = self.get_format().ok(); + + if let Some(fmt) = ¤t_fmt { debug!( - "Current active format {:?} is not supported by One-KVM, falling back to full enumeration", - current.pixelformat + "enumerate_bridge_formats: current G_FMT -> {:?} {}x{}", + fmt.pixelformat, fmt.width, fmt.height ); - return self.enumerate_formats(); - }; + } - let description = self - .format_description(current.pixelformat) - .unwrap_or_else(|| format.to_string()); + // Preference order for the HDMI source resolution: + // 1. Subdev-reported DV timings (authoritative on rkcif + RK628 where + // the video node returns ENOTTY for QUERY_DV_TIMINGS). + // 2. Video-node DV timings / G_FMT (rk_hdmirx, tc358743 direct). + let hdmi_mode = subdev_hdmi_mode + .map(|(w, h, fps)| { + let mut fps_list = Vec::new(); + if let Some(f) = fps { + fps_list.push(f); + } + if let Some(parm_fps) = self.current_parm_fps() { + fps_list.push(parm_fps); + } + normalize_fps_list(&mut fps_list); + ResolutionInfo::new(w, h, fps_list) + }) + .or_else(|| self.current_mode_resolution_info()); + if let Some(info) = &hdmi_mode { + debug!( + "enumerate_bridge_formats: HDMI source mode {}x{} (from {})", + info.width, + info.height, + if subdev_hdmi_mode.is_some() { + "subdev" + } else { + "video node" + } + ); + } else { + debug!("enumerate_bridge_formats: no HDMI source mode available"); + } - let mut resolutions = self.enumerate_resolutions(current.pixelformat)?; - if resolutions.is_empty() { - if let Some(current_mode) = self.current_mode_resolution_info() { - resolutions.push(current_mode); + let mut formats: Vec = Vec::new(); + for desc in FormatIterator::new(&self.fd, queue) { + let Some(format) = PixelFormat::from_v4l2r(desc.pixelformat) else { + debug!( + "enumerate_bridge_formats: skipping unsupported fourcc {:?} ({})", + desc.pixelformat, desc.description + ); + continue; + }; + + let resolutions = hdmi_mode.clone().into_iter().collect(); + + formats.push(FormatInfo { + format, + resolutions, + description: desc.description.clone(), + }); + } + + if formats.is_empty() { + // Fallback: driver refused ENUM_FMT entirely, use just the current + // active format reported by G_FMT so we still have something. + if let Some(fmt) = current_fmt { + if let Some(format) = PixelFormat::from_v4l2r(fmt.pixelformat) { + let description = self + .format_description(fmt.pixelformat) + .unwrap_or_else(|| format.to_string()); + let resolutions = hdmi_mode.into_iter().collect(); + formats.push(FormatInfo { + format, + resolutions, + description, + }); + } } } - Ok(vec![FormatInfo { - format, - resolutions, - description, - }]) + // Highest priority first (MJPEG > NV12 > NV16 > NV24 > BGR24 > ...). + formats.sort_by(|a, b| b.format.priority().cmp(&a.format.priority())); + + debug!( + "enumerate_bridge_formats: resolved formats {:?}", + formats + .iter() + .map(|f| format!("{}({} res)", f.format, f.resolutions.len())) + .collect::>() + ); + + Ok(formats) } /// Enumerate resolutions for a specific format @@ -259,24 +472,26 @@ impl VideoDevice { resolutions.push(ResolutionInfo::new(d.width, d.height, fps)); } FrmSizeTypes::StepWise(s) => { - for res in [ - Resolution::VGA, - Resolution::HD720, - Resolution::HD1080, - Resolution::UHD4K, - ] { - if res.width >= s.min_width - && res.width <= s.max_width - && res.height >= s.min_height - && res.height <= s.max_height - { - let fps = self - .enumerate_fps(fourcc, res.width, res.height) - .unwrap_or_default(); - resolutions - .push(ResolutionInfo::new(res.width, res.height, fps)); - } + // StepWise ranges are ignored on purpose: on + // CSI/HDMI bridge drivers (rkcif) the range + // only describes the DMA engine's capability + // and not what the HDMI source can deliver, + // so synthesising candidate resolutions from + // it is misleading. Bridge devices go + // through `enumerate_bridge_formats` and use + // the DV-timings source mode directly; for + // any other driver that emits StepWise we + // fall back to the current active mode below. + debug!( + "ENUM_FRAMESIZES {:?}: ignoring StepWise {}x{} - {}x{} step {}/{}", + fourcc, s.min_width, s.min_height, + s.max_width, s.max_height, + s.step_width, s.step_height + ); + if resolutions.is_empty() { + should_fallback_to_current_mode = true; } + break; } } } @@ -449,6 +664,8 @@ impl VideoDevice { "macrosilicon", "tc358743", "uvc", + "rkcif", + "rk_hdmirx", ]; // Check card/driver names @@ -639,20 +856,16 @@ impl VideoDevice { pub fn enumerate_devices() -> Result> { info!("Enumerating video devices..."); - let mut devices = Vec::new(); - - // Scan /dev/video* devices + // First pass: collect candidates that pass the sysfs-based pre-filter. + // This avoids opening orphan /dev/videoN nodes (ENODEV) and m2m codec + // nodes (ENOTTY) that would otherwise waste one syscall + one ioctl each. + let mut candidates: Vec = Vec::new(); for entry in std::fs::read_dir("/dev") .map_err(|e| AppError::VideoError(format!("Failed to read /dev: {}", e)))? { - let entry = match entry { - Ok(e) => e, - Err(_) => continue, - }; - + let Ok(entry) = entry else { continue }; let path = entry.path(); let name = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); - if !name.starts_with("video") { continue; } @@ -663,11 +876,31 @@ pub fn enumerate_devices() -> Result> { debug!("Skipping non-capture candidate (sysfs): {:?}", path); continue; } + candidates.push(path); + } - // Try to open and query the device (with timeout) - match probe_device_with_timeout(&path, Duration::from_millis(DEVICE_PROBE_TIMEOUT_MS)) { + collapse_rkcif_probe_candidates(&mut candidates); + + // Second pass: probe the remaining candidates in parallel. Each probe + // already spawns its own worker thread inside `probe_device_with_timeout`, + // so the total wall-clock time is bounded by `DEVICE_PROBE_TIMEOUT_MS` + // rather than (N × per-probe-latency). + let timeout = Duration::from_millis(DEVICE_PROBE_TIMEOUT_MS); + let mut handles = Vec::with_capacity(candidates.len()); + for path in candidates { + handles.push(std::thread::spawn(move || { + (path.clone(), probe_device_with_timeout(&path, timeout)) + })); + } + + let mut devices = Vec::new(); + for handle in handles { + let (path, info) = match handle.join() { + Ok(pair) => pair, + Err(_) => continue, + }; + match info { Some(info) => { - // Only include devices with video capture capability if info.capabilities.video_capture || info.capabilities.video_capture_mplane { info!( "Found capture device: {} ({}) - {} formats", @@ -686,13 +919,76 @@ pub fn enumerate_devices() -> Result> { } } - // Sort by priority (highest first) - devices.sort_by(|a, b| b.priority.cmp(&a.priority)); + // Sort by priority (highest first), then by path (lowest first) as tiebreaker. + // The path tiebreaker ensures deterministic ordering when multiple sub-devices + // share the same priority (e.g. rkcif nodes), so that /dev/video0 is preferred + // over /dev/video10 after deduplication. + devices.sort_by(|a, b| b.priority.cmp(&a.priority).then_with(|| a.path.cmp(&b.path))); + + // Deduplicate rkcif sub-devices: the driver exposes many /dev/video* nodes + // for a single MIPI CSI pipeline. Keep only the highest-priority node per + // (driver, bus_info) group so users see one device instead of ~11. + dedup_platform_subdevices(&mut devices); info!("Found {} video capture devices", devices.len()); Ok(devices) } +/// Collapse platform sub-device nodes that share the same driver + bus_info +/// into a single entry (the one with the highest priority / most formats). +/// Currently applies to the `rkcif` driver on Rockchip SoCs where each +/// media-pipeline link creates its own `/dev/video*` node. +fn dedup_platform_subdevices(devices: &mut Vec) { + // devices is already sorted by priority (descending). + // Walk the list and keep only the first (highest-priority) representative + // of each (driver, bus_info) group that needs deduplication. + let mut seen = std::collections::HashSet::new(); + devices.retain(|d| { + if !is_rkcif_driver(&d.driver) || d.bus_info.is_empty() { + return true; + } + let key = (d.driver.clone(), d.bus_info.clone()); + seen.insert(key) + }); +} + +/// rkcif registers many `/dev/video*` queues; probing all in parallel can +/// contend and time out. Keep one node per board (lowest `videoN`). +fn collapse_rkcif_probe_candidates(candidates: &mut Vec) { + let mut rkcif: Vec = Vec::new(); + let mut rest: Vec = Vec::new(); + for p in candidates.drain(..) { + if sysfs_uevent_driver(&p).is_some_and(|d| d.contains("rkcif")) { + rkcif.push(p); + } else { + rest.push(p); + } + } + if let Some(one) = rkcif + .iter() + .min_by_key(|p| video_index(p).unwrap_or(u32::MAX)) + .cloned() + { + rest.push(one); + } + *candidates = rest; +} + +fn sysfs_uevent_driver(path: &Path) -> Option { + let name = path.file_name()?.to_str()?; + let uevent = + read_sysfs_string(&Path::new("/sys/class/video4linux").join(name).join("device/uevent"))?; + extract_uevent_value(&uevent, "driver") +} + +fn video_index(path: &Path) -> Option { + path.file_name()? + .to_str()? + .strip_prefix("video")? + .parse() + .ok() +} + fn probe_device_with_timeout(path: &Path, timeout: Duration) -> Option { let path = path.to_path_buf(); let path_for_thread = path.clone(); @@ -725,8 +1021,26 @@ fn sysfs_maybe_capture(path: &Path) -> bool { Some(name) => name, None => return true, }; + + // Fast-path: nodes whose filename clearly marks them as m2m codecs + // (e.g. /dev/video-enc0, /dev/video-dec0 on Rockchip). These never + // answer VIDIOC_QUERYCAP as capture devices. + let name_lower = name.to_ascii_lowercase(); + let filename_skip = ["-enc", "-dec", "-codec", "-m2m", "-vepu", "-vdpu"]; + if filename_skip.iter().any(|hint| name_lower.contains(hint)) { + return false; + } + let sysfs_base = Path::new("/sys/class/video4linux").join(name); + // Orphan /dev/videoN nodes (no matching sysfs entry) can appear when the + // kernel driver that created them has been unloaded but the device nodes + // were never cleaned up. Opening them returns ENODEV; skip the probe. + if !sysfs_base.exists() { + debug!("Skipping {:?}: no matching /sys/class/video4linux entry", path); + return false; + } + let sysfs_name = read_sysfs_string(&sysfs_base.join("name")) .unwrap_or_default() .to_lowercase(); @@ -746,19 +1060,51 @@ fn sysfs_maybe_capture(path: &Path) -> bool { "macrosilicon", "tc358743", "grabber", + "rkcif", + "rk_hdmirx", ]; if capture_hints.iter().any(|hint| sysfs_name.contains(hint)) { maybe_capture = true; } - if let Some(driver) = driver { - if driver.contains("uvcvideo") || driver.contains("tc358743") { + if let Some(driver) = &driver { + if driver.contains("uvcvideo") + || driver.contains("tc358743") + || driver.contains("rkcif") + || driver.contains("rk_hdmirx") + { maybe_capture = true; } } + // Skip known non-capture drivers (RK video codecs, Hantro VPU, ISP/VPE + // pipelines, MIPI ISP statistics / params nodes). These would otherwise + // succeed QUERYCAP but expose only VIDEO_M2M / STATS / PARAMS and get + // filtered later — skipping here saves an open() + ioctl() per node. + let driver_skip = [ + "rkvenc", "rkvdec", "vepu", "vdpu", "hantro", "mpp_", "rockchip-vpu", + ]; + if let Some(driver) = &driver { + if driver_skip.iter().any(|hint| driver.contains(hint)) { + return false; + } + } + let skip_hints = [ - "codec", "decoder", "encoder", "isp", "mem2mem", "m2m", "vbi", "radio", "metadata", + "codec", + "decoder", + "encoder", + "isp", + "mem2mem", + "m2m", + "vbi", + "radio", + "metadata", "output", + // rkisp sub-nodes that are not video capture queues + "rkisp-statistics", + "rkisp-input-params", + "rkisp_rawrd", + "rkisp_rawwr", ]; if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture { return false; @@ -783,6 +1129,18 @@ fn extract_uevent_value(content: &str, key: &str) -> Option { None } +/// Parse the `bridge_kind` string serialised into `VideoDeviceInfo` back +/// into the strongly-typed enum used by [`csi_bridge`]. +pub(crate) fn parse_bridge_kind(kind: Option<&str>) -> Option { + Some(match kind? { + "rk628" => csi_bridge::CsiBridgeKind::Rk628, + "rkhdmirx" => csi_bridge::CsiBridgeKind::RkHdmirx, + "tc358743" => csi_bridge::CsiBridgeKind::Tc358743, + "unknown" => csi_bridge::CsiBridgeKind::Unknown, + _ => return None, + }) +} + fn dv_timings_fps(bt: &v4l2_bt_timings) -> Option { let total_width = bt.width + bt.hfrontporch + bt.hsync + bt.hbackporch; let total_height = if bt.interlaced != 0 { @@ -813,6 +1171,24 @@ fn normalize_fps_list(fps_list: &mut Vec) { fps_list.dedup_by(|a, b| (*a - *b).abs() < 0.01); } +/// Replace every `ResolutionInfo::fps` in `formats` with the single HDMI +/// source frame-rate. Used for CSI/HDMI bridge devices (rkcif, rk_hdmirx) +/// whose `VIDIOC_ENUM_FRAMEINTERVALS` returns meaningless StepWise values +/// — the only trustworthy fps comes from the bridge DV-timings on the +/// paired subdev. Silently no-op when `fps` normalises to empty. +fn override_resolution_fps(formats: &mut [FormatInfo], fps: f64) { + let mut normalized = vec![fps]; + normalize_fps_list(&mut normalized); + if normalized.is_empty() { + return; + } + for fi in formats.iter_mut() { + for res in fi.resolutions.iter_mut() { + res.fps = normalized.clone(); + } + } +} + /// Find the best video device for KVM use pub fn find_best_device() -> Result { let devices = enumerate_devices()?; diff --git a/src/video/encoder/jpeg.rs b/src/video/encoder/jpeg.rs index 5f72b195..f7a448ef 100644 --- a/src/video/encoder/jpeg.rs +++ b/src/video/encoder/jpeg.rs @@ -152,6 +152,41 @@ impl JpegEncoder { self.encode_i420_to_jpeg(sequence) } + /// YVYU → swap chroma to YUYV in scratch, then same as [`Self::encode_yuyv`]. + pub fn encode_yvyu(&mut self, data: &[u8], sequence: u64) -> Result { + let width = self.config.resolution.width as usize; + let height = self.config.resolution.height as usize; + let expected_size = width * height * 2; + + if data.len() < expected_size { + return Err(AppError::VideoError(format!( + "YVYU data too small: {} < {}", + data.len(), + expected_size + ))); + } + + // Reuse bgra_buffer as scratch for the swapped YUYV data. + if self.bgra_buffer.len() < expected_size { + self.bgra_buffer.resize(expected_size, 0); + } + let dst = &mut self.bgra_buffer[..expected_size]; + let src = &data[..expected_size]; + + // Swap bytes [1] and [3] in every 4-byte macropixel: Y0 V0 Y1 U0 → Y0 U0 Y1 V0 + for (chunk_dst, chunk_src) in dst.chunks_exact_mut(4).zip(src.chunks_exact(4)) { + chunk_dst[0] = chunk_src[0]; // Y0 + chunk_dst[1] = chunk_src[3]; // U0 + chunk_dst[2] = chunk_src[2]; // Y1 + chunk_dst[3] = chunk_src[1]; // V0 + } + + libyuv::yuy2_to_i420(dst, &mut self.i420_buffer, width as i32, height as i32) + .map_err(|e| AppError::VideoError(format!("libyuv YVYU→I420 failed: {}", e)))?; + + self.encode_i420_to_jpeg(sequence) + } + /// Encode NV12 frame to JPEG pub fn encode_nv12(&mut self, data: &[u8], sequence: u64) -> Result { let width = self.config.resolution.width as usize; @@ -323,7 +358,8 @@ impl crate::video::encoder::traits::Encoder for JpegEncoder { fn encode(&mut self, data: &[u8], sequence: u64) -> Result { match self.config.input_format { - PixelFormat::Yuyv | PixelFormat::Yvyu => self.encode_yuyv(data, sequence), + PixelFormat::Yuyv => self.encode_yuyv(data, sequence), + PixelFormat::Yvyu => self.encode_yvyu(data, sequence), PixelFormat::Nv12 => self.encode_nv12(data, sequence), PixelFormat::Nv16 => self.encode_nv16(data, sequence), PixelFormat::Nv24 => self.encode_nv24(data, sequence), diff --git a/src/video/format.rs b/src/video/format.rs index f794dcfe..ae55b27e 100644 --- a/src/video/format.rs +++ b/src/video/format.rs @@ -141,8 +141,8 @@ impl PixelFormat { match self { PixelFormat::Mjpeg => 100, PixelFormat::Jpeg => 99, - PixelFormat::Yuyv => 80, - PixelFormat::Nv12 => 75, + PixelFormat::Nv12 => 80, + PixelFormat::Yuyv => 75, PixelFormat::Nv21 => 74, PixelFormat::Yuv420 => 70, PixelFormat::Uyvy => 65, @@ -159,30 +159,18 @@ impl PixelFormat { /// Get recommended format for video encoding (WebRTC) /// - /// Hardware encoding prefers: NV12 > YUYV - /// Software encoding prefers: YUYV > NV12 + /// Prefers NV12 over YUYV (matches [`Self::priority`]) /// /// Returns None if no suitable format is available pub fn recommended_for_encoding( available: &[PixelFormat], - is_hardware: bool, + _is_hardware: bool, ) -> Option { - if is_hardware { - // Hardware encoding: NV12 > YUYV - if available.contains(&PixelFormat::Nv12) { - return Some(PixelFormat::Nv12); - } - if available.contains(&PixelFormat::Yuyv) { - return Some(PixelFormat::Yuyv); - } - } else { - // Software encoding: YUYV > NV12 - if available.contains(&PixelFormat::Yuyv) { - return Some(PixelFormat::Yuyv); - } - if available.contains(&PixelFormat::Nv12) { - return Some(PixelFormat::Nv12); - } + if available.contains(&PixelFormat::Nv12) { + return Some(PixelFormat::Nv12); + } + if available.contains(&PixelFormat::Yuyv) { + return Some(PixelFormat::Yuyv); } // Fallback to any non-compressed format available.iter().find(|f| !f.is_compressed()).copied() @@ -280,19 +268,76 @@ impl Resolution { self.width as u64 * self.height as u64 } - /// Common resolutions + /// Common resolutions. + /// + /// All constants here are 8-pixel aligned on both axes so they survive + /// the `step=8` constraint imposed by most CSI bridge drivers (e.g. + /// `rkcif` on Rockchip). If you add a new entry, make sure + /// `width % 8 == 0 && height % 8 == 0`, otherwise the driver will + /// silently round it at `S_FMT` time and the UI will report a + /// different resolution than the one the user picked. pub const VGA: Resolution = Resolution { width: 640, height: 480, }; + /// CEA-2/3 NTSC SD (480p) + pub const NTSC: Resolution = Resolution { + width: 720, + height: 480, + }; + /// CEA-5/17/18 PAL SD (576p) + pub const PAL: Resolution = Resolution { + width: 720, + height: 576, + }; + /// SVGA — legacy BIOS / POST output + pub const SVGA: Resolution = Resolution { + width: 800, + height: 600, + }; + /// XGA — very common BIOS / server console output + pub const XGA: Resolution = Resolution { + width: 1024, + height: 768, + }; pub const HD720: Resolution = Resolution { width: 1280, height: 720, }; + /// WXGA — older laptop panels + pub const WXGA: Resolution = Resolution { + width: 1280, + height: 800, + }; + /// SXGA — 4:3 / 5:4 legacy desktop displays + pub const SXGA: Resolution = Resolution { + width: 1280, + height: 1024, + }; + /// 1360×768 — fallback for 8-aligned "1366×768"-like panels + pub const HDTV: Resolution = Resolution { + width: 1360, + height: 768, + }; + /// UXGA — industrial / 4:3 legacy displays + pub const UXGA: Resolution = Resolution { + width: 1600, + height: 1200, + }; pub const HD1080: Resolution = Resolution { width: 1920, height: 1080, }; + /// WUXGA — 16:10 professional monitors + pub const WUXGA: Resolution = Resolution { + width: 1920, + height: 1200, + }; + /// QHD / 2K — modern PC monitors + pub const QHD: Resolution = Resolution { + width: 2560, + height: 1440, + }; pub const UHD4K: Resolution = Resolution { width: 3840, height: 2160, diff --git a/src/video/mod.rs b/src/video/mod.rs index a98030f4..5d2db7ad 100644 --- a/src/video/mod.rs +++ b/src/video/mod.rs @@ -4,6 +4,7 @@ pub mod codec_constraints; pub mod convert; +pub mod csi_bridge; pub mod decoder; pub mod device; pub mod encoder; @@ -25,6 +26,55 @@ pub use shared_video_pipeline::{ pub use stream_manager::VideoStreamManager; pub use streamer::{Streamer, StreamerState}; +/// Fine-grained signal status reported by CSI/HDMI bridge devices. +/// +/// Only `rk_hdmirx` / `rkcif` / tc358743-class bridges can distinguish these +/// via `VIDIOC_QUERY_DV_TIMINGS` errno; USB UVC devices always report `Ok` +/// until they fail with a generic timeout. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SignalStatus { + /// HDMI cable physically disconnected (`ENOLINK`). + NoCable, + /// TMDS signal present but timings cannot be locked (`ENOLCK`). + NoSync, + /// Timings outside of hardware capability (`ERANGE`). + OutOfRange, + /// Generic "no usable source" (fallback for EINVAL / EIO / unknown errnos). + NoSignal, +} + +impl SignalStatus { + pub fn as_str(self) -> &'static str { + match self { + SignalStatus::NoCable => "no_cable", + SignalStatus::NoSync => "no_sync", + SignalStatus::OutOfRange => "out_of_range", + SignalStatus::NoSignal => "no_signal", + } + } + + pub fn from_str(s: &str) -> Option { + Some(match s { + "no_cable" => SignalStatus::NoCable, + "no_sync" => SignalStatus::NoSync, + "out_of_range" => SignalStatus::OutOfRange, + "no_signal" => SignalStatus::NoSignal, + _ => return None, + }) + } +} + +impl From for streamer::StreamerState { + fn from(value: SignalStatus) -> Self { + match value { + SignalStatus::NoCable => streamer::StreamerState::NoCable, + SignalStatus::NoSync => streamer::StreamerState::NoSync, + SignalStatus::OutOfRange => streamer::StreamerState::OutOfRange, + SignalStatus::NoSignal => streamer::StreamerState::NoSignal, + } + } +} + pub(crate) fn is_rk_hdmirx_driver(driver: &str, card: &str) -> bool { driver.eq_ignore_ascii_case("rk_hdmirx") || card.eq_ignore_ascii_case("rk_hdmirx") } @@ -32,3 +82,13 @@ pub(crate) fn is_rk_hdmirx_driver(driver: &str, card: &str) -> bool { pub(crate) fn is_rk_hdmirx_device(device: &device::VideoDeviceInfo) -> bool { is_rk_hdmirx_driver(&device.driver, &device.card) } + +pub(crate) fn is_rkcif_driver(driver: &str) -> bool { + driver.eq_ignore_ascii_case("rkcif") +} + +/// Unified check for CSI/HDMI bridge devices (rk_hdmirx, rkcif, etc.) +/// that require special enumeration and format-selection logic. +pub(crate) fn is_csi_hdmi_bridge(device: &device::VideoDeviceInfo) -> bool { + is_rk_hdmirx_device(device) || is_rkcif_driver(&device.driver) +} diff --git a/src/video/shared_video_pipeline.rs b/src/video/shared_video_pipeline.rs index 23f8ed8b..c2e61803 100644 --- a/src/video/shared_video_pipeline.rs +++ b/src/video/shared_video_pipeline.rs @@ -19,6 +19,7 @@ mod encoder_state; use bytes::Bytes; +use parking_lot::Mutex as ParkingMutex; use parking_lot::RwLock as ParkingRwLock; use std::collections::HashMap; use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering}; @@ -33,10 +34,10 @@ use self::encoder_state::{build_encoder_state, EncoderThreadState}; const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3; /// After this many consecutive timeouts, log a prominent warning. const CAPTURE_TIMEOUT_RESTART_THRESHOLD: u32 = 5; -/// After this many consecutive timeouts, actually stop the pipeline. -/// Setting this high (60 × 2 s poll = ~120 s) keeps WebRTC sessions alive -/// while the source is temporarily unavailable (e.g. resolution change/reboot). const CAPTURE_TIMEOUT_STOP_THRESHOLD: u32 = 60; +const CAPTURE_TIMEOUT_SOFT_RESTART_THRESHOLD: u32 = 3; +const CSI_BRIDGE_NOSIGNAL_INTERVAL_MS: u64 = 500; +const NOSIGNAL_POLL_MAX: Duration = Duration::from_secs(20); /// Minimum valid frame size for capture const MIN_CAPTURE_FRAME_SIZE: usize = 128; /// Validate every JPEG frame during startup to avoid poisoning HW decoders @@ -49,10 +50,13 @@ const ENCODE_ERROR_THROTTLE_SECS: u64 = 5; use crate::error::{AppError, Result}; use crate::utils::LogThrottler; +use crate::video::csi_bridge::{self, ProbeResult}; use crate::video::encoder::registry::{EncoderBackend, VideoEncoderType}; use crate::video::format::{PixelFormat, Resolution}; use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame}; -use crate::video::v4l2r_capture::V4l2rCaptureStream; +use crate::video::device::parse_bridge_kind; +use crate::video::SignalStatus; +use crate::video::v4l2r_capture::{is_source_changed_error, BridgeContext, V4l2rCaptureStream}; #[cfg(any(target_arch = "aarch64", target_arch = "arm"))] use hwcodec::ffmpeg_hw::last_error_message as ffmpeg_hw_last_error; @@ -77,6 +81,39 @@ enum PipelineCmd { SetBitrate { bitrate_kbps: u32, gop: u32 }, } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct PipelineStateNotification { + pub state: &'static str, + pub reason: Option<&'static str>, + pub next_retry_ms: Option, +} + +impl PipelineStateNotification { + fn streaming() -> Self { + Self { + state: "streaming", + reason: None, + next_retry_ms: None, + } + } + + fn no_signal(status: SignalStatus, next_retry_ms: Option) -> Self { + Self { + state: "no_signal", + reason: Some(status.as_str()), + next_retry_ms, + } + } + + fn device_busy(reason: &'static str) -> Self { + Self { + state: "device_busy", + reason: Some(reason), + next_retry_ms: None, + } + } +} + /// Shared video pipeline configuration #[derive(Debug, Clone)] pub struct SharedVideoPipelineConfig { @@ -241,6 +278,84 @@ pub struct SharedVideoPipeline { /// Pipeline start time for PTS calculation (epoch millis, 0 = not set) /// Uses AtomicI64 instead of Mutex for lock-free access pipeline_start_time_ms: AtomicI64, + pending_sync_geometry: ParkingMutex>, + state_notifier: ParkingRwLock>>, + last_state_notification: ParkingMutex>, +} + +fn poll_bridge_subdev_after_no_signal( + bridge_ctx: &BridgeContext, + pipeline: &SharedVideoPipeline, +) { + let Some(subdev_path) = bridge_ctx.subdev_path.as_ref() else { + return; + }; + let kind = bridge_ctx + .kind + .unwrap_or(csi_bridge::CsiBridgeKind::Unknown); + let deadline = Instant::now() + NOSIGNAL_POLL_MAX; + let mut poll_count: u32 = 0; + info!( + "No-signal poll: scanning subdev {:?} every {} ms (max {:?})", + subdev_path, CSI_BRIDGE_NOSIGNAL_INTERVAL_MS, NOSIGNAL_POLL_MAX + ); + loop { + if !pipeline.running_flag.load(Ordering::Acquire) { + return; + } + if Instant::now() >= deadline { + info!( + "No-signal poll: stopped after {:?} ({} attempts)", + NOSIGNAL_POLL_MAX, poll_count + ); + return; + } + let fd = match csi_bridge::open_subdev(subdev_path) { + Ok(f) => f, + Err(e) => { + debug!("No-signal poll: open subdev {:?} failed: {}", subdev_path, e); + std::thread::sleep(Duration::from_millis(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS)); + continue; + } + }; + match csi_bridge::probe_signal_thread_timeout( + &fd, + kind, + csi_bridge::RK628_SUBDEV_PROBE_TIMEOUT, + ) { + Some(ProbeResult::Locked(mode)) => { + info!( + "No-signal poll: locked {}x{} @ {} Hz — proceeding to capture re-open", + mode.width, mode.height, mode.pixelclock + ); + return; + } + Some(other) => { + poll_count = poll_count.saturating_add(1); + if poll_count == 1 || poll_count.is_multiple_of(8) { + debug!( + "No-signal poll: attempt {} — still {:?}", + poll_count, + other.as_status() + ); + } + if let Some(st) = other.as_status() { + pipeline.notify_state(PipelineStateNotification::no_signal( + st, + Some(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS.saturating_add(50)), + )); + } + } + None => { + poll_count = poll_count.saturating_add(1); + debug!( + "No-signal poll: attempt {} — probe ioctl timed out", + poll_count + ); + } + } + std::thread::sleep(Duration::from_millis(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS)); + } } impl SharedVideoPipeline { @@ -268,11 +383,43 @@ impl SharedVideoPipeline { sequence: AtomicU64::new(0), keyframe_requested: AtomicBool::new(false), pipeline_start_time_ms: AtomicI64::new(0), + pending_sync_geometry: ParkingMutex::new(None), + state_notifier: ParkingRwLock::new(None), + last_state_notification: ParkingMutex::new(None), }); Ok(pipeline) } + pub fn take_pending_sync_geometry(&self) -> Option<(Resolution, PixelFormat)> { + self.pending_sync_geometry.lock().take() + } + + pub fn set_state_notifier( + &self, + notifier: Option>, + ) { + *self.state_notifier.write() = notifier; + } + + fn notify_state(&self, notification: PipelineStateNotification) { + let should_emit = { + let mut last = self.last_state_notification.lock(); + if last.as_ref() == Some(¬ification) { + false + } else { + *last = Some(notification); + true + } + }; + if !should_emit { + return; + } + if let Some(notifier) = self.state_notifier.read().clone() { + notifier(notification); + } + } + /// Subscribe to encoded frames pub fn subscribe(&self) -> mpsc::Receiver> { let (tx, rx) = mpsc::channel(4); @@ -393,13 +540,68 @@ impl SharedVideoPipeline { device_path: std::path::PathBuf, buffer_count: u32, _jpeg_quality: u8, + subdev_path: Option, + bridge_kind: Option, ) -> Result<()> { if *self.running_rx.borrow() { warn!("Pipeline already running"); return Ok(()); } - let config = self.config.read().await.clone(); + let mut config = self.config.read().await.clone(); + { + let mut last = self.last_state_notification.lock(); + *last = None; + } + + // Pre-open for DV negotiation; align encoder to probed size. + let bridge_ctx_probe = BridgeContext::from_parts( + subdev_path.clone(), + parse_bridge_kind(bridge_kind.as_deref()), + ); + let preopened: Option = + match V4l2rCaptureStream::open_with_bridge( + &device_path, + config.resolution, + config.input_format, + config.fps, + buffer_count.max(1), + Duration::from_secs(2), + bridge_ctx_probe, + ) { + Ok(s) => { + let negotiated_res = s.resolution(); + let negotiated_fmt = s.format(); + if negotiated_res != config.resolution || negotiated_fmt != config.input_format { + info!( + "Negotiated capture {}x{} {:?} (configured {}x{} {:?}) — aligning encoder to source", + negotiated_res.width, + negotiated_res.height, + negotiated_fmt, + config.resolution.width, + config.resolution.height, + config.input_format + ); + config.resolution = negotiated_res; + config.input_format = negotiated_fmt; + *self.config.write().await = config.clone(); + } + Some(s) + } + Err(AppError::CaptureNoSignal { kind }) => { + debug!( + "Pre-probe: no signal — encoder uses configured geometry until capture opens" + ); + let status = SignalStatus::from_str(&kind).unwrap_or(SignalStatus::NoSignal); + self.notify_state(PipelineStateNotification::no_signal( + status, + Some(Duration::from_secs(2).as_millis() as u64), + )); + None + } + Err(e) => return Err(e), + }; + let mut encoder_state = build_encoder_state(&config)?; let _ = self.running.send(true); self.running_flag.store(true, Ordering::Release); @@ -499,28 +701,123 @@ impl SharedVideoPipeline { let latest_frame = latest_frame.clone(); let frame_seq_tx = frame_seq_tx.clone(); let buffer_pool = buffer_pool.clone(); + let bridge_ctx = BridgeContext::from_parts( + subdev_path, + parse_bridge_kind(bridge_kind.as_deref()), + ); std::thread::spawn(move || { - let mut stream = match V4l2rCaptureStream::open( - &device_path, - config.resolution, - config.input_format, - config.fps, - buffer_count.max(1), - Duration::from_secs(2), - ) { - Ok(stream) => stream, - Err(e) => { - error!("Failed to open capture stream: {}", e); - let _ = pipeline.running.send(false); - pipeline.running_flag.store(false, Ordering::Release); - let _ = frame_seq_tx.send(1); - return; - } - }; + let mut stream: Option = None; + let mut initial_geometry: Option<(Resolution, PixelFormat)> = None; + let mut resolution = config.resolution; + let mut pixel_format = config.input_format; + let mut stride: u32 = 0; - let resolution = stream.resolution(); - let pixel_format = stream.format(); - let stride = stream.stride(); + match preopened { + Some(s) => { + resolution = s.resolution(); + pixel_format = s.format(); + stride = s.stride(); + initial_geometry = Some((resolution, pixel_format)); + stream = Some(s); + } + None => { + match V4l2rCaptureStream::open_with_bridge( + &device_path, + config.resolution, + config.input_format, + config.fps, + buffer_count.max(1), + Duration::from_secs(2), + bridge_ctx.clone(), + ) { + Ok(s) => { + resolution = s.resolution(); + pixel_format = s.format(); + stride = s.stride(); + if resolution != config.resolution + || pixel_format != config.input_format + { + info!( + "First capture open negotiated {}x{} {:?} but encoder expects {}x{} {:?} — stopping for dimension resync", + resolution.width, + resolution.height, + pixel_format, + config.resolution.width, + config.resolution.height, + config.input_format + ); + pipeline.notify_state(PipelineStateNotification::device_busy( + "config_changing", + )); + *pipeline.pending_sync_geometry.lock() = + Some((resolution, pixel_format)); + let _ = pipeline.running.send(false); + pipeline.running_flag.store(false, Ordering::Release); + let _ = frame_seq_tx.send(1); + return; + } + initial_geometry = Some((resolution, pixel_format)); + stream = Some(s); + } + Err(AppError::CaptureNoSignal { kind }) => { + warn!( + "Capture stream open reports no signal ({}) — pipeline will retry", + kind + ); + pipeline.notify_state(PipelineStateNotification::no_signal( + SignalStatus::from_str(&kind).unwrap_or(SignalStatus::NoSignal), + Some(CSI_BRIDGE_NOSIGNAL_INTERVAL_MS), + )); + } + Err(e) => { + error!("Failed to open capture stream: {}", e); + let _ = pipeline.running.send(false); + pipeline.running_flag.store(false, Ordering::Release); + let _ = frame_seq_tx.send(1); + return; + } + } + } + } + + /// Helper: try to (re)open the capture stream. Returns: + /// * `Ok(Some(stream))` — opened successfully + /// * `Ok(None)` — CaptureNoSignal, keep retrying later + /// * `Err(())` — fatal (stop pipeline) + enum OpenResult { + Opened(V4l2rCaptureStream), + NoSignal(SignalStatus), + Fatal, + } + + fn open_or_retry( + device_path: &std::path::Path, + config: &SharedVideoPipelineConfig, + buffer_count: u32, + bridge_ctx: BridgeContext, + ) -> OpenResult { + match V4l2rCaptureStream::open_with_bridge( + device_path, + config.resolution, + config.input_format, + config.fps, + buffer_count.max(1), + Duration::from_secs(2), + bridge_ctx, + ) { + Ok(s) => OpenResult::Opened(s), + Err(AppError::CaptureNoSignal { kind }) => { + debug!("Capture soft-restart: still no signal ({})", kind); + OpenResult::NoSignal( + SignalStatus::from_str(&kind).unwrap_or(SignalStatus::NoSignal), + ) + } + Err(e) => { + error!("Capture soft-restart failed: {}", e); + OpenResult::Fatal + } + } + } let mut no_subscribers_since: Option = None; let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS); @@ -569,16 +866,243 @@ impl SharedVideoPipeline { no_subscribers_since = None; } + // ── No usable stream? Try to (re)open, back off on failure. ── + if stream.is_none() { + match open_or_retry(&device_path, &config, buffer_count, bridge_ctx.clone()) { + OpenResult::Opened(new_stream) => { + let new_res = new_stream.resolution(); + let new_fmt = new_stream.format(); + let new_stride = new_stream.stride(); + + // Pre-probe was skipped (no signal at pipeline start) but the + // encoder was sized to saved settings — if DV timings now + // disagree, we cannot encode until WebRTC resyncs dimensions. + if initial_geometry.is_none() + && (new_res != config.resolution || new_fmt != config.input_format) + { + info!( + "Deferred capture open is {}x{} {:?} but encoder expects {}x{} {:?} — stopping for dimension resync", + new_res.width, + new_res.height, + new_fmt, + config.resolution.width, + config.resolution.height, + config.input_format + ); + pipeline.notify_state(PipelineStateNotification::device_busy( + "config_changing", + )); + *pipeline.pending_sync_geometry.lock() = Some((new_res, new_fmt)); + let _ = pipeline.running.send(false); + pipeline.running_flag.store(false, Ordering::Release); + let _ = frame_seq_tx.send(sequence.wrapping_add(1)); + break; + } + + // If this is the very first successful open, + // record it and run normally. Otherwise check + // for a geometry change — the encoder thread + // is pinned to the original geometry, so a + // change requires tearing the pipeline down + // and letting the upper layer rebuild. + match initial_geometry { + Some((orig_res, orig_fmt)) + if orig_res != new_res || orig_fmt != new_fmt => + { + info!( + "Capture soft-restart detected geometry change \ + {:?}/{:?} -> {:?}/{:?}, stopping pipeline for \ + encoder rebuild", + orig_res, orig_fmt, new_res, new_fmt + ); + pipeline.notify_state( + PipelineStateNotification::device_busy( + "config_changing", + ), + ); + *pipeline.pending_sync_geometry.lock() = + Some((new_res, new_fmt)); + let _ = pipeline.running.send(false); + pipeline.running_flag.store(false, Ordering::Release); + let _ = frame_seq_tx.send(sequence.wrapping_add(1)); + break; + } + _ => {} + } + + if initial_geometry.is_none() { + initial_geometry = Some((new_res, new_fmt)); + } + resolution = new_res; + pixel_format = new_fmt; + stride = new_stride; + stream = Some(new_stream); + consecutive_timeouts = 0; + info!( + "Capture stream (re)opened: {}x{} {:?} stride={}", + resolution.width, resolution.height, pixel_format, stride + ); + } + OpenResult::NoSignal(status) => { + consecutive_timeouts = + consecutive_timeouts.saturating_add(1); + if consecutive_timeouts >= CAPTURE_TIMEOUT_STOP_THRESHOLD { + warn!( + "Capture soft-restart gave up after {} attempts, \ + stopping pipeline", + consecutive_timeouts + ); + let _ = pipeline.running.send(false); + pipeline.running_flag.store(false, Ordering::Release); + let _ = frame_seq_tx.send(sequence.wrapping_add(1)); + break; + } + let wait_ms = CSI_BRIDGE_NOSIGNAL_INTERVAL_MS; + pipeline.notify_state(PipelineStateNotification::no_signal( + status, + Some(wait_ms), + )); + std::thread::sleep(Duration::from_millis(wait_ms)); + continue; + } + OpenResult::Fatal => { + let _ = pipeline.running.send(false); + pipeline.running_flag.store(false, Ordering::Release); + let _ = frame_seq_tx.send(sequence.wrapping_add(1)); + break; + } + } + } + let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE); - let meta = match stream.next_into(&mut owned) { + let next_result = stream + .as_mut() + .expect("stream is Some above") + .next_into(&mut owned); + let meta = match next_result { Ok(meta) => { consecutive_timeouts = 0; + pipeline.notify_state(PipelineStateNotification::streaming()); meta } Err(e) => { + // V4L2 driver reported V4L2_EVENT_SOURCE_CHANGE. + // The current capture is effectively invalidated: + // drop the stream so the next iteration re-opens + // via a fresh DV_TIMINGS probe. This is the fast + // path for source-side resolution switches on + // RK628 / rkcif — sub-second recovery vs. the ~8 s + // timeout fallback. + if is_source_changed_error(&e) { + info!( + "Capture reported SOURCE_CHANGE — \ + dropping stream for immediate re-open" + ); + consecutive_timeouts = 0; + stream = None; + continue; + } if e.kind() == std::io::ErrorKind::TimedOut { consecutive_timeouts = consecutive_timeouts.saturating_add(1); - warn!("Capture timeout - no signal?"); + let probe_result = { + let sr = stream.as_mut().expect("stream is Some above"); + sr.probe_bridge_signal_with_timeout( + csi_bridge::RK628_SUBDEV_PROBE_TIMEOUT, + ) + }; + match probe_result { + Some(ProbeResult::Locked(mode)) => { + let probed_resolution = + Resolution::new(mode.width, mode.height); + if probed_resolution == resolution { + info!( + "Capture timeout but bridge is locked at {}x{} — soft-restarting capture without encoder rebuild", + probed_resolution.width, + probed_resolution.height + ); + } else { + info!( + "Capture timeout probe detected geometry change {}x{} -> {}x{} — soft-restarting capture for encoder rebuild", + resolution.width, + resolution.height, + probed_resolution.width, + probed_resolution.height + ); + pipeline.notify_state( + PipelineStateNotification::device_busy( + "config_changing", + ), + ); + } + consecutive_timeouts = 0; + stream = None; + continue; + } + Some(other) => { + let status = + other.as_status().unwrap_or(SignalStatus::NoSignal); + warn!( + "Capture timeout probe reports no signal ({})", + status.as_str() + ); + pipeline.notify_state( + PipelineStateNotification::no_signal( + status, + Some(Duration::from_secs(2).as_millis() as u64), + ), + ); + // Drop capture so RK628 / rkcif can release the queue, + // then poll subdev on a fresh fd until timings lock (or + // timeout). Avoids sitting on DQBUF 2s × N with a dead + // stream while `v4l2-ctl --query-dv-timings` already shows + // a real mode. + stream = None; + consecutive_timeouts = 0; + if bridge_ctx.has_subdev() + && matches!( + other, + ProbeResult::NoSignal + | ProbeResult::NoSync + | ProbeResult::OutOfRange + ) + { + poll_bridge_subdev_after_no_signal( + &bridge_ctx, + &pipeline, + ); + } + continue; + } + None if bridge_ctx.has_subdev() => { + warn!( + "DV-timings probe timed out or failed — forcing stream re-open (RK628 / rkcif)" + ); + consecutive_timeouts = 0; + stream = None; + poll_bridge_subdev_after_no_signal(&bridge_ctx, &pipeline); + continue; + } + None => { + warn!("Capture timeout - no signal?"); + } + } + + if consecutive_timeouts + >= CAPTURE_TIMEOUT_SOFT_RESTART_THRESHOLD + { + // Drop the stream so the next loop + // iteration re-opens via the DV-timings + // probe. This catches source-side + // resolution changes in ~6 s without + // taking the encoder down. + warn!( + "Capture timed out {} consecutive times, \ + closing stream for soft-restart", + consecutive_timeouts + ); + stream = None; + continue; + } if consecutive_timeouts == CAPTURE_TIMEOUT_RESTART_THRESHOLD { warn!( @@ -599,6 +1123,20 @@ impl SharedVideoPipeline { } } else { consecutive_timeouts = 0; + // EIO (5) / EPIPE (32) in next_into generally + // mean the source glitched mid-stream. + // Tear down the stream and let the open loop + // re-probe via DV_TIMINGS — same logic as + // timeouts, just triggered earlier. + if matches!(e.raw_os_error(), Some(5) | Some(32)) { + warn!( + "Capture transient error ({}), closing stream for \ + soft-restart", + e + ); + stream = None; + continue; + } let key = classify_capture_error(&e); if capture_error_throttler.should_log(&key) { let suppressed = diff --git a/src/video/stream_manager.rs b/src/video/stream_manager.rs index 7ffe755e..4bfc8bdd 100644 --- a/src/video/stream_manager.rs +++ b/src/video/stream_manager.rs @@ -38,8 +38,8 @@ use crate::hid::HidController; use crate::stream::MjpegStreamHandler; use crate::video::codec_constraints::StreamCodecConstraints; use crate::video::format::{PixelFormat, Resolution}; -use crate::video::is_rk_hdmirx_device; -use crate::video::streamer::{Streamer, StreamerState}; +use crate::video::is_csi_hdmi_bridge; +use crate::video::streamer::{Streamer, StreamerStats, StreamerState}; use crate::webrtc::WebRtcStreamer; /// Video stream manager configuration @@ -353,8 +353,17 @@ impl VideoStreamManager { .update_video_config(resolution, format, fps) .await; if let Some(device_path) = device_path { + // Resolve the paired subdev so the WebRTC pipeline can run the + // RK628 STREAMON gate + SOURCE_CHANGE polling identically to the + // MJPEG path. See `csi_bridge::discover_subdev_for_video`. + let (subdev_path, bridge_kind) = self + .streamer + .current_device() + .await + .map(|d| (d.subdev_path.clone(), d.bridge_kind.clone())) + .unwrap_or((None, None)); self.webrtc_streamer - .set_capture_device(device_path, jpeg_quality) + .set_capture_device(device_path, jpeg_quality, subdev_path, bridge_kind) .await; } else { warn!("No capture device configured while syncing WebRTC capture source"); @@ -431,7 +440,7 @@ impl VideoStreamManager { device.formats.iter().map(|f| f.format).collect(); // If current format is not MJPEG and device supports MJPEG, switch to it - if !is_rk_hdmirx_device(&device) + if !is_csi_hdmi_bridge(&device) && current_format != PixelFormat::Mjpeg && available_formats.contains(&PixelFormat::Mjpeg) { @@ -550,8 +559,14 @@ impl VideoStreamManager { } if let Some(device_path) = device_path { info!("Configuring direct capture for WebRTC after config change"); + let (subdev_path, bridge_kind) = self + .streamer + .current_device() + .await + .map(|d| (d.subdev_path.clone(), d.bridge_kind.clone())) + .unwrap_or((None, None)); self.webrtc_streamer - .set_capture_device(device_path, jpeg_quality) + .set_capture_device(device_path, jpeg_quality, subdev_path, bridge_kind) .await; } else { warn!("No capture device configured for WebRTC after config change"); @@ -610,7 +625,7 @@ impl VideoStreamManager { /// Get video device info for device_info event pub async fn get_video_info(&self) -> VideoDeviceInfo { - let stats = self.streamer.stats().await; + let stats = self.stats().await; let state = self.streamer.state().await; let device = self.streamer.current_device().await; let mode = self.mode.read().await.clone(); @@ -636,7 +651,7 @@ impl VideoStreamManager { config_changing: self.streamer.is_config_changing(), error: if state == StreamerState::Error { Some("Video stream error".to_string()) - } else if state == StreamerState::NoSignal { + } else if state.is_no_signal_like() { Some("No video signal".to_string()) } else { None @@ -687,8 +702,24 @@ impl VideoStreamManager { } /// Get streamer statistics - pub async fn stats(&self) -> crate::video::streamer::StreamerStats { - self.streamer.stats().await + /// + /// In WebRTC mode, resolution/format/target_fps/fps reflect + /// [`WebRtcStreamer`]'s config (updated after DV negotiation / geometry sync), + /// not only the MJPEG [`Streamer`] snapshot — so `/api/stream/status` matches + /// what the shared encoder actually uses. + pub async fn stats(&self) -> StreamerStats { + let mut s = self.streamer.stats().await; + if *self.mode.read().await == StreamMode::WebRTC { + let (res, fmt, tgt_fps) = self.webrtc_streamer.current_video_geometry().await; + s.format = Some(fmt.to_string()); + s.resolution = Some((res.width, res.height)); + s.target_fps = tgt_fps; + if let Some(ps) = self.webrtc_streamer.pipeline_stats().await { + s.fps = ps.current_fps; + } + s.clients = self.webrtc_streamer.session_count().await as u64; + } + s } /// Check if config is being changed diff --git a/src/video/streamer.rs b/src/video/streamer.rs index 572d8907..182a4a77 100644 --- a/src/video/streamer.rs +++ b/src/video/streamer.rs @@ -5,21 +5,22 @@ use std::collections::HashMap; use std::path::PathBuf; -use std::sync::atomic::{AtomicBool, AtomicU32, Ordering}; +use std::sync::atomic::{AtomicBool, AtomicU32, AtomicU64, Ordering}; use std::sync::Arc; use std::time::Duration; use tokio::sync::RwLock; use tracing::{debug, error, info, trace, warn}; -use super::device::{enumerate_devices, find_best_device, VideoDevice, VideoDeviceInfo}; +use super::csi_bridge; +use super::device::{enumerate_devices, find_best_device, parse_bridge_kind, VideoDevice, VideoDeviceInfo}; use super::format::{PixelFormat, Resolution}; use super::frame::{FrameBuffer, FrameBufferPool, VideoFrame}; -use super::is_rk_hdmirx_device; +use super::is_csi_hdmi_bridge; use crate::error::{AppError, Result}; use crate::events::{EventBus, SystemEvent}; use crate::stream::MjpegStreamHandler; use crate::utils::LogThrottler; -use crate::video::v4l2r_capture::V4l2rCaptureStream; +use crate::video::v4l2r_capture::{is_source_changed_error, BridgeContext, V4l2rCaptureStream}; /// Minimum valid frame size for capture const MIN_CAPTURE_FRAME_SIZE: usize = 128; @@ -53,7 +54,7 @@ impl Default for StreamerConfig { } } -/// Streamer state +/// Fine-grained capture state; [`external_state`] maps to UI wire names. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum StreamerState { /// Not initialized @@ -62,14 +63,83 @@ pub enum StreamerState { Ready, /// Actively streaming Streaming, - /// No video signal + /// No video signal (generic / source not detected) NoSignal, + /// HDMI cable not connected (DV_RX_POWER_PRESENT = false or ENOLINK) + NoCable, + /// TMDS signal present but timings not locked (ENOLCK) + NoSync, + /// Source timings are outside of what the capture hardware supports (ERANGE) + OutOfRange, /// Error occurred Error, /// Device was lost (unplugged) DeviceLost, /// Device is being recovered (reconnecting) Recovering, + Busy, +} + +impl StreamerState { + pub fn as_str(self) -> &'static str { + match self { + StreamerState::Uninitialized => "uninitialized", + StreamerState::Ready => "ready", + StreamerState::Streaming => "streaming", + StreamerState::NoSignal => "no_signal", + StreamerState::NoCable => "no_cable", + StreamerState::NoSync => "no_sync", + StreamerState::OutOfRange => "out_of_range", + StreamerState::Error => "error", + StreamerState::DeviceLost => "device_lost", + StreamerState::Recovering => "recovering", + StreamerState::Busy => "device_busy", + } + } + + /// Parse a state string as produced by [`StreamerState::as_str`]. + pub fn from_str(s: &str) -> Option { + Some(match s { + "uninitialized" => StreamerState::Uninitialized, + "ready" => StreamerState::Ready, + "streaming" => StreamerState::Streaming, + "no_signal" => StreamerState::NoSignal, + "no_cable" => StreamerState::NoCable, + "no_sync" => StreamerState::NoSync, + "out_of_range" => StreamerState::OutOfRange, + "error" => StreamerState::Error, + "device_lost" => StreamerState::DeviceLost, + "recovering" => StreamerState::Recovering, + "device_busy" | "busy" => StreamerState::Busy, + _ => return None, + }) + } + + pub fn is_no_signal_like(self) -> bool { + matches!( + self, + StreamerState::NoSignal + | StreamerState::NoCable + | StreamerState::NoSync + | StreamerState::OutOfRange + ) + } + + pub fn external_state(self) -> (&'static str, Option<&'static str>) { + match self { + StreamerState::Streaming => ("streaming", None), + StreamerState::Ready => ("ready", None), + StreamerState::Uninitialized => ("uninitialized", None), + StreamerState::Error => ("error", None), + StreamerState::NoSignal => ("no_signal", Some("no_signal")), + StreamerState::NoCable => ("no_signal", Some("no_cable")), + StreamerState::NoSync => ("no_signal", Some("no_sync")), + StreamerState::OutOfRange => ("no_signal", Some("out_of_range")), + StreamerState::DeviceLost => ("device_lost", Some("device_lost")), + StreamerState::Recovering => ("device_lost", Some("recovering")), + StreamerState::Busy => ("device_busy", None), + } + } } /// Video streamer service @@ -85,8 +155,8 @@ pub struct Streamer { current_fps: AtomicU32, /// Event bus for broadcasting state changes (optional) events: RwLock>>, - /// Last published state (for change detection) - last_published_state: RwLock>, + last_published_state: RwLock, Option)>>, + next_retry_ms: AtomicU64, /// Flag to indicate config is being changed (prevents auto-start during config change) config_changing: std::sync::atomic::AtomicBool, /// Flag to indicate background tasks (stats, cleanup, monitor) have been started @@ -117,6 +187,7 @@ impl Streamer { current_fps: AtomicU32::new(0), events: RwLock::new(None), last_published_state: RwLock::new(None), + next_retry_ms: AtomicU64::new(0), config_changing: std::sync::atomic::AtomicBool::new(false), background_tasks_started: std::sync::atomic::AtomicBool::new(false), recovery_retry_count: std::sync::atomic::AtomicU32::new(0), @@ -140,6 +211,7 @@ impl Streamer { current_fps: AtomicU32::new(0), events: RwLock::new(None), last_published_state: RwLock::new(None), + next_retry_ms: AtomicU64::new(0), config_changing: std::sync::atomic::AtomicBool::new(false), background_tasks_started: std::sync::atomic::AtomicBool::new(false), recovery_retry_count: std::sync::atomic::AtomicU32::new(0), @@ -149,7 +221,6 @@ impl Streamer { }) } - /// Get current state as SystemEvent pub async fn current_state_event(&self) -> SystemEvent { let state = *self.state.read().await; let device = self @@ -158,21 +229,21 @@ impl Streamer { .await .as_ref() .map(|d| d.path.display().to_string()); + let (external, reason) = state.external_state(); + let next = self.next_retry_ms.load(Ordering::Relaxed); SystemEvent::StreamStateChanged { - state: match state { - StreamerState::Uninitialized => "uninitialized".to_string(), - StreamerState::Ready => "ready".to_string(), - StreamerState::Streaming => "streaming".to_string(), - StreamerState::NoSignal => "no_signal".to_string(), - StreamerState::Error => "error".to_string(), - StreamerState::DeviceLost => "device_lost".to_string(), - StreamerState::Recovering => "recovering".to_string(), - }, + state: external.to_string(), device, + reason: reason.map(|s| s.to_string()), + next_retry_ms: if next == 0 { None } else { Some(next) }, } } + pub fn set_next_retry_ms(&self, ms: u64) { + self.next_retry_ms.store(ms, Ordering::Relaxed); + } + /// Set event bus for broadcasting state changes pub async fn set_event_bus(&self, events: Arc) { *self.events.write().await = Some(events); @@ -264,6 +335,13 @@ impl Streamer { }) .await; + // Surface a "device busy" state so the frontend can render a + // "please wait" overlay for the (short) duration of the config + // change. The capture loop itself will flip to `Streaming` once + // the first frame of the new geometry arrives. + *self.state.write().await = StreamerState::Busy; + self.publish_event(self.current_state_event().await).await; + let devices = enumerate_devices()?; let device = devices .into_iter() @@ -369,12 +447,36 @@ impl Streamer { device: &VideoDeviceInfo, preferred: PixelFormat, ) -> Result { - if is_rk_hdmirx_device(device) { - return device + if is_csi_hdmi_bridge(device) { + if !device.has_signal { + info!( + "select_format: CSI bridge no signal, keeping preferred {:?}", + preferred + ); + return Ok(preferred); + } + // Prefer the user-configured format if the device actually supports + // it; otherwise fall back to the highest-priority format (formats + // are pre-sorted by PixelFormat::priority(), e.g. NV12 > YUYV for rkcif/rk_hdmirx). + if device.formats.iter().any(|f| f.format == preferred) { + info!( + "select_format: CSI bridge with signal, using preferred {:?}", + preferred + ); + return Ok(preferred); + } + let fmt = device .formats .first() .map(|f| f.format) - .ok_or_else(|| AppError::VideoError("No supported formats found".to_string())); + .ok_or_else(|| AppError::VideoError("No supported formats found".to_string()))?; + info!( + "select_format: CSI bridge with signal, preferred {:?} unavailable, selected {:?} from {:?}", + preferred, + fmt, + device.formats.iter().map(|f| f.format).collect::>() + ); + return Ok(fmt); } // Check if preferred format is available @@ -397,18 +499,32 @@ impl Streamer { format: &PixelFormat, preferred: Resolution, ) -> Result { + if is_csi_hdmi_bridge(device) && !device.has_signal { + info!( + "select_resolution: CSI bridge no signal, keeping preferred {}", + preferred + ); + return Ok(preferred); + } + let format_info = device .formats .iter() .find(|f| &f.format == format) .ok_or_else(|| AppError::VideoError("Format not found".to_string()))?; - if is_rk_hdmirx_device(device) { - return Ok(format_info + if is_csi_hdmi_bridge(device) { + let res = format_info .resolutions .first() .map(|r| r.resolution()) - .unwrap_or(preferred)); + .unwrap_or(preferred); + info!( + "select_resolution: CSI bridge with signal, selected {} (preferred {}, available {:?})", + res, preferred, + format_info.resolutions.iter().map(|r| format!("{}x{}", r.width, r.height)).collect::>() + ); + return Ok(res); } // Check if preferred resolution is available @@ -451,7 +567,8 @@ impl Streamer { tokio::time::sleep(std::time::Duration::from_millis(100)).await; let state = *self.state.read().await; match state { - StreamerState::Streaming | StreamerState::NoSignal => return Ok(()), + StreamerState::Streaming => return Ok(()), + s if s.is_no_signal_like() => return Ok(()), StreamerState::Error | StreamerState::DeviceLost => { return Err(AppError::VideoError( "Failed to restart capture".to_string(), @@ -631,14 +748,26 @@ impl Streamer { const RETRY_DELAY_MS: u64 = 200; const IDLE_STOP_DELAY_SECS: u64 = 5; const BUFFER_COUNT: u32 = 2; - /// After this many seconds without signal, close+re-open the device. - const NOSIGNAL_SOFT_RESTART_SECS: u64 = 8; - /// Placeholder frame re-send interval while in NoSignal state (iterations of 100 ms). - const NOSIGNAL_PLACEHOLDER_INTERVAL: u32 = 10; // every ~1 s + /// Initial back-off after signal loss before the first soft restart. + /// + /// PiKVM/ustreamer drops to sub-second recovery because it subscribes to + /// `V4L2_EVENT_SOURCE_CHANGE`; lacking that (for now), we bound how long + /// the user has to stare at a placeholder after a source-side resolution + /// change by driving a soft-restart at 1 s, then 2 s, 4 s, …, 8 s. + const NOSIGNAL_SOFT_RESTART_INITIAL_SECS: u64 = 1; + const NOSIGNAL_SOFT_RESTART_MAX_SECS: u64 = 8; let handle = tokio::runtime::Handle::current(); let mut last_state = StreamerState::Streaming; + // Compute the current soft-restart back-off window (in seconds) + // for the exponential ladder 1 s → 2 s → 4 s → 8 s (capped). + let backoff_secs = |count: u32| -> u64 { + NOSIGNAL_SOFT_RESTART_INITIAL_SECS + .saturating_mul(2u64.pow(count.min(3))) + .min(NOSIGNAL_SOFT_RESTART_MAX_SECS) + }; + let mut set_state = |new_state: StreamerState| { if new_state != last_state { handle.block_on(async { @@ -649,9 +778,32 @@ impl Streamer { } }; + // Helper: drop the MJPEG online flag so any connected HTTP clients + // exit their streaming tasks cleanly. Replaces the old "push a + // placeholder JPEG every second" scheme — the frontend now renders + // its own overlay from `stream.state_changed` and doesn't need a + // fake image to keep the connection alive. Idempotent. + let go_offline = || { + self.mjpeg_handler.set_offline(); + }; + + // Helper: record the back-off window on the streamer so it rides + // along on the next `stream.state_changed` event; cleared when we + // return to `Streaming`. + let set_retry = |ms: u64| { + self.next_retry_ms.store(ms, Ordering::Relaxed); + }; + // How many soft-restart cycles have been attempted (for exponential back-off). let mut no_signal_restart_count: u32 = 0; + // Last (resolution, format, fps) combination for which we emitted a + // `StreamConfigApplied` event. Used to de-duplicate the event across + // soft-restarts that produce the exact same geometry (e.g. a spurious + // single-frame timeout on a stable source) — the frontend would + // otherwise re-layout the `` on every glitch. + let mut last_applied: Option<(u32, u32, PixelFormat, u32)> = None; + 'session: loop { if self.direct_stop.load(Ordering::Relaxed) { break 'session; @@ -661,6 +813,62 @@ impl Streamer { // call (from a previous soft-restart or recovery) is reflected here. let config = handle.block_on(async { self.config.read().await.clone() }); + // ── Resolve the CSI bridge subdev (if any) for this video ────────── + // + // The subdev is where QUERY_DV_TIMINGS and SOURCE_CHANGE events + // actually live on RK628-on-rkcif. It's stored in + // `VideoDeviceInfo` during enumeration; we re-read it here + // rather than caching on Streamer so a hot-plug recovery picks + // up a possibly-different subdev path. + let bridge_ctx = handle.block_on(async { + self.current_device + .read() + .await + .as_ref() + .map(|info| { + BridgeContext::from_parts( + info.subdev_path.clone(), + parse_bridge_kind(info.bridge_kind.as_deref()), + ) + }) + .unwrap_or_default() + }); + + // ── STREAMON gate: for CSI bridges with a subdev, refuse to + // open the video node when the subdev reports no signal. + // On RK628 this prevents a kernel null-pointer deref. + if let Some(subdev_path) = bridge_ctx.subdev_path.as_ref() { + match probe_subdev_signal(subdev_path, bridge_ctx.kind) { + Some(crate::video::SignalStatus::NoCable) + | Some(crate::video::SignalStatus::NoSync) + | Some(crate::video::SignalStatus::NoSignal) + | Some(crate::video::SignalStatus::OutOfRange) => { + let status = probe_subdev_signal(subdev_path, bridge_ctx.kind) + .unwrap_or(crate::video::SignalStatus::NoSignal); + let wait_secs = backoff_secs(no_signal_restart_count); + debug!( + "Pre-STREAMON gate: subdev {:?} reports {:?} — \ + waiting for SOURCE_CHANGE (<= {}s) before opening {:?}", + subdev_path, status, wait_secs, device_path + ); + set_retry(wait_secs.saturating_mul(1000)); + go_offline(); + set_state(status.into()); + // Wait for SOURCE_CHANGE or timeout before retrying. + // Opens the subdev just for the poll — cheap and + // does NOT touch the video node. + wait_subdev_for_source_change( + subdev_path, + &self.direct_stop, + Duration::from_secs(wait_secs), + ); + no_signal_restart_count = no_signal_restart_count.saturating_add(1); + continue 'session; + } + _ => {} // Locked (None from as_status) or unknown — proceed + } + } + // ── Open the capture stream ───────────────────────────────────────── let mut stream_opt: Option = None; let mut last_error: Option = None; @@ -671,18 +879,39 @@ impl Streamer { return; } - match V4l2rCaptureStream::open( + match V4l2rCaptureStream::open_with_bridge( &device_path, config.resolution, config.format, config.fps, BUFFER_COUNT, Duration::from_secs(2), + bridge_ctx.clone(), ) { Ok(stream) => { stream_opt = Some(stream); break; } + Err(AppError::CaptureNoSignal { kind }) => { + // CSI bridge open-time DV-timings probe failed. + // Drop the HTTP stream so the frontend renders its + // "no signal" overlay, update the state with the + // fine-grained reason, and let the outer 'session + // loop back off before the next retry. + let status = crate::video::SignalStatus::from_str(&kind) + .unwrap_or(crate::video::SignalStatus::NoSignal); + debug!( + "CSI open probe reports no signal ({:?}), will soft-restart", + status + ); + set_retry( + backoff_secs(no_signal_restart_count).saturating_mul(1000), + ); + go_offline(); + set_state(status.into()); + last_error = Some(format!("CaptureNoSignal({})", kind)); + break; + } Err(e) => { let err_str = e.to_string(); if err_str.contains("busy") || err_str.contains("resource") { @@ -705,14 +934,33 @@ impl Streamer { let mut stream = match stream_opt { Some(stream) => stream, None => { - error!( - "Failed to open device {:?}: {}", - device_path, - last_error.unwrap_or_else(|| "unknown error".to_string()) + // If the open failed because of a no-signal condition, do + // *not* escalate to Error — instead keep the capture loop + // alive in NoSignal-like state and retry via the soft + // restart path. This lets CSI bridges recover on their + // own when the source comes back (resolution change, + // host reboot, HDMI cable re-plug). + let was_no_signal = + handle.block_on(async { self.state().await }).is_no_signal_like(); + if !was_no_signal { + error!( + "Failed to open device {:?}: {}", + device_path, + last_error.unwrap_or_else(|| "unknown error".to_string()) + ); + self.mjpeg_handler.set_offline(); + set_state(StreamerState::Error); + break 'session; + } + + debug!( + "Open failed in NoSignal-like state, backing off before soft-restart" ); - self.mjpeg_handler.set_offline(); - set_state(StreamerState::Error); - break 'session; + let wait = backoff_secs(no_signal_restart_count); + set_retry(wait.saturating_mul(1000)); + std::thread::sleep(Duration::from_secs(wait)); + no_signal_restart_count = no_signal_restart_count.saturating_add(1); + continue 'session; } }; @@ -748,8 +996,6 @@ impl Streamer { // None = signal is present; Some(Instant) = when signal was first lost. let mut no_signal_since: Option = None; - // Counter for periodic placeholder pushes during NoSignal. - let mut nosignal_placeholder_counter: u32 = 0; // Whether the inner 'capture loop should trigger a soft restart. let mut need_soft_restart = false; @@ -780,43 +1026,37 @@ impl Streamer { let meta = match stream.next_into(&mut owned) { Ok(meta) => meta, Err(e) => { + if is_source_changed_error(&e) { + info!("Capture SOURCE_CHANGE — soft-restart for DV re-probe"); + set_retry( + backoff_secs(no_signal_restart_count).saturating_mul(1000), + ); + go_offline(); + set_state(StreamerState::NoSignal); + need_soft_restart = true; + break 'capture; + } if e.kind() == std::io::ErrorKind::TimedOut { if signal_present { signal_present = false; - // Don't call set_offline() – instead keep the MJPEG stream - // alive by pushing a placeholder frame so clients stay - // connected and see the "no signal" image. - self.mjpeg_handler.push_no_signal_placeholder(); + let wait = backoff_secs(no_signal_restart_count); + set_retry(wait.saturating_mul(1000)); + go_offline(); set_state(StreamerState::NoSignal); no_signal_since = Some(std::time::Instant::now()); self.current_fps.store(0, Ordering::Relaxed); fps_frame_count = 0; last_fps_time = std::time::Instant::now(); - nosignal_placeholder_counter = 0; - } else { - // Already in NoSignal – re-send placeholder periodically so - // the HTTP keepalive timer does not expire. - nosignal_placeholder_counter = - nosignal_placeholder_counter.wrapping_add(1); - if nosignal_placeholder_counter >= NOSIGNAL_PLACEHOLDER_INTERVAL { - nosignal_placeholder_counter = 0; - self.mjpeg_handler.push_no_signal_placeholder(); - } - - // Soft-restart after exponential back-off. - if let Some(since) = no_signal_since { - let backoff_secs = NOSIGNAL_SOFT_RESTART_SECS - .saturating_mul(2u64.pow(no_signal_restart_count.min(2))) - .min(30); - if since.elapsed().as_secs() >= backoff_secs { - info!( - "NoSignal for {}s, attempting soft restart (attempt {})", - backoff_secs, - no_signal_restart_count + 1 - ); - need_soft_restart = true; - break 'capture; - } + } else if let Some(since) = no_signal_since { + let wait = backoff_secs(no_signal_restart_count); + if since.elapsed().as_secs() >= wait { + info!( + "NoSignal for {}s, attempting soft restart (attempt {})", + wait, + no_signal_restart_count + 1 + ); + need_soft_restart = true; + break 'capture; } } @@ -824,18 +1064,24 @@ impl Streamer { continue 'capture; } - let is_device_lost = match e.raw_os_error() { - Some(6) => true, // ENXIO - Some(19) => true, // ENODEV - Some(5) => true, // EIO - Some(32) => true, // EPIPE - Some(108) => true, // ESHUTDOWN - _ => false, - }; + // Classify the capture error. + // + // Only errnos that mean "the device file is gone" + // (ENODEV, ENXIO, ESHUTDOWN) trigger the full + // DeviceLost → recovery path. + // + // EIO / EPIPE are common transient errors on rkcif + // when the source glitches or re-locks; those are + // treated as NoSignal + soft-restart so we recover + // in ~1 s instead of the 1 s recovery-poll loop. + let os_err = e.raw_os_error(); + let is_device_lost = matches!(os_err, Some(6) | Some(19) | Some(108)); + let is_transient_signal_error = matches!(os_err, Some(5) | Some(32)); if is_device_lost { error!("Video device lost: {} - {}", device_path.display(), e); - self.mjpeg_handler.set_offline(); + go_offline(); + set_retry(0); handle.block_on(async { *self.last_lost_device.write().await = Some(device_path.display().to_string()); @@ -851,6 +1097,20 @@ impl Streamer { break 'capture; } + if is_transient_signal_error { + warn!( + "Capture transient error ({}): treating as NoSignal + soft-restart", + e + ); + set_retry( + backoff_secs(no_signal_restart_count).saturating_mul(1000), + ); + go_offline(); + set_state(StreamerState::NoSignal); + need_soft_restart = true; + break 'capture; + } + let key = classify_capture_error(&e); if capture_error_throttler.should_log(&key) { let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0); @@ -893,8 +1153,28 @@ impl Streamer { signal_present = true; no_signal_since = None; no_signal_restart_count = 0; - // Stream was kept online (placeholder pushes), just update state. + set_retry(0); set_state(StreamerState::Streaming); + + let fps_val = config.fps; + let current = (resolution.width, resolution.height, pixel_format, fps_val); + if last_applied != Some(current) { + last_applied = Some(current); + let dp = device_path.display().to_string(); + let fmt = format!("{:?}", pixel_format); + let w = resolution.width; + let h = resolution.height; + handle.block_on(async { + self.publish_event(SystemEvent::StreamConfigApplied { + transition_id: None, + device: dp, + resolution: (w, h), + format: fmt, + fps: fps_val, + }) + .await; + }); + } } self.mjpeg_handler.update_frame(frame); @@ -923,12 +1203,30 @@ impl Streamer { break 'session; } - // ── Soft restart path ─────────────────────────────────────────────── no_signal_restart_count = no_signal_restart_count.saturating_add(1); - // Re-probe the device to pick up a changed resolution/format. match VideoDevice::open_readonly(&device_path).and_then(|d| d.info()) { Ok(device_info) => { + // Skip re-open while rkcif still reports placeholder (≤64²) geometry. + let probed_res = device_info + .formats + .first() + .and_then(|f| f.resolutions.first()) + .map(|r| (r.width, r.height)); + + if matches!(probed_res, Some((w, h)) if w <= 64 || h <= 64) + || probed_res.is_none() + { + warn!( + "Soft restart: probed resolution too small ({:?}), still no signal", + probed_res + ); + set_retry(2_000); + go_offline(); + std::thread::sleep(Duration::from_secs(2)); + continue 'session; + } + handle.block_on(async { let fmt; let res; @@ -971,19 +1269,12 @@ impl Streamer { self.current_fps.store(0, Ordering::Relaxed); } - /// Check if streaming (or in NoSignal state — capture thread is still running) + /// `Streaming` or any no-signal-like state (capture thread still alive). pub async fn is_streaming(&self) -> bool { - matches!( - self.state().await, - StreamerState::Streaming | StreamerState::NoSignal - ) + let s = self.state().await; + s == StreamerState::Streaming || s.is_no_signal_like() } - /// Re-probe a device and update the stored config/device info. - /// - /// Called during recovery or after a NoSignal soft restart so that a - /// resolution / format change on the source side is picked up before - /// the capture stream is re-opened. pub async fn re_init_device(self: &Arc, device_path: &str) -> Result<()> { let device = VideoDevice::open_readonly(device_path) .map_err(|e| AppError::VideoError(format!("Cannot open device for re-init: {}", e)))?; @@ -1030,30 +1321,27 @@ impl Streamer { } } - /// Publish event to event bus (if configured) - /// For StreamStateChanged events, only publishes if state actually changed (de-duplication) + /// Dedupes `StreamStateChanged` on `(state, reason, next_retry_ms)`. async fn publish_event(&self, event: SystemEvent) { if let Some(events) = self.events.read().await.as_ref() { - // For state change events, check if state actually changed - if let SystemEvent::StreamStateChanged { ref state, .. } = event { - let current_state = match state.as_str() { - "uninitialized" => StreamerState::Uninitialized, - "ready" => StreamerState::Ready, - "streaming" => StreamerState::Streaming, - "no_signal" => StreamerState::NoSignal, - "error" => StreamerState::Error, - "device_lost" => StreamerState::DeviceLost, - "recovering" => StreamerState::Recovering, - _ => StreamerState::Error, - }; - + if let SystemEvent::StreamStateChanged { + ref state, + ref reason, + next_retry_ms, + .. + } = event + { + let key = (state.clone(), reason.clone(), next_retry_ms); let mut last_state = self.last_published_state.write().await; - if *last_state == Some(current_state) { - // State hasn't changed, skip publishing - trace!("Skipping duplicate stream state event: {}", state); + if last_state.as_ref() == Some(&key) { + trace!( + "Skipping duplicate stream state event: {} (reason={:?})", + state, + reason + ); return; } - *last_state = Some(current_state); + *last_state = Some(key); } events.publish(event); @@ -1143,8 +1431,12 @@ impl Streamer { ); } - // Wait before retry (1 second) - tokio::time::sleep(std::time::Duration::from_secs(1)).await; + let wait = if attempt == 1 { + std::time::Duration::from_millis(200) + } else { + std::time::Duration::from_secs(1) + }; + tokio::time::sleep(wait).await; // Check if device file exists let device_exists = std::path::Path::new(&device_path).exists(); @@ -1212,6 +1504,7 @@ impl Default for Streamer { current_fps: AtomicU32::new(0), events: RwLock::new(None), last_published_state: RwLock::new(None), + next_retry_ms: AtomicU64::new(0), config_changing: std::sync::atomic::AtomicBool::new(false), background_tasks_started: std::sync::atomic::AtomicBool::new(false), recovery_retry_count: std::sync::atomic::AtomicU32::new(0), @@ -1236,20 +1529,73 @@ pub struct StreamerStats { pub fps: f32, } +fn probe_subdev_signal( + subdev_path: &std::path::Path, + kind: Option, +) -> Option { + let fd = match csi_bridge::open_subdev(subdev_path) { + Ok(f) => f, + Err(e) => { + debug!("probe_subdev_signal: failed to open {:?}: {}", subdev_path, e); + return Some(crate::video::SignalStatus::NoSignal); + } + }; + let kind = kind.unwrap_or(csi_bridge::CsiBridgeKind::Unknown); + let probe = csi_bridge::probe_signal(&fd, kind); + probe.as_status() +} + +fn wait_subdev_for_source_change( + subdev_path: &std::path::Path, + direct_stop: &AtomicBool, + max_wait: Duration, +) { + let fd = match csi_bridge::open_subdev(subdev_path) { + Ok(f) => f, + Err(e) => { + debug!( + "wait_subdev_for_source_change: failed to open {:?}: {}", + subdev_path, e + ); + std::thread::sleep(max_wait.min(Duration::from_secs(1))); + return; + } + }; + if let Err(e) = csi_bridge::subscribe_source_change(&fd) { + debug!( + "wait_subdev_for_source_change: subscribe failed on {:?}: {}", + subdev_path, e + ); + } + let slice = Duration::from_millis(250); + let deadline = std::time::Instant::now() + max_wait; + while std::time::Instant::now() < deadline { + if direct_stop.load(Ordering::Relaxed) { + return; + } + let remaining = deadline.saturating_duration_since(std::time::Instant::now()); + let wait = remaining.min(slice); + match csi_bridge::wait_source_change(&fd, wait) { + Ok(true) => { + info!( + "Subdev SOURCE_CHANGE during no-signal wait, retrying open immediately" + ); + return; + } + Ok(false) => continue, + Err(e) => { + debug!("wait_source_change error on {:?}: {}", subdev_path, e); + return; + } + } + } +} + impl serde::Serialize for StreamerState { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { - let s = match self { - StreamerState::Uninitialized => "uninitialized", - StreamerState::Ready => "ready", - StreamerState::Streaming => "streaming", - StreamerState::NoSignal => "no_signal", - StreamerState::Error => "error", - StreamerState::DeviceLost => "device_lost", - StreamerState::Recovering => "recovering", - }; - serializer.serialize_str(s) + serializer.serialize_str(self.as_str()) } } diff --git a/src/video/v4l2r_capture.rs b/src/video/v4l2r_capture.rs index 2a1b73a6..e93ceb4a 100644 --- a/src/video/v4l2r_capture.rs +++ b/src/video/v4l2r_capture.rs @@ -3,22 +3,37 @@ use std::fs::File; use std::io; use std::os::fd::AsFd; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::time::Duration; use nix::poll::{poll, PollFd, PollFlags, PollTimeout}; -use tracing::{debug, warn}; -use v4l2r::bindings::{v4l2_requestbuffers, v4l2_streamparm, v4l2_streamparm__bindgen_ty_1}; +use tracing::{debug, info, warn}; +use v4l2r::bindings::{ + v4l2_dv_timings, v4l2_requestbuffers, v4l2_streamparm, v4l2_streamparm__bindgen_ty_1, + V4L2_DV_BT_656_1120, +}; use v4l2r::ioctl::{ - self, Capabilities, Capability as V4l2rCapability, MemoryConsistency, PlaneMapping, QBufPlane, - QBuffer, QueryBuffer, V4l2Buffer, + self, Capabilities, Capability as V4l2rCapability, Event as V4l2Event, EventType, + MemoryConsistency, PlaneMapping, QBufPlane, QBuffer, QueryBuffer, QueryDvTimingsError, + SubscribeEventFlags, V4l2Buffer, }; use v4l2r::memory::{MemoryType, MmapHandle}; use v4l2r::nix::errno::Errno; use v4l2r::{Format as V4l2rFormat, PixelFormat as V4l2rPixelFormat, QueueType}; use crate::error::{AppError, Result}; +use crate::video::csi_bridge::{self, CsiBridgeKind, ProbeResult}; use crate::video::format::{PixelFormat, Resolution}; +use crate::video::SignalStatus; + +/// `io::Error` payload when the driver posts `V4L2_EVENT_SOURCE_CHANGE`. +pub const SOURCE_CHANGED_MARKER: &str = "v4l2_source_changed"; + +pub fn is_source_changed_error(err: &io::Error) -> bool { + err.get_ref() + .map(|inner| inner.to_string() == SOURCE_CHANGED_MARKER) + .unwrap_or(false) +} /// Metadata for a captured frame. #[derive(Debug, Clone, Copy)] @@ -27,6 +42,23 @@ pub struct CaptureMeta { pub sequence: u64, } +/// When set, DV ioctls use the subdev (rkcif: video node has no DV ioctls). +#[derive(Debug, Clone, Default)] +pub struct BridgeContext { + pub subdev_path: Option, + pub kind: Option, +} + +impl BridgeContext { + pub fn from_parts(subdev_path: Option, kind: Option) -> Self { + Self { subdev_path, kind } + } + + pub fn has_subdev(&self) -> bool { + self.subdev_path.is_some() + } +} + /// V4L2 capture stream backed by v4l2r ioctl. pub struct V4l2rCaptureStream { fd: File, @@ -36,9 +68,12 @@ pub struct V4l2rCaptureStream { stride: u32, timeout: Duration, mappings: Vec>, + subdev_fd: Option, + bridge_kind: Option, } impl V4l2rCaptureStream { + /// UVC: uses `resolution`. CSI bridges: DV-probe first; may return `CaptureNoSignal`. pub fn open( device_path: impl AsRef, resolution: Resolution, @@ -47,6 +82,69 @@ impl V4l2rCaptureStream { buffer_count: u32, timeout: Duration, ) -> Result { + Self::open_with_bridge( + device_path, + resolution, + format, + fps, + buffer_count, + timeout, + BridgeContext::default(), + ) + } + + /// With subdev: probe DV on subdev before opening video (RK628 safety); may ignore requested size. + pub fn open_with_bridge( + device_path: impl AsRef, + resolution: Resolution, + format: PixelFormat, + fps: u32, + buffer_count: u32, + timeout: Duration, + bridge: BridgeContext, + ) -> Result { + // Probe subdev before video open (RK628: no-signal must not reach capture STREAMON). + let mut subdev_fd_opt: Option = None; + let mut subdev_dv_mode: Option = None; + + if let Some(subdev_path) = bridge.subdev_path.as_ref() { + let subdev_fd = csi_bridge::open_subdev(subdev_path).map_err(|e| { + AppError::VideoError(format!( + "Failed to open CSI bridge subdev {:?}: {}", + subdev_path, e + )) + })?; + + let kind = bridge.kind.unwrap_or(CsiBridgeKind::Unknown); + match csi_bridge::probe_signal(&subdev_fd, kind) { + ProbeResult::Locked(mode) => { + info!( + "Subdev {:?} locked: {}x{} @ {}Hz", + subdev_path, mode.width, mode.height, mode.pixelclock + ); + csi_bridge::apply_dv_timings(&subdev_fd, mode.raw); + if let Err(e) = csi_bridge::subscribe_source_change(&subdev_fd) { + debug!("subdev SOURCE_CHANGE subscribe failed: {}", e); + } + subdev_dv_mode = Some(mode); + } + other => { + let status = other + .as_status() + .unwrap_or(SignalStatus::NoSignal); + debug!( + "Subdev {:?} reports no signal ({:?}) — refusing STREAMON", + subdev_path, status + ); + return Err(AppError::CaptureNoSignal { + kind: status.as_str().to_string(), + }); + } + } + subdev_fd_opt = Some(subdev_fd); + } + + // ── Phase 1: open the capture (video) node ───────────────────── let mut fd = File::options() .read(true) .write(true) @@ -56,6 +154,8 @@ impl V4l2rCaptureStream { let caps: V4l2rCapability = ioctl::querycap(&fd) .map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?; let caps_flags = caps.device_caps(); + let driver_name = caps.driver.to_string(); + let is_csi_bridge = is_csi_bridge_driver(&driver_name); // Prefer multi-planar capture when available, as it is required for some // devices/pixel formats (e.g. NV12 via VIDEO_CAPTURE_MPLANE). @@ -69,11 +169,50 @@ impl V4l2rCaptureStream { )); }; - let mut fmt: V4l2rFormat = ioctl::g_fmt(&fd, queue) - .map_err(|e| AppError::VideoError(format!("Failed to get device format: {}", e)))?; + // CSI/HDMI bridge without a subdev pairing (tc358743 on uvcvideo, + // rk_hdmirx on RK3588): probe DV timings on the video node, with + // the same no-signal gate as the subdev path. When we *do* have + // a subdev, reuse its already-probed mode to drive S_FMT. + let dv_mode = if let Some(mode) = subdev_dv_mode.as_ref() { + Some(DvTimingsMode { + width: mode.width, + height: mode.height, + fps: mode.fps, + }) + } else if is_csi_bridge { + Some(probe_and_apply_dv_timings(&fd)?) + } else { + None + }; - fmt.width = resolution.width; - fmt.height = resolution.height; + // rkcif + RK628: G_FMT is often 0×0 until the first S_FMT; G_FMT may + // also fail. With DV timings from the subdev, build the format (same as + // `v4l2-ctl --set-fmt-video=width=…,height=…`). + let mut fmt: V4l2rFormat = match ( + ioctl::g_fmt::(&fd, queue), + is_csi_bridge, + dv_mode.as_ref(), + ) { + (Ok(f), _, _) if f.width > 0 && f.height > 0 => f, + (_, true, Some(m)) => { + let fourcc = format.to_fourcc(); + V4l2rFormat::from((&fourcc, (m.width as usize, m.height as usize))) + } + (Ok(f), _, _) => f, + (Err(e), _, _) => { + return Err(AppError::VideoError(format!("Failed to get device format: {}", e))); + } + }; + + // Prefer the DV-timings-reported geometry for CSI bridges — the + // source, not the user config, dictates what the capture hardware + // will actually deliver. + let (target_w, target_h) = match dv_mode { + Some(DvTimingsMode { width, height, .. }) => (width, height), + None => (resolution.width, resolution.height), + }; + fmt.width = target_w; + fmt.height = target_h; fmt.pixelformat = V4l2rPixelFormat::from(&format.to_fourcc()); let actual_fmt: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt)) @@ -146,12 +285,33 @@ impl V4l2rCaptureStream { stride, timeout, mappings, + subdev_fd: subdev_fd_opt, + bridge_kind: bridge.kind, }; stream.queue_all_buffers()?; ioctl::streamon(&stream.fd, stream.queue) .map_err(|e| AppError::VideoError(format!("Failed to start capture stream: {}", e)))?; + // When the subdev path was used, SOURCE_CHANGE was already + // subscribed *there* (the rkcif video node returns ENOTTY). + // Otherwise try on the video node as a best-effort fallback for + // drivers that do honour it (tc358743/uvcvideo, rk_hdmirx). + if stream.subdev_fd.is_none() { + match ioctl::subscribe_event( + &stream.fd, + EventType::SourceChange(0), + SubscribeEventFlags::empty(), + ) { + Ok(()) => debug!("Subscribed to V4L2_EVENT_SOURCE_CHANGE on video node"), + Err(e) => debug!( + "V4L2_EVENT_SOURCE_CHANGE subscription unavailable on video node \ + ({}), falling back to timeout-based restart", + e + ), + } + } + Ok(stream) } @@ -167,6 +327,51 @@ impl V4l2rCaptureStream { self.stride } + /// Re-probe DV timings on the persistent subdev handle (no extra `open`). + pub fn probe_bridge_signal(&self) -> Option { + let subdev_fd = self.subdev_fd.as_ref()?; + Some(csi_bridge::probe_signal( + subdev_fd, + self.bridge_kind.unwrap_or(CsiBridgeKind::Unknown), + )) + } + + /// Like [`Self::probe_bridge_signal`] but isolates the ioctl on a dup'd + /// fd with a wall-clock cap — see [`csi_bridge::probe_signal_thread_timeout`]. + pub fn probe_bridge_signal_with_timeout(&self, limit: Duration) -> Option { + let subdev_fd = self.subdev_fd.as_ref()?; + csi_bridge::probe_signal_thread_timeout( + subdev_fd, + self.bridge_kind.unwrap_or(CsiBridgeKind::Unknown), + limit, + ) + } + + fn expected_capture_bytes(&self) -> Option { + if self.format.is_compressed() { + return None; + } + // Stride is bytesperline; packed formats use stride × height (not × bpp). + if self.format.bytes_per_pixel().is_some() { + return (self.stride as usize).checked_mul(self.resolution.height as usize); + } + match self.format { + PixelFormat::Nv12 | PixelFormat::Nv21 | PixelFormat::Yuv420 | PixelFormat::Yvu420 => { + (self.stride as usize) + .checked_mul(self.resolution.height as usize)? + .checked_mul(3)? + .checked_div(2) + } + PixelFormat::Nv16 => (self.stride as usize) + .checked_mul(self.resolution.height as usize)? + .checked_mul(2), + PixelFormat::Nv24 => (self.stride as usize) + .checked_mul(self.resolution.height as usize)? + .checked_mul(3), + _ => None, + } + } + pub fn next_into(&mut self, dst: &mut Vec) -> io::Result { self.wait_ready()?; @@ -210,6 +415,21 @@ impl V4l2rCaptureStream { self.queue_buffer(index as u32) .map_err(|e| io::Error::other(e.to_string()))?; + if let Some(expected) = self.expected_capture_bytes() { + if total > 0 && total != expected { + warn!( + "DQBUF bytes_used ({}) != expected ({}) for {:?} {}x{} stride={} — requesting stream re-open", + total, + expected, + self.format, + self.resolution.width, + self.resolution.height, + self.stride + ); + return Err(io::Error::other(SOURCE_CHANGED_MARKER)); + } + } + Ok(CaptureMeta { bytes_used: total, sequence, @@ -220,13 +440,79 @@ impl V4l2rCaptureStream { if self.timeout.is_zero() { return Ok(()); } - let mut fds = [PollFd::new(self.fd.as_fd(), PollFlags::POLLIN)]; + // Multiplex video fd (POLLIN for DQBUF, POLLPRI as fallback for + // drivers that deliver events here) and the optional subdev fd + // (POLLPRI only — SOURCE_CHANGE on RK628 / rkcif). + let mut poll_fds: Vec = Vec::with_capacity(2); + poll_fds.push(PollFd::new( + self.fd.as_fd(), + PollFlags::POLLIN + | PollFlags::POLLPRI + | PollFlags::POLLERR + | PollFlags::POLLHUP, + )); + if let Some(subdev_fd) = self.subdev_fd.as_ref() { + poll_fds.push(PollFd::new(subdev_fd.as_fd(), PollFlags::POLLPRI)); + } let timeout_ms = self.timeout.as_millis().min(u16::MAX as u128) as u16; - let ready = poll(&mut fds, PollTimeout::from(timeout_ms))?; + let ready = poll(&mut poll_fds, PollTimeout::from(timeout_ms))?; if ready == 0 { return Err(io::Error::new(io::ErrorKind::TimedOut, "capture timeout")); } - Ok(()) + + // Subdev POLLPRI fires first on rkcif/RK628 when the source-side + // HDMI timings changed. Drain all pending events and bubble up + // the `source_changed` marker so the upper layer re-opens with a + // fresh DV_TIMINGS probe. + if let Some(subdev_fd) = self.subdev_fd.as_ref() { + if let Some(revents) = poll_fds.get(1).and_then(|f| f.revents()) { + if revents.contains(PollFlags::POLLPRI) { + let drained = drain_events(subdev_fd); + info!( + "Subdev SOURCE_CHANGE detected (drained {} event(s)), \ + requesting stream re-open", + drained + ); + return Err(io::Error::other(SOURCE_CHANGED_MARKER)); + } + } + } + + if let Some(revents) = poll_fds[0].revents() { + if revents.contains(PollFlags::POLLERR) || revents.contains(PollFlags::POLLHUP) { + debug!( + "capture poll: video revents={:?} (ERR/HUP) — requesting stream re-open", + revents + ); + return Err(io::Error::other(SOURCE_CHANGED_MARKER)); + } + if revents.contains(PollFlags::POLLPRI) { + let drained = drain_events(&self.fd); + info!( + "Video-node SOURCE_CHANGE detected (drained {} event(s)), \ + requesting stream re-open", + drained + ); + return Err(io::Error::other(SOURCE_CHANGED_MARKER)); + } + if !revents.contains(PollFlags::POLLIN) { + // rkcif + RK628: the driver may wake `poll` after internally + // invalidating queued buffers without queueing a V4L2 event. + // Treat like SOURCE_CHANGE so we STREAMOFF / re-S_FMT. + debug!( + "capture poll: ready={} video revents={:?} (no POLLIN) — requesting stream re-open", + ready, revents + ); + return Err(io::Error::other(SOURCE_CHANGED_MARKER)); + } + return Ok(()); + } + + debug!( + "capture poll: ready={} but video revents unavailable — requesting stream re-open", + ready + ); + Err(io::Error::other(SOURCE_CHANGED_MARKER)) } fn queue_all_buffers(&mut self) -> Result<()> { @@ -256,12 +542,199 @@ impl V4l2rCaptureStream { impl Drop for V4l2rCaptureStream { fn drop(&mut self) { + // Release ordering matters on rkcif: a subsequent open()/S_FMT from a + // freshly-constructed stream returns EBUSY if the previous capture has + // not fully relinquished its buffers. Mirror the ustreamer teardown + // order: + // 1. STREAMOFF (stop DMA) + // 2. unsubscribe_all (no further DQEVENT paths) + // 3. munmap via Drop (release buffer mappings) + // 4. REQBUFS count=0 (free kernel buffer list) + // 5. close(fd) (implicit on File Drop) if let Err(e) = ioctl::streamoff(&self.fd, self.queue) { debug!("Failed to stop capture stream: {}", e); } + if let Err(e) = ioctl::unsubscribe_all_events(&self.fd) { + debug!("Failed to unsubscribe V4L2 events: {}", e); + } + // Explicit munmap *before* REQBUFS(0) — the kernel refuses to free the + // buffer list while mappings are outstanding. + self.mappings.clear(); + if let Err(e) = ioctl::reqbufs::( + &self.fd, + self.queue, + MemoryType::Mmap, + 0, + MemoryConsistency::empty(), + ) { + debug!("Failed to release capture buffers: {}", e); + } } } +/// Driver-name check for CSI/HDMI bridge devices (rk_hdmirx, rkcif, tc358743, +/// …) that expose DV timings. Kept in sync with `video::is_csi_hdmi_bridge` +/// but queries the raw V4L2 driver string so we don't need a full +/// `VideoDeviceInfo` at `V4l2rCaptureStream::open` time. +fn is_csi_bridge_driver(driver: &str) -> bool { + let d = driver.to_ascii_lowercase(); + d == "rk_hdmirx" || d == "rkcif" || d == "tc358743" || d.starts_with("rkcif") +} + +/// Drain any pending `V4L2_EVENT_*` events on `fd`. Used after POLLPRI to +/// clear the queue so the next poll doesn't immediately wake up on stale +/// state. Capped at 16 events per call. +fn drain_events(fd: &File) -> u32 { + let mut drained = 0u32; + while let Ok(_ev) = ioctl::dqevent::(fd) { + drained = drained.saturating_add(1); + if drained >= 16 { + break; + } + } + drained +} + +/// Result of a successful `VIDIOC_QUERY_DV_TIMINGS` + `VIDIOC_S_DV_TIMINGS` +/// probe. Used by the CSI bridge path to override the requested resolution +/// with the source-reported geometry before `S_FMT`. +#[derive(Debug, Clone, Copy)] +struct DvTimingsMode { + width: u32, + height: u32, + #[allow(dead_code)] + fps: Option, +} + +/// Probe DV timings from the source and latch them into the driver. +/// +/// Mirrors PiKVM/ustreamer's `src_hdmi_open_sequence`: +/// 1. `VIDIOC_QUERY_DV_TIMINGS` — active-probe the source. +/// 2. On success, `VIDIOC_S_DV_TIMINGS` — commit so that subsequent +/// `S_FMT` is accepted at the matching geometry. +/// 3. Return the timings for the caller to feed into `S_FMT`. +/// +/// Errno mapping (see `V4L2_CID_DV_RX_POWER_PRESENT` semantics): +/// * `ENOLINK` → `NoCable` (TMDS clock absent, cable unplugged) +/// * `ENOLCK` → `NoSync` (TMDS present, timings unstable) +/// * `ERANGE` → `OutOfRange` (timings outside hardware caps) +/// * `ENODATA` → `NoSignal` (driver says "no DV timings support on +/// this input", e.g. EDID not applied yet) +/// * anything else → `NoSignal` (fallback, keeps the retry loop going) +fn probe_and_apply_dv_timings(fd: &File) -> Result { + let timings: v4l2_dv_timings = match ioctl::query_dv_timings(fd) { + Ok(t) => t, + Err(err) => { + let status = match &err { + QueryDvTimingsError::NoLink => SignalStatus::NoCable, + QueryDvTimingsError::UnstableSignal => SignalStatus::NoSync, + QueryDvTimingsError::IoctlError(Errno::ERANGE) => SignalStatus::OutOfRange, + QueryDvTimingsError::Unsupported => SignalStatus::NoSignal, + // I2C-layer failures between rkcif and the RK628 bridge + // (`ret=-110`/-121/-5) typically mean the bridge is in the + // middle of a PHY re-lock, not that the source is gone. + // Classify them as `NoSync` so the upper layer keeps retrying + // on the short end of the back-off ladder. + QueryDvTimingsError::IoctlError(Errno::EIO) + | QueryDvTimingsError::IoctlError(Errno::EREMOTEIO) + | QueryDvTimingsError::IoctlError(Errno::ETIMEDOUT) => SignalStatus::NoSync, + QueryDvTimingsError::IoctlError(_) => SignalStatus::NoSignal, + }; + info!( + "VIDIOC_QUERY_DV_TIMINGS failed: {} -> SignalStatus::{:?}", + err, status + ); + return Err(AppError::CaptureNoSignal { + kind: status.as_str().to_string(), + }); + } + }; + + // `v4l2_dv_timings` is a packed union; copy the scalar fields out to + // aligned locals before formatting / comparing to avoid UB (and the + // rustc E0793 "reference to field of packed struct is unaligned" error). + let timings_type: u32 = timings.type_; + if timings_type != V4L2_DV_BT_656_1120 { + warn!( + "QUERY_DV_TIMINGS returned unknown type {}, treating as NoSignal", + timings_type + ); + return Err(AppError::CaptureNoSignal { + kind: SignalStatus::NoSignal.as_str().to_string(), + }); + } + + let bt = unsafe { timings.__bindgen_anon_1.bt }; + let bt_width: u32 = bt.width; + let bt_height: u32 = bt.height; + let bt_pixelclock: u64 = bt.pixelclock; + let bt_hfrontporch: u32 = bt.hfrontporch; + let bt_hsync: u32 = bt.hsync; + let bt_hbackporch: u32 = bt.hbackporch; + let bt_vfrontporch: u32 = bt.vfrontporch; + let bt_vsync: u32 = bt.vsync; + let bt_vbackporch: u32 = bt.vbackporch; + + if bt_width == 0 || bt_height == 0 || bt_width <= 64 || bt_height <= 64 { + warn!( + "QUERY_DV_TIMINGS returned degenerate {}x{}, treating as NoSignal", + bt_width, bt_height + ); + return Err(AppError::CaptureNoSignal { + kind: SignalStatus::NoSignal.as_str().to_string(), + }); + } + + // Latch the detected timings so subsequent S_FMT / STREAMON use the + // right pixel clock + blanking. Failure here is *not* fatal on some + // drivers (rkcif doesn't implement S_DV_TIMINGS per-output-device, only + // on the bridging subdev), so degrade to a warning and keep going. + if let Err(e) = ioctl::s_dv_timings::<_, v4l2_dv_timings>(fd, timings) { + debug!( + "VIDIOC_S_DV_TIMINGS failed ({}), continuing with queried timings for S_FMT", + e + ); + } + + let fps = dv_timings_fps_from_scalars( + bt_width, + bt_height, + bt_hfrontporch + bt_hsync + bt_hbackporch, + bt_vfrontporch + bt_vsync + bt_vbackporch, + bt_pixelclock, + ); + info!( + "DV timings locked: {}x{} @ {} (pix_clk={})", + bt_width, + bt_height, + fps.map(|f| format!("{:.2} fps", f)) + .unwrap_or_else(|| "?fps".to_string()), + bt_pixelclock + ); + + Ok(DvTimingsMode { + width: bt_width, + height: bt_height, + fps, + }) +} + +fn dv_timings_fps_from_scalars( + width: u32, + height: u32, + h_blanking: u32, + v_blanking: u32, + pixelclock: u64, +) -> Option { + let total_h = (width + h_blanking) as u64; + let total_v = (height + v_blanking) as u64; + let denom = total_h.checked_mul(total_v)?; + if denom == 0 || pixelclock == 0 { + return None; + } + Some(pixelclock as f64 / denom as f64) +} + fn set_fps(fd: &File, queue: QueueType, fps: u32) -> std::result::Result<(), ioctl::GParmError> { let mut params = unsafe { std::mem::zeroed::() }; params.type_ = queue as u32; diff --git a/src/web/audio_ws.rs b/src/web/audio_ws.rs index 564c663e..ce7d6a5d 100644 --- a/src/web/audio_ws.rs +++ b/src/web/audio_ws.rs @@ -78,15 +78,13 @@ async fn handle_audio_socket(socket: WebSocket, state: Arc) { loop { tokio::select! { // Receive Opus frames and send to client - opus_result = opus_rx.changed() => { - if opus_result.is_err() { - info!("Audio stream closed"); - break; - } - - let frame = match opus_rx.borrow().clone() { - Some(frame) => frame, - None => continue, + opus_result = opus_rx.recv() => { + let frame = match opus_result { + Some(f) => f, + None => { + info!("Audio stream closed"); + break; + } }; let binary = encode_audio_packet(&frame, stream_start); diff --git a/src/web/handlers/mod.rs b/src/web/handlers/mod.rs index de8567cd..a99dc30e 100644 --- a/src/web/handlers/mod.rs +++ b/src/web/handlers/mod.rs @@ -988,6 +988,7 @@ pub struct VideoDevice { pub driver: String, pub formats: Vec, pub usb_bus: Option, + pub has_signal: bool, } #[derive(Serialize)] @@ -1083,10 +1084,14 @@ pub async fn list_devices(State(state): State>) -> Json vec![], + Err(e) => { + warn!(error = %e, "Video device enumeration failed; returning empty video list for /api/devices"); + vec![] + } }; // Detect serial devices (common USB/ACM ports) - single directory read diff --git a/src/webrtc/universal_session.rs b/src/webrtc/universal_session.rs index 4e1db442..b52203c5 100644 --- a/src/webrtc/universal_session.rs +++ b/src/webrtc/universal_session.rs @@ -589,6 +589,9 @@ impl UniversalSession { mut frame_rx: tokio::sync::mpsc::Receiver>, request_keyframe: Arc, ) { + if let Some(handle) = self.video_receiver_handle.lock().await.take() { + handle.abort(); + } info!( "Starting {} session {} with shared encoder", self.codec, self.session_id @@ -749,7 +752,7 @@ impl UniversalSession { /// Start receiving Opus audio frames pub async fn start_audio_from_opus( &self, - mut opus_rx: tokio::sync::watch::Receiver>>, + mut opus_rx: tokio::sync::mpsc::Receiver>, ) { let audio_track = match &self.audio_track { Some(track) => track.clone(), @@ -805,15 +808,13 @@ impl UniversalSession { } } - result = opus_rx.changed() => { - if result.is_err() { - info!("Opus channel closed for session {}", session_id); - break; - } - - let opus_frame = match opus_rx.borrow().clone() { + result = opus_rx.recv() => { + let opus_frame = match result { Some(frame) => frame, - None => continue, + None => { + info!("Opus mpsc closed for session {}", session_id); + break; + } }; // 20ms at 48kHz = 960 samples diff --git a/src/webrtc/webrtc_streamer.rs b/src/webrtc/webrtc_streamer.rs index cabab679..553be30e 100644 --- a/src/webrtc/webrtc_streamer.rs +++ b/src/webrtc/webrtc_streamer.rs @@ -36,14 +36,15 @@ use tracing::{debug, info, trace, warn}; use crate::audio::{AudioController, OpusFrame}; use crate::error::{AppError, Result}; -use crate::events::EventBus; +use crate::events::{EventBus, SystemEvent}; use crate::hid::HidController; use crate::video::encoder::registry::EncoderBackend; use crate::video::encoder::registry::VideoEncoderType; use crate::video::encoder::VideoCodecType; use crate::video::format::{PixelFormat, Resolution}; use crate::video::shared_video_pipeline::{ - SharedVideoPipeline, SharedVideoPipelineConfig, SharedVideoPipelineStats, + PipelineStateNotification, SharedVideoPipeline, SharedVideoPipelineConfig, + SharedVideoPipelineStats, }; use super::config::{TurnServer, WebRtcConfig}; @@ -93,6 +94,8 @@ pub struct CaptureDeviceConfig { pub device_path: PathBuf, pub buffer_count: u32, pub jpeg_quality: u8, + pub subdev_path: Option, + pub bridge_kind: Option, } /// WebRTC streamer statistics @@ -274,6 +277,73 @@ impl WebRtcStreamer { } } + fn build_pipeline_state_notifier( + device: String, + events: Option>, + ) -> Option> { + events.map(|events| { + Arc::new(move |notification: PipelineStateNotification| { + events.publish(SystemEvent::StreamStateChanged { + state: notification.state.to_string(), + device: Some(device.clone()), + reason: notification.reason.map(|reason| reason.to_string()), + next_retry_ms: notification.next_retry_ms, + }); + }) as Arc + }) + } + + fn make_keyframe_callback( + pipeline: Arc, + session_id: String, + ) -> Arc { + Arc::new(move || { + let pipeline = pipeline.clone(); + let sid = session_id.clone(); + tokio::spawn(async move { + info!("Requesting keyframe for session {} after reconnect", sid); + pipeline.request_keyframe().await; + }); + }) + } + + async fn reconnect_sessions_to_current_pipeline( + self: &Arc, + reason: &str, + ) -> Result { + if self.capture_device.read().await.is_none() { + return Ok(0); + } + + let sessions_to_reconnect: Vec<(String, Arc)> = { + let sessions = self.sessions.read().await; + sessions + .iter() + .map(|(session_id, session)| (session_id.clone(), session.clone())) + .collect() + }; + + if sessions_to_reconnect.is_empty() { + return Ok(0); + } + + let pipeline = self.ensure_video_pipeline().await?; + for (session_id, session) in &sessions_to_reconnect { + info!( + "Reconnecting session {} to pipeline after {}", + session_id, reason + ); + session + .start_from_video_pipeline( + pipeline.subscribe(), + Self::make_keyframe_callback(pipeline.clone(), session_id.clone()), + ) + .await; + } + + Ok(sessions_to_reconnect.len()) + } + /// Ensure video pipeline is initialized and running async fn ensure_video_pipeline(self: &Arc) -> Result> { let mut pipeline_guard = self.video_pipeline.write().await; @@ -284,24 +354,35 @@ impl WebRtcStreamer { } } - let config = self.config.read().await; let codec = *self.video_codec.read().await; - - let pipeline_config = SharedVideoPipelineConfig { - resolution: config.resolution, - input_format: config.input_format, - output_codec: Self::codec_type_to_encoder_type(codec), - bitrate_preset: config.bitrate_preset, - fps: config.fps, - encoder_backend: config.encoder_backend, + let pipeline_config = { + let config = self.config.read().await; + SharedVideoPipelineConfig { + resolution: config.resolution, + input_format: config.input_format, + output_codec: Self::codec_type_to_encoder_type(codec), + bitrate_preset: config.bitrate_preset, + fps: config.fps, + encoder_backend: config.encoder_backend, + } }; info!("Creating shared video pipeline for {:?}", codec); let pipeline = SharedVideoPipeline::new(pipeline_config)?; let capture_device = self.capture_device.read().await.clone(); if let Some(device) = capture_device { + pipeline.set_state_notifier(Self::build_pipeline_state_notifier( + device.device_path.display().to_string(), + self.events.read().await.clone(), + )); pipeline - .start_with_device(device.device_path, device.buffer_count, device.jpeg_quality) + .start_with_device( + device.device_path, + device.buffer_count, + device.jpeg_quality, + device.subdev_path, + device.bridge_kind, + ) .await?; } else { return Err(AppError::VideoError( @@ -322,11 +403,13 @@ impl WebRtcStreamer { // Clear pipeline reference in WebRtcStreamer if let Some(streamer) = streamer_weak.upgrade() { + let mut pending_geometry: Option<(Resolution, PixelFormat)> = None; let mut pipeline_guard = streamer.video_pipeline.write().await; // Only clear if it's the same pipeline that stopped if let Some(ref current) = *pipeline_guard { if let Some(stopped_pipeline) = pipeline_weak.upgrade() { if Arc::ptr_eq(current, &stopped_pipeline) { + pending_geometry = stopped_pipeline.take_pending_sync_geometry(); *pipeline_guard = None; info!("Cleared stopped video pipeline reference"); } @@ -334,6 +417,35 @@ impl WebRtcStreamer { } drop(pipeline_guard); + let should_reconnect = pending_geometry.is_some(); + if let Some((r, f)) = pending_geometry { + streamer.sync_video_geometry_from_negotiated(r, f).await; + } + if should_reconnect { + let streamer_for_reconnect = streamer.clone(); + tokio::task::spawn_blocking(move || { + let handle = tokio::runtime::Handle::current(); + handle.block_on(async move { + match streamer_for_reconnect + .reconnect_sessions_to_current_pipeline( + "capture geometry change", + ) + .await + { + Ok(reconnected) if reconnected > 0 => info!( + "Video pipeline rebuilt after geometry change, reconnected {} sessions", + reconnected + ), + Ok(_) => {} + Err(e) => warn!( + "Failed to reconnect sessions after geometry change: {}", + e + ), + } + }); + }); + } + info!( "Video pipeline stopped, but keeping capture config for new sessions" ); @@ -344,6 +456,13 @@ impl WebRtcStreamer { debug!("Video pipeline monitor task ended"); }); + let pipeline_cfg = pipeline.config().await; + self.sync_video_geometry_from_negotiated( + pipeline_cfg.resolution, + pipeline_cfg.input_format, + ) + .await; + *pipeline_guard = Some(pipeline.clone()); Ok(pipeline) } @@ -367,6 +486,15 @@ impl WebRtcStreamer { } } + pub async fn current_video_geometry(&self) -> (Resolution, PixelFormat, u32) { + if let Some(cfg) = self.get_pipeline_config().await { + (cfg.resolution, cfg.input_format, cfg.fps) + } else { + let c = self.config.read().await; + (c.resolution, c.input_format, c.fps) + } + } + /// Request the encoder to generate a keyframe on next encode pub async fn request_keyframe(&self) -> Result<()> { if let Some(ref pipeline) = *self.video_pipeline.read().await { @@ -417,7 +545,7 @@ impl WebRtcStreamer { /// Subscribe to encoded Opus frames (for sessions) pub async fn subscribe_opus( &self, - ) -> Option>>> { + ) -> Option>> { if let Some(ref controller) = *self.audio_controller.read().await { controller.subscribe_opus_async().await } else { @@ -441,16 +569,23 @@ impl WebRtcStreamer { } } - /// Set capture device for direct capture pipeline - pub async fn set_capture_device(&self, device_path: PathBuf, jpeg_quality: u8) { + pub async fn set_capture_device( + &self, + device_path: PathBuf, + jpeg_quality: u8, + subdev_path: Option, + bridge_kind: Option, + ) { info!( - "Setting direct capture device for WebRTC: {:?}", - device_path + "Setting direct capture device for WebRTC: {:?} (subdev={:?}, kind={:?})", + device_path, subdev_path, bridge_kind ); *self.capture_device.write().await = Some(CaptureDeviceConfig { device_path, buffer_count: 2, jpeg_quality, + subdev_path, + bridge_kind, }); } @@ -519,16 +654,54 @@ impl WebRtcStreamer { } // Update config (preserve user-configured bitrate) - let mut config = self.config.write().await; - config.resolution = resolution; - config.input_format = format; - config.fps = fps; - // Note: bitrate is NOT auto-scaled here - use set_bitrate() or config to change it + { + let mut config = self.config.write().await; + config.resolution = resolution; + config.input_format = format; + config.fps = fps; + // Note: bitrate is NOT auto-scaled here - use set_bitrate() or config to change it - info!( - "WebRTC config updated: {}x{} {:?} @ {} fps, {}", - resolution.width, resolution.height, format, fps, config.bitrate_preset - ); + info!( + "WebRTC config updated: {}x{} {:?} @ {} fps, {}", + resolution.width, + resolution.height, + format, + fps, + config.bitrate_preset + ); + } + + self.notify_device_info_dirty().await; + } + + /// Update resolution/format to match DV-negotiated capture without stopping + /// the pipeline or closing sessions. Used when hardware timing differs from + /// saved settings (e.g. RK628 `S_FMT` follows source while SQLite still has + /// a user-chosen preset). + pub async fn sync_video_geometry_from_negotiated( + &self, + resolution: Resolution, + format: PixelFormat, + ) { + { + let mut config = self.config.write().await; + if config.resolution == resolution && config.input_format == format { + return; + } + info!( + "WebRTC geometry aligned to negotiated capture: {}x{} {:?} (was {}x{} {:?})", + resolution.width, + resolution.height, + format, + config.resolution.width, + config.resolution.height, + config.input_format + ); + config.resolution = resolution; + config.input_format = format; + } + + self.notify_device_info_dirty().await; } /// Update encoder backend (software/hardware selection) @@ -652,6 +825,14 @@ impl WebRtcStreamer { *self.events.write().await = Some(events); } + /// Push a debounced `system.device_info` refresh so the console status card + /// picks up DV-negotiated / pipeline resolution without a separate WebRTC message. + async fn notify_device_info_dirty(&self) { + if let Some(bus) = self.events.read().await.as_ref() { + bus.mark_device_info_dirty(); + } + } + // === Session Management === /// Create a new WebRTC session @@ -695,17 +876,8 @@ impl WebRtcStreamer { // Request keyframe after ICE connection is established and on gaps let pipeline_for_callback = pipeline.clone(); let session_id_for_callback = session_id.clone(); - let request_keyframe = Arc::new(move || { - let pipeline = pipeline_for_callback.clone(); - let sid = session_id_for_callback.clone(); - tokio::spawn(async move { - info!( - "Requesting keyframe for session {} after ICE connected", - sid - ); - pipeline.request_keyframe().await; - }); - }); + let request_keyframe = + Self::make_keyframe_callback(pipeline_for_callback, session_id_for_callback); session .start_from_video_pipeline(pipeline.subscribe(), request_keyframe) .await; @@ -939,34 +1111,14 @@ impl WebRtcStreamer { return Ok(()); } - let session_ids: Vec = self.sessions.read().await.keys().cloned().collect(); - if !session_ids.is_empty() { - let pipeline = self.ensure_video_pipeline().await?; - - let sessions = self.sessions.read().await; - for session_id in &session_ids { - if let Some(session) = sessions.get(session_id) { - info!("Reconnecting session {} to new pipeline", session_id); - let pipeline_for_callback = pipeline.clone(); - let sid = session_id.clone(); - let request_keyframe = Arc::new(move || { - let pipeline = pipeline_for_callback.clone(); - let sid = sid.clone(); - tokio::spawn(async move { - info!("Requesting keyframe for session {} after reconnect", sid); - pipeline.request_keyframe().await; - }); - }); - session - .start_from_video_pipeline(pipeline.subscribe(), request_keyframe) - .await; - } - } - + let reconnected = self + .reconnect_sessions_to_current_pipeline("bitrate change") + .await?; + if reconnected > 0 { info!( "Video pipeline restarted with {}, reconnected {} sessions", preset, - session_ids.len() + reconnected ); } } else { diff --git a/web/src/api/index.ts b/web/src/api/index.ts index faaa99b8..bd8a01a9 100644 --- a/web/src/api/index.ts +++ b/web/src/api/index.ts @@ -213,7 +213,18 @@ export interface VideoEncoderSelfCheckResponse { export const streamApi = { status: () => request<{ - state: 'uninitialized' | 'ready' | 'streaming' | 'no_signal' | 'error' + state: + | 'uninitialized' + | 'ready' + | 'streaming' + | 'no_signal' + | 'no_cable' + | 'no_sync' + | 'out_of_range' + | 'device_lost' + | 'recovering' + | 'device_busy' + | 'error' device: string | null format: string | null resolution: [number, number] | null @@ -649,6 +660,7 @@ export const configApi = { }> }> usb_bus: string | null + has_signal: boolean }> serial: Array<{ path: string; name: string }> audio: Array<{ diff --git a/web/src/components/VideoConfigPopover.vue b/web/src/components/VideoConfigPopover.vue index ebaf4bcf..21abba3f 100644 --- a/web/src/components/VideoConfigPopover.vue +++ b/web/src/components/VideoConfigPopover.vue @@ -47,6 +47,7 @@ interface VideoDevice { fps: number[] }[] }[] + has_signal?: boolean } const props = defineProps<{ diff --git a/web/src/composables/useConsoleEvents.ts b/web/src/composables/useConsoleEvents.ts index b45ffc90..027c08f2 100644 --- a/web/src/composables/useConsoleEvents.ts +++ b/web/src/composables/useConsoleEvents.ts @@ -14,7 +14,14 @@ export interface ConsoleEventHandlers { onStreamModeSwitching?: (data: { transition_id: string; to_mode: string; from_mode: string }) => void onStreamModeReady?: (data: { transition_id: string; mode: string }) => void onWebRTCReady?: (data: { codec: string; hardware: boolean; transition_id?: string }) => void - onStreamStateChanged?: (data: { state: string; device?: string | null }) => void + onStreamStateChanged?: (data: { + state: string + device?: string | null + /** Optional fine-grained diagnostic tag (e.g. `no_cable`, `out_of_range`, `recovering`). */ + reason?: string | null + /** Optional countdown (ms) until the next backend self-recovery attempt. */ + next_retry_ms?: number | null + }) => void onStreamDeviceLost?: (data: { device: string; reason: string }) => void onStreamReconnecting?: (data: { device: string; attempt: number }) => void onStreamRecovered?: (data: { device: string }) => void diff --git a/web/src/i18n/en-US.ts b/web/src/i18n/en-US.ts index cc3e565d..b8b659ed 100644 --- a/web/src/i18n/en-US.ts +++ b/web/src/i18n/en-US.ts @@ -240,6 +240,8 @@ export default { fps: 'Frame Rate', selectFps: 'Select FPS', noVideoDevices: 'No video devices detected', + noSignalDetected: 'No HDMI signal detected. Please connect an HDMI cable and refresh.', + refreshDevices: 'Refresh Devices', // Audio audioDevice: 'Audio Device', selectAudioDevice: 'Select audio capture device', @@ -310,6 +312,33 @@ export default { configChanging: 'Applying new configuration...', videoRestarted: 'Video stream updated', streamError: 'Stream error', + // Four canonical video states (backend StreamStateChanged: streaming / + // no_signal / device_lost / device_busy). `reason` provides optional + // fine-grained diagnostic sub-text. + signal: { + noSignal: { + title: 'Waiting for video signal', + detail: 'Capture device is ready, waiting for the target to output video', + }, + deviceLost: { + title: 'Video device offline', + detail: 'Capture card is not responding, attempting to re-detect…', + }, + deviceBusy: { + title: 'Video channel busy', + detail: 'Applying a new configuration or another component is using the device, please wait…', + }, + reason: { + no_cable: 'HDMI cable not detected — check the cable and that the target is powered on', + no_sync: 'Unstable signal: timings could not be locked — try a lower resolution or refresh rate', + out_of_range: 'Resolution or refresh rate exceeds capture capability — try 1080p60 or below', + no_signal: 'Capture card is ready, waiting for a picture…', + recovering: 'Reconnecting the video device automatically', + device_lost: 'Video node disappeared, waiting for the driver to recover', + config_changing: 'Applying new configuration', + mode_switching: 'Switching video mode', + }, + }, // WebRTC webrtcConnected: 'WebRTC Connected', webrtcConnectedDesc: 'Using low-latency H.264 video stream', diff --git a/web/src/i18n/zh-CN.ts b/web/src/i18n/zh-CN.ts index de2817f4..1f15baf5 100644 --- a/web/src/i18n/zh-CN.ts +++ b/web/src/i18n/zh-CN.ts @@ -240,6 +240,8 @@ export default { fps: '帧率', selectFps: '选择帧率', noVideoDevices: '未检测到视频设备', + noSignalDetected: '未检测到 HDMI 信号,请连接 HDMI 线缆后刷新。', + refreshDevices: '刷新设备', // Audio audioDevice: '音频设备', selectAudioDevice: '选择音频采集设备', @@ -310,6 +312,32 @@ export default { configChanging: '正在应用新配置...', videoRestarted: '视频流已更新', streamError: '视频流错误', + // 四档视频状态(对应后端 StreamStateChanged:streaming / no_signal / + // device_lost / device_busy). `reason` 子键可选,用于在副文案中补充细节。 + signal: { + noSignal: { + title: '暂无视频信号', + detail: '采集卡已就绪,正在等待被控机画面', + }, + deviceLost: { + title: '视频设备已断开', + detail: '采集卡离线,正在尝试重新识别…', + }, + deviceBusy: { + title: '视频通道忙', + detail: '正在切换配置或被其他组件占用,请稍候…', + }, + reason: { + no_cable: '未检测到 HDMI 线缆,请检查连接或被控机是否已开机', + no_sync: '信号不稳定,无法锁定时序,可尝试降低被控机分辨率/刷新率', + out_of_range: '分辨率或刷新率超出采集卡能力,建议切换到 1080p60 以内', + no_signal: '采集卡已就绪,正在等待画面…', + recovering: '正在自动重连视频设备', + device_lost: '视频节点丢失,等待驱动恢复', + config_changing: '正在应用新配置', + mode_switching: '正在切换视频模式', + }, + }, // WebRTC webrtcConnected: 'WebRTC 已连接', webrtcConnectedDesc: '正在使用 H.264 低延迟视频流', diff --git a/web/src/views/ConsoleView.vue b/web/src/views/ConsoleView.vue index 6423d725..c4b1c718 100644 --- a/web/src/views/ConsoleView.vue +++ b/web/src/views/ConsoleView.vue @@ -59,7 +59,7 @@ import { Loader2, } from 'lucide-vue-next' -const { t } = useI18n() +const { t, te } = useI18n() const router = useRouter() const systemStore = useSystemStore() const configStore = useConfigStore() @@ -98,6 +98,12 @@ const videoErrorMessage = ref('') const videoRestarting = ref(false) // Track if video is restarting due to config change const mjpegFrameReceived = ref(false) // Whether MJPEG stream has received at least one frame +/** From `stream.state_changed`: ok | no_signal | device_lost | device_busy */ +type StreamSignalState = 'ok' | 'no_signal' | 'device_lost' | 'device_busy' +const streamSignalState = ref('ok') +const streamSignalReason = ref(null) +const streamNextRetryMs = ref(null) + // Video aspect ratio (dynamically updated from actual video dimensions) // Using string format "width/height" to let browser handle the ratio calculation const videoAspectRatio = ref(null) @@ -644,6 +650,7 @@ function waitForVideoFirstFrame(el: HTMLVideoElement, timeoutMs = 2000): Promise }) } +/** For WebRTC watch: skip auto-reconnect when these hold. */ function shouldSuppressAutoReconnect(): boolean { return videoMode.value === 'mjpeg' || !isConsoleActive.value @@ -751,6 +758,17 @@ function handleVideoError() { return } + // Expected error while overlay shows no_signal / device_* — do not retry. + if (streamSignalState.value !== 'ok') { + if (retryTimeoutId !== null) { + clearTimeout(retryTimeoutId) + retryTimeoutId = null + } + videoLoading.value = false + mjpegFrameReceived.value = false + return + } + // Count consecutive errors even in grace period consecutiveErrors++ @@ -993,22 +1011,121 @@ function handleStreamModeSwitching(data: { transition_id: string; to_mode: strin } function handleStreamStateChanged(data: any) { - if (data.state === 'error') { + const state = typeof data?.state === 'string' ? data.state : '' + const reason = typeof data?.reason === 'string' && data.reason.length > 0 ? data.reason : null + const nextRetry = typeof data?.next_retry_ms === 'number' && data.next_retry_ms > 0 + ? data.next_retry_ms + : null + + streamSignalReason.value = reason + streamNextRetryMs.value = nextRetry + + const previous = streamSignalState.value + + switch (state) { + case 'streaming': + case 'ready': + case 'uninitialized': + streamSignalState.value = 'ok' + break + case 'no_signal': + streamSignalState.value = 'no_signal' + break + case 'device_lost': + streamSignalState.value = 'device_lost' + break + case 'device_busy': + streamSignalState.value = 'device_busy' + break + } + + if (state === 'error') { videoError.value = true videoErrorMessage.value = t('console.streamError') - } else if (data.state === 'recovering' && videoMode.value !== 'mjpeg') { - // Backend is in the DeviceLost recovery loop; start WebRTC reconnect if not already scheduled. + } else if (state === 'no_signal' && videoMode.value !== 'mjpeg') { + cancelWebRTCRecovery() + videoRestarting.value = false + videoError.value = false + videoErrorMessage.value = '' + } else if (state === 'device_busy' && videoMode.value !== 'mjpeg') { + cancelWebRTCRecovery() + videoRestarting.value = true + videoLoading.value = true + videoError.value = false + videoErrorMessage.value = '' + if (previous !== 'device_busy') { + captureFrameOverlay().catch(() => {}) + } + } else if (state === 'device_lost' && videoMode.value !== 'mjpeg') { if (webrtcRecoveryTimerId === null && webrtcRecoveryAttempts === 0) { scheduleWebRTCRecovery() } - } else if (data.state === 'streaming' || data.state === 'no_signal') { - // Backend stream is alive; cancel any pending recovery timers. - if (data.state === 'streaming') { - cancelWebRTCRecovery() + } else if (state === 'streaming') { + cancelWebRTCRecovery() + videoError.value = false + videoErrorMessage.value = '' + videoRestarting.value = false + if ( + videoMode.value === 'mjpeg' + && (previous === 'no_signal' || previous === 'device_lost' || previous === 'device_busy') + ) { + refreshVideo() + } else if ( + videoMode.value !== 'mjpeg' + && (previous === 'no_signal' || previous === 'device_busy' || previous === 'device_lost') + ) { + if (webrtc.isConnected.value && !webrtc.isConnecting.value) { + void rebindWebRTCVideo().then(() => { + videoLoading.value = false + }) + } else if (!webrtc.isConnected.value && !webrtc.isConnecting.value) { + void connectWebRTCSerial('stream recovered').then(async (ok) => { + if (ok) { + await rebindWebRTCVideo() + videoLoading.value = false + } else if (webrtcRecoveryTimerId === null && webrtcRecoveryAttempts === 0) { + scheduleWebRTCRecovery() + } + }) + } } } } +const showSignalOverlay = computed(() => streamSignalState.value !== 'ok') + +const signalOverlayInfo = computed(() => { + const reason = streamSignalReason.value + const reasonHintKey = reason ? `console.signal.reason.${reason}` : '' + const hint = reasonHintKey && te(reasonHintKey) ? t(reasonHintKey) : '' + + switch (streamSignalState.value) { + case 'no_signal': + return { + title: t('console.signal.noSignal.title'), + detail: t('console.signal.noSignal.detail'), + hint, + tone: 'info' as const, + } + case 'device_lost': + return { + title: t('console.signal.deviceLost.title'), + detail: t('console.signal.deviceLost.detail'), + hint, + tone: 'error' as const, + } + case 'device_busy': + return { + title: t('console.signal.deviceBusy.title'), + detail: t('console.signal.deviceBusy.detail'), + hint, + tone: 'info' as const, + } + default: + return { title: '', detail: '', hint: '', tone: 'info' as const } + } +}) + function handleStreamStatsUpdate(data: any) { // Always update clients count in store (for MJPEG mode display) if (typeof data.clients === 'number') { @@ -1177,8 +1294,12 @@ function refreshVideo() { } // MJPEG URL with cache-busting timestamp (reactive) -// Only return valid URL when in MJPEG mode to prevent unnecessary requests -const mjpegTimestamp = ref(0) // Start with 0 to prevent initial load +// Only return valid URL when in MJPEG mode and the backend reports a +// healthy stream. When the backend goes offline (no_signal / device_lost +// / device_busy) we deliberately return an empty string so the `` +// tag has no `src` and the 4-state overlay fully owns the video area — +// no more fake placeholder JPEG peeking through. +const mjpegTimestamp = ref(0) const mjpegUrl = computed(() => { if (videoMode.value !== 'mjpeg') { return '' // Don't load MJPEG when in H264 mode @@ -1186,6 +1307,9 @@ const mjpegUrl = computed(() => { if (mjpegTimestamp.value === 0) { return '' // Don't load until refreshVideo() is called } + if (streamSignalState.value !== 'ok') { + return '' // Backend is offline; let the overlay own the viewport + } return `${streamApi.getMjpegUrl(myClientId)}&t=${mjpegTimestamp.value}` }) @@ -1491,21 +1615,27 @@ watch(() => webrtc.state.value, (newState, oldState) => { webrtcReconnectTimeout = null } - if (shouldSuppressAutoReconnect()) { - return - } - - // Update stream online status based on WebRTC connection state + // Run before `shouldSuppressAutoReconnect()` so `device_busy` / `videoRestarting` + // never blocks clearing the loading overlay when ICE becomes connected. if (videoMode.value !== 'mjpeg') { if (newState === 'connected') { systemStore.setStreamOnline(true) webrtcReconnectFailures = 0 + if (videoLoading.value) { + void rebindWebRTCVideo().then(() => { + videoLoading.value = false + }) + } } else if (newState === 'disconnected' || newState === 'failed') { // Don't immediately set offline - wait for potential reconnect // The device_info event will eventually sync the correct state } } + if (shouldSuppressAutoReconnect()) { + return + } + // Auto-reconnect when disconnected (but was previously connected) if (newState === 'disconnected' && oldState === 'connected' && videoMode.value !== 'mjpeg') { webrtcReconnectTimeout = setTimeout(async () => { @@ -2584,6 +2714,50 @@ onUnmounted(() => { + + +
+ +
+

{{ signalOverlayInfo.title }}

+

{{ signalOverlayInfo.detail }}

+

{{ signalOverlayInfo.hint }}

+
+
+
+
}> usb_bus: string | null + has_signal: boolean } interface AudioDeviceInfo { @@ -164,6 +166,29 @@ const passwordStrengthColor = computed(() => { return colors[passwordStrength.value] || colors[0] }) +// Whether the selected video device currently has an HDMI signal +const selectedDeviceHasSignal = computed(() => { + const device = devices.value.video.find((d) => d.path === videoDevice.value) + return device?.has_signal ?? true +}) + +const refreshingDevices = ref(false) + +async function refreshDeviceList() { + refreshingDevices.value = true + try { + const result = await configApi.listDevices() + devices.value = result + if (result.extensions) { + ttydAvailable.value = result.extensions.ttyd_available + } + } catch { + // keep current list + } finally { + refreshingDevices.value = false + } +} + // Computed: available formats for selected video device const availableFormats = computed(() => { const device = devices.value.video.find((d) => d.path === videoDevice.value) @@ -735,6 +760,14 @@ const stepIcons = [User, Video, Keyboard, Puzzle]
+
+

{{ t('setup.noSignalDetected') }}

+ +
+