refactor(video): restore v4l2r and remove temporary debug logs

This commit is contained in:
a15355447898a
2026-03-01 01:40:28 +08:00
parent 4f2fb534a4
commit b74659dcd4
12 changed files with 799 additions and 802 deletions

View File

@@ -66,7 +66,7 @@ clap = { version = "4", features = ["derive"] }
time = "0.3" time = "0.3"
# Video capture (V4L2) # Video capture (V4L2)
v4l = "0.14" v4l2r = "0.0.7"
# JPEG encoding (libjpeg-turbo, SIMD accelerated) # JPEG encoding (libjpeg-turbo, SIMD accelerated)
turbojpeg = "1.3" turbojpeg = "1.3"

View File

@@ -15,18 +15,16 @@
//! //!
//! Note: Audio WebSocket is handled separately by audio_ws.rs (/api/ws/audio) //! Note: Audio WebSocket is handled separately by audio_ws.rs (/api/ws/audio)
use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream;
use std::collections::HashMap;
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio::sync::{Mutex, RwLock}; use tokio::sync::{Mutex, RwLock};
use tracing::{error, info, warn}; use tracing::{error, info, warn};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::video::capture::Parameters;
use v4l::Format;
use crate::audio::AudioController; use crate::audio::AudioController;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
@@ -491,8 +489,7 @@ impl MjpegStreamer {
} }
}; };
let mut device_opt: Option<Device> = None; let mut stream_opt: Option<V4l2rCaptureStream> = None;
let mut format_opt: Option<Format> = None;
let mut last_error: Option<String> = None; let mut last_error: Option<String> = None;
for attempt in 0..MAX_RETRIES { for attempt in 0..MAX_RETRIES {
@@ -501,8 +498,18 @@ impl MjpegStreamer {
return; return;
} }
let device = match Device::with_path(&device_path) { match V4l2rCaptureStream::open(
Ok(d) => d, &device_path,
config.resolution,
config.format,
config.fps,
4,
Duration::from_secs(2),
) {
Ok(stream) => {
stream_opt = Some(stream);
break;
}
Err(e) => { Err(e) => {
let err_str = e.to_string(); let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") { if err_str.contains("busy") || err_str.contains("resource") {
@@ -519,42 +526,12 @@ impl MjpegStreamer {
last_error = Some(err_str); last_error = Some(err_str);
break; break;
} }
};
let requested = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
match device.set_format(&requested) {
Ok(actual) => {
device_opt = Some(device);
format_opt = Some(actual);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(std::time::Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(err_str);
continue;
}
last_error = Some(err_str);
break;
}
} }
} }
let (device, actual_format) = match (device_opt, format_opt) { let mut stream = match stream_opt {
(Some(d), Some(f)) => (d, f), Some(stream) => stream,
_ => { None => {
error!( error!(
"Failed to open device {:?}: {}", "Failed to open device {:?}: {}",
device_path, device_path,
@@ -567,40 +544,36 @@ impl MjpegStreamer {
} }
}; };
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!( info!(
"Capture format: {}x{} {:?} stride={}", "Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride resolution.width, resolution.height, pixel_format, stride
); );
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.format);
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream = match MmapStream::with_buffers(&device, BufferType::VideoCapture, 4) {
Ok(s) => s,
Err(e) => {
error!("Failed to create capture stream: {}", e);
set_state(MjpegStreamerState::Error);
self.mjpeg_handler.set_offline();
self.direct_active.store(false, Ordering::SeqCst);
return;
}
};
let buffer_pool = Arc::new(FrameBufferPool::new(8)); let buffer_pool = Arc::new(FrameBufferPool::new(8));
let mut signal_present = true; let mut signal_present = true;
let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0; let mut validate_counter: u64 = 0;
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while !self.direct_stop.load(Ordering::Relaxed) { while !self.direct_stop.load(Ordering::Relaxed) {
let (buf, meta) = match stream.next() { let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
Ok(frame_data) => frame_data, let meta = match stream.next_into(&mut owned) {
Ok(meta) => meta,
Err(e) => { Err(e) => {
if e.kind() == io::ErrorKind::TimedOut { if e.kind() == io::ErrorKind::TimedOut {
if signal_present { if signal_present {
@@ -628,35 +601,43 @@ impl MjpegStreamer {
return; return;
} }
error!("Capture error: {}", e); let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue; continue;
} }
}; };
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE { if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue; continue;
} }
validate_counter = validate_counter.wrapping_add(1); validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed() if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0 && validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size]) && !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{ {
continue; continue;
} }
let mut owned = buffer_pool.take(frame_size); owned.truncate(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
let frame = VideoFrame::from_pooled( let frame = VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))), Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution, resolution,
pixel_format, pixel_format,
actual_format.stride, stride,
sequence, meta.sequence,
); );
sequence = sequence.wrapping_add(1);
if !signal_present { if !signal_present {
signal_present = true; signal_present = true;

View File

@@ -2,24 +2,21 @@
//! //!
//! Provides async video capture using memory-mapped buffers. //! Provides async video capture using memory-mapped buffers.
use bytes::Bytes;
use std::collections::HashMap;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use bytes::Bytes;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use tokio::sync::{watch, Mutex}; use tokio::sync::{watch, Mutex};
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::capture::Parameters;
use v4l::video::Capture;
use v4l::Format;
use super::format::{PixelFormat, Resolution}; use super::format::{PixelFormat, Resolution};
use super::frame::VideoFrame; use super::frame::VideoFrame;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::utils::LogThrottler;
use crate::video::v4l2r_capture::V4l2rCaptureStream;
/// Default number of capture buffers (reduced from 4 to 2 for lower latency) /// Default number of capture buffers (reduced from 4 to 2 for lower latency)
const DEFAULT_BUFFER_COUNT: u32 = 2; const DEFAULT_BUFFER_COUNT: u32 = 2;
@@ -280,9 +277,15 @@ fn run_capture(
return Ok(()); return Ok(());
} }
// Open device let stream = match V4l2rCaptureStream::open(
let device = match Device::with_path(&config.device_path) { &config.device_path,
Ok(d) => d, config.resolution,
config.format,
config.fps,
config.buffer_count,
config.timeout,
) {
Ok(stream) => stream,
Err(e) => { Err(e) => {
let err_str = e.to_string(); let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") { if err_str.contains("busy") || err_str.contains("resource") {
@@ -306,34 +309,7 @@ fn run_capture(
} }
}; };
// Set format return run_capture_inner(config, state, stats, stop_flag, stream);
let format = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
let actual_format = match device.set_format(&format) {
Ok(f) => f,
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(AppError::VideoError(format!("Failed to set format: {}", e)));
continue;
}
return Err(AppError::VideoError(format!("Failed to set format: {}", e)));
}
};
// Device opened and format set successfully - proceed with capture
return run_capture_inner(config, state, stats, stop_flag, device, actual_format);
} }
// All retries exhausted // All retries exhausted
@@ -348,48 +324,16 @@ fn run_capture_inner(
state: &watch::Sender<CaptureState>, state: &watch::Sender<CaptureState>,
stats: &Arc<Mutex<CaptureStats>>, stats: &Arc<Mutex<CaptureStats>>,
stop_flag: &AtomicBool, stop_flag: &AtomicBool,
device: Device, mut stream: V4l2rCaptureStream,
actual_format: Format,
) -> Result<()> { ) -> Result<()> {
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!( info!(
"Capture format: {}x{} {:?} stride={}", "Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride resolution.width, resolution.height, pixel_format, stride
); );
// Try to set hardware FPS (V4L2 VIDIOC_S_PARM)
if config.fps > 0 {
match device.set_params(&Parameters::with_fps(config.fps)) {
Ok(actual_params) => {
// Extract actual FPS from returned interval (numerator/denominator)
let actual_hw_fps = if actual_params.interval.numerator > 0 {
actual_params.interval.denominator / actual_params.interval.numerator
} else {
0
};
if actual_hw_fps == config.fps {
info!("Hardware FPS set successfully: {} fps", actual_hw_fps);
} else if actual_hw_fps > 0 {
info!(
"Hardware FPS coerced: requested {} fps, got {} fps",
config.fps, actual_hw_fps
);
} else {
warn!("Hardware FPS setting returned invalid interval");
}
}
Err(e) => {
warn!("Failed to set hardware FPS: {}", e);
}
}
}
// Create stream with mmap buffers
let mut stream =
MmapStream::with_buffers(&device, BufferType::VideoCapture, config.buffer_count)
.map_err(|e| AppError::VideoError(format!("Failed to create stream: {}", e)))?;
let _ = state.send(CaptureState::Running); let _ = state.send(CaptureState::Running);
info!("Capture started"); info!("Capture started");
@@ -397,12 +341,25 @@ fn run_capture_inner(
let mut fps_frame_count = 0u64; let mut fps_frame_count = 0u64;
let mut fps_window_start = Instant::now(); let mut fps_window_start = Instant::now();
let fps_window_duration = Duration::from_secs(1); let fps_window_duration = Duration::from_secs(1);
let mut scratch = Vec::new();
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
// Main capture loop // Main capture loop
while !stop_flag.load(Ordering::Relaxed) { while !stop_flag.load(Ordering::Relaxed) {
// Try to capture a frame let meta = match stream.next_into(&mut scratch) {
let (_buf, meta) = match stream.next() { Ok(meta) => meta,
Ok(frame_data) => frame_data,
Err(e) => { Err(e) => {
if e.kind() == io::ErrorKind::TimedOut { if e.kind() == io::ErrorKind::TimedOut {
warn!("Capture timeout - no signal?"); warn!("Capture timeout - no signal?");
@@ -432,19 +389,30 @@ fn run_capture_inner(
}); });
} }
error!("Capture error: {}", e); let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue; continue;
} }
}; };
// Use actual bytes used, not buffer size // Use actual bytes used, not buffer size
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
// Validate frame // Validate frame
if frame_size < MIN_FRAME_SIZE { if frame_size < MIN_FRAME_SIZE {
debug!( debug!(
"Dropping small frame: {} bytes (bytesused={})", "Dropping small frame: {} bytes (bytesused={})",
frame_size, meta.bytesused frame_size, meta.bytes_used
); );
continue; continue;
} }
@@ -470,6 +438,10 @@ fn run_capture_inner(
s.current_fps = (fps_frame_count as f32 / elapsed.as_secs_f32()).max(0.0); s.current_fps = (fps_frame_count as f32 / elapsed.as_secs_f32()).max(0.0);
} }
} }
if *state.borrow() == CaptureState::NoSignal {
let _ = state.send(CaptureState::Running);
}
} }
info!("Capture stopped"); info!("Capture stopped");
@@ -525,38 +497,37 @@ fn grab_single_frame(
resolution: Resolution, resolution: Resolution,
format: PixelFormat, format: PixelFormat,
) -> Result<VideoFrame> { ) -> Result<VideoFrame> {
let device = Device::with_path(device_path) let mut stream = V4l2rCaptureStream::open(
.map_err(|e| AppError::VideoError(format!("Failed to open device: {}", e)))?; device_path,
resolution,
let fmt = Format::new(resolution.width, resolution.height, format.to_fourcc()); format,
let actual = device 0,
.set_format(&fmt) 2,
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?; Duration::from_secs(DEFAULT_TIMEOUT),
)?;
let mut stream = MmapStream::with_buffers(&device, BufferType::VideoCapture, 2) let actual_resolution = stream.resolution();
.map_err(|e| AppError::VideoError(format!("Failed to create stream: {}", e)))?; let actual_format = stream.format();
let actual_stride = stream.stride();
let mut scratch = Vec::new();
// Try to get a valid frame (skip first few which might be bad) // Try to get a valid frame (skip first few which might be bad)
for attempt in 0..5 { for attempt in 0..5 {
match stream.next() { match stream.next_into(&mut scratch) {
Ok((buf, _meta)) => { Ok(meta) => {
if buf.len() >= MIN_FRAME_SIZE { if meta.bytes_used >= MIN_FRAME_SIZE {
let actual_format = PixelFormat::from_fourcc(actual.fourcc).unwrap_or(format);
return Ok(VideoFrame::new( return Ok(VideoFrame::new(
Bytes::copy_from_slice(buf), Bytes::copy_from_slice(&scratch[..meta.bytes_used]),
Resolution::new(actual.width, actual.height), actual_resolution,
actual_format, actual_format,
actual.stride, actual_stride,
0, 0,
)); ));
} }
} }
Err(e) => { Err(e) if attempt == 4 => {
if attempt == 4 { return Err(AppError::VideoError(format!("Failed to grab frame: {}", e)));
return Err(AppError::VideoError(format!("Failed to grab frame: {}", e)));
}
} }
Err(_) => {}
} }
} }

View File

@@ -1,15 +1,17 @@
//! V4L2 device enumeration and capability query //! V4L2 device enumeration and capability query
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fs::File;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::mpsc; use std::sync::mpsc;
use std::time::Duration; use std::time::Duration;
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use v4l::capability::Flags; use v4l2r::bindings::{v4l2_frmivalenum, v4l2_frmsizeenum};
use v4l::prelude::*; use v4l2r::ioctl::{
use v4l::video::Capture; self, Capabilities, Capability as V4l2rCapability, FormatIterator, FrmIvalTypes, FrmSizeTypes,
use v4l::Format; };
use v4l::FourCC; use v4l2r::nix::errno::Errno;
use v4l2r::{Format as V4l2rFormat, QueueType};
use super::format::{PixelFormat, Resolution}; use super::format::{PixelFormat, Resolution};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
@@ -81,7 +83,7 @@ pub struct DeviceCapabilities {
/// Wrapper around a V4L2 video device /// Wrapper around a V4L2 video device
pub struct VideoDevice { pub struct VideoDevice {
pub path: PathBuf, pub path: PathBuf,
device: Device, fd: File,
} }
impl VideoDevice { impl VideoDevice {
@@ -90,42 +92,55 @@ impl VideoDevice {
let path = path.as_ref().to_path_buf(); let path = path.as_ref().to_path_buf();
debug!("Opening video device: {:?}", path); debug!("Opening video device: {:?}", path);
let device = Device::with_path(&path).map_err(|e| { let fd = File::options()
.read(true)
.write(true)
.open(&path)
.map_err(|e| {
AppError::VideoError(format!("Failed to open device {:?}: {}", path, e))
})?;
Ok(Self { path, fd })
}
/// Open a video device read-only (for probing/enumeration)
pub fn open_readonly(path: impl AsRef<Path>) -> Result<Self> {
let path = path.as_ref().to_path_buf();
debug!("Opening video device (read-only): {:?}", path);
let fd = File::options().read(true).open(&path).map_err(|e| {
AppError::VideoError(format!("Failed to open device {:?}: {}", path, e)) AppError::VideoError(format!("Failed to open device {:?}: {}", path, e))
})?; })?;
Ok(Self { path, device }) Ok(Self { path, fd })
} }
/// Get device capabilities /// Get device capabilities
pub fn capabilities(&self) -> Result<DeviceCapabilities> { pub fn capabilities(&self) -> Result<DeviceCapabilities> {
let caps = self let caps: V4l2rCapability = ioctl::querycap(&self.fd)
.device
.query_caps()
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let flags = caps.device_caps();
Ok(DeviceCapabilities { Ok(DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE), video_capture: flags.contains(Capabilities::VIDEO_CAPTURE),
video_capture_mplane: caps.capabilities.contains(Flags::VIDEO_CAPTURE_MPLANE), video_capture_mplane: flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE),
video_output: caps.capabilities.contains(Flags::VIDEO_OUTPUT), video_output: flags.contains(Capabilities::VIDEO_OUTPUT),
streaming: caps.capabilities.contains(Flags::STREAMING), streaming: flags.contains(Capabilities::STREAMING),
read_write: caps.capabilities.contains(Flags::READ_WRITE), read_write: flags.contains(Capabilities::READWRITE),
}) })
} }
/// Get detailed device information /// Get detailed device information
pub fn info(&self) -> Result<VideoDeviceInfo> { pub fn info(&self) -> Result<VideoDeviceInfo> {
let caps = self let caps: V4l2rCapability = ioctl::querycap(&self.fd)
.device
.query_caps()
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let flags = caps.device_caps();
let capabilities = DeviceCapabilities { let capabilities = DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE), video_capture: flags.contains(Capabilities::VIDEO_CAPTURE),
video_capture_mplane: caps.capabilities.contains(Flags::VIDEO_CAPTURE_MPLANE), video_capture_mplane: flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE),
video_output: caps.capabilities.contains(Flags::VIDEO_OUTPUT), video_output: flags.contains(Capabilities::VIDEO_OUTPUT),
streaming: caps.capabilities.contains(Flags::STREAMING), streaming: flags.contains(Capabilities::STREAMING),
read_write: caps.capabilities.contains(Flags::READ_WRITE), read_write: flags.contains(Capabilities::READWRITE),
}; };
let formats = self.enumerate_formats()?; let formats = self.enumerate_formats()?;
@@ -141,7 +156,7 @@ impl VideoDevice {
path: self.path.clone(), path: self.path.clone(),
name: caps.card.clone(), name: caps.card.clone(),
driver: caps.driver.clone(), driver: caps.driver.clone(),
bus_info: caps.bus.clone(), bus_info: caps.bus_info.clone(),
card: caps.card, card: caps.card,
formats, formats,
capabilities, capabilities,
@@ -154,16 +169,13 @@ impl VideoDevice {
pub fn enumerate_formats(&self) -> Result<Vec<FormatInfo>> { pub fn enumerate_formats(&self) -> Result<Vec<FormatInfo>> {
let mut formats = Vec::new(); let mut formats = Vec::new();
// Get supported formats let queue = self.capture_queue_type()?;
let format_descs = self let format_descs = FormatIterator::new(&self.fd, queue);
.device
.enum_formats()
.map_err(|e| AppError::VideoError(format!("Failed to enumerate formats: {}", e)))?;
for desc in format_descs { for desc in format_descs {
// Try to convert FourCC to our PixelFormat // Try to convert FourCC to our PixelFormat
if let Some(format) = PixelFormat::from_fourcc(desc.fourcc) { if let Some(format) = PixelFormat::from_v4l2r(desc.pixelformat) {
let resolutions = self.enumerate_resolutions(desc.fourcc)?; let resolutions = self.enumerate_resolutions(desc.pixelformat)?;
formats.push(FormatInfo { formats.push(FormatInfo {
format, format,
@@ -173,7 +185,7 @@ impl VideoDevice {
} else { } else {
debug!( debug!(
"Skipping unsupported format: {:?} ({})", "Skipping unsupported format: {:?} ({})",
desc.fourcc, desc.description desc.pixelformat, desc.description
); );
} }
} }
@@ -185,46 +197,55 @@ impl VideoDevice {
} }
/// Enumerate resolutions for a specific format /// Enumerate resolutions for a specific format
fn enumerate_resolutions(&self, fourcc: FourCC) -> Result<Vec<ResolutionInfo>> { fn enumerate_resolutions(&self, fourcc: v4l2r::PixelFormat) -> Result<Vec<ResolutionInfo>> {
let mut resolutions = Vec::new(); let mut resolutions = Vec::new();
// Try to enumerate frame sizes let mut index = 0u32;
match self.device.enum_framesizes(fourcc) { loop {
Ok(sizes) => { match ioctl::enum_frame_sizes::<v4l2_frmsizeenum>(&self.fd, index, fourcc) {
for size in sizes { Ok(size) => {
match size.size { if let Some(size) = size.size() {
v4l::framesize::FrameSizeEnum::Discrete(d) => { match size {
let fps = self FrmSizeTypes::Discrete(d) => {
.enumerate_fps(fourcc, d.width, d.height) let fps = self
.unwrap_or_default(); .enumerate_fps(fourcc, d.width, d.height)
resolutions.push(ResolutionInfo::new(d.width, d.height, fps)); .unwrap_or_default();
} resolutions.push(ResolutionInfo::new(d.width, d.height, fps));
v4l::framesize::FrameSizeEnum::Stepwise(s) => { }
// For stepwise, add some common resolutions FrmSizeTypes::StepWise(s) => {
for res in [ for res in [
Resolution::VGA, Resolution::VGA,
Resolution::HD720, Resolution::HD720,
Resolution::HD1080, Resolution::HD1080,
Resolution::UHD4K, Resolution::UHD4K,
] { ] {
if res.width >= s.min_width if res.width >= s.min_width
&& res.width <= s.max_width && res.width <= s.max_width
&& res.height >= s.min_height && res.height >= s.min_height
&& res.height <= s.max_height && res.height <= s.max_height
{ {
let fps = self let fps = self
.enumerate_fps(fourcc, res.width, res.height) .enumerate_fps(fourcc, res.width, res.height)
.unwrap_or_default(); .unwrap_or_default();
resolutions resolutions
.push(ResolutionInfo::new(res.width, res.height, fps)); .push(ResolutionInfo::new(res.width, res.height, fps));
}
} }
} }
} }
} }
index += 1;
}
Err(e) => {
let is_einval = matches!(
e,
v4l2r::ioctl::FrameSizeError::IoctlError(err) if err == Errno::EINVAL
);
if !is_einval {
debug!("Failed to enumerate frame sizes for {:?}: {}", fourcc, e);
}
break;
} }
}
Err(e) => {
debug!("Failed to enumerate frame sizes for {:?}: {}", fourcc, e);
} }
} }
@@ -236,36 +257,55 @@ impl VideoDevice {
} }
/// Enumerate FPS for a specific resolution /// Enumerate FPS for a specific resolution
fn enumerate_fps(&self, fourcc: FourCC, width: u32, height: u32) -> Result<Vec<u32>> { fn enumerate_fps(
&self,
fourcc: v4l2r::PixelFormat,
width: u32,
height: u32,
) -> Result<Vec<u32>> {
let mut fps_list = Vec::new(); let mut fps_list = Vec::new();
match self.device.enum_frameintervals(fourcc, width, height) { let mut index = 0u32;
Ok(intervals) => { loop {
for interval in intervals { match ioctl::enum_frame_intervals::<v4l2_frmivalenum>(
match interval.interval { &self.fd, index, fourcc, width, height,
v4l::frameinterval::FrameIntervalEnum::Discrete(fraction) => { ) {
if fraction.numerator > 0 { Ok(interval) => {
let fps = fraction.denominator / fraction.numerator; if let Some(interval) = interval.intervals() {
fps_list.push(fps); match interval {
FrmIvalTypes::Discrete(fraction) => {
if fraction.numerator > 0 {
let fps = fraction.denominator / fraction.numerator;
fps_list.push(fps);
}
} }
} FrmIvalTypes::StepWise(step) => {
v4l::frameinterval::FrameIntervalEnum::Stepwise(step) => { if step.max.numerator > 0 {
// Just pick max/min/step let min_fps = step.max.denominator / step.max.numerator;
if step.max.numerator > 0 { let max_fps = step.min.denominator / step.min.numerator;
let min_fps = step.max.denominator / step.max.numerator; fps_list.push(min_fps);
let max_fps = step.min.denominator / step.min.numerator; if max_fps != min_fps {
fps_list.push(min_fps); fps_list.push(max_fps);
if max_fps != min_fps { }
fps_list.push(max_fps);
} }
} }
} }
} }
index += 1;
}
Err(e) => {
let is_einval = matches!(
e,
v4l2r::ioctl::FrameIntervalsError::IoctlError(err) if err == Errno::EINVAL
);
if !is_einval {
debug!(
"Failed to enumerate frame intervals for {:?} {}x{}: {}",
fourcc, width, height, e
);
}
break;
} }
}
Err(_) => {
// If enumeration fails, assume 30fps
fps_list.push(30);
} }
} }
@@ -275,20 +315,26 @@ impl VideoDevice {
} }
/// Get current format /// Get current format
pub fn get_format(&self) -> Result<Format> { pub fn get_format(&self) -> Result<V4l2rFormat> {
self.device let queue = self.capture_queue_type()?;
.format() ioctl::g_fmt(&self.fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e))) .map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e)))
} }
/// Set capture format /// Set capture format
pub fn set_format(&self, width: u32, height: u32, format: PixelFormat) -> Result<Format> { pub fn set_format(&self, width: u32, height: u32, format: PixelFormat) -> Result<V4l2rFormat> {
let fmt = Format::new(width, height, format.to_fourcc()); let queue = self.capture_queue_type()?;
let mut fmt: V4l2rFormat = ioctl::g_fmt(&self.fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e)))?;
fmt.width = width;
fmt.height = height;
fmt.pixelformat = format.to_v4l2r();
// Request the format let mut fd = self
let actual = self .fd
.device .try_clone()
.set_format(&fmt) .map_err(|e| AppError::VideoError(format!("Failed to clone device fd: {}", e)))?;
let actual: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt))
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?;
if actual.width != width || actual.height != height { if actual.width != width || actual.height != height {
@@ -364,7 +410,7 @@ impl VideoDevice {
.max() .max()
.unwrap_or(0); .unwrap_or(0);
priority += (max_resolution / 100000) as u32; priority += max_resolution / 100000;
// Known good drivers get bonus // Known good drivers get bonus
let good_drivers = ["uvcvideo", "tc358743"]; let good_drivers = ["uvcvideo", "tc358743"];
@@ -376,8 +422,21 @@ impl VideoDevice {
} }
/// Get the inner device reference (for advanced usage) /// Get the inner device reference (for advanced usage)
pub fn inner(&self) -> &Device { pub fn inner(&self) -> &File {
&self.device &self.fd
}
fn capture_queue_type(&self) -> Result<QueueType> {
let caps = self.capabilities()?;
if caps.video_capture {
Ok(QueueType::VideoCapture)
} else if caps.video_capture_mplane {
Ok(QueueType::VideoCaptureMplane)
} else {
Err(AppError::VideoError(
"Device does not expose a capture queue".to_string(),
))
}
} }
} }
@@ -446,7 +505,7 @@ fn probe_device_with_timeout(path: &Path, timeout: Duration) -> Option<VideoDevi
std::thread::spawn(move || { std::thread::spawn(move || {
let result = (|| -> Result<VideoDeviceInfo> { let result = (|| -> Result<VideoDeviceInfo> {
let device = VideoDevice::open(&path_for_thread)?; let device = VideoDevice::open_readonly(&path_for_thread)?;
device.info() device.info()
})(); })();
let _ = tx.send(result); let _ = tx.send(result);
@@ -503,15 +562,7 @@ fn sysfs_maybe_capture(path: &Path) -> bool {
} }
let skip_hints = [ let skip_hints = [
"codec", "codec", "decoder", "encoder", "isp", "mem2mem", "m2m", "vbi", "radio", "metadata",
"decoder",
"encoder",
"isp",
"mem2mem",
"m2m",
"vbi",
"radio",
"metadata",
"output", "output",
]; ];
if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture { if skip_hints.iter().any(|hint| sysfs_name.contains(hint)) && !maybe_capture {

View File

@@ -2,7 +2,7 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use v4l::format::fourcc; use v4l2r::PixelFormat as V4l2rPixelFormat;
/// Supported pixel formats /// Supported pixel formats
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@@ -41,30 +41,29 @@ pub enum PixelFormat {
} }
impl PixelFormat { impl PixelFormat {
/// Convert to V4L2 FourCC /// Convert to V4L2 FourCC bytes
pub fn to_fourcc(&self) -> fourcc::FourCC { pub fn to_fourcc(&self) -> [u8; 4] {
match self { match self {
PixelFormat::Mjpeg => fourcc::FourCC::new(b"MJPG"), PixelFormat::Mjpeg => *b"MJPG",
PixelFormat::Jpeg => fourcc::FourCC::new(b"JPEG"), PixelFormat::Jpeg => *b"JPEG",
PixelFormat::Yuyv => fourcc::FourCC::new(b"YUYV"), PixelFormat::Yuyv => *b"YUYV",
PixelFormat::Yvyu => fourcc::FourCC::new(b"YVYU"), PixelFormat::Yvyu => *b"YVYU",
PixelFormat::Uyvy => fourcc::FourCC::new(b"UYVY"), PixelFormat::Uyvy => *b"UYVY",
PixelFormat::Nv12 => fourcc::FourCC::new(b"NV12"), PixelFormat::Nv12 => *b"NV12",
PixelFormat::Nv21 => fourcc::FourCC::new(b"NV21"), PixelFormat::Nv21 => *b"NV21",
PixelFormat::Nv16 => fourcc::FourCC::new(b"NV16"), PixelFormat::Nv16 => *b"NV16",
PixelFormat::Nv24 => fourcc::FourCC::new(b"NV24"), PixelFormat::Nv24 => *b"NV24",
PixelFormat::Yuv420 => fourcc::FourCC::new(b"YU12"), PixelFormat::Yuv420 => *b"YU12",
PixelFormat::Yvu420 => fourcc::FourCC::new(b"YV12"), PixelFormat::Yvu420 => *b"YV12",
PixelFormat::Rgb565 => fourcc::FourCC::new(b"RGBP"), PixelFormat::Rgb565 => *b"RGBP",
PixelFormat::Rgb24 => fourcc::FourCC::new(b"RGB3"), PixelFormat::Rgb24 => *b"RGB3",
PixelFormat::Bgr24 => fourcc::FourCC::new(b"BGR3"), PixelFormat::Bgr24 => *b"BGR3",
PixelFormat::Grey => fourcc::FourCC::new(b"GREY"), PixelFormat::Grey => *b"GREY",
} }
} }
/// Try to convert from V4L2 FourCC /// Try to convert from V4L2 FourCC
pub fn from_fourcc(fourcc: fourcc::FourCC) -> Option<Self> { pub fn from_fourcc(repr: [u8; 4]) -> Option<Self> {
let repr = fourcc.repr;
match &repr { match &repr {
b"MJPG" => Some(PixelFormat::Mjpeg), b"MJPG" => Some(PixelFormat::Mjpeg),
b"JPEG" => Some(PixelFormat::Jpeg), b"JPEG" => Some(PixelFormat::Jpeg),
@@ -85,6 +84,17 @@ impl PixelFormat {
} }
} }
/// Convert to v4l2r PixelFormat
pub fn to_v4l2r(&self) -> V4l2rPixelFormat {
V4l2rPixelFormat::from(&self.to_fourcc())
}
/// Convert from v4l2r PixelFormat
pub fn from_v4l2r(format: V4l2rPixelFormat) -> Option<Self> {
let repr: [u8; 4] = format.into();
Self::from_fourcc(repr)
}
/// Check if format is compressed (JPEG/MJPEG) /// Check if format is compressed (JPEG/MJPEG)
pub fn is_compressed(&self) -> bool { pub fn is_compressed(&self) -> bool {
matches!(self, PixelFormat::Mjpeg | PixelFormat::Jpeg) matches!(self, PixelFormat::Mjpeg | PixelFormat::Jpeg)

View File

@@ -14,6 +14,7 @@ pub mod h264_pipeline;
pub mod shared_video_pipeline; pub mod shared_video_pipeline;
pub mod stream_manager; pub mod stream_manager;
pub mod streamer; pub mod streamer;
pub mod v4l2r_capture;
pub mod video_session; pub mod video_session;
pub use capture::VideoCapturer; pub use capture::VideoCapturer;

View File

@@ -18,6 +18,7 @@
use bytes::Bytes; use bytes::Bytes;
use parking_lot::RwLock as ParkingRwLock; use parking_lot::RwLock as ParkingRwLock;
use std::collections::HashMap;
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering}; use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@@ -26,28 +27,17 @@ use tracing::{debug, error, info, trace, warn};
/// Grace period before auto-stopping pipeline when no subscribers (in seconds) /// Grace period before auto-stopping pipeline when no subscribers (in seconds)
const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3; const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3;
/// Restart capture stream after this many consecutive timeouts.
const CAPTURE_TIMEOUT_RESTART_THRESHOLD: u32 = 5;
/// Minimum valid frame size for capture /// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128; const MIN_CAPTURE_FRAME_SIZE: usize = 128;
/// Validate JPEG header every N frames to reduce overhead /// Validate JPEG header every N frames to reduce overhead
const JPEG_VALIDATE_INTERVAL: u64 = 30; const JPEG_VALIDATE_INTERVAL: u64 = 30;
/// Retry count for capture format configuration when device is busy.
const SET_FORMAT_MAX_RETRIES: usize = 5;
/// Delay between capture format retry attempts.
const SET_FORMAT_RETRY_DELAY_MS: u64 = 100;
/// Low-frequency diagnostic logging interval (in frames).
const PIPELINE_DEBUG_LOG_INTERVAL: u64 = 120;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::utils::LogThrottler;
use crate::video::convert::{Nv12Converter, PixelConverter}; use crate::video::convert::{Nv12Converter, PixelConverter};
use crate::video::decoder::MjpegTurboDecoder; use crate::video::decoder::MjpegTurboDecoder;
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use hwcodec::ffmpeg_hw::{last_error_message as ffmpeg_hw_last_error, HwMjpegH26xConfig, HwMjpegH26xPipeline};
use v4l::buffer::Type as BufferType;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::video::capture::Parameters;
use v4l::Format;
use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat}; use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat};
use crate::video::encoder::h265::{ use crate::video::encoder::h265::{
detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat, detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat,
@@ -58,6 +48,11 @@ use crate::video::encoder::vp8::{detect_best_vp8_encoder, VP8Config, VP8Encoder}
use crate::video::encoder::vp9::{detect_best_vp9_encoder, VP9Config, VP9Encoder}; use crate::video::encoder::vp9::{detect_best_vp9_encoder, VP9Config, VP9Encoder};
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame}; use crate::video::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use crate::video::v4l2r_capture::V4l2rCaptureStream;
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use hwcodec::ffmpeg_hw::{
last_error_message as ffmpeg_hw_last_error, HwMjpegH26xConfig, HwMjpegH26xPipeline,
};
/// Encoded video frame for distribution /// Encoded video frame for distribution
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -517,7 +512,10 @@ impl SharedVideoPipeline {
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))] #[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
if needs_mjpeg_decode if needs_mjpeg_decode
&& is_rkmpp_encoder && is_rkmpp_encoder
&& matches!(config.output_codec, VideoEncoderType::H264 | VideoEncoderType::H265) && matches!(
config.output_codec,
VideoEncoderType::H264 | VideoEncoderType::H265
)
{ {
info!( info!(
"Initializing FFmpeg HW MJPEG->{} pipeline (no fallback)", "Initializing FFmpeg HW MJPEG->{} pipeline (no fallback)",
@@ -534,7 +532,11 @@ impl SharedVideoPipeline {
thread_count: 1, thread_count: 1,
}; };
let pipeline = HwMjpegH26xPipeline::new(hw_config).map_err(|e| { let pipeline = HwMjpegH26xPipeline::new(hw_config).map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e }; let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!( AppError::VideoError(format!(
"FFmpeg HW MJPEG->{} init failed: {}", "FFmpeg HW MJPEG->{} init failed: {}",
config.output_codec, detail config.output_codec, detail
@@ -908,7 +910,11 @@ impl SharedVideoPipeline {
/// Get subscriber count /// Get subscriber count
pub fn subscriber_count(&self) -> usize { pub fn subscriber_count(&self) -> usize {
self.subscribers.read().iter().filter(|tx| !tx.is_closed()).count() self.subscribers
.read()
.iter()
.filter(|tx| !tx.is_closed())
.count()
} }
/// Report that a receiver has lagged behind /// Report that a receiver has lagged behind
@@ -957,7 +963,11 @@ impl SharedVideoPipeline {
pipeline pipeline
.reconfigure(bitrate_kbps as i32, gop as i32) .reconfigure(bitrate_kbps as i32, gop as i32)
.map_err(|e| { .map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e }; let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!( AppError::VideoError(format!(
"FFmpeg HW reconfigure failed: {}", "FFmpeg HW reconfigure failed: {}",
detail detail
@@ -1215,8 +1225,6 @@ impl SharedVideoPipeline {
let mut last_fps_time = Instant::now(); let mut last_fps_time = Instant::now();
let mut fps_frame_count: u64 = 0; let mut fps_frame_count: u64 = 0;
let mut last_seq = *frame_seq_rx.borrow(); let mut last_seq = *frame_seq_rx.borrow();
let mut encode_no_output_count: u64 = 0;
let mut no_subscriber_skip_count: u64 = 0;
while pipeline.running_flag.load(Ordering::Acquire) { while pipeline.running_flag.load(Ordering::Acquire) {
if frame_seq_rx.changed().await.is_err() { if frame_seq_rx.changed().await.is_err() {
@@ -1232,24 +1240,9 @@ impl SharedVideoPipeline {
} }
last_seq = seq; last_seq = seq;
let subscriber_count = pipeline.subscriber_count(); if pipeline.subscriber_count() == 0 {
if subscriber_count == 0 {
no_subscriber_skip_count = no_subscriber_skip_count.wrapping_add(1);
if no_subscriber_skip_count % PIPELINE_DEBUG_LOG_INTERVAL == 0 {
info!(
"[Pipeline-Debug] encoder loop skipped {} times because subscriber_count=0",
no_subscriber_skip_count
);
}
continue; continue;
} }
if no_subscriber_skip_count > 0 {
info!(
"[Pipeline-Debug] encoder loop resumed with subscribers after {} empty cycles",
no_subscriber_skip_count
);
no_subscriber_skip_count = 0;
}
while let Ok(cmd) = cmd_rx.try_recv() { while let Ok(cmd) = cmd_rx.try_recv() {
if let Err(e) = pipeline.apply_cmd(&mut encoder_state, cmd) { if let Err(e) = pipeline.apply_cmd(&mut encoder_state, cmd) {
@@ -1268,39 +1261,13 @@ impl SharedVideoPipeline {
match pipeline.encode_frame_sync(&mut encoder_state, &frame, frame_count) { match pipeline.encode_frame_sync(&mut encoder_state, &frame, frame_count) {
Ok(Some(encoded_frame)) => { Ok(Some(encoded_frame)) => {
let encoded_size = encoded_frame.data.len();
let encoded_seq = encoded_frame.sequence;
let encoded_pts = encoded_frame.pts_ms;
let encoded_keyframe = encoded_frame.is_keyframe;
let encoded_arc = Arc::new(encoded_frame); let encoded_arc = Arc::new(encoded_frame);
pipeline.broadcast_encoded(encoded_arc).await; pipeline.broadcast_encoded(encoded_arc).await;
if encoded_keyframe || frame_count % PIPELINE_DEBUG_LOG_INTERVAL == 0 {
info!(
"[Pipeline-Debug] encoded+broadcast codec={} frame_idx={} seq={} size={} keyframe={} pts_ms={} subscribers={}",
encoder_state.codec,
frame_count,
encoded_seq,
encoded_size,
encoded_keyframe,
encoded_pts,
subscriber_count
);
}
frame_count += 1; frame_count += 1;
fps_frame_count += 1; fps_frame_count += 1;
} }
Ok(None) => { Ok(None) => {}
encode_no_output_count = encode_no_output_count.wrapping_add(1);
if encode_no_output_count % PIPELINE_DEBUG_LOG_INTERVAL == 0 {
info!(
"[Pipeline-Debug] encoder produced no output {} times (codec={})",
encode_no_output_count,
encoder_state.codec
);
}
}
Err(e) => { Err(e) => {
error!("Encoding failed: {}", e); error!("Encoding failed: {}", e);
} }
@@ -1328,93 +1295,17 @@ impl SharedVideoPipeline {
let frame_seq_tx = frame_seq_tx.clone(); let frame_seq_tx = frame_seq_tx.clone();
let buffer_pool = buffer_pool.clone(); let buffer_pool = buffer_pool.clone();
std::thread::spawn(move || { std::thread::spawn(move || {
let device = match Device::with_path(&device_path) { let mut stream = match V4l2rCaptureStream::open(
Ok(d) => d, &device_path,
Err(e) => { config.resolution,
error!("Failed to open device {:?}: {}", device_path, e); config.input_format,
let _ = pipeline.running.send(false); config.fps,
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
return;
}
};
let requested_format = Format::new(
config.resolution.width,
config.resolution.height,
config.input_format.to_fourcc(),
);
let mut actual_format_opt = None;
let mut last_set_format_error: Option<String> = None;
for attempt in 0..SET_FORMAT_MAX_RETRIES {
match device.set_format(&requested_format) {
Ok(format) => {
actual_format_opt = Some(format);
break;
}
Err(e) => {
let err_str = e.to_string();
let is_busy = err_str.contains("busy") || err_str.contains("resource");
last_set_format_error = Some(err_str);
if is_busy && attempt + 1 < SET_FORMAT_MAX_RETRIES {
warn!(
"Capture set_format busy (attempt {}/{}), retrying in {}ms",
attempt + 1,
SET_FORMAT_MAX_RETRIES,
SET_FORMAT_RETRY_DELAY_MS
);
std::thread::sleep(Duration::from_millis(SET_FORMAT_RETRY_DELAY_MS));
continue;
}
break;
}
}
}
let actual_format = match actual_format_opt {
Some(format) => format,
None => {
error!(
"Failed to set capture format: {}",
last_set_format_error
.unwrap_or_else(|| "unknown error".to_string())
);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1);
return;
}
};
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.input_format);
let stride = actual_format.stride;
info!(
"[Pipeline-Debug] capture format applied: {}x{} fourcc={:?} pixel_format={} stride={}",
actual_format.width,
actual_format.height,
actual_format.fourcc,
pixel_format,
stride
);
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream = match MmapStream::with_buffers(
&device,
BufferType::VideoCapture,
buffer_count.max(1), buffer_count.max(1),
Duration::from_secs(2),
) { ) {
Ok(s) => s, Ok(stream) => stream,
Err(e) => { Err(e) => {
error!("Failed to create capture stream: {}", e); error!("Failed to open capture stream: {}", e);
let _ = pipeline.running.send(false); let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release); pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(1); let _ = frame_seq_tx.send(1);
@@ -1422,11 +1313,28 @@ impl SharedVideoPipeline {
} }
}; };
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
let mut no_subscribers_since: Option<Instant> = None; let mut no_subscribers_since: Option<Instant> = None;
let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS); let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS);
let mut sequence: u64 = 0; let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0; let mut validate_counter: u64 = 0;
let mut captured_frame_count: u64 = 0; let mut consecutive_timeouts: u32 = 0;
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while pipeline.running_flag.load(Ordering::Acquire) { while pipeline.running_flag.load(Ordering::Acquire) {
let subscriber_count = pipeline.subscriber_count(); let subscriber_count = pipeline.subscriber_count();
@@ -1456,59 +1364,78 @@ impl SharedVideoPipeline {
no_subscribers_since = None; no_subscribers_since = None;
} }
let (buf, meta) = match stream.next() { let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
Ok(frame_data) => frame_data, let meta = match stream.next_into(&mut owned) {
Ok(meta) => {
consecutive_timeouts = 0;
meta
}
Err(e) => { Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut { if e.kind() == std::io::ErrorKind::TimedOut {
consecutive_timeouts = consecutive_timeouts.saturating_add(1);
warn!("Capture timeout - no signal?"); warn!("Capture timeout - no signal?");
if consecutive_timeouts >= CAPTURE_TIMEOUT_RESTART_THRESHOLD {
warn!(
"Capture timed out {} consecutive times, restarting video pipeline",
consecutive_timeouts
);
let _ = pipeline.running.send(false);
pipeline.running_flag.store(false, Ordering::Release);
let _ = frame_seq_tx.send(sequence.wrapping_add(1));
break;
}
} else { } else {
error!("Capture error: {}", e); consecutive_timeouts = 0;
let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed =
suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!(
"Capture error: {} (suppressed {} repeats)",
e, suppressed
);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
} }
continue; continue;
} }
}; };
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE { if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue; continue;
} }
validate_counter = validate_counter.wrapping_add(1); validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed() if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0 && validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size]) && !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{ {
continue; continue;
} }
let mut owned = buffer_pool.take(frame_size); owned.truncate(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
let frame = Arc::new(VideoFrame::from_pooled( let frame = Arc::new(VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))), Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution, resolution,
pixel_format, pixel_format,
stride, stride,
sequence, meta.sequence,
)); ));
captured_frame_count = captured_frame_count.wrapping_add(1); sequence = meta.sequence.wrapping_add(1);
if captured_frame_count % PIPELINE_DEBUG_LOG_INTERVAL == 0 {
info!(
"[Pipeline-Debug] captured frames={} last_seq={} last_size={} subscribers={}",
captured_frame_count,
sequence,
frame_size,
subscriber_count
);
}
sequence = sequence.wrapping_add(1);
{ {
let mut guard = latest_frame.write(); let mut guard = latest_frame.write();
*guard = Some(frame); *guard = Some(frame);
} }
let _ = frame_seq_tx.send(sequence); let _ = frame_seq_tx.send(sequence);
} }
pipeline.running_flag.store(false, Ordering::Release); pipeline.running_flag.store(false, Ordering::Release);
@@ -1573,7 +1500,11 @@ impl SharedVideoPipeline {
} }
let packet = pipeline.encode(raw_frame, pts_ms).map_err(|e| { let packet = pipeline.encode(raw_frame, pts_ms).map_err(|e| {
let detail = if e.is_empty() { ffmpeg_hw_last_error() } else { e }; let detail = if e.is_empty() {
ffmpeg_hw_last_error()
} else {
e
};
AppError::VideoError(format!("FFmpeg HW encode failed: {}", detail)) AppError::VideoError(format!("FFmpeg HW encode failed: {}", detail))
})?; })?;
@@ -1593,9 +1524,10 @@ impl SharedVideoPipeline {
} }
let decoded_buf = if input_format.is_compressed() { let decoded_buf = if input_format.is_compressed() {
let decoder = state.mjpeg_decoder.as_mut().ok_or_else(|| { let decoder = state
AppError::VideoError("MJPEG decoder not initialized".to_string()) .mjpeg_decoder
})?; .as_mut()
.ok_or_else(|| AppError::VideoError("MJPEG decoder not initialized".to_string()))?;
let decoded = decoder.decode(raw_frame)?; let decoded = decoder.decode(raw_frame)?;
Some(decoded) Some(decoded)
} else { } else {
@@ -1625,16 +1557,18 @@ impl SharedVideoPipeline {
debug!("[Pipeline] Keyframe will be generated for this frame"); debug!("[Pipeline] Keyframe will be generated for this frame");
} }
let encode_result = if needs_yuv420p && state.yuv420p_converter.is_some() { let encode_result = if needs_yuv420p {
// Software encoder with direct input conversion to YUV420P // Software encoder with direct input conversion to YUV420P
let conv = state.yuv420p_converter.as_mut().unwrap(); if let Some(conv) = state.yuv420p_converter.as_mut() {
let yuv420p_data = conv let yuv420p_data = conv.convert(raw_frame).map_err(|e| {
.convert(raw_frame) AppError::VideoError(format!("YUV420P conversion failed: {}", e))
.map_err(|e| AppError::VideoError(format!("YUV420P conversion failed: {}", e)))?; })?;
encoder.encode_raw(yuv420p_data, pts_ms) encoder.encode_raw(yuv420p_data, pts_ms)
} else if state.nv12_converter.is_some() { } else {
encoder.encode_raw(raw_frame, pts_ms)
}
} else if let Some(conv) = state.nv12_converter.as_mut() {
// Hardware encoder with input conversion to NV12 // Hardware encoder with input conversion to NV12
let conv = state.nv12_converter.as_mut().unwrap();
let nv12_data = conv let nv12_data = conv
.convert(raw_frame) .convert(raw_frame)
.map_err(|e| AppError::VideoError(format!("NV12 conversion failed: {}", e)))?; .map_err(|e| AppError::VideoError(format!("NV12 conversion failed: {}", e)))?;

View File

@@ -3,9 +3,11 @@
//! This module provides a high-level interface for video capture and streaming, //! This module provides a high-level interface for video capture and streaming,
//! managing the lifecycle of the capture thread and MJPEG/WebRTC distribution. //! managing the lifecycle of the capture thread and MJPEG/WebRTC distribution.
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicU32, Ordering}; use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::{debug, error, info, trace, warn}; use tracing::{debug, error, info, trace, warn};
@@ -15,12 +17,8 @@ use super::frame::{FrameBuffer, FrameBufferPool, VideoFrame};
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::events::{EventBus, SystemEvent}; use crate::events::{EventBus, SystemEvent};
use crate::stream::MjpegStreamHandler; use crate::stream::MjpegStreamHandler;
use v4l::buffer::Type as BufferType; use crate::utils::LogThrottler;
use v4l::io::traits::CaptureStream; use crate::video::v4l2r_capture::V4l2rCaptureStream;
use v4l::prelude::*;
use v4l::video::capture::Parameters;
use v4l::video::Capture;
use v4l::Format;
/// Minimum valid frame size for capture /// Minimum valid frame size for capture
const MIN_CAPTURE_FRAME_SIZE: usize = 128; const MIN_CAPTURE_FRAME_SIZE: usize = 128;
@@ -573,11 +571,9 @@ impl Streamer {
break; break;
} }
} }
} else { } else if zero_since.is_some() {
if zero_since.is_some() { info!("Clients reconnected, canceling auto-pause");
info!("Clients reconnected, canceling auto-pause"); zero_since = None;
zero_since = None;
}
} }
} }
}); });
@@ -632,8 +628,7 @@ impl Streamer {
} }
}; };
let mut device_opt: Option<Device> = None; let mut stream_opt: Option<V4l2rCaptureStream> = None;
let mut format_opt: Option<Format> = None;
let mut last_error: Option<String> = None; let mut last_error: Option<String> = None;
for attempt in 0..MAX_RETRIES { for attempt in 0..MAX_RETRIES {
@@ -642,8 +637,18 @@ impl Streamer {
return; return;
} }
let device = match Device::with_path(&device_path) { match V4l2rCaptureStream::open(
Ok(d) => d, &device_path,
config.resolution,
config.format,
config.fps,
BUFFER_COUNT,
Duration::from_secs(2),
) {
Ok(stream) => {
stream_opt = Some(stream);
break;
}
Err(e) => { Err(e) => {
let err_str = e.to_string(); let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") { if err_str.contains("busy") || err_str.contains("resource") {
@@ -660,42 +665,12 @@ impl Streamer {
last_error = Some(err_str); last_error = Some(err_str);
break; break;
} }
};
let requested = Format::new(
config.resolution.width,
config.resolution.height,
config.format.to_fourcc(),
);
match device.set_format(&requested) {
Ok(actual) => {
device_opt = Some(device);
format_opt = Some(actual);
break;
}
Err(e) => {
let err_str = e.to_string();
if err_str.contains("busy") || err_str.contains("resource") {
warn!(
"Device busy on set_format attempt {}/{}, retrying in {}ms...",
attempt + 1,
MAX_RETRIES,
RETRY_DELAY_MS
);
std::thread::sleep(std::time::Duration::from_millis(RETRY_DELAY_MS));
last_error = Some(err_str);
continue;
}
last_error = Some(err_str);
break;
}
} }
} }
let (device, actual_format) = match (device_opt, format_opt) { let mut stream = match stream_opt {
(Some(d), Some(f)) => (d, f), Some(stream) => stream,
_ => { None => {
error!( error!(
"Failed to open device {:?}: {}", "Failed to open device {:?}: {}",
device_path, device_path,
@@ -709,42 +684,35 @@ impl Streamer {
} }
}; };
let resolution = stream.resolution();
let pixel_format = stream.format();
let stride = stream.stride();
info!( info!(
"Capture format: {}x{} {:?} stride={}", "Capture format: {}x{} {:?} stride={}",
actual_format.width, actual_format.height, actual_format.fourcc, actual_format.stride resolution.width, resolution.height, pixel_format, stride
); );
let resolution = Resolution::new(actual_format.width, actual_format.height);
let pixel_format =
PixelFormat::from_fourcc(actual_format.fourcc).unwrap_or(config.format);
if config.fps > 0 {
if let Err(e) = device.set_params(&Parameters::with_fps(config.fps)) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let mut stream =
match MmapStream::with_buffers(&device, BufferType::VideoCapture, BUFFER_COUNT) {
Ok(s) => s,
Err(e) => {
error!("Failed to create capture stream: {}", e);
self.mjpeg_handler.set_offline();
set_state(StreamerState::Error);
self.direct_active.store(false, Ordering::SeqCst);
self.current_fps.store(0, Ordering::Relaxed);
return;
}
};
let buffer_pool = Arc::new(FrameBufferPool::new(BUFFER_COUNT.max(4) as usize)); let buffer_pool = Arc::new(FrameBufferPool::new(BUFFER_COUNT.max(4) as usize));
let mut signal_present = true; let mut signal_present = true;
let mut sequence: u64 = 0;
let mut validate_counter: u64 = 0; let mut validate_counter: u64 = 0;
let mut idle_since: Option<std::time::Instant> = None; let mut idle_since: Option<std::time::Instant> = None;
let mut fps_frame_count: u64 = 0; let mut fps_frame_count: u64 = 0;
let mut last_fps_time = std::time::Instant::now(); let mut last_fps_time = std::time::Instant::now();
let capture_error_throttler = LogThrottler::with_secs(5);
let mut suppressed_capture_errors: HashMap<String, u64> = HashMap::new();
let classify_capture_error = |err: &std::io::Error| -> String {
let message = err.to_string();
if message.contains("dqbuf failed") && message.contains("EINVAL") {
"capture_dqbuf_einval".to_string()
} else if message.contains("dqbuf failed") {
"capture_dqbuf".to_string()
} else {
format!("capture_{:?}", err.kind())
}
};
while !self.direct_stop.load(Ordering::Relaxed) { while !self.direct_stop.load(Ordering::Relaxed) {
let mjpeg_clients = self.mjpeg_handler.client_count(); let mjpeg_clients = self.mjpeg_handler.client_count();
@@ -768,8 +736,9 @@ impl Streamer {
idle_since = None; idle_since = None;
} }
let (buf, meta) = match stream.next() { let mut owned = buffer_pool.take(MIN_CAPTURE_FRAME_SIZE);
Ok(frame_data) => frame_data, let meta = match stream.next_into(&mut owned) {
Ok(meta) => meta,
Err(e) => { Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut { if e.kind() == std::io::ErrorKind::TimedOut {
if signal_present { if signal_present {
@@ -811,35 +780,43 @@ impl Streamer {
break; break;
} }
error!("Capture error: {}", e); let key = classify_capture_error(&e);
if capture_error_throttler.should_log(&key) {
let suppressed = suppressed_capture_errors.remove(&key).unwrap_or(0);
if suppressed > 0 {
error!("Capture error: {} (suppressed {} repeats)", e, suppressed);
} else {
error!("Capture error: {}", e);
}
} else {
let counter = suppressed_capture_errors.entry(key).or_insert(0);
*counter = counter.saturating_add(1);
}
continue; continue;
} }
}; };
let frame_size = meta.bytesused as usize; let frame_size = meta.bytes_used;
if frame_size < MIN_CAPTURE_FRAME_SIZE { if frame_size < MIN_CAPTURE_FRAME_SIZE {
continue; continue;
} }
validate_counter = validate_counter.wrapping_add(1); validate_counter = validate_counter.wrapping_add(1);
if pixel_format.is_compressed() if pixel_format.is_compressed()
&& validate_counter % JPEG_VALIDATE_INTERVAL == 0 && validate_counter.is_multiple_of(JPEG_VALIDATE_INTERVAL)
&& !VideoFrame::is_valid_jpeg_bytes(&buf[..frame_size]) && !VideoFrame::is_valid_jpeg_bytes(&owned[..frame_size])
{ {
continue; continue;
} }
let mut owned = buffer_pool.take(frame_size); owned.truncate(frame_size);
owned.resize(frame_size, 0);
owned[..frame_size].copy_from_slice(&buf[..frame_size]);
let frame = VideoFrame::from_pooled( let frame = VideoFrame::from_pooled(
Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))), Arc::new(FrameBuffer::new(owned, Some(buffer_pool.clone()))),
resolution, resolution,
pixel_format, pixel_format,
actual_format.stride, stride,
sequence, meta.sequence,
); );
sequence = sequence.wrapping_add(1);
if !signal_present { if !signal_present {
signal_present = true; signal_present = true;
@@ -985,7 +962,7 @@ impl Streamer {
*streamer.state.write().await = StreamerState::Recovering; *streamer.state.write().await = StreamerState::Recovering;
// Publish reconnecting event (every 5 attempts to avoid spam) // Publish reconnecting event (every 5 attempts to avoid spam)
if attempt == 1 || attempt % 5 == 0 { if attempt == 1 || attempt.is_multiple_of(5) {
streamer streamer
.publish_event(SystemEvent::StreamReconnecting { .publish_event(SystemEvent::StreamReconnecting {
device: device_path.clone(), device: device_path.clone(),

277
src/video/v4l2r_capture.rs Normal file
View File

@@ -0,0 +1,277 @@
//! V4L2 capture implementation using v4l2r (ioctl layer).
use std::fs::File;
use std::io;
use std::os::fd::AsFd;
use std::path::Path;
use std::time::Duration;
use nix::poll::{poll, PollFd, PollFlags, PollTimeout};
use tracing::{debug, warn};
use v4l2r::bindings::{v4l2_requestbuffers, v4l2_streamparm, v4l2_streamparm__bindgen_ty_1};
use v4l2r::ioctl::{
self, Capabilities, Capability as V4l2rCapability, MemoryConsistency, PlaneMapping, QBufPlane,
QBuffer, QueryBuffer, V4l2Buffer,
};
use v4l2r::memory::{MemoryType, MmapHandle};
use v4l2r::{Format as V4l2rFormat, PixelFormat as V4l2rPixelFormat, QueueType};
use crate::error::{AppError, Result};
use crate::video::format::{PixelFormat, Resolution};
/// Metadata for a captured frame.
#[derive(Debug, Clone, Copy)]
pub struct CaptureMeta {
pub bytes_used: usize,
pub sequence: u64,
}
/// V4L2 capture stream backed by v4l2r ioctl.
pub struct V4l2rCaptureStream {
fd: File,
queue: QueueType,
resolution: Resolution,
format: PixelFormat,
stride: u32,
timeout: Duration,
mappings: Vec<Vec<PlaneMapping>>,
}
impl V4l2rCaptureStream {
pub fn open(
device_path: impl AsRef<Path>,
resolution: Resolution,
format: PixelFormat,
fps: u32,
buffer_count: u32,
timeout: Duration,
) -> Result<Self> {
let mut fd = File::options()
.read(true)
.write(true)
.open(device_path.as_ref())
.map_err(|e| AppError::VideoError(format!("Failed to open device: {}", e)))?;
let caps: V4l2rCapability = ioctl::querycap(&fd)
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let caps_flags = caps.device_caps();
// Prefer multi-planar capture when available, as it is required for some
// devices/pixel formats (e.g. NV12 via VIDEO_CAPTURE_MPLANE).
let queue = if caps_flags.contains(Capabilities::VIDEO_CAPTURE_MPLANE) {
QueueType::VideoCaptureMplane
} else if caps_flags.contains(Capabilities::VIDEO_CAPTURE) {
QueueType::VideoCapture
} else {
return Err(AppError::VideoError(
"Device does not support capture queues".to_string(),
));
};
let mut fmt: V4l2rFormat = ioctl::g_fmt(&fd, queue)
.map_err(|e| AppError::VideoError(format!("Failed to get device format: {}", e)))?;
fmt.width = resolution.width;
fmt.height = resolution.height;
fmt.pixelformat = V4l2rPixelFormat::from(&format.to_fourcc());
let actual_fmt: V4l2rFormat = ioctl::s_fmt(&mut fd, (queue, &fmt))
.map_err(|e| AppError::VideoError(format!("Failed to set device format: {}", e)))?;
let actual_resolution = Resolution::new(actual_fmt.width, actual_fmt.height);
let actual_format = PixelFormat::from_v4l2r(actual_fmt.pixelformat).unwrap_or(format);
let stride = actual_fmt
.plane_fmt
.first()
.map(|p| p.bytesperline)
.unwrap_or_else(|| match actual_format.bytes_per_pixel() {
Some(bpp) => actual_resolution.width * bpp as u32,
None => actual_resolution.width,
});
if fps > 0 {
if let Err(e) = set_fps(&fd, queue, fps) {
warn!("Failed to set hardware FPS: {}", e);
}
}
let req: v4l2_requestbuffers = ioctl::reqbufs(
&fd,
queue,
MemoryType::Mmap,
buffer_count,
MemoryConsistency::empty(),
)
.map_err(|e| AppError::VideoError(format!("Failed to request buffers: {}", e)))?;
let allocated = req.count as usize;
if allocated == 0 {
return Err(AppError::VideoError(
"Driver returned zero capture buffers".to_string(),
));
}
let mut mappings = Vec::with_capacity(allocated);
for index in 0..allocated as u32 {
let query: QueryBuffer = ioctl::querybuf(&fd, queue, index as usize).map_err(|e| {
AppError::VideoError(format!("Failed to query buffer {}: {}", index, e))
})?;
if query.planes.is_empty() {
return Err(AppError::VideoError(format!(
"Driver returned zero planes for buffer {}",
index
)));
}
let mut plane_maps = Vec::with_capacity(query.planes.len());
for plane in &query.planes {
let mapping = ioctl::mmap(&fd, plane.mem_offset, plane.length).map_err(|e| {
AppError::VideoError(format!("Failed to mmap buffer {}: {}", index, e))
})?;
plane_maps.push(mapping);
}
mappings.push(plane_maps);
}
let mut stream = Self {
fd,
queue,
resolution: actual_resolution,
format: actual_format,
stride,
timeout,
mappings,
};
stream.queue_all_buffers()?;
ioctl::streamon(&stream.fd, stream.queue)
.map_err(|e| AppError::VideoError(format!("Failed to start capture stream: {}", e)))?;
Ok(stream)
}
pub fn resolution(&self) -> Resolution {
self.resolution
}
pub fn format(&self) -> PixelFormat {
self.format
}
pub fn stride(&self) -> u32 {
self.stride
}
pub fn next_into(&mut self, dst: &mut Vec<u8>) -> io::Result<CaptureMeta> {
self.wait_ready()?;
let dqbuf: V4l2Buffer = ioctl::dqbuf(&self.fd, self.queue)
.map_err(|e| io::Error::other(format!("dqbuf failed: {}", e)))?;
let index = dqbuf.as_v4l2_buffer().index as usize;
let sequence = dqbuf.as_v4l2_buffer().sequence as u64;
let mut total = 0usize;
for (plane_idx, plane) in dqbuf.planes_iter().enumerate() {
let bytes_used = *plane.bytesused as usize;
let data_offset = plane.data_offset.copied().unwrap_or(0) as usize;
if bytes_used == 0 {
continue;
}
let mapping = &self.mappings[index][plane_idx];
let start = data_offset.min(mapping.len());
let end = (data_offset + bytes_used).min(mapping.len());
total += end.saturating_sub(start);
}
dst.resize(total, 0);
let mut cursor = 0usize;
for (plane_idx, plane) in dqbuf.planes_iter().enumerate() {
let bytes_used = *plane.bytesused as usize;
let data_offset = plane.data_offset.copied().unwrap_or(0) as usize;
if bytes_used == 0 {
continue;
}
let mapping = &self.mappings[index][plane_idx];
let start = data_offset.min(mapping.len());
let end = (data_offset + bytes_used).min(mapping.len());
let len = end.saturating_sub(start);
if len == 0 {
continue;
}
dst[cursor..cursor + len].copy_from_slice(&mapping[start..end]);
cursor += len;
}
self.queue_buffer(index as u32)
.map_err(|e| io::Error::other(e.to_string()))?;
Ok(CaptureMeta {
bytes_used: total,
sequence,
})
}
fn wait_ready(&self) -> io::Result<()> {
if self.timeout.is_zero() {
return Ok(());
}
let mut fds = [PollFd::new(self.fd.as_fd(), PollFlags::POLLIN)];
let timeout_ms = self.timeout.as_millis().min(u16::MAX as u128) as u16;
let ready = poll(&mut fds, PollTimeout::from(timeout_ms))?;
if ready == 0 {
return Err(io::Error::new(io::ErrorKind::TimedOut, "capture timeout"));
}
Ok(())
}
fn queue_all_buffers(&mut self) -> Result<()> {
for index in 0..self.mappings.len() as u32 {
self.queue_buffer(index)?;
}
Ok(())
}
fn queue_buffer(&mut self, index: u32) -> Result<()> {
let handle = MmapHandle;
let planes = self.mappings[index as usize]
.iter()
.map(|mapping| {
let mut plane = QBufPlane::new_from_handle(&handle, 0);
plane.0.length = mapping.len() as u32;
plane
})
.collect();
let mut qbuf: QBuffer<MmapHandle> = QBuffer::new(self.queue, index);
qbuf.planes = planes;
ioctl::qbuf::<_, ()>(&self.fd, qbuf)
.map_err(|e| AppError::VideoError(format!("Failed to queue buffer: {}", e)))?;
Ok(())
}
}
impl Drop for V4l2rCaptureStream {
fn drop(&mut self) {
if let Err(e) = ioctl::streamoff(&self.fd, self.queue) {
debug!("Failed to stop capture stream: {}", e);
}
}
}
fn set_fps(fd: &File, queue: QueueType, fps: u32) -> Result<()> {
let mut params = unsafe { std::mem::zeroed::<v4l2_streamparm>() };
params.type_ = queue as u32;
params.parm = v4l2_streamparm__bindgen_ty_1 {
capture: v4l2r::bindings::v4l2_captureparm {
timeperframe: v4l2r::bindings::v4l2_fract {
numerator: 1,
denominator: fps,
},
..unsafe { std::mem::zeroed() }
},
};
let _actual: v4l2_streamparm = ioctl::s_parm(fd, params)
.map_err(|e| AppError::VideoError(format!("Failed to set FPS: {}", e)))?;
Ok(())
}

View File

@@ -45,11 +45,9 @@ use webrtc::ice_transport::ice_gatherer_state::RTCIceGathererState;
/// H.265/HEVC MIME type (RFC 7798) /// H.265/HEVC MIME type (RFC 7798)
const MIME_TYPE_H265: &str = "video/H265"; const MIME_TYPE_H265: &str = "video/H265";
/// Low-frequency diagnostic logging interval for video receive/send loop.
const VIDEO_DEBUG_LOG_INTERVAL: u64 = 120;
fn h264_contains_parameter_sets(data: &[u8]) -> bool { fn h264_contains_parameter_sets(data: &[u8]) -> bool {
// Annex-B path (00 00 01 / 00 00 00 01) // Annex-B start code path
let mut i = 0usize; let mut i = 0usize;
while i + 4 <= data.len() { while i + 4 <= data.len() {
let sc_len = if i + 4 <= data.len() let sc_len = if i + 4 <= data.len()
@@ -95,46 +93,6 @@ fn h264_contains_parameter_sets(data: &[u8]) -> bool {
false false
} }
fn extract_video_sdp_section(sdp: &str) -> String {
let mut lines_out: Vec<&str> = Vec::new();
let mut in_video = false;
for line in sdp.lines() {
if line.starts_with("m=") {
if line.starts_with("m=video") {
in_video = true;
lines_out.push(line);
continue;
}
if in_video {
break;
}
}
if !in_video {
continue;
}
if line.starts_with("c=")
|| line.starts_with("a=mid:")
|| line.starts_with("a=rtpmap:")
|| line.starts_with("a=fmtp:")
|| line.starts_with("a=rtcp-fb:")
|| line.starts_with("a=send")
|| line.starts_with("a=recv")
|| line.starts_with("a=inactive")
{
lines_out.push(line);
}
}
if lines_out.is_empty() {
"<no video m-section>".to_string()
} else {
lines_out.join(" | ")
}
}
/// Universal WebRTC session configuration /// Universal WebRTC session configuration
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct UniversalSessionConfig { pub struct UniversalSessionConfig {
@@ -679,10 +637,6 @@ impl UniversalSession {
let mut last_keyframe_request = Instant::now() - Duration::from_secs(1); let mut last_keyframe_request = Instant::now() - Duration::from_secs(1);
let mut frames_sent: u64 = 0; let mut frames_sent: u64 = 0;
let mut frames_received: u64 = 0;
let mut codec_mismatch_count: u64 = 0;
let mut waiting_keyframe_drop_count: u64 = 0;
let mut send_fail_count: u64 = 0;
loop { loop {
tokio::select! { tokio::select! {
@@ -707,43 +661,14 @@ impl UniversalSession {
break; break;
} }
}; };
frames_received = frames_received.wrapping_add(1);
// Verify codec matches // Verify codec matches
let frame_codec = encoded_frame.codec; let frame_codec = encoded_frame.codec;
if frame_codec != expected_codec { if frame_codec != expected_codec {
codec_mismatch_count = codec_mismatch_count.wrapping_add(1);
if codec_mismatch_count <= 5
|| codec_mismatch_count % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] codec mismatch count={} expected={} got={} recv_seq={}",
session_id,
codec_mismatch_count,
expected_codec,
frame_codec,
encoded_frame.sequence
);
}
continue; continue;
} }
if encoded_frame.is_keyframe
|| frames_received % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] received frame recv_count={} sent_count={} seq={} size={} keyframe={} waiting_for_keyframe={}",
session_id,
frames_received,
frames_sent,
encoded_frame.sequence,
encoded_frame.data.len(),
encoded_frame.is_keyframe,
waiting_for_keyframe
);
}
// Debug log for H265 frames // Debug log for H265 frames
if expected_codec == VideoEncoderType::H265 if expected_codec == VideoEncoderType::H265
&& (encoded_frame.is_keyframe || frames_sent.is_multiple_of(30)) { && (encoded_frame.is_keyframe || frames_sent.is_multiple_of(30)) {
@@ -764,27 +689,16 @@ impl UniversalSession {
} }
} }
let was_waiting_for_keyframe = waiting_for_keyframe;
if waiting_for_keyframe || gap_detected { if waiting_for_keyframe || gap_detected {
if encoded_frame.is_keyframe { if encoded_frame.is_keyframe {
waiting_for_keyframe = false; waiting_for_keyframe = false;
if was_waiting_for_keyframe || gap_detected {
info!(
"[Session-Debug:{}] keyframe accepted seq={} after_wait={} gap_detected={}",
session_id,
encoded_frame.sequence,
was_waiting_for_keyframe,
gap_detected
);
}
} else { } else {
if gap_detected { if gap_detected {
waiting_for_keyframe = true; waiting_for_keyframe = true;
} }
// Some H264 encoders (notably v4l2m2m on certain drivers) emit // Some H264 encoders output SPS/PPS in a separate non-keyframe AU
// SPS/PPS in a separate non-keyframe access unit right before IDR. // before IDR. Keep this frame so browser can decode the next IDR.
// If we drop it here, browser receives IDR-only (NAL 5) and cannot decode.
let forward_h264_parameter_frame = waiting_for_keyframe let forward_h264_parameter_frame = waiting_for_keyframe
&& expected_codec == VideoEncoderType::H264 && expected_codec == VideoEncoderType::H264
&& h264_contains_parameter_sets(encoded_frame.data.as_ref()); && h264_contains_parameter_sets(encoded_frame.data.as_ref());
@@ -796,32 +710,7 @@ impl UniversalSession {
request_keyframe(); request_keyframe();
last_keyframe_request = now; last_keyframe_request = now;
} }
if !forward_h264_parameter_frame {
if forward_h264_parameter_frame {
info!(
"[Session-Debug:{}] forwarding H264 parameter frame while waiting keyframe seq={} size={}",
session_id,
encoded_frame.sequence,
encoded_frame.data.len()
);
} else {
waiting_keyframe_drop_count =
waiting_keyframe_drop_count.wrapping_add(1);
if gap_detected
|| waiting_keyframe_drop_count <= 5
|| waiting_keyframe_drop_count
% VIDEO_DEBUG_LOG_INTERVAL
== 0
{
info!(
"[Session-Debug:{}] dropping frame while waiting keyframe seq={} keyframe={} gap_detected={} drop_count={}",
session_id,
encoded_frame.sequence,
encoded_frame.is_keyframe,
gap_detected,
waiting_keyframe_drop_count
);
}
continue; continue;
} }
} }
@@ -838,33 +727,11 @@ impl UniversalSession {
.await; .await;
let _ = send_in_flight; let _ = send_in_flight;
if let Err(e) = send_result { if send_result.is_err() {
send_fail_count = send_fail_count.wrapping_add(1); // Keep quiet unless debugging send failures elsewhere
if send_fail_count <= 5 || send_fail_count % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] track write failed count={} err={}",
session_id,
send_fail_count,
e
);
}
} else { } else {
frames_sent += 1; frames_sent += 1;
last_sequence = Some(encoded_frame.sequence); last_sequence = Some(encoded_frame.sequence);
if encoded_frame.is_keyframe
|| frames_sent % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] sent frame sent_count={} recv_count={} seq={} size={} keyframe={}",
session_id,
frames_sent,
frames_received,
encoded_frame.sequence,
encoded_frame.data.len(),
encoded_frame.is_keyframe
);
}
} }
} }
} }
@@ -983,12 +850,6 @@ impl UniversalSession {
/// Handle SDP offer and create answer /// Handle SDP offer and create answer
pub async fn handle_offer(&self, offer: SdpOffer) -> Result<SdpAnswer> { pub async fn handle_offer(&self, offer: SdpOffer) -> Result<SdpAnswer> {
info!(
"[SDP-Debug:{}] offer video section: {}",
self.session_id,
extract_video_sdp_section(&offer.sdp)
);
// Log offer for debugging H.265 codec negotiation // Log offer for debugging H.265 codec negotiation
if self.codec == VideoEncoderType::H265 { if self.codec == VideoEncoderType::H265 {
let has_h265 = offer.sdp.to_lowercase().contains("h265") let has_h265 = offer.sdp.to_lowercase().contains("h265")
@@ -1015,12 +876,6 @@ impl UniversalSession {
.await .await
.map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?; .map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?;
info!(
"[SDP-Debug:{}] answer video section: {}",
self.session_id,
extract_video_sdp_section(&answer.sdp)
);
// Log answer for debugging // Log answer for debugging
if self.codec == VideoEncoderType::H265 { if self.codec == VideoEncoderType::H265 {
let has_h265 = answer.sdp.to_lowercase().contains("h265") let has_h265 = answer.sdp.to_lowercase().contains("h265")

View File

@@ -18,10 +18,9 @@
use bytes::Bytes; use bytes::Bytes;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration; use std::time::Duration;
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tracing::{debug, info, trace, warn}; use tracing::{debug, trace, warn};
use webrtc::media::Sample; use webrtc::media::Sample;
use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability; use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability;
use webrtc::track::track_local::track_local_static_rtp::TrackLocalStaticRTP; use webrtc::track::track_local::track_local_static_rtp::TrackLocalStaticRTP;
@@ -39,10 +38,6 @@ use crate::video::format::Resolution;
/// Default MTU for RTP packets /// Default MTU for RTP packets
const RTP_MTU: usize = 1200; const RTP_MTU: usize = 1200;
/// Low-frequency diagnostic logging interval for H264 frame writes.
const H264_DEBUG_LOG_INTERVAL: u64 = 120;
static H264_WRITE_COUNTER: AtomicU64 = AtomicU64::new(0);
/// Video codec type for WebRTC /// Video codec type for WebRTC
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -318,20 +313,7 @@ impl UniversalVideoTrack {
/// ///
/// Sends the entire Annex B frame as a single Sample to allow the /// Sends the entire Annex B frame as a single Sample to allow the
/// H264Payloader to aggregate SPS+PPS into STAP-A packets. /// H264Payloader to aggregate SPS+PPS into STAP-A packets.
async fn write_h264_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> { async fn write_h264_frame(&self, data: Bytes, _is_keyframe: bool) -> Result<()> {
let frame_idx = H264_WRITE_COUNTER.fetch_add(1, Ordering::Relaxed) + 1;
if is_keyframe || frame_idx % H264_DEBUG_LOG_INTERVAL == 0 {
let (stream_format, nal_types) = detect_h264_stream_format_and_nals(&data);
info!(
"[H264-Track-Debug] frame_idx={} size={} keyframe={} stream_format={} nal_types={:?}",
frame_idx,
data.len(),
is_keyframe,
stream_format,
nal_types
);
}
// Send entire Annex B frame as one Sample // Send entire Annex B frame as one Sample
// The H264Payloader in rtp crate will: // The H264Payloader in rtp crate will:
// 1. Parse NAL units from Annex B format // 1. Parse NAL units from Annex B format
@@ -488,49 +470,6 @@ impl UniversalVideoTrack {
} }
} }
fn detect_h264_stream_format_and_nals(data: &[u8]) -> (&'static str, Vec<u8>) {
let mut nal_types: Vec<u8> = Vec::new();
let mut i = 0usize;
while i + 4 <= data.len() {
let sc_len = if i + 4 <= data.len()
&& data[i] == 0
&& data[i + 1] == 0
&& data[i + 2] == 0
&& data[i + 3] == 1
{
4
} else if i + 3 <= data.len() && data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1 {
3
} else {
i += 1;
continue;
};
let nal_start = i + sc_len;
if nal_start < data.len() {
nal_types.push(data[nal_start] & 0x1F);
if nal_types.len() >= 12 {
break;
}
}
i = nal_start.saturating_add(1);
}
if !nal_types.is_empty() {
return ("annex-b", nal_types);
}
if data.len() >= 5 {
let first_len = u32::from_be_bytes([data[0], data[1], data[2], data[3]]) as usize;
if first_len > 0 && first_len + 4 <= data.len() {
return ("length-prefixed", vec![data[4] & 0x1F]);
}
}
("unknown", Vec::new())
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@@ -250,8 +250,8 @@ impl WebRtcStreamer {
} }
} }
fn should_stop_pipeline(session_count: usize) -> bool { fn should_stop_pipeline(session_count: usize, subscriber_count: usize) -> bool {
session_count == 0 session_count == 0 && subscriber_count == 0
} }
async fn stop_pipeline_if_idle(&self, reason: &str) { async fn stop_pipeline_if_idle(&self, reason: &str) {
@@ -263,7 +263,7 @@ impl WebRtcStreamer {
}; };
let subscriber_count = pipeline.subscriber_count(); let subscriber_count = pipeline.subscriber_count();
if Self::should_stop_pipeline(session_count) { if Self::should_stop_pipeline(session_count, subscriber_count) {
info!( info!(
"{} stopping video pipeline (sessions={}, subscribers={})", "{} stopping video pipeline (sessions={}, subscribers={})",
reason, session_count, subscriber_count reason, session_count, subscriber_count
@@ -1005,9 +1005,10 @@ mod tests {
} }
#[test] #[test]
fn stop_pipeline_requires_no_sessions() { fn stop_pipeline_requires_no_sessions_and_no_subscribers() {
assert!(WebRtcStreamer::should_stop_pipeline(0)); assert!(WebRtcStreamer::should_stop_pipeline(0, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(1)); assert!(!WebRtcStreamer::should_stop_pipeline(1, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(2)); assert!(!WebRtcStreamer::should_stop_pipeline(0, 1));
assert!(!WebRtcStreamer::should_stop_pipeline(2, 3));
} }
} }