feat(video): 事务化切换与前端统一编排,增强视频输入格式支持

- 后端:切换事务+transition_id,/stream/mode 返回 switching/transition_id 与实际 codec

- 事件:新增 mode_switching/mode_ready,config/webrtc_ready/mode_changed 关联事务

- 编码/格式:扩展 NV21/NV16/NV24/RGB/BGR 输入与转换链路,RKMPP direct input 优化

- 前端:useVideoSession 统一切换,失败回退真实切回 MJPEG,菜单格式同步修复

- 清理:useVideoStream 降级为 MJPEG-only
This commit is contained in:
mofeng-git
2026-01-11 10:41:57 +08:00
parent 9feb74b72c
commit 206594e292
110 changed files with 3955 additions and 2251 deletions

View File

@@ -231,7 +231,9 @@ impl VideoCapturer {
let last_error = self.last_error.clone();
let handle = tokio::task::spawn_blocking(move || {
capture_loop(config, state, stats, frame_tx, stop_flag, sequence, last_error);
capture_loop(
config, state, stats, frame_tx, stop_flag, sequence, last_error,
);
});
*self.capture_handle.lock().await = Some(handle);
@@ -275,14 +277,7 @@ fn capture_loop(
sequence: Arc<AtomicU64>,
error_holder: Arc<parking_lot::RwLock<Option<(String, String)>>>,
) {
let result = run_capture(
&config,
&state,
&stats,
&frame_tx,
&stop_flag,
&sequence,
);
let result = run_capture(&config, &state, &stats, &frame_tx, &stop_flag, &sequence);
match result {
Ok(_) => {
@@ -503,7 +498,10 @@ fn run_capture_inner(
// Validate frame
if frame_size < MIN_FRAME_SIZE {
debug!("Dropping small frame: {} bytes (bytesused={})", frame_size, meta.bytesused);
debug!(
"Dropping small frame: {} bytes (bytesused={})",
frame_size, meta.bytesused
);
if let Ok(mut s) = stats.try_lock() {
s.frames_dropped += 1;
}
@@ -606,18 +604,12 @@ impl FrameGrabber {
}
/// Capture a single frame
pub async fn grab(
&self,
resolution: Resolution,
format: PixelFormat,
) -> Result<VideoFrame> {
pub async fn grab(&self, resolution: Resolution, format: PixelFormat) -> Result<VideoFrame> {
let device_path = self.device_path.clone();
tokio::task::spawn_blocking(move || {
grab_single_frame(&device_path, resolution, format)
})
.await
.map_err(|e| AppError::VideoError(format!("Grab task failed: {}", e)))?
tokio::task::spawn_blocking(move || grab_single_frame(&device_path, resolution, format))
.await
.map_err(|e| AppError::VideoError(format!("Grab task failed: {}", e)))?
}
}
@@ -626,14 +618,13 @@ fn grab_single_frame(
resolution: Resolution,
format: PixelFormat,
) -> Result<VideoFrame> {
let device = Device::with_path(device_path).map_err(|e| {
AppError::VideoError(format!("Failed to open device: {}", e))
})?;
let device = Device::with_path(device_path)
.map_err(|e| AppError::VideoError(format!("Failed to open device: {}", e)))?;
let fmt = Format::new(resolution.width, resolution.height, format.to_fourcc());
let actual = device.set_format(&fmt).map_err(|e| {
AppError::VideoError(format!("Failed to set format: {}", e))
})?;
let actual = device
.set_format(&fmt)
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?;
let mut stream = MmapStream::with_buffers(&device, BufferType::VideoCapture, 2)
.map_err(|e| AppError::VideoError(format!("Failed to create stream: {}", e)))?;
@@ -643,8 +634,7 @@ fn grab_single_frame(
match stream.next() {
Ok((buf, _meta)) => {
if buf.len() >= MIN_FRAME_SIZE {
let actual_format =
PixelFormat::from_fourcc(actual.fourcc).unwrap_or(format);
let actual_format = PixelFormat::from_fourcc(actual.fourcc).unwrap_or(format);
return Ok(VideoFrame::new(
Bytes::copy_from_slice(buf),
@@ -657,16 +647,15 @@ fn grab_single_frame(
}
Err(e) => {
if attempt == 4 {
return Err(AppError::VideoError(format!(
"Failed to grab frame: {}",
e
)));
return Err(AppError::VideoError(format!("Failed to grab frame: {}", e)));
}
}
}
}
Err(AppError::VideoError("Failed to capture valid frame".to_string()))
Err(AppError::VideoError(
"Failed to capture valid frame".to_string(),
))
}
#[cfg(test)]

View File

@@ -233,6 +233,16 @@ impl PixelConverter {
}
}
/// Create a new converter for NV21 → YUV420P
pub fn nv21_to_yuv420p(resolution: Resolution) -> Self {
Self {
src_format: PixelFormat::Nv21,
dst_format: PixelFormat::Yuv420,
resolution,
output_buffer: Yuv420pBuffer::new(resolution),
}
}
/// Create a new converter for YVU420 → YUV420P (swap U and V planes)
pub fn yvu420_to_yuv420p(resolution: Resolution) -> Self {
Self {
@@ -272,23 +282,39 @@ impl PixelConverter {
match (self.src_format, self.dst_format) {
(PixelFormat::Yuyv, PixelFormat::Yuv420) => {
libyuv::yuy2_to_i420(input, self.output_buffer.as_bytes_mut(), width, height)
.map_err(|e| AppError::VideoError(format!("libyuv conversion failed: {}", e)))?;
.map_err(|e| {
AppError::VideoError(format!("libyuv conversion failed: {}", e))
})?;
}
(PixelFormat::Uyvy, PixelFormat::Yuv420) => {
libyuv::uyvy_to_i420(input, self.output_buffer.as_bytes_mut(), width, height)
.map_err(|e| AppError::VideoError(format!("libyuv conversion failed: {}", e)))?;
.map_err(|e| {
AppError::VideoError(format!("libyuv conversion failed: {}", e))
})?;
}
(PixelFormat::Nv12, PixelFormat::Yuv420) => {
libyuv::nv12_to_i420(input, self.output_buffer.as_bytes_mut(), width, height)
.map_err(|e| AppError::VideoError(format!("libyuv conversion failed: {}", e)))?;
.map_err(|e| {
AppError::VideoError(format!("libyuv conversion failed: {}", e))
})?;
}
(PixelFormat::Nv21, PixelFormat::Yuv420) => {
libyuv::nv21_to_i420(input, self.output_buffer.as_bytes_mut(), width, height)
.map_err(|e| {
AppError::VideoError(format!("libyuv conversion failed: {}", e))
})?;
}
(PixelFormat::Rgb24, PixelFormat::Yuv420) => {
libyuv::rgb24_to_i420(input, self.output_buffer.as_bytes_mut(), width, height)
.map_err(|e| AppError::VideoError(format!("libyuv conversion failed: {}", e)))?;
.map_err(|e| {
AppError::VideoError(format!("libyuv conversion failed: {}", e))
})?;
}
(PixelFormat::Bgr24, PixelFormat::Yuv420) => {
libyuv::bgr24_to_i420(input, self.output_buffer.as_bytes_mut(), width, height)
.map_err(|e| AppError::VideoError(format!("libyuv conversion failed: {}", e)))?;
.map_err(|e| {
AppError::VideoError(format!("libyuv conversion failed: {}", e))
})?;
}
(PixelFormat::Yvyu, PixelFormat::Yuv420) => {
// YVYU is not directly supported by libyuv, use software conversion
@@ -307,7 +333,9 @@ impl PixelConverter {
expected_size
)));
}
self.output_buffer.as_bytes_mut().copy_from_slice(&input[..expected_size]);
self.output_buffer
.as_bytes_mut()
.copy_from_slice(&input[..expected_size]);
}
_ => {
return Err(AppError::VideoError(format!(
@@ -426,6 +454,8 @@ pub struct Nv12Converter {
resolution: Resolution,
/// Output buffer (reused across conversions)
output_buffer: Nv12Buffer,
/// Optional I420 buffer for intermediate conversions
i420_buffer: Option<Yuv420pBuffer>,
}
impl Nv12Converter {
@@ -435,6 +465,7 @@ impl Nv12Converter {
src_format: PixelFormat::Bgr24,
resolution,
output_buffer: Nv12Buffer::new(resolution),
i420_buffer: None,
}
}
@@ -444,6 +475,7 @@ impl Nv12Converter {
src_format: PixelFormat::Rgb24,
resolution,
output_buffer: Nv12Buffer::new(resolution),
i420_buffer: None,
}
}
@@ -453,6 +485,37 @@ impl Nv12Converter {
src_format: PixelFormat::Yuyv,
resolution,
output_buffer: Nv12Buffer::new(resolution),
i420_buffer: None,
}
}
/// Create a new converter for YUV420P (I420) → NV12
pub fn yuv420_to_nv12(resolution: Resolution) -> Self {
Self {
src_format: PixelFormat::Yuv420,
resolution,
output_buffer: Nv12Buffer::new(resolution),
i420_buffer: None,
}
}
/// Create a new converter for NV21 → NV12
pub fn nv21_to_nv12(resolution: Resolution) -> Self {
Self {
src_format: PixelFormat::Nv21,
resolution,
output_buffer: Nv12Buffer::new(resolution),
i420_buffer: Some(Yuv420pBuffer::new(resolution)),
}
}
/// Create a new converter for NV16 → NV12 (downsample chroma vertically)
pub fn nv16_to_nv12(resolution: Resolution) -> Self {
Self {
src_format: PixelFormat::Nv16,
resolution,
output_buffer: Nv12Buffer::new(resolution),
i420_buffer: None,
}
}
@@ -460,12 +523,45 @@ impl Nv12Converter {
pub fn convert(&mut self, input: &[u8]) -> Result<&[u8]> {
let width = self.resolution.width as i32;
let height = self.resolution.height as i32;
let dst = self.output_buffer.as_bytes_mut();
// Handle formats that need custom conversion without holding dst borrow
match self.src_format {
PixelFormat::Nv21 => {
let mut i420 = self.i420_buffer.take().ok_or_else(|| {
AppError::VideoError("NV21 I420 buffer not initialized".to_string())
})?;
{
let dst = self.output_buffer.as_bytes_mut();
Self::convert_nv21_to_nv12_with_dims(
self.resolution.width as usize,
self.resolution.height as usize,
input,
dst,
&mut i420,
)?;
}
self.i420_buffer = Some(i420);
return Ok(self.output_buffer.as_bytes());
}
PixelFormat::Nv16 => {
let dst = self.output_buffer.as_bytes_mut();
Self::convert_nv16_to_nv12_with_dims(
self.resolution.width as usize,
self.resolution.height as usize,
input,
dst,
)?;
return Ok(self.output_buffer.as_bytes());
}
_ => {}
}
let dst = self.output_buffer.as_bytes_mut();
let result = match self.src_format {
PixelFormat::Bgr24 => libyuv::bgr24_to_nv12(input, dst, width, height),
PixelFormat::Rgb24 => libyuv::rgb24_to_nv12(input, dst, width, height),
PixelFormat::Yuyv => libyuv::yuy2_to_nv12(input, dst, width, height),
PixelFormat::Yuv420 => libyuv::i420_to_nv12(input, dst, width, height),
_ => {
return Err(AppError::VideoError(format!(
"Unsupported conversion to NV12: {}",
@@ -474,10 +570,71 @@ impl Nv12Converter {
}
};
result.map_err(|e| AppError::VideoError(format!("libyuv NV12 conversion failed: {}", e)))?;
result
.map_err(|e| AppError::VideoError(format!("libyuv NV12 conversion failed: {}", e)))?;
Ok(self.output_buffer.as_bytes())
}
fn convert_nv21_to_nv12_with_dims(
width: usize,
height: usize,
input: &[u8],
dst: &mut [u8],
yuv: &mut Yuv420pBuffer,
) -> Result<()> {
libyuv::nv21_to_i420(input, yuv.as_bytes_mut(), width as i32, height as i32)
.map_err(|e| AppError::VideoError(format!("libyuv NV21->I420 failed: {}", e)))?;
libyuv::i420_to_nv12(yuv.as_bytes(), dst, width as i32, height as i32)
.map_err(|e| AppError::VideoError(format!("libyuv I420->NV12 failed: {}", e)))?;
Ok(())
}
fn convert_nv16_to_nv12_with_dims(
width: usize,
height: usize,
input: &[u8],
dst: &mut [u8],
) -> Result<()> {
let y_size = width * height;
let uv_size_nv16 = y_size; // NV16 chroma plane is full height
let uv_size_nv12 = y_size / 2;
if input.len() < y_size + uv_size_nv16 {
return Err(AppError::VideoError(format!(
"NV16 data too small: {} < {}",
input.len(),
y_size + uv_size_nv16
)));
}
// Copy Y plane as-is
dst[..y_size].copy_from_slice(&input[..y_size]);
// Downsample chroma vertically: average pairs of rows
let src_uv = &input[y_size..y_size + uv_size_nv16];
let dst_uv = &mut dst[y_size..y_size + uv_size_nv12];
let src_row_bytes = width;
let dst_row_bytes = width;
let dst_rows = height / 2;
for row in 0..dst_rows {
let src_row0 =
&src_uv[row * 2 * src_row_bytes..row * 2 * src_row_bytes + src_row_bytes];
let src_row1 = &src_uv
[(row * 2 + 1) * src_row_bytes..(row * 2 + 1) * src_row_bytes + src_row_bytes];
let dst_row = &mut dst_uv[row * dst_row_bytes..row * dst_row_bytes + dst_row_bytes];
for i in 0..dst_row_bytes {
let sum = src_row0[i] as u16 + src_row1[i] as u16;
dst_row[i] = (sum / 2) as u8;
}
}
Ok(())
}
/// Get output buffer length
pub fn output_len(&self) -> usize {
self.output_buffer.len()
@@ -542,10 +699,8 @@ mod tests {
// Create YUYV data (4x4 = 32 bytes)
let yuyv = vec![
16, 128, 17, 129, 18, 130, 19, 131,
20, 132, 21, 133, 22, 134, 23, 135,
24, 136, 25, 137, 26, 138, 27, 139,
28, 140, 29, 141, 30, 142, 31, 143,
16, 128, 17, 129, 18, 130, 19, 131, 20, 132, 21, 133, 22, 134, 23, 135, 24, 136, 25,
137, 26, 138, 27, 139, 28, 140, 29, 141, 30, 142, 31, 143,
];
let result = converter.convert(&yuyv).unwrap();

View File

@@ -95,9 +95,10 @@ impl VideoDevice {
/// Get device capabilities
pub fn capabilities(&self) -> Result<DeviceCapabilities> {
let caps = self.device.query_caps().map_err(|e| {
AppError::VideoError(format!("Failed to query capabilities: {}", e))
})?;
let caps = self
.device
.query_caps()
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
Ok(DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE),
@@ -110,9 +111,10 @@ impl VideoDevice {
/// Get detailed device information
pub fn info(&self) -> Result<VideoDeviceInfo> {
let caps = self.device.query_caps().map_err(|e| {
AppError::VideoError(format!("Failed to query capabilities: {}", e))
})?;
let caps = self
.device
.query_caps()
.map_err(|e| AppError::VideoError(format!("Failed to query capabilities: {}", e)))?;
let capabilities = DeviceCapabilities {
video_capture: caps.capabilities.contains(Flags::VIDEO_CAPTURE),
@@ -128,7 +130,8 @@ impl VideoDevice {
let is_capture_card = Self::detect_capture_card(&caps.card, &caps.driver, &formats);
// Calculate priority score
let priority = Self::calculate_priority(&caps.card, &caps.driver, &formats, is_capture_card);
let priority =
Self::calculate_priority(&caps.card, &caps.driver, &formats, is_capture_card);
Ok(VideoDeviceInfo {
path: self.path.clone(),
@@ -148,9 +151,10 @@ impl VideoDevice {
let mut formats = Vec::new();
// Get supported formats
let format_descs = self.device.enum_formats().map_err(|e| {
AppError::VideoError(format!("Failed to enumerate formats: {}", e))
})?;
let format_descs = self
.device
.enum_formats()
.map_err(|e| AppError::VideoError(format!("Failed to enumerate formats: {}", e)))?;
for desc in format_descs {
// Try to convert FourCC to our PixelFormat
@@ -186,7 +190,9 @@ impl VideoDevice {
for size in sizes {
match size.size {
v4l::framesize::FrameSizeEnum::Discrete(d) => {
let fps = self.enumerate_fps(fourcc, d.width, d.height).unwrap_or_default();
let fps = self
.enumerate_fps(fourcc, d.width, d.height)
.unwrap_or_default();
resolutions.push(ResolutionInfo::new(d.width, d.height, fps));
}
v4l::framesize::FrameSizeEnum::Stepwise(s) => {
@@ -202,8 +208,11 @@ impl VideoDevice {
&& res.height >= s.min_height
&& res.height <= s.max_height
{
let fps = self.enumerate_fps(fourcc, res.width, res.height).unwrap_or_default();
resolutions.push(ResolutionInfo::new(res.width, res.height, fps));
let fps = self
.enumerate_fps(fourcc, res.width, res.height)
.unwrap_or_default();
resolutions
.push(ResolutionInfo::new(res.width, res.height, fps));
}
}
}
@@ -255,7 +264,7 @@ impl VideoDevice {
fps_list.push(30);
}
}
fps_list.sort_by(|a, b| b.cmp(a));
fps_list.dedup();
Ok(fps_list)
@@ -263,9 +272,9 @@ impl VideoDevice {
/// Get current format
pub fn get_format(&self) -> Result<Format> {
self.device.format().map_err(|e| {
AppError::VideoError(format!("Failed to get format: {}", e))
})
self.device
.format()
.map_err(|e| AppError::VideoError(format!("Failed to get format: {}", e)))
}
/// Set capture format
@@ -273,9 +282,10 @@ impl VideoDevice {
let fmt = Format::new(width, height, format.to_fourcc());
// Request the format
let actual = self.device.set_format(&fmt).map_err(|e| {
AppError::VideoError(format!("Failed to set format: {}", e))
})?;
let actual = self
.device
.set_format(&fmt)
.map_err(|e| AppError::VideoError(format!("Failed to set format: {}", e)))?;
if actual.width != width || actual.height != height {
warn!(
@@ -374,9 +384,9 @@ pub fn enumerate_devices() -> Result<Vec<VideoDeviceInfo>> {
let mut devices = Vec::new();
// Scan /dev/video* devices
for entry in std::fs::read_dir("/dev").map_err(|e| {
AppError::VideoError(format!("Failed to read /dev: {}", e))
})? {
for entry in std::fs::read_dir("/dev")
.map_err(|e| AppError::VideoError(format!("Failed to read /dev: {}", e)))?
{
let entry = match entry {
Ok(e) => e,
Err(_) => continue,
@@ -432,9 +442,10 @@ pub fn enumerate_devices() -> Result<Vec<VideoDeviceInfo>> {
pub fn find_best_device() -> Result<VideoDeviceInfo> {
let devices = enumerate_devices()?;
devices.into_iter().next().ok_or_else(|| {
AppError::VideoError("No video capture devices found".to_string())
})
devices
.into_iter()
.next()
.ok_or_else(|| AppError::VideoError("No video capture devices found".to_string()))
}
#[cfg(test)]

View File

@@ -99,8 +99,18 @@ pub enum H264InputFormat {
Yuv420p,
/// NV12 - Y plane + interleaved UV plane (optimal for VAAPI)
Nv12,
/// NV21 - Y plane + interleaved VU plane
Nv21,
/// NV16 - Y plane + interleaved UV plane (4:2:2)
Nv16,
/// NV24 - Y plane + interleaved UV plane (4:4:4)
Nv24,
/// YUYV422 - packed YUV 4:2:2 format (optimal for RKMPP direct input)
Yuyv422,
/// RGB24 - packed RGB format (RKMPP direct input)
Rgb24,
/// BGR24 - packed BGR format (RKMPP direct input)
Bgr24,
}
impl Default for H264InputFormat {
@@ -202,7 +212,7 @@ pub fn get_available_encoders(width: u32, height: u32) -> Vec<CodecInfo> {
fps: 30,
gop: 30,
rc: RateControl::RC_CBR,
quality: Quality::Quality_Low, // Use low quality preset for fastest encoding (ultrafast)
quality: Quality::Quality_Low, // Use low quality preset for fastest encoding (ultrafast)
kbs: 2000,
q: 23,
thread_count: 4,
@@ -270,9 +280,8 @@ impl H264Encoder {
// Detect best encoder
let (_encoder_type, codec_name) = detect_best_encoder(width, height);
let codec_name = codec_name.ok_or_else(|| {
AppError::VideoError("No H.264 encoder available".to_string())
})?;
let codec_name = codec_name
.ok_or_else(|| AppError::VideoError("No H.264 encoder available".to_string()))?;
Self::with_codec(config, &codec_name)
}
@@ -287,8 +296,13 @@ impl H264Encoder {
// Select pixel format based on config
let pixfmt = match config.input_format {
H264InputFormat::Nv12 => AVPixelFormat::AV_PIX_FMT_NV12,
H264InputFormat::Nv21 => AVPixelFormat::AV_PIX_FMT_NV21,
H264InputFormat::Nv16 => AVPixelFormat::AV_PIX_FMT_NV16,
H264InputFormat::Nv24 => AVPixelFormat::AV_PIX_FMT_NV24,
H264InputFormat::Yuv420p => AVPixelFormat::AV_PIX_FMT_YUV420P,
H264InputFormat::Yuyv422 => AVPixelFormat::AV_PIX_FMT_YUYV422,
H264InputFormat::Rgb24 => AVPixelFormat::AV_PIX_FMT_RGB24,
H264InputFormat::Bgr24 => AVPixelFormat::AV_PIX_FMT_BGR24,
};
info!(
@@ -306,10 +320,10 @@ impl H264Encoder {
fps: config.fps as i32,
gop: config.gop_size as i32,
rc: RateControl::RC_CBR,
quality: Quality::Quality_Low, // Use low quality preset for fastest encoding (lowest latency)
quality: Quality::Quality_Low, // Use low quality preset for fastest encoding (lowest latency)
kbs: config.bitrate_kbps as i32,
q: 23,
thread_count: 4, // Use 4 threads for better performance
thread_count: 4, // Use 4 threads for better performance
};
let inner = HwEncoder::new(ctx).map_err(|_| {
@@ -353,9 +367,9 @@ impl H264Encoder {
/// Update bitrate dynamically
pub fn set_bitrate(&mut self, bitrate_kbps: u32) -> Result<()> {
self.inner.set_bitrate(bitrate_kbps as i32).map_err(|_| {
AppError::VideoError("Failed to set bitrate".to_string())
})?;
self.inner
.set_bitrate(bitrate_kbps as i32)
.map_err(|_| AppError::VideoError("Failed to set bitrate".to_string()))?;
self.config.bitrate_kbps = bitrate_kbps;
debug!("Bitrate updated to {} kbps", bitrate_kbps);
Ok(())
@@ -394,16 +408,7 @@ impl H264Encoder {
Ok(owned_frames)
}
Err(e) => {
// For the first ~30 frames, x264 may fail due to initialization
// Log as warning instead of error to avoid alarming users
if self.frame_count <= 30 {
warn!(
"Encode failed during initialization (frame {}): {} - this is normal for x264",
self.frame_count, e
);
} else {
error!("Encode failed: {}", e);
}
error!("Encode failed: {}", e);
Err(AppError::VideoError(format!("Encode failed: {}", e)))
}
}
@@ -458,7 +463,9 @@ impl Encoder for H264Encoder {
if frames.is_empty() {
// Encoder needs more frames (shouldn't happen with our config)
warn!("Encoder returned no frames");
return Err(AppError::VideoError("Encoder returned no frames".to_string()));
return Err(AppError::VideoError(
"Encoder returned no frames".to_string(),
));
}
// Take ownership of the first frame (zero-copy)
@@ -493,8 +500,13 @@ impl Encoder for H264Encoder {
// Check if the format matches our configured input format
match self.config.input_format {
H264InputFormat::Nv12 => matches!(format, PixelFormat::Nv12),
H264InputFormat::Nv21 => matches!(format, PixelFormat::Nv21),
H264InputFormat::Nv16 => matches!(format, PixelFormat::Nv16),
H264InputFormat::Nv24 => matches!(format, PixelFormat::Nv24),
H264InputFormat::Yuv420p => matches!(format, PixelFormat::Yuv420),
H264InputFormat::Yuyv422 => matches!(format, PixelFormat::Yuyv),
H264InputFormat::Rgb24 => matches!(format, PixelFormat::Rgb24),
H264InputFormat::Bgr24 => matches!(format, PixelFormat::Bgr24),
}
}
}
@@ -538,7 +550,11 @@ mod tests {
let config = H264Config::low_latency(Resolution::HD720, 2000);
match H264Encoder::new(config) {
Ok(encoder) => {
println!("Created encoder: {} ({})", encoder.codec_name(), encoder.encoder_type());
println!(
"Created encoder: {} ({})",
encoder.codec_name(),
encoder.encoder_type()
);
}
Err(e) => {
println!("Failed to create encoder: {}", e);

View File

@@ -92,8 +92,18 @@ pub enum H265InputFormat {
Yuv420p,
/// NV12 - Y plane + interleaved UV plane (optimal for hardware encoders)
Nv12,
/// NV21 - Y plane + interleaved VU plane
Nv21,
/// NV16 - Y plane + interleaved UV plane (4:2:2)
Nv16,
/// NV24 - Y plane + interleaved UV plane (4:4:4)
Nv24,
/// YUYV422 - packed YUV 4:2:2 format (optimal for RKMPP direct input)
Yuyv422,
/// RGB24 - packed RGB format (RKMPP direct input)
Rgb24,
/// BGR24 - packed BGR format (RKMPP direct input)
Bgr24,
}
impl Default for H265InputFormat {
@@ -252,10 +262,7 @@ pub fn detect_best_h265_encoder(width: u32, height: u32) -> (H265EncoderType, Op
H265EncoderType::Software // Default to software for unknown
};
info!(
"Selected H.265 encoder: {} ({})",
codec.name, encoder_type
);
info!("Selected H.265 encoder: {} ({})", codec.name, encoder_type);
(encoder_type, Some(codec.name.clone()))
}
@@ -304,7 +311,8 @@ impl H265Encoder {
if encoder_type == H265EncoderType::None {
return Err(AppError::VideoError(
"No H.265 encoder available. Please ensure FFmpeg is built with libx265 support.".to_string(),
"No H.265 encoder available. Please ensure FFmpeg is built with libx265 support."
.to_string(),
));
}
@@ -336,8 +344,17 @@ impl H265Encoder {
} else {
match config.input_format {
H265InputFormat::Nv12 => (AVPixelFormat::AV_PIX_FMT_NV12, H265InputFormat::Nv12),
H265InputFormat::Yuv420p => (AVPixelFormat::AV_PIX_FMT_YUV420P, H265InputFormat::Yuv420p),
H265InputFormat::Yuyv422 => (AVPixelFormat::AV_PIX_FMT_YUYV422, H265InputFormat::Yuyv422),
H265InputFormat::Nv21 => (AVPixelFormat::AV_PIX_FMT_NV21, H265InputFormat::Nv21),
H265InputFormat::Nv16 => (AVPixelFormat::AV_PIX_FMT_NV16, H265InputFormat::Nv16),
H265InputFormat::Nv24 => (AVPixelFormat::AV_PIX_FMT_NV24, H265InputFormat::Nv24),
H265InputFormat::Yuv420p => {
(AVPixelFormat::AV_PIX_FMT_YUV420P, H265InputFormat::Yuv420p)
}
H265InputFormat::Yuyv422 => {
(AVPixelFormat::AV_PIX_FMT_YUYV422, H265InputFormat::Yuyv422)
}
H265InputFormat::Rgb24 => (AVPixelFormat::AV_PIX_FMT_RGB24, H265InputFormat::Rgb24),
H265InputFormat::Bgr24 => (AVPixelFormat::AV_PIX_FMT_BGR24, H265InputFormat::Bgr24),
}
};
@@ -407,9 +424,9 @@ impl H265Encoder {
/// Update bitrate dynamically
pub fn set_bitrate(&mut self, bitrate_kbps: u32) -> Result<()> {
self.inner.set_bitrate(bitrate_kbps as i32).map_err(|_| {
AppError::VideoError("Failed to set H.265 bitrate".to_string())
})?;
self.inner
.set_bitrate(bitrate_kbps as i32)
.map_err(|_| AppError::VideoError("Failed to set H.265 bitrate".to_string()))?;
self.config.bitrate_kbps = bitrate_kbps;
debug!("H.265 bitrate updated to {} kbps", bitrate_kbps);
Ok(())
@@ -464,7 +481,10 @@ impl H265Encoder {
if keyframe || self.frame_count % 30 == 1 {
debug!(
"[H265] Encoded frame #{}: output_size={}, keyframe={}, frame_count={}",
self.frame_count, total_size, keyframe, owned_frames.len()
self.frame_count,
total_size,
keyframe,
owned_frames.len()
);
// Log first few bytes of keyframe for debugging
@@ -477,7 +497,10 @@ impl H265Encoder {
}
}
} else {
warn!("[H265] Encoder returned empty frame list for frame #{}", self.frame_count);
warn!(
"[H265] Encoder returned empty frame list for frame #{}",
self.frame_count
);
}
Ok(owned_frames)
@@ -567,8 +590,13 @@ impl Encoder for H265Encoder {
fn supports_format(&self, format: PixelFormat) -> bool {
match self.config.input_format {
H265InputFormat::Nv12 => matches!(format, PixelFormat::Nv12),
H265InputFormat::Nv21 => matches!(format, PixelFormat::Nv21),
H265InputFormat::Nv16 => matches!(format, PixelFormat::Nv16),
H265InputFormat::Nv24 => matches!(format, PixelFormat::Nv24),
H265InputFormat::Yuv420p => matches!(format, PixelFormat::Yuv420),
H265InputFormat::Yuyv422 => matches!(format, PixelFormat::Yuyv),
H265InputFormat::Rgb24 => matches!(format, PixelFormat::Rgb24),
H265InputFormat::Bgr24 => matches!(format, PixelFormat::Bgr24),
}
}
}
@@ -580,7 +608,10 @@ mod tests {
#[test]
fn test_detect_h265_encoder() {
let (encoder_type, codec_name) = detect_best_h265_encoder(1280, 720);
println!("Detected H.265 encoder: {:?} ({:?})", encoder_type, codec_name);
println!(
"Detected H.265 encoder: {:?} ({:?})",
encoder_type, codec_name
);
}
#[test]

View File

@@ -35,10 +35,12 @@ impl JpegEncoder {
// I420: Y = width*height, U = width*height/4, V = width*height/4
let i420_size = width * height * 3 / 2;
let mut compressor = turbojpeg::Compressor::new()
.map_err(|e| AppError::VideoError(format!("Failed to create turbojpeg compressor: {}", e)))?;
let mut compressor = turbojpeg::Compressor::new().map_err(|e| {
AppError::VideoError(format!("Failed to create turbojpeg compressor: {}", e))
})?;
compressor.set_quality(config.quality.min(100) as i32)
compressor
.set_quality(config.quality.min(100) as i32)
.map_err(|e| AppError::VideoError(format!("Failed to set JPEG quality: {}", e)))?;
Ok(Self {
@@ -56,7 +58,8 @@ impl JpegEncoder {
/// Set JPEG quality (1-100)
pub fn set_quality(&mut self, quality: u32) -> Result<()> {
self.compressor.set_quality(quality.min(100) as i32)
self.compressor
.set_quality(quality.min(100) as i32)
.map_err(|e| AppError::VideoError(format!("Failed to set JPEG quality: {}", e)))?;
self.config.quality = quality;
Ok(())
@@ -73,12 +76,14 @@ impl JpegEncoder {
pixels: self.i420_buffer.as_slice(),
width,
height,
align: 1, // No padding between rows
align: 1, // No padding between rows
subsamp: turbojpeg::Subsamp::Sub2x2, // YUV 4:2:0
};
// Compress YUV directly to JPEG (skips color space conversion!)
let jpeg_data = self.compressor.compress_yuv_to_vec(yuv_image)
let jpeg_data = self
.compressor
.compress_yuv_to_vec(yuv_image)
.map_err(|e| AppError::VideoError(format!("JPEG compression failed: {}", e)))?;
Ok(EncodedFrame::jpeg(

View File

@@ -19,7 +19,9 @@ pub mod vp8;
pub mod vp9;
// Core traits and types
pub use traits::{BitratePreset, EncodedFormat, EncodedFrame, Encoder, EncoderConfig, EncoderFactory};
pub use traits::{
BitratePreset, EncodedFormat, EncodedFrame, Encoder, EncoderConfig, EncoderFactory,
};
// WebRTC codec abstraction
pub use codec::{CodecFrame, VideoCodec, VideoCodecConfig, VideoCodecFactory, VideoCodecType};

View File

@@ -264,10 +264,7 @@ impl EncoderRegistry {
if let Some(encoder) = AvailableEncoder::from_codec_info(codec_info) {
debug!(
"Detected encoder: {} ({}) - {} priority={}",
encoder.codec_name,
encoder.format,
encoder.backend,
encoder.priority
encoder.codec_name, encoder.format, encoder.backend, encoder.priority
);
self.encoders
@@ -336,13 +333,15 @@ impl EncoderRegistry {
format: VideoEncoderType,
hardware_only: bool,
) -> Option<&AvailableEncoder> {
self.encoders.get(&format)?.iter().find(|e| {
if hardware_only {
e.is_hardware
} else {
true
}
})
self.encoders.get(&format)?.iter().find(
|e| {
if hardware_only {
e.is_hardware
} else {
true
}
},
)
}
/// Get all encoders for a format
@@ -523,9 +522,6 @@ mod tests {
// Should have detected at least H264 (software fallback available)
println!("Available formats: {:?}", registry.available_formats(false));
println!(
"Selectable formats: {:?}",
registry.selectable_formats()
);
println!("Selectable formats: {:?}", registry.selectable_formats());
}
}

View File

@@ -5,8 +5,8 @@ use serde::{Deserialize, Serialize};
use std::time::Instant;
use typeshare::typeshare;
use crate::video::format::{PixelFormat, Resolution};
use crate::error::Result;
use crate::video::format::{PixelFormat, Resolution};
/// Bitrate preset for video encoding
///
@@ -46,10 +46,10 @@ impl BitratePreset {
/// Quality preset uses longer GOP for better compression efficiency.
pub fn gop_size(&self, fps: u32) -> u32 {
match self {
Self::Speed => (fps / 2).max(15), // 0.5 second, minimum 15 frames
Self::Balanced => fps, // 1 second
Self::Quality => fps * 2, // 2 seconds
Self::Custom(_) => fps, // Default 1 second for custom
Self::Speed => (fps / 2).max(15), // 0.5 second, minimum 15 frames
Self::Balanced => fps, // 1 second
Self::Quality => fps * 2, // 2 seconds
Self::Custom(_) => fps, // Default 1 second for custom
}
}

View File

@@ -186,10 +186,7 @@ pub fn detect_best_vp8_encoder(width: u32, height: u32) -> (VP8EncoderType, Opti
VP8EncoderType::Software // Default to software for unknown
};
info!(
"Selected VP8 encoder: {} ({})",
codec.name, encoder_type
);
info!("Selected VP8 encoder: {} ({})", codec.name, encoder_type);
(encoder_type, Some(codec.name.clone()))
}
@@ -238,7 +235,8 @@ impl VP8Encoder {
if encoder_type == VP8EncoderType::None {
return Err(AppError::VideoError(
"No VP8 encoder available. Please ensure FFmpeg is built with libvpx support.".to_string(),
"No VP8 encoder available. Please ensure FFmpeg is built with libvpx support."
.to_string(),
));
}
@@ -270,7 +268,9 @@ impl VP8Encoder {
} else {
match config.input_format {
VP8InputFormat::Nv12 => (AVPixelFormat::AV_PIX_FMT_NV12, VP8InputFormat::Nv12),
VP8InputFormat::Yuv420p => (AVPixelFormat::AV_PIX_FMT_YUV420P, VP8InputFormat::Yuv420p),
VP8InputFormat::Yuv420p => {
(AVPixelFormat::AV_PIX_FMT_YUV420P, VP8InputFormat::Yuv420p)
}
}
};
@@ -340,9 +340,9 @@ impl VP8Encoder {
/// Update bitrate dynamically
pub fn set_bitrate(&mut self, bitrate_kbps: u32) -> Result<()> {
self.inner.set_bitrate(bitrate_kbps as i32).map_err(|_| {
AppError::VideoError("Failed to set VP8 bitrate".to_string())
})?;
self.inner
.set_bitrate(bitrate_kbps as i32)
.map_err(|_| AppError::VideoError("Failed to set VP8 bitrate".to_string()))?;
self.config.bitrate_kbps = bitrate_kbps;
debug!("VP8 bitrate updated to {} kbps", bitrate_kbps);
Ok(())
@@ -470,7 +470,10 @@ mod tests {
#[test]
fn test_detect_vp8_encoder() {
let (encoder_type, codec_name) = detect_best_vp8_encoder(1280, 720);
println!("Detected VP8 encoder: {:?} ({:?})", encoder_type, codec_name);
println!(
"Detected VP8 encoder: {:?} ({:?})",
encoder_type, codec_name
);
}
#[test]

View File

@@ -186,10 +186,7 @@ pub fn detect_best_vp9_encoder(width: u32, height: u32) -> (VP9EncoderType, Opti
VP9EncoderType::Software // Default to software for unknown
};
info!(
"Selected VP9 encoder: {} ({})",
codec.name, encoder_type
);
info!("Selected VP9 encoder: {} ({})", codec.name, encoder_type);
(encoder_type, Some(codec.name.clone()))
}
@@ -238,7 +235,8 @@ impl VP9Encoder {
if encoder_type == VP9EncoderType::None {
return Err(AppError::VideoError(
"No VP9 encoder available. Please ensure FFmpeg is built with libvpx support.".to_string(),
"No VP9 encoder available. Please ensure FFmpeg is built with libvpx support."
.to_string(),
));
}
@@ -270,7 +268,9 @@ impl VP9Encoder {
} else {
match config.input_format {
VP9InputFormat::Nv12 => (AVPixelFormat::AV_PIX_FMT_NV12, VP9InputFormat::Nv12),
VP9InputFormat::Yuv420p => (AVPixelFormat::AV_PIX_FMT_YUV420P, VP9InputFormat::Yuv420p),
VP9InputFormat::Yuv420p => {
(AVPixelFormat::AV_PIX_FMT_YUV420P, VP9InputFormat::Yuv420p)
}
}
};
@@ -340,9 +340,9 @@ impl VP9Encoder {
/// Update bitrate dynamically
pub fn set_bitrate(&mut self, bitrate_kbps: u32) -> Result<()> {
self.inner.set_bitrate(bitrate_kbps as i32).map_err(|_| {
AppError::VideoError("Failed to set VP9 bitrate".to_string())
})?;
self.inner
.set_bitrate(bitrate_kbps as i32)
.map_err(|_| AppError::VideoError("Failed to set VP9 bitrate".to_string()))?;
self.config.bitrate_kbps = bitrate_kbps;
debug!("VP9 bitrate updated to {} kbps", bitrate_kbps);
Ok(())
@@ -470,7 +470,10 @@ mod tests {
#[test]
fn test_detect_vp9_encoder() {
let (encoder_type, codec_name) = detect_best_vp9_encoder(1280, 720);
println!("Detected VP9 encoder: {:?} ({:?})", encoder_type, codec_name);
println!(
"Detected VP9 encoder: {:?} ({:?})",
encoder_type, codec_name
);
}
#[test]

View File

@@ -20,6 +20,8 @@ pub enum PixelFormat {
Uyvy,
/// NV12 semi-planar format (Y plane + interleaved UV)
Nv12,
/// NV21 semi-planar format (Y plane + interleaved VU)
Nv21,
/// NV16 semi-planar format
Nv16,
/// NV24 semi-planar format
@@ -48,6 +50,7 @@ impl PixelFormat {
PixelFormat::Yvyu => fourcc::FourCC::new(b"YVYU"),
PixelFormat::Uyvy => fourcc::FourCC::new(b"UYVY"),
PixelFormat::Nv12 => fourcc::FourCC::new(b"NV12"),
PixelFormat::Nv21 => fourcc::FourCC::new(b"NV21"),
PixelFormat::Nv16 => fourcc::FourCC::new(b"NV16"),
PixelFormat::Nv24 => fourcc::FourCC::new(b"NV24"),
PixelFormat::Yuv420 => fourcc::FourCC::new(b"YU12"),
@@ -69,6 +72,7 @@ impl PixelFormat {
b"YVYU" => Some(PixelFormat::Yvyu),
b"UYVY" => Some(PixelFormat::Uyvy),
b"NV12" => Some(PixelFormat::Nv12),
b"NV21" => Some(PixelFormat::Nv21),
b"NV16" => Some(PixelFormat::Nv16),
b"NV24" => Some(PixelFormat::Nv24),
b"YU12" | b"I420" => Some(PixelFormat::Yuv420),
@@ -92,7 +96,9 @@ impl PixelFormat {
match self {
PixelFormat::Mjpeg | PixelFormat::Jpeg => None,
PixelFormat::Yuyv | PixelFormat::Yvyu | PixelFormat::Uyvy => Some(2),
PixelFormat::Nv12 | PixelFormat::Yuv420 | PixelFormat::Yvu420 => None, // Variable
PixelFormat::Nv12 | PixelFormat::Nv21 | PixelFormat::Yuv420 | PixelFormat::Yvu420 => {
None
} // Variable
PixelFormat::Nv16 => None,
PixelFormat::Nv24 => None,
PixelFormat::Rgb565 => Some(2),
@@ -108,7 +114,9 @@ impl PixelFormat {
match self {
PixelFormat::Mjpeg | PixelFormat::Jpeg => None,
PixelFormat::Yuyv | PixelFormat::Yvyu | PixelFormat::Uyvy => Some(pixels * 2),
PixelFormat::Nv12 | PixelFormat::Yuv420 | PixelFormat::Yvu420 => Some(pixels * 3 / 2),
PixelFormat::Nv12 | PixelFormat::Nv21 | PixelFormat::Yuv420 | PixelFormat::Yvu420 => {
Some(pixels * 3 / 2)
}
PixelFormat::Nv16 => Some(pixels * 2),
PixelFormat::Nv24 => Some(pixels * 3),
PixelFormat::Rgb565 => Some(pixels * 2),
@@ -125,6 +133,7 @@ impl PixelFormat {
PixelFormat::Jpeg => 99,
PixelFormat::Yuyv => 80,
PixelFormat::Nv12 => 75,
PixelFormat::Nv21 => 74,
PixelFormat::Yuv420 => 70,
PixelFormat::Uyvy => 65,
PixelFormat::Yvyu => 64,
@@ -144,7 +153,10 @@ impl PixelFormat {
/// Software encoding prefers: YUYV > NV12
///
/// Returns None if no suitable format is available
pub fn recommended_for_encoding(available: &[PixelFormat], is_hardware: bool) -> Option<PixelFormat> {
pub fn recommended_for_encoding(
available: &[PixelFormat],
is_hardware: bool,
) -> Option<PixelFormat> {
if is_hardware {
// Hardware encoding: NV12 > YUYV
if available.contains(&PixelFormat::Nv12) {
@@ -175,6 +187,7 @@ impl PixelFormat {
PixelFormat::Yvyu,
PixelFormat::Uyvy,
PixelFormat::Nv12,
PixelFormat::Nv21,
PixelFormat::Nv16,
PixelFormat::Nv24,
PixelFormat::Yuv420,
@@ -196,6 +209,7 @@ impl fmt::Display for PixelFormat {
PixelFormat::Yvyu => "YVYU",
PixelFormat::Uyvy => "UYVY",
PixelFormat::Nv12 => "NV12",
PixelFormat::Nv21 => "NV21",
PixelFormat::Nv16 => "NV16",
PixelFormat::Nv24 => "NV24",
PixelFormat::Yuv420 => "YUV420",
@@ -220,6 +234,7 @@ impl std::str::FromStr for PixelFormat {
"YVYU" => Ok(PixelFormat::Yvyu),
"UYVY" => Ok(PixelFormat::Uyvy),
"NV12" => Ok(PixelFormat::Nv12),
"NV21" => Ok(PixelFormat::Nv21),
"NV16" => Ok(PixelFormat::Nv16),
"NV24" => Ok(PixelFormat::Nv24),
"YUV420" | "I420" => Ok(PixelFormat::Yuv420),

View File

@@ -106,9 +106,9 @@ impl VideoFrame {
/// Get hash of frame data (computed once, cached)
/// Used for fast frame deduplication comparison
pub fn get_hash(&self) -> u64 {
*self.hash.get_or_init(|| {
xxhash_rust::xxh64::xxh64(self.data.as_ref(), 0)
})
*self
.hash
.get_or_init(|| xxhash_rust::xxh64::xxh64(self.data.as_ref(), 0))
}
/// Check if format is JPEG/MJPEG

View File

@@ -93,10 +93,7 @@ impl H264Pipeline {
pub fn new(config: H264PipelineConfig) -> Result<Self> {
info!(
"Creating H264 pipeline: {}x{} @ {} kbps, {} fps",
config.resolution.width,
config.resolution.height,
config.bitrate_kbps,
config.fps
config.resolution.width, config.resolution.height, config.bitrate_kbps, config.fps
);
// Determine encoder input format based on pipeline input
@@ -154,7 +151,7 @@ impl H264Pipeline {
// MJPEG/JPEG input - not supported (requires libjpeg for decoding)
PixelFormat::Mjpeg | PixelFormat::Jpeg => {
return Err(AppError::VideoError(
"MJPEG input format not supported in this build".to_string()
"MJPEG input format not supported in this build".to_string(),
));
}
@@ -216,7 +213,10 @@ impl H264Pipeline {
}
let _ = self.running.send(true);
info!("Starting H264 pipeline (input format: {})", self.config.input_format);
info!(
"Starting H264 pipeline (input format: {})",
self.config.input_format
);
let encoder = self.encoder.lock().await.take();
let nv12_converter = self.nv12_converter.lock().await.take();

View File

@@ -18,11 +18,15 @@ pub mod video_session;
pub use capture::VideoCapturer;
pub use convert::{PixelConverter, Yuv420pBuffer};
pub use device::{VideoDevice, VideoDeviceInfo};
pub use encoder::{JpegEncoder, H264Encoder, H264EncoderType};
pub use encoder::{H264Encoder, H264EncoderType, JpegEncoder};
pub use format::PixelFormat;
pub use frame::VideoFrame;
pub use h264_pipeline::{H264Pipeline, H264PipelineBuilder, H264PipelineConfig};
pub use shared_video_pipeline::{EncodedVideoFrame, SharedVideoPipeline, SharedVideoPipelineConfig, SharedVideoPipelineStats};
pub use shared_video_pipeline::{
EncodedVideoFrame, SharedVideoPipeline, SharedVideoPipelineConfig, SharedVideoPipelineStats,
};
pub use stream_manager::VideoStreamManager;
pub use streamer::{Streamer, StreamerState};
pub use video_session::{VideoSessionManager, VideoSessionManagerConfig, VideoSessionInfo, VideoSessionState, CodecInfo};
pub use video_session::{
CodecInfo, VideoSessionInfo, VideoSessionManager, VideoSessionManagerConfig, VideoSessionState,
};

View File

@@ -28,8 +28,10 @@ const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3;
use crate::error::{AppError, Result};
use crate::video::convert::{Nv12Converter, PixelConverter};
use crate::video::encoder::h264::{H264Config, H264Encoder};
use crate::video::encoder::h265::{H265Config, H265Encoder};
use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat};
use crate::video::encoder::h265::{
detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat,
};
use crate::video::encoder::registry::{EncoderBackend, EncoderRegistry, VideoEncoderType};
use crate::video::encoder::traits::EncoderConfig;
use crate::video::encoder::vp8::{VP8Config, VP8Encoder};
@@ -157,7 +159,6 @@ pub struct SharedVideoPipelineStats {
pub subscribers: u64,
}
/// Universal video encoder trait object
#[allow(dead_code)]
trait VideoEncoderTrait: Send {
@@ -300,7 +301,7 @@ pub struct SharedVideoPipeline {
/// Whether the encoder needs YUV420P (true) or NV12 (false)
encoder_needs_yuv420p: AtomicBool,
/// Whether YUYV direct input is enabled (RKMPP optimization)
yuyv_direct_input: AtomicBool,
direct_input: AtomicBool,
frame_tx: broadcast::Sender<EncodedVideoFrame>,
stats: Mutex<SharedVideoPipelineStats>,
running: watch::Sender<bool>,
@@ -326,7 +327,7 @@ impl SharedVideoPipeline {
config.input_format
);
let (frame_tx, _) = broadcast::channel(16); // Reduced from 64 for lower latency
let (frame_tx, _) = broadcast::channel(16); // Reduced from 64 for lower latency
let (running_tx, running_rx) = watch::channel(false);
let pipeline = Arc::new(Self {
@@ -335,7 +336,7 @@ impl SharedVideoPipeline {
nv12_converter: Mutex::new(None),
yuv420p_converter: Mutex::new(None),
encoder_needs_yuv420p: AtomicBool::new(false),
yuyv_direct_input: AtomicBool::new(false),
direct_input: AtomicBool::new(false),
frame_tx,
stats: Mutex::new(SharedVideoPipelineStats::default()),
running: running_tx,
@@ -354,29 +355,108 @@ impl SharedVideoPipeline {
let registry = EncoderRegistry::global();
// Helper to get codec name for specific backend
let get_codec_name = |format: VideoEncoderType, backend: Option<EncoderBackend>| -> Option<String> {
match backend {
Some(b) => registry.encoder_with_backend(format, b).map(|e| e.codec_name.clone()),
None => registry.best_encoder(format, false).map(|e| e.codec_name.clone()),
}
};
let get_codec_name =
|format: VideoEncoderType, backend: Option<EncoderBackend>| -> Option<String> {
match backend {
Some(b) => registry
.encoder_with_backend(format, b)
.map(|e| e.codec_name.clone()),
None => registry
.best_encoder(format, false)
.map(|e| e.codec_name.clone()),
}
};
// Check if RKMPP backend is available for YUYV direct input optimization
let is_rkmpp_available = registry.encoder_with_backend(VideoEncoderType::H264, EncoderBackend::Rkmpp).is_some();
// Check if RKMPP backend is available for direct input optimization
let is_rkmpp_available = registry
.encoder_with_backend(VideoEncoderType::H264, EncoderBackend::Rkmpp)
.is_some();
let use_yuyv_direct = is_rkmpp_available && config.input_format == PixelFormat::Yuyv;
let use_rkmpp_direct = is_rkmpp_available
&& matches!(
config.input_format,
PixelFormat::Yuyv
| PixelFormat::Yuv420
| PixelFormat::Rgb24
| PixelFormat::Bgr24
| PixelFormat::Nv12
| PixelFormat::Nv16
| PixelFormat::Nv21
| PixelFormat::Nv24
);
if use_yuyv_direct {
info!("RKMPP backend detected with YUYV input, enabling YUYV direct input optimization");
info!(
"RKMPP backend detected with YUYV input, enabling YUYV direct input optimization"
);
} else if use_rkmpp_direct {
info!(
"RKMPP backend detected with {} input, enabling direct input optimization",
config.input_format
);
}
// Create encoder based on codec type
let encoder: Box<dyn VideoEncoderTrait + Send> = match config.output_codec {
VideoEncoderType::H264 => {
// Determine H264 input format based on backend and input format
let h264_input_format = if use_yuyv_direct {
crate::video::encoder::h264::H264InputFormat::Yuyv422
let codec_name = if use_rkmpp_direct {
// Force RKMPP backend for direct input
get_codec_name(VideoEncoderType::H264, Some(EncoderBackend::Rkmpp)).ok_or_else(
|| {
AppError::VideoError(
"RKMPP backend not available for H.264".to_string(),
)
},
)?
} else if let Some(ref backend) = config.encoder_backend {
// Specific backend requested
get_codec_name(VideoEncoderType::H264, Some(*backend)).ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support H.264",
backend
))
})?
} else {
crate::video::encoder::h264::H264InputFormat::Nv12
// Auto select best available encoder
let (_encoder_type, detected) =
detect_best_encoder(config.resolution.width, config.resolution.height);
detected.ok_or_else(|| {
AppError::VideoError("No H.264 encoder available".to_string())
})?
};
let is_rkmpp = codec_name.contains("rkmpp");
let direct_input_format = if is_rkmpp {
match config.input_format {
PixelFormat::Yuyv => Some(H264InputFormat::Yuyv422),
PixelFormat::Yuv420 => Some(H264InputFormat::Yuv420p),
PixelFormat::Rgb24 => Some(H264InputFormat::Rgb24),
PixelFormat::Bgr24 => Some(H264InputFormat::Bgr24),
PixelFormat::Nv12 => Some(H264InputFormat::Nv12),
PixelFormat::Nv16 => Some(H264InputFormat::Nv16),
PixelFormat::Nv21 => Some(H264InputFormat::Nv21),
PixelFormat::Nv24 => Some(H264InputFormat::Nv24),
_ => None,
}
} else if codec_name.contains("libx264") {
match config.input_format {
PixelFormat::Nv12 => Some(H264InputFormat::Nv12),
PixelFormat::Nv16 => Some(H264InputFormat::Nv16),
PixelFormat::Nv21 => Some(H264InputFormat::Nv21),
PixelFormat::Yuv420 => Some(H264InputFormat::Yuv420p),
_ => None,
}
} else {
None
};
// Choose input format: prefer direct input when supported
let h264_input_format = if let Some(fmt) = direct_input_format {
fmt
} else if codec_name.contains("libx264") {
H264InputFormat::Yuv420p
} else {
H264InputFormat::Nv12
};
let encoder_config = H264Config {
@@ -387,69 +467,124 @@ impl SharedVideoPipeline {
input_format: h264_input_format,
};
let encoder = if use_yuyv_direct {
// Force RKMPP backend for YUYV direct input
let codec_name = get_codec_name(VideoEncoderType::H264, Some(EncoderBackend::Rkmpp))
.ok_or_else(|| AppError::VideoError(
"RKMPP backend not available for H.264".to_string()
))?;
info!("Creating H264 encoder with RKMPP backend for YUYV direct input (codec: {})", codec_name);
H264Encoder::with_codec(encoder_config, &codec_name)?
if use_rkmpp_direct {
info!(
"Creating H264 encoder with RKMPP backend for {} direct input (codec: {})",
config.input_format, codec_name
);
} else if let Some(ref backend) = config.encoder_backend {
// Specific backend requested
let codec_name = get_codec_name(VideoEncoderType::H264, Some(*backend))
.ok_or_else(|| AppError::VideoError(format!(
"Backend {:?} does not support H.264", backend
)))?;
info!("Creating H264 encoder with backend {:?} (codec: {})", backend, codec_name);
H264Encoder::with_codec(encoder_config, &codec_name)?
} else {
// Auto select
H264Encoder::new(encoder_config)?
};
info!(
"Creating H264 encoder with backend {:?} (codec: {})",
backend, codec_name
);
}
let encoder = H264Encoder::with_codec(encoder_config, &codec_name)?;
info!("Created H264 encoder: {}", encoder.codec_name());
Box::new(H264EncoderWrapper(encoder))
}
VideoEncoderType::H265 => {
// Determine H265 input format based on backend and input format
let encoder_config = if use_yuyv_direct {
H265Config::low_latency_yuyv422(config.resolution, config.bitrate_kbps())
let codec_name = if use_rkmpp_direct {
get_codec_name(VideoEncoderType::H265, Some(EncoderBackend::Rkmpp)).ok_or_else(
|| {
AppError::VideoError(
"RKMPP backend not available for H.265".to_string(),
)
},
)?
} else if let Some(ref backend) = config.encoder_backend {
get_codec_name(VideoEncoderType::H265, Some(*backend)).ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support H.265",
backend
))
})?
} else {
H265Config::low_latency(config.resolution, config.bitrate_kbps())
let (_encoder_type, detected) =
detect_best_h265_encoder(config.resolution.width, config.resolution.height);
detected.ok_or_else(|| {
AppError::VideoError("No H.265 encoder available".to_string())
})?
};
let encoder = if use_yuyv_direct {
// Force RKMPP backend for YUYV direct input
let codec_name = get_codec_name(VideoEncoderType::H265, Some(EncoderBackend::Rkmpp))
.ok_or_else(|| AppError::VideoError(
"RKMPP backend not available for H.265".to_string()
))?;
info!("Creating H265 encoder with RKMPP backend for YUYV direct input (codec: {})", codec_name);
H265Encoder::with_codec(encoder_config, &codec_name)?
} else if let Some(ref backend) = config.encoder_backend {
let codec_name = get_codec_name(VideoEncoderType::H265, Some(*backend))
.ok_or_else(|| AppError::VideoError(format!(
"Backend {:?} does not support H.265", backend
)))?;
info!("Creating H265 encoder with backend {:?} (codec: {})", backend, codec_name);
H265Encoder::with_codec(encoder_config, &codec_name)?
let is_rkmpp = codec_name.contains("rkmpp");
let direct_input_format = if is_rkmpp {
match config.input_format {
PixelFormat::Yuyv => Some(H265InputFormat::Yuyv422),
PixelFormat::Yuv420 => Some(H265InputFormat::Yuv420p),
PixelFormat::Rgb24 => Some(H265InputFormat::Rgb24),
PixelFormat::Bgr24 => Some(H265InputFormat::Bgr24),
PixelFormat::Nv12 => Some(H265InputFormat::Nv12),
PixelFormat::Nv16 => Some(H265InputFormat::Nv16),
PixelFormat::Nv21 => Some(H265InputFormat::Nv21),
PixelFormat::Nv24 => Some(H265InputFormat::Nv24),
_ => None,
}
} else if codec_name.contains("libx265") {
match config.input_format {
PixelFormat::Yuv420 => Some(H265InputFormat::Yuv420p),
_ => None,
}
} else {
H265Encoder::new(encoder_config)?
None
};
let h265_input_format = if let Some(fmt) = direct_input_format {
fmt
} else if codec_name.contains("libx265") {
H265InputFormat::Yuv420p
} else {
H265InputFormat::Nv12
};
let encoder_config = H265Config {
base: EncoderConfig {
resolution: config.resolution,
input_format: config.input_format,
quality: config.bitrate_kbps(),
fps: config.fps,
gop_size: config.gop_size(),
},
bitrate_kbps: config.bitrate_kbps(),
gop_size: config.gop_size(),
fps: config.fps,
input_format: h265_input_format,
};
if use_rkmpp_direct {
info!(
"Creating H265 encoder with RKMPP backend for {} direct input (codec: {})",
config.input_format, codec_name
);
} else if let Some(ref backend) = config.encoder_backend {
info!(
"Creating H265 encoder with backend {:?} (codec: {})",
backend, codec_name
);
}
let encoder = H265Encoder::with_codec(encoder_config, &codec_name)?;
info!("Created H265 encoder: {}", encoder.codec_name());
Box::new(H265EncoderWrapper(encoder))
}
VideoEncoderType::VP8 => {
let encoder_config = VP8Config::low_latency(config.resolution, config.bitrate_kbps());
let encoder_config =
VP8Config::low_latency(config.resolution, config.bitrate_kbps());
let encoder = if let Some(ref backend) = config.encoder_backend {
let codec_name = get_codec_name(VideoEncoderType::VP8, Some(*backend))
.ok_or_else(|| AppError::VideoError(format!(
"Backend {:?} does not support VP8", backend
)))?;
info!("Creating VP8 encoder with backend {:?} (codec: {})", backend, codec_name);
.ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support VP8",
backend
))
})?;
info!(
"Creating VP8 encoder with backend {:?} (codec: {})",
backend, codec_name
);
VP8Encoder::with_codec(encoder_config, &codec_name)?
} else {
VP8Encoder::new(encoder_config)?
@@ -459,14 +594,21 @@ impl SharedVideoPipeline {
Box::new(VP8EncoderWrapper(encoder))
}
VideoEncoderType::VP9 => {
let encoder_config = VP9Config::low_latency(config.resolution, config.bitrate_kbps());
let encoder_config =
VP9Config::low_latency(config.resolution, config.bitrate_kbps());
let encoder = if let Some(ref backend) = config.encoder_backend {
let codec_name = get_codec_name(VideoEncoderType::VP9, Some(*backend))
.ok_or_else(|| AppError::VideoError(format!(
"Backend {:?} does not support VP9", backend
)))?;
info!("Creating VP9 encoder with backend {:?} (codec: {})", backend, codec_name);
.ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support VP9",
backend
))
})?;
info!(
"Creating VP9 encoder with backend {:?} (codec: {})",
backend, codec_name
);
VP9Encoder::with_codec(encoder_config, &codec_name)?
} else {
VP9Encoder::new(encoder_config)?
@@ -477,25 +619,71 @@ impl SharedVideoPipeline {
}
};
// Determine if encoder needs YUV420P (software encoders) or NV12 (hardware encoders)
// Determine if encoder can take direct input without conversion
let codec_name = encoder.codec_name();
let needs_yuv420p = codec_name.contains("libvpx") || codec_name.contains("libx265");
let use_direct_input = if codec_name.contains("rkmpp") {
matches!(
config.input_format,
PixelFormat::Yuyv
| PixelFormat::Yuv420
| PixelFormat::Rgb24
| PixelFormat::Bgr24
| PixelFormat::Nv12
| PixelFormat::Nv16
| PixelFormat::Nv21
| PixelFormat::Nv24
)
} else if codec_name.contains("libx264") {
matches!(
config.input_format,
PixelFormat::Nv12 | PixelFormat::Nv16 | PixelFormat::Nv21 | PixelFormat::Yuv420
)
} else {
false
};
// Determine if encoder needs YUV420P (software encoders) or NV12 (hardware encoders)
let needs_yuv420p = if codec_name.contains("libx264") {
!matches!(
config.input_format,
PixelFormat::Nv12 | PixelFormat::Nv16 | PixelFormat::Nv21 | PixelFormat::Yuv420
)
} else {
codec_name.contains("libvpx") || codec_name.contains("libx265")
};
info!(
"Encoder {} needs {} format",
codec_name,
if use_yuyv_direct { "YUYV422 (direct)" } else if needs_yuv420p { "YUV420P" } else { "NV12" }
if use_direct_input {
"direct"
} else if needs_yuv420p {
"YUV420P"
} else {
"NV12"
}
);
// Create converter or decoder based on input format and encoder needs
info!("Initializing input format handler for: {} -> {}",
config.input_format,
if use_yuyv_direct { "YUYV422 (direct)" } else if needs_yuv420p { "YUV420P" } else { "NV12" });
info!(
"Initializing input format handler for: {} -> {}",
config.input_format,
if use_direct_input {
"direct"
} else if needs_yuv420p {
"YUV420P"
} else {
"NV12"
}
);
let (nv12_converter, yuv420p_converter) = if use_yuyv_direct {
// RKMPP with YUYV direct input - skip all conversion
info!("YUYV direct input enabled for RKMPP, skipping format conversion");
(None, None)
} else if use_direct_input {
info!("Direct input enabled, skipping format conversion");
(None, None)
} else if needs_yuv420p {
// Software encoder needs YUV420P
match config.input_format {
@@ -505,19 +693,38 @@ impl SharedVideoPipeline {
}
PixelFormat::Yuyv => {
info!("Using YUYV->YUV420P converter");
(None, Some(PixelConverter::yuyv_to_yuv420p(config.resolution)))
(
None,
Some(PixelConverter::yuyv_to_yuv420p(config.resolution)),
)
}
PixelFormat::Nv12 => {
info!("Using NV12->YUV420P converter");
(None, Some(PixelConverter::nv12_to_yuv420p(config.resolution)))
(
None,
Some(PixelConverter::nv12_to_yuv420p(config.resolution)),
)
}
PixelFormat::Nv21 => {
info!("Using NV21->YUV420P converter");
(
None,
Some(PixelConverter::nv21_to_yuv420p(config.resolution)),
)
}
PixelFormat::Rgb24 => {
info!("Using RGB24->YUV420P converter");
(None, Some(PixelConverter::rgb24_to_yuv420p(config.resolution)))
(
None,
Some(PixelConverter::rgb24_to_yuv420p(config.resolution)),
)
}
PixelFormat::Bgr24 => {
info!("Using BGR24->YUV420P converter");
(None, Some(PixelConverter::bgr24_to_yuv420p(config.resolution)))
(
None,
Some(PixelConverter::bgr24_to_yuv420p(config.resolution)),
)
}
_ => {
return Err(AppError::VideoError(format!(
@@ -537,6 +744,18 @@ impl SharedVideoPipeline {
info!("Using YUYV->NV12 converter");
(Some(Nv12Converter::yuyv_to_nv12(config.resolution)), None)
}
PixelFormat::Nv21 => {
info!("Using NV21->NV12 converter");
(Some(Nv12Converter::nv21_to_nv12(config.resolution)), None)
}
PixelFormat::Nv16 => {
info!("Using NV16->NV12 converter");
(Some(Nv12Converter::nv16_to_nv12(config.resolution)), None)
}
PixelFormat::Yuv420 => {
info!("Using YUV420P->NV12 converter");
(Some(Nv12Converter::yuv420_to_nv12(config.resolution)), None)
}
PixelFormat::Rgb24 => {
info!("Using RGB24->NV12 converter");
(Some(Nv12Converter::rgb24_to_nv12(config.resolution)), None)
@@ -557,8 +776,9 @@ impl SharedVideoPipeline {
*self.encoder.lock().await = Some(encoder);
*self.nv12_converter.lock().await = nv12_converter;
*self.yuv420p_converter.lock().await = yuv420p_converter;
self.encoder_needs_yuv420p.store(needs_yuv420p, Ordering::Release);
self.yuyv_direct_input.store(use_yuyv_direct, Ordering::Release);
self.encoder_needs_yuv420p
.store(needs_yuv420p, Ordering::Release);
self.direct_input.store(use_direct_input, Ordering::Release);
Ok(())
}
@@ -646,7 +866,10 @@ impl SharedVideoPipeline {
}
/// Start the pipeline
pub async fn start(self: &Arc<Self>, mut frame_rx: broadcast::Receiver<VideoFrame>) -> Result<()> {
pub async fn start(
self: &Arc<Self>,
mut frame_rx: broadcast::Receiver<VideoFrame>,
) -> Result<()> {
if *self.running_rx.borrow() {
warn!("Pipeline already running");
return Ok(());
@@ -657,7 +880,10 @@ impl SharedVideoPipeline {
let config = self.config.read().await.clone();
let gop_size = config.gop_size();
info!("Starting {} pipeline (GOP={})", config.output_codec, gop_size);
info!(
"Starting {} pipeline (GOP={})",
config.output_codec, gop_size
);
let pipeline = self.clone();
@@ -674,7 +900,6 @@ impl SharedVideoPipeline {
let mut local_errors: u64 = 0;
let mut local_dropped: u64 = 0;
let mut local_skipped: u64 = 0;
// Track when we last had subscribers for auto-stop feature
let mut no_subscribers_since: Option<Instant> = None;
let grace_period = Duration::from_secs(AUTO_STOP_GRACE_PERIOD_SECS);
@@ -790,7 +1015,11 @@ impl SharedVideoPipeline {
}
/// Encode a single frame
async fn encode_frame(&self, frame: &VideoFrame, frame_count: u64) -> Result<Option<EncodedVideoFrame>> {
async fn encode_frame(
&self,
frame: &VideoFrame,
frame_count: u64,
) -> Result<Option<EncodedVideoFrame>> {
let config = self.config.read().await;
let raw_frame = frame.data();
let fps = config.fps;
@@ -835,9 +1064,9 @@ impl SharedVideoPipeline {
let needs_yuv420p = self.encoder_needs_yuv420p.load(Ordering::Acquire);
let mut encoder_guard = self.encoder.lock().await;
let encoder = encoder_guard.as_mut().ok_or_else(|| {
AppError::VideoError("Encoder not initialized".to_string())
})?;
let encoder = encoder_guard
.as_mut()
.ok_or_else(|| AppError::VideoError("Encoder not initialized".to_string()))?;
// Check and consume keyframe request (atomic, no lock contention)
if self.keyframe_requested.swap(false, Ordering::AcqRel) {
@@ -848,13 +1077,15 @@ impl SharedVideoPipeline {
let encode_result = if needs_yuv420p && yuv420p_converter.is_some() {
// Software encoder with direct input conversion to YUV420P
let conv = yuv420p_converter.as_mut().unwrap();
let yuv420p_data = conv.convert(raw_frame)
let yuv420p_data = conv
.convert(raw_frame)
.map_err(|e| AppError::VideoError(format!("YUV420P conversion failed: {}", e)))?;
encoder.encode_raw(yuv420p_data, pts_ms)
} else if nv12_converter.is_some() {
// Hardware encoder with input conversion to NV12
let conv = nv12_converter.as_mut().unwrap();
let nv12_data = conv.convert(raw_frame)
let nv12_data = conv
.convert(raw_frame)
.map_err(|e| AppError::VideoError(format!("NV12 conversion failed: {}", e)))?;
encoder.encode_raw(nv12_data, pts_ms)
} else {
@@ -871,7 +1102,6 @@ impl SharedVideoPipeline {
if !frames.is_empty() {
let encoded = frames.into_iter().next().unwrap();
let is_keyframe = encoded.key == 1;
let sequence = self.sequence.fetch_add(1, Ordering::Relaxed) + 1;
// Debug log for H265 encoded frame
@@ -901,17 +1131,23 @@ impl SharedVideoPipeline {
}))
} else {
if codec == VideoEncoderType::H265 {
warn!("[Pipeline-H265] Encoder returned no frames for frame #{}", frame_count);
warn!(
"[Pipeline-H265] Encoder returned no frames for frame #{}",
frame_count
);
}
Ok(None)
}
}
Err(e) => {
if codec == VideoEncoderType::H265 {
error!("[Pipeline-H265] Encode error at frame #{}: {}", frame_count, e);
error!(
"[Pipeline-H265] Encode error at frame #{}: {}",
frame_count, e
);
}
Err(e)
},
}
}
}
@@ -924,7 +1160,10 @@ impl SharedVideoPipeline {
}
/// Set bitrate using preset
pub async fn set_bitrate_preset(&self, preset: crate::video::encoder::BitratePreset) -> Result<()> {
pub async fn set_bitrate_preset(
&self,
preset: crate::video::encoder::BitratePreset,
) -> Result<()> {
let bitrate_kbps = preset.bitrate_kbps();
if let Some(ref mut encoder) = *self.encoder.lock().await {
encoder.set_bitrate(bitrate_kbps)?;
@@ -965,11 +1204,7 @@ fn parse_h265_nal_types(data: &[u8]) -> Vec<(u8, usize)> {
&& data[i + 3] == 1
{
i + 4
} else if i + 3 <= data.len()
&& data[i] == 0
&& data[i + 1] == 0
&& data[i + 2] == 1
{
} else if i + 3 <= data.len() && data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1 {
i + 3
} else {
i += 1;

View File

@@ -30,6 +30,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{debug, error, info, warn};
use uuid::Uuid;
use crate::config::{ConfigStore, StreamMode};
use crate::error::Result;
@@ -55,6 +56,17 @@ pub struct StreamManagerConfig {
pub fps: u32,
}
/// Result of a mode switch request.
#[derive(Debug, Clone)]
pub struct ModeSwitchTransaction {
/// Whether this request started a new switch.
pub accepted: bool,
/// Whether a switch is currently in progress after handling this request.
pub switching: bool,
/// Transition ID if a switch is/was in progress.
pub transition_id: Option<String>,
}
impl Default for StreamManagerConfig {
fn default() -> Self {
Self {
@@ -90,6 +102,8 @@ pub struct VideoStreamManager {
config_store: RwLock<Option<ConfigStore>>,
/// Mode switching lock to prevent concurrent switch requests
switching: AtomicBool,
/// Current mode switch transaction ID (set while switching=true)
transition_id: RwLock<Option<String>>,
}
impl VideoStreamManager {
@@ -105,6 +119,7 @@ impl VideoStreamManager {
events: RwLock::new(None),
config_store: RwLock::new(None),
switching: AtomicBool::new(false),
transition_id: RwLock::new(None),
})
}
@@ -113,6 +128,11 @@ impl VideoStreamManager {
self.switching.load(Ordering::SeqCst)
}
/// Get current mode switch transition ID, if any
pub async fn current_transition_id(&self) -> Option<String> {
self.transition_id.read().await.clone()
}
/// Set event bus for notifications
pub async fn set_event_bus(&self, events: Arc<EventBus>) {
*self.events.write().await = Some(events);
@@ -188,7 +208,9 @@ impl VideoStreamManager {
"Reconnecting frame source to WebRTC after init: {}x{} {:?} @ {}fps (receiver_count={})",
resolution.width, resolution.height, format, fps, frame_tx.receiver_count()
);
self.webrtc_streamer.update_video_config(resolution, format, fps).await;
self.webrtc_streamer
.update_video_config(resolution, format, fps)
.await;
self.webrtc_streamer.set_video_source(frame_tx).await;
}
@@ -204,6 +226,18 @@ impl VideoStreamManager {
/// 4. Start the new mode (ensuring video capture runs for WebRTC)
/// 5. Update configuration
pub async fn switch_mode(self: &Arc<Self>, new_mode: StreamMode) -> Result<()> {
let _ = self.switch_mode_transaction(new_mode).await?;
Ok(())
}
/// Switch streaming mode with a transaction ID for correlating events
///
/// If a switch is already in progress, returns `accepted=false` with the
/// current `transition_id` (if known) and does not start a new switch.
pub async fn switch_mode_transaction(
self: &Arc<Self>,
new_mode: StreamMode,
) -> Result<ModeSwitchTransaction> {
let current_mode = self.mode.read().await.clone();
if current_mode == new_mode {
@@ -212,19 +246,85 @@ impl VideoStreamManager {
if new_mode == StreamMode::WebRTC {
self.ensure_video_capture_running().await?;
}
return Ok(());
return Ok(ModeSwitchTransaction {
accepted: false,
switching: false,
transition_id: None,
});
}
// Acquire switching lock - prevent concurrent switch requests
if self.switching.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst).is_err() {
if self
.switching
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.is_err()
{
debug!("Mode switch already in progress, ignoring duplicate request");
return Ok(());
return Ok(ModeSwitchTransaction {
accepted: false,
switching: true,
transition_id: self.transition_id.read().await.clone(),
});
}
// Use a helper to ensure we release the lock when done
let result = self.do_switch_mode(current_mode, new_mode.clone()).await;
self.switching.store(false, Ordering::SeqCst);
result
let transition_id = Uuid::new_v4().to_string();
*self.transition_id.write().await = Some(transition_id.clone());
// Publish transaction start event
let from_mode_str = self.mode_to_string(&current_mode).await;
let to_mode_str = self.mode_to_string(&new_mode).await;
self.publish_event(SystemEvent::StreamModeSwitching {
transition_id: transition_id.clone(),
to_mode: to_mode_str,
from_mode: from_mode_str,
})
.await;
// Perform the switch asynchronously so the HTTP handler can return
// immediately and clients can reliably wait for WebSocket events.
let manager = Arc::clone(self);
let transition_id_for_task = transition_id.clone();
tokio::spawn(async move {
let result = manager
.do_switch_mode(current_mode, new_mode, transition_id_for_task.clone())
.await;
if let Err(e) = result {
error!(
"Mode switch transaction {} failed: {}",
transition_id_for_task, e
);
}
// Publish transaction end marker with best-effort actual mode
let actual_mode = manager.mode.read().await.clone();
let actual_mode_str = manager.mode_to_string(&actual_mode).await;
manager
.publish_event(SystemEvent::StreamModeReady {
transition_id: transition_id_for_task.clone(),
mode: actual_mode_str,
})
.await;
*manager.transition_id.write().await = None;
manager.switching.store(false, Ordering::SeqCst);
});
Ok(ModeSwitchTransaction {
accepted: true,
switching: true,
transition_id: Some(transition_id),
})
}
async fn mode_to_string(&self, mode: &StreamMode) -> String {
match mode {
StreamMode::Mjpeg => "mjpeg".to_string(),
StreamMode::WebRTC => {
let codec = self.webrtc_streamer.current_video_codec().await;
codec_to_string(codec)
}
}
}
/// Ensure video capture is running (for WebRTC mode)
@@ -257,7 +357,9 @@ impl VideoStreamManager {
"Reconnecting frame source to WebRTC: {}x{} {:?} @ {}fps",
resolution.width, resolution.height, format, fps
);
self.webrtc_streamer.update_video_config(resolution, format, fps).await;
self.webrtc_streamer
.update_video_config(resolution, format, fps)
.await;
self.webrtc_streamer.set_video_source(frame_tx).await;
}
@@ -265,7 +367,12 @@ impl VideoStreamManager {
}
/// Internal implementation of mode switching (called with lock held)
async fn do_switch_mode(self: &Arc<Self>, current_mode: StreamMode, new_mode: StreamMode) -> Result<()> {
async fn do_switch_mode(
self: &Arc<Self>,
current_mode: StreamMode,
new_mode: StreamMode,
transition_id: String,
) -> Result<()> {
info!("Switching video mode: {:?} -> {:?}", current_mode, new_mode);
// Get the actual mode strings (with codec info for WebRTC)
@@ -286,6 +393,7 @@ impl VideoStreamManager {
// 1. Publish mode change event (clients should prepare to reconnect)
self.publish_event(SystemEvent::StreamModeChanged {
transition_id: Some(transition_id.clone()),
mode: new_mode_str,
previous_mode: previous_mode_str,
})
@@ -320,15 +428,26 @@ impl VideoStreamManager {
// Auto-switch to MJPEG format if device supports it
if let Some(device) = self.streamer.current_device().await {
let (current_format, resolution, fps) = self.streamer.current_video_config().await;
let available_formats: Vec<PixelFormat> = device.formats.iter().map(|f| f.format).collect();
let (current_format, resolution, fps) =
self.streamer.current_video_config().await;
let available_formats: Vec<PixelFormat> =
device.formats.iter().map(|f| f.format).collect();
// If current format is not MJPEG and device supports MJPEG, switch to it
if current_format != PixelFormat::Mjpeg && available_formats.contains(&PixelFormat::Mjpeg) {
if current_format != PixelFormat::Mjpeg
&& available_formats.contains(&PixelFormat::Mjpeg)
{
info!("Auto-switching to MJPEG format for MJPEG mode");
let device_path = device.path.to_string_lossy().to_string();
if let Err(e) = self.streamer.apply_video_config(&device_path, PixelFormat::Mjpeg, resolution, fps).await {
warn!("Failed to auto-switch to MJPEG format: {}, keeping current format", e);
if let Err(e) = self
.streamer
.apply_video_config(&device_path, PixelFormat::Mjpeg, resolution, fps)
.await
{
warn!(
"Failed to auto-switch to MJPEG format: {}, keeping current format",
e
);
}
}
}
@@ -353,21 +472,29 @@ impl VideoStreamManager {
// Auto-switch to non-compressed format if current format is MJPEG/JPEG
if let Some(device) = self.streamer.current_device().await {
let (current_format, resolution, fps) = self.streamer.current_video_config().await;
let (current_format, resolution, fps) =
self.streamer.current_video_config().await;
if current_format.is_compressed() {
let available_formats: Vec<PixelFormat> = device.formats.iter().map(|f| f.format).collect();
let available_formats: Vec<PixelFormat> =
device.formats.iter().map(|f| f.format).collect();
// Determine if using hardware encoding
let is_hardware = self.webrtc_streamer.is_hardware_encoding().await;
if let Some(recommended) = PixelFormat::recommended_for_encoding(&available_formats, is_hardware) {
if let Some(recommended) =
PixelFormat::recommended_for_encoding(&available_formats, is_hardware)
{
info!(
"Auto-switching from {:?} to {:?} for WebRTC encoding (hardware={})",
current_format, recommended, is_hardware
);
let device_path = device.path.to_string_lossy().to_string();
if let Err(e) = self.streamer.apply_video_config(&device_path, recommended, resolution, fps).await {
if let Err(e) = self
.streamer
.apply_video_config(&device_path, recommended, resolution, fps)
.await
{
warn!("Failed to auto-switch format for WebRTC: {}, keeping current format", e);
}
}
@@ -394,33 +521,24 @@ impl VideoStreamManager {
"Connecting frame source to WebRTC pipeline: {}x{} {:?} @ {}fps",
resolution.width, resolution.height, format, fps
);
self.webrtc_streamer.update_video_config(resolution, format, fps).await;
self.webrtc_streamer
.update_video_config(resolution, format, fps)
.await;
self.webrtc_streamer.set_video_source(frame_tx).await;
// Get device path for events
let device_path = self.streamer.current_device().await
.map(|d| d.path.to_string_lossy().to_string())
.unwrap_or_default();
// Publish StreamConfigApplied event - clients can now safely connect
self.publish_event(SystemEvent::StreamConfigApplied {
device: device_path,
resolution: (resolution.width, resolution.height),
format: format!("{:?}", format).to_lowercase(),
fps,
})
.await;
// Publish WebRTCReady event - frame source is now connected
let codec = self.webrtc_streamer.current_video_codec().await;
let is_hardware = self.webrtc_streamer.is_hardware_encoding().await;
self.publish_event(SystemEvent::WebRTCReady {
transition_id: Some(transition_id.clone()),
codec: codec_to_string(codec),
hardware: is_hardware,
})
.await;
} else {
warn!("No frame source available for WebRTC - sessions may fail to receive video");
warn!(
"No frame source available for WebRTC - sessions may fail to receive video"
);
}
info!("WebRTC mode activated (sessions created on-demand)");
@@ -483,13 +601,16 @@ impl VideoStreamManager {
if let Some(frame_tx) = self.streamer.frame_sender().await {
// Note: update_video_config was already called above with the requested config,
// but verify that actual capture matches
let (actual_format, actual_resolution, actual_fps) = self.streamer.current_video_config().await;
let (actual_format, actual_resolution, actual_fps) =
self.streamer.current_video_config().await;
if actual_format != format || actual_resolution != resolution || actual_fps != fps {
info!(
"Actual capture config differs from requested, updating WebRTC: {}x{} {:?} @ {}fps",
actual_resolution.width, actual_resolution.height, actual_format, actual_fps
);
self.webrtc_streamer.update_video_config(actual_resolution, actual_format, actual_fps).await;
self.webrtc_streamer
.update_video_config(actual_resolution, actual_format, actual_fps)
.await;
}
info!("Reconnecting frame source to WebRTC after config change");
self.webrtc_streamer.set_video_source(frame_tx).await;
@@ -522,7 +643,9 @@ impl VideoStreamManager {
if let Some(frame_tx) = self.streamer.frame_sender().await {
// Synchronize WebRTC config with actual capture format
let (format, resolution, fps) = self.streamer.current_video_config().await;
self.webrtc_streamer.update_video_config(resolution, format, fps).await;
self.webrtc_streamer
.update_video_config(resolution, format, fps)
.await;
self.webrtc_streamer.set_video_source(frame_tx).await;
}
}
@@ -620,7 +743,9 @@ impl VideoStreamManager {
// =========================================================================
/// List available video devices
pub async fn list_devices(&self) -> crate::error::Result<Vec<crate::video::device::VideoDeviceInfo>> {
pub async fn list_devices(
&self,
) -> crate::error::Result<Vec<crate::video::device::VideoDeviceInfo>> {
self.streamer.list_devices().await
}
@@ -640,7 +765,9 @@ impl VideoStreamManager {
}
/// Get frame sender for video frames
pub async fn frame_sender(&self) -> Option<tokio::sync::broadcast::Sender<crate::video::frame::VideoFrame>> {
pub async fn frame_sender(
&self,
) -> Option<tokio::sync::broadcast::Sender<crate::video::frame::VideoFrame>> {
self.streamer.frame_sender().await
}
@@ -654,12 +781,17 @@ impl VideoStreamManager {
/// Returns None if video capture cannot be started or pipeline creation fails.
pub async fn subscribe_encoded_frames(
&self,
) -> Option<tokio::sync::broadcast::Receiver<crate::video::shared_video_pipeline::EncodedVideoFrame>> {
) -> Option<
tokio::sync::broadcast::Receiver<crate::video::shared_video_pipeline::EncodedVideoFrame>,
> {
// 1. Ensure video capture is initialized
if self.streamer.state().await == StreamerState::Uninitialized {
tracing::info!("Initializing video capture for encoded frame subscription");
if let Err(e) = self.streamer.init_auto().await {
tracing::error!("Failed to initialize video capture for encoded frames: {}", e);
tracing::error!(
"Failed to initialize video capture for encoded frames: {}",
e
);
return None;
}
}
@@ -688,13 +820,22 @@ impl VideoStreamManager {
let (format, resolution, fps) = self.streamer.current_video_config().await;
tracing::info!(
"Connecting encoded frame subscription: {}x{} {:?} @ {}fps",
resolution.width, resolution.height, format, fps
resolution.width,
resolution.height,
format,
fps
);
self.webrtc_streamer.update_video_config(resolution, format, fps).await;
self.webrtc_streamer
.update_video_config(resolution, format, fps)
.await;
// 5. Use WebRtcStreamer to ensure the shared video pipeline is running
// This will create the pipeline if needed
match self.webrtc_streamer.ensure_video_pipeline_for_external(frame_tx).await {
match self
.webrtc_streamer
.ensure_video_pipeline_for_external(frame_tx)
.await
{
Ok(pipeline) => Some(pipeline.subscribe()),
Err(e) => {
tracing::error!("Failed to start shared video pipeline: {}", e);
@@ -704,7 +845,9 @@ impl VideoStreamManager {
}
/// Get the current video encoding configuration from the shared pipeline
pub async fn get_encoding_config(&self) -> Option<crate::video::shared_video_pipeline::SharedVideoPipelineConfig> {
pub async fn get_encoding_config(
&self,
) -> Option<crate::video::shared_video_pipeline::SharedVideoPipelineConfig> {
self.webrtc_streamer.get_pipeline_config().await
}
@@ -712,7 +855,10 @@ impl VideoStreamManager {
///
/// This allows external consumers (like RustDesk) to set the video codec
/// before subscribing to encoded frames.
pub async fn set_video_codec(&self, codec: crate::video::encoder::VideoCodecType) -> crate::error::Result<()> {
pub async fn set_video_codec(
&self,
codec: crate::video::encoder::VideoCodecType,
) -> crate::error::Result<()> {
self.webrtc_streamer.set_video_codec(codec).await
}
@@ -720,7 +866,10 @@ impl VideoStreamManager {
///
/// This allows external consumers (like RustDesk) to adjust the video quality
/// based on client preferences.
pub async fn set_bitrate_preset(&self, preset: crate::video::encoder::BitratePreset) -> crate::error::Result<()> {
pub async fn set_bitrate_preset(
&self,
preset: crate::video::encoder::BitratePreset,
) -> crate::error::Result<()> {
self.webrtc_streamer.set_bitrate_preset(preset).await
}

View File

@@ -133,7 +133,12 @@ impl Streamer {
/// Get current state as SystemEvent
pub async fn current_state_event(&self) -> SystemEvent {
let state = *self.state.read().await;
let device = self.current_device.read().await.as_ref().map(|d| d.path.display().to_string());
let device = self
.current_device
.read()
.await
.as_ref()
.map(|d| d.path.display().to_string());
SystemEvent::StreamStateChanged {
state: match state {
@@ -162,7 +167,8 @@ impl Streamer {
/// Check if config is currently being changed
/// When true, auto-start should be blocked to prevent device busy errors
pub fn is_config_changing(&self) -> bool {
self.config_changing.load(std::sync::atomic::Ordering::SeqCst)
self.config_changing
.load(std::sync::atomic::Ordering::SeqCst)
}
/// Get MJPEG handler for stream endpoints
@@ -209,13 +215,17 @@ impl Streamer {
fps: u32,
) -> Result<()> {
// Set config_changing flag to prevent frontend mode sync during config change
self.config_changing.store(true, std::sync::atomic::Ordering::SeqCst);
self.config_changing
.store(true, std::sync::atomic::Ordering::SeqCst);
let result = self.apply_video_config_inner(device_path, format, resolution, fps).await;
let result = self
.apply_video_config_inner(device_path, format, resolution, fps)
.await;
// Clear the flag after config change is complete
// The stream will be started by MJPEG client connection, not here
self.config_changing.store(false, std::sync::atomic::Ordering::SeqCst);
self.config_changing
.store(false, std::sync::atomic::Ordering::SeqCst);
result
}
@@ -230,6 +240,7 @@ impl Streamer {
) -> Result<()> {
// Publish "config changing" event
self.publish_event(SystemEvent::StreamConfigChanging {
transition_id: None,
reason: "device_switch".to_string(),
})
.await;
@@ -254,7 +265,9 @@ impl Streamer {
.iter()
.any(|r| r.width == resolution.width && r.height == resolution.height)
{
return Err(AppError::VideoError("Requested resolution not supported".to_string()));
return Err(AppError::VideoError(
"Requested resolution not supported".to_string(),
));
}
// IMPORTANT: Disconnect all MJPEG clients FIRST before stopping capture
@@ -277,7 +290,6 @@ impl Streamer {
// Explicitly drop the capturer to release V4L2 resources
drop(capturer);
}
}
// Update config
@@ -305,9 +317,12 @@ impl Streamer {
*self.state.write().await = StreamerState::Ready;
// Publish "config applied" event
info!("Publishing StreamConfigApplied event: {}x{} {:?} @ {}fps",
resolution.width, resolution.height, format, fps);
info!(
"Publishing StreamConfigApplied event: {}x{} {:?} @ {}fps",
resolution.width, resolution.height, format, fps
);
self.publish_event(SystemEvent::StreamConfigApplied {
transition_id: None,
device: device_path.to_string(),
resolution: (resolution.width, resolution.height),
format: format!("{:?}", format),
@@ -381,7 +396,11 @@ impl Streamer {
}
/// Select best format for device
fn select_format(&self, device: &VideoDeviceInfo, preferred: PixelFormat) -> Result<PixelFormat> {
fn select_format(
&self,
device: &VideoDeviceInfo,
preferred: PixelFormat,
) -> Result<PixelFormat> {
// Check if preferred format is available
if device.formats.iter().any(|f| f.format == preferred) {
return Ok(preferred);
@@ -410,9 +429,10 @@ impl Streamer {
// Check if preferred resolution is available
if format_info.resolutions.is_empty()
|| format_info.resolutions.iter().any(|r| {
r.width == preferred.width && r.height == preferred.height
})
|| format_info
.resolutions
.iter()
.any(|r| r.width == preferred.width && r.height == preferred.height)
{
return Ok(preferred);
}
@@ -528,7 +548,10 @@ impl Streamer {
// Stop the streamer
if let Some(streamer) = state_ref.upgrade() {
if let Err(e) = streamer.stop().await {
warn!("Failed to stop streamer during idle cleanup: {}", e);
warn!(
"Failed to stop streamer during idle cleanup: {}",
e
);
}
}
break;
@@ -609,8 +632,14 @@ impl Streamer {
// Start background tasks only once per Streamer instance
// Use compare_exchange to atomically check and set the flag
if self.background_tasks_started
.compare_exchange(false, true, std::sync::atomic::Ordering::SeqCst, std::sync::atomic::Ordering::SeqCst)
if self
.background_tasks_started
.compare_exchange(
false,
true,
std::sync::atomic::Ordering::SeqCst,
std::sync::atomic::Ordering::SeqCst,
)
.is_ok()
{
info!("Starting background tasks (stats, cleanup, monitor)");
@@ -626,10 +655,12 @@ impl Streamer {
let clients_stat = streamer.mjpeg_handler().get_clients_stat();
let clients = clients_stat.len() as u64;
streamer.publish_event(SystemEvent::StreamStatsUpdate {
clients,
clients_stat,
}).await;
streamer
.publish_event(SystemEvent::StreamStatsUpdate {
clients,
clients_stat,
})
.await;
} else {
break;
}
@@ -649,7 +680,9 @@ impl Streamer {
loop {
interval.tick().await;
let Some(streamer) = monitor_ref.upgrade() else { break; };
let Some(streamer) = monitor_ref.upgrade() else {
break;
};
// Check auto-pause configuration
let config = monitor_handler.auto_pause_config();
@@ -663,10 +696,16 @@ impl Streamer {
if count == 0 {
if zero_since.is_none() {
zero_since = Some(std::time::Instant::now());
info!("No clients connected, starting shutdown timer ({}s)", config.shutdown_delay_secs);
info!(
"No clients connected, starting shutdown timer ({}s)",
config.shutdown_delay_secs
);
} else if let Some(since) = zero_since {
if since.elapsed().as_secs() >= config.shutdown_delay_secs {
info!("Auto-pausing stream (no clients for {}s)", config.shutdown_delay_secs);
info!(
"Auto-pausing stream (no clients for {}s)",
config.shutdown_delay_secs
);
if let Err(e) = streamer.stop().await {
error!("Auto-pause failed: {}", e);
}
@@ -734,8 +773,14 @@ impl Streamer {
clients: self.mjpeg_handler.client_count(),
target_fps: config.fps,
fps: capture_stats.as_ref().map(|s| s.current_fps).unwrap_or(0.0),
frames_captured: capture_stats.as_ref().map(|s| s.frames_captured).unwrap_or(0),
frames_dropped: capture_stats.as_ref().map(|s| s.frames_dropped).unwrap_or(0),
frames_captured: capture_stats
.as_ref()
.map(|s| s.frames_captured)
.unwrap_or(0),
frames_dropped: capture_stats
.as_ref()
.map(|s| s.frames_dropped)
.unwrap_or(0),
}
}
@@ -776,7 +821,10 @@ impl Streamer {
/// until the device is recovered.
async fn start_device_recovery_internal(self: &Arc<Self>) {
// Check if recovery is already in progress
if self.recovery_in_progress.swap(true, std::sync::atomic::Ordering::SeqCst) {
if self
.recovery_in_progress
.swap(true, std::sync::atomic::Ordering::SeqCst)
{
debug!("Device recovery already in progress, skipping");
return;
}
@@ -786,7 +834,9 @@ impl Streamer {
let capturer = self.capturer.read().await;
if let Some(cap) = capturer.as_ref() {
cap.last_error().unwrap_or_else(|| {
let device_path = self.current_device.blocking_read()
let device_path = self
.current_device
.blocking_read()
.as_ref()
.map(|d| d.path.display().to_string())
.unwrap_or_else(|| "unknown".to_string());
@@ -800,13 +850,15 @@ impl Streamer {
// Store error info
*self.last_lost_device.write().await = Some(device.clone());
*self.last_lost_reason.write().await = Some(reason.clone());
self.recovery_retry_count.store(0, std::sync::atomic::Ordering::Relaxed);
self.recovery_retry_count
.store(0, std::sync::atomic::Ordering::Relaxed);
// Publish device lost event
self.publish_event(SystemEvent::StreamDeviceLost {
device: device.clone(),
reason: reason.clone(),
}).await;
})
.await;
// Start recovery task
let streamer = Arc::clone(self);
@@ -814,11 +866,16 @@ impl Streamer {
let device_path = device.clone();
loop {
let attempt = streamer.recovery_retry_count.fetch_add(1, std::sync::atomic::Ordering::Relaxed) + 1;
let attempt = streamer
.recovery_retry_count
.fetch_add(1, std::sync::atomic::Ordering::Relaxed)
+ 1;
// Check if still in device lost state
let current_state = *streamer.state.read().await;
if current_state != StreamerState::DeviceLost && current_state != StreamerState::Recovering {
if current_state != StreamerState::DeviceLost
&& current_state != StreamerState::Recovering
{
info!("Stream state changed during recovery, stopping recovery task");
break;
}
@@ -828,11 +885,16 @@ impl Streamer {
// Publish reconnecting event (every 5 attempts to avoid spam)
if attempt == 1 || attempt % 5 == 0 {
streamer.publish_event(SystemEvent::StreamReconnecting {
device: device_path.clone(),
attempt,
}).await;
info!("Attempting to recover video device {} (attempt {})", device_path, attempt);
streamer
.publish_event(SystemEvent::StreamReconnecting {
device: device_path.clone(),
attempt,
})
.await;
info!(
"Attempting to recover video device {} (attempt {})",
device_path, attempt
);
}
// Wait before retry (1 second)
@@ -848,13 +910,20 @@ impl Streamer {
// Try to restart capture
match streamer.restart_capturer().await {
Ok(_) => {
info!("Video device {} recovered after {} attempts", device_path, attempt);
streamer.recovery_in_progress.store(false, std::sync::atomic::Ordering::SeqCst);
info!(
"Video device {} recovered after {} attempts",
device_path, attempt
);
streamer
.recovery_in_progress
.store(false, std::sync::atomic::Ordering::SeqCst);
// Publish recovered event
streamer.publish_event(SystemEvent::StreamRecovered {
device: device_path.clone(),
}).await;
streamer
.publish_event(SystemEvent::StreamRecovered {
device: device_path.clone(),
})
.await;
// Clear error info
*streamer.last_lost_device.write().await = None;
@@ -867,7 +936,9 @@ impl Streamer {
}
}
streamer.recovery_in_progress.store(false, std::sync::atomic::Ordering::SeqCst);
streamer
.recovery_in_progress
.store(false, std::sync::atomic::Ordering::SeqCst);
});
}
}

View File

@@ -234,10 +234,7 @@ impl VideoSessionManager {
let mut sessions = self.sessions.write().await;
sessions.insert(session_id.clone(), session);
info!(
"Video session created: {} (codec: {})",
session_id, codec
);
info!("Video session created: {} (codec: {})", session_id, codec);
Ok(session_id)
}
@@ -428,8 +425,7 @@ impl VideoSessionManager {
sessions
.iter()
.filter(|(_, s)| {
(s.state == VideoSessionState::Paused
|| s.state == VideoSessionState::Created)
(s.state == VideoSessionState::Paused || s.state == VideoSessionState::Created)
&& now.duration_since(s.last_activity) > timeout
})
.map(|(id, _)| id.clone())