fix(video): v4l path + webrtc h264 startup diagnostics

This commit is contained in:
a15355447898a
2026-03-01 01:24:26 +08:00
parent bd17f8d0f8
commit 4f2fb534a4
12 changed files with 856 additions and 797 deletions

View File

@@ -45,6 +45,95 @@ use webrtc::ice_transport::ice_gatherer_state::RTCIceGathererState;
/// H.265/HEVC MIME type (RFC 7798)
const MIME_TYPE_H265: &str = "video/H265";
/// Low-frequency diagnostic logging interval for video receive/send loop.
const VIDEO_DEBUG_LOG_INTERVAL: u64 = 120;
fn h264_contains_parameter_sets(data: &[u8]) -> bool {
// Annex-B path (00 00 01 / 00 00 00 01)
let mut i = 0usize;
while i + 4 <= data.len() {
let sc_len = if i + 4 <= data.len()
&& data[i] == 0
&& data[i + 1] == 0
&& data[i + 2] == 0
&& data[i + 3] == 1
{
4
} else if i + 3 <= data.len() && data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1 {
3
} else {
i += 1;
continue;
};
let nal_start = i + sc_len;
if nal_start < data.len() {
let nal_type = data[nal_start] & 0x1F;
if nal_type == 7 || nal_type == 8 {
return true;
}
}
i = nal_start.saturating_add(1);
}
// Length-prefixed fallback
let mut pos = 0usize;
while pos + 4 <= data.len() {
let nalu_len =
u32::from_be_bytes([data[pos], data[pos + 1], data[pos + 2], data[pos + 3]]) as usize;
pos += 4;
if nalu_len == 0 || pos + nalu_len > data.len() {
break;
}
let nal_type = data[pos] & 0x1F;
if nal_type == 7 || nal_type == 8 {
return true;
}
pos += nalu_len;
}
false
}
fn extract_video_sdp_section(sdp: &str) -> String {
let mut lines_out: Vec<&str> = Vec::new();
let mut in_video = false;
for line in sdp.lines() {
if line.starts_with("m=") {
if line.starts_with("m=video") {
in_video = true;
lines_out.push(line);
continue;
}
if in_video {
break;
}
}
if !in_video {
continue;
}
if line.starts_with("c=")
|| line.starts_with("a=mid:")
|| line.starts_with("a=rtpmap:")
|| line.starts_with("a=fmtp:")
|| line.starts_with("a=rtcp-fb:")
|| line.starts_with("a=send")
|| line.starts_with("a=recv")
|| line.starts_with("a=inactive")
{
lines_out.push(line);
}
}
if lines_out.is_empty() {
"<no video m-section>".to_string()
} else {
lines_out.join(" | ")
}
}
/// Universal WebRTC session configuration
#[derive(Debug, Clone)]
@@ -590,6 +679,10 @@ impl UniversalSession {
let mut last_keyframe_request = Instant::now() - Duration::from_secs(1);
let mut frames_sent: u64 = 0;
let mut frames_received: u64 = 0;
let mut codec_mismatch_count: u64 = 0;
let mut waiting_keyframe_drop_count: u64 = 0;
let mut send_fail_count: u64 = 0;
loop {
tokio::select! {
@@ -614,14 +707,43 @@ impl UniversalSession {
break;
}
};
frames_received = frames_received.wrapping_add(1);
// Verify codec matches
let frame_codec = encoded_frame.codec;
if frame_codec != expected_codec {
codec_mismatch_count = codec_mismatch_count.wrapping_add(1);
if codec_mismatch_count <= 5
|| codec_mismatch_count % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] codec mismatch count={} expected={} got={} recv_seq={}",
session_id,
codec_mismatch_count,
expected_codec,
frame_codec,
encoded_frame.sequence
);
}
continue;
}
if encoded_frame.is_keyframe
|| frames_received % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] received frame recv_count={} sent_count={} seq={} size={} keyframe={} waiting_for_keyframe={}",
session_id,
frames_received,
frames_sent,
encoded_frame.sequence,
encoded_frame.data.len(),
encoded_frame.is_keyframe,
waiting_for_keyframe
);
}
// Debug log for H265 frames
if expected_codec == VideoEncoderType::H265
&& (encoded_frame.is_keyframe || frames_sent.is_multiple_of(30)) {
@@ -642,13 +764,31 @@ impl UniversalSession {
}
}
let was_waiting_for_keyframe = waiting_for_keyframe;
if waiting_for_keyframe || gap_detected {
if encoded_frame.is_keyframe {
waiting_for_keyframe = false;
if was_waiting_for_keyframe || gap_detected {
info!(
"[Session-Debug:{}] keyframe accepted seq={} after_wait={} gap_detected={}",
session_id,
encoded_frame.sequence,
was_waiting_for_keyframe,
gap_detected
);
}
} else {
if gap_detected {
waiting_for_keyframe = true;
}
// Some H264 encoders (notably v4l2m2m on certain drivers) emit
// SPS/PPS in a separate non-keyframe access unit right before IDR.
// If we drop it here, browser receives IDR-only (NAL 5) and cannot decode.
let forward_h264_parameter_frame = waiting_for_keyframe
&& expected_codec == VideoEncoderType::H264
&& h264_contains_parameter_sets(encoded_frame.data.as_ref());
let now = Instant::now();
if now.duration_since(last_keyframe_request)
>= Duration::from_millis(200)
@@ -656,7 +796,34 @@ impl UniversalSession {
request_keyframe();
last_keyframe_request = now;
}
continue;
if forward_h264_parameter_frame {
info!(
"[Session-Debug:{}] forwarding H264 parameter frame while waiting keyframe seq={} size={}",
session_id,
encoded_frame.sequence,
encoded_frame.data.len()
);
} else {
waiting_keyframe_drop_count =
waiting_keyframe_drop_count.wrapping_add(1);
if gap_detected
|| waiting_keyframe_drop_count <= 5
|| waiting_keyframe_drop_count
% VIDEO_DEBUG_LOG_INTERVAL
== 0
{
info!(
"[Session-Debug:{}] dropping frame while waiting keyframe seq={} keyframe={} gap_detected={} drop_count={}",
session_id,
encoded_frame.sequence,
encoded_frame.is_keyframe,
gap_detected,
waiting_keyframe_drop_count
);
}
continue;
}
}
}
@@ -671,11 +838,33 @@ impl UniversalSession {
.await;
let _ = send_in_flight;
if send_result.is_err() {
// Keep quiet unless debugging send failures elsewhere
if let Err(e) = send_result {
send_fail_count = send_fail_count.wrapping_add(1);
if send_fail_count <= 5 || send_fail_count % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] track write failed count={} err={}",
session_id,
send_fail_count,
e
);
}
} else {
frames_sent += 1;
last_sequence = Some(encoded_frame.sequence);
if encoded_frame.is_keyframe
|| frames_sent % VIDEO_DEBUG_LOG_INTERVAL == 0
{
info!(
"[Session-Debug:{}] sent frame sent_count={} recv_count={} seq={} size={} keyframe={}",
session_id,
frames_sent,
frames_received,
encoded_frame.sequence,
encoded_frame.data.len(),
encoded_frame.is_keyframe
);
}
}
}
}
@@ -794,6 +983,12 @@ impl UniversalSession {
/// Handle SDP offer and create answer
pub async fn handle_offer(&self, offer: SdpOffer) -> Result<SdpAnswer> {
info!(
"[SDP-Debug:{}] offer video section: {}",
self.session_id,
extract_video_sdp_section(&offer.sdp)
);
// Log offer for debugging H.265 codec negotiation
if self.codec == VideoEncoderType::H265 {
let has_h265 = offer.sdp.to_lowercase().contains("h265")
@@ -820,6 +1015,12 @@ impl UniversalSession {
.await
.map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?;
info!(
"[SDP-Debug:{}] answer video section: {}",
self.session_id,
extract_video_sdp_section(&answer.sdp)
);
// Log answer for debugging
if self.codec == VideoEncoderType::H265 {
let has_h265 = answer.sdp.to_lowercase().contains("h265")

View File

@@ -18,9 +18,10 @@
use bytes::Bytes;
use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration;
use tokio::sync::Mutex;
use tracing::{debug, trace, warn};
use tracing::{debug, info, trace, warn};
use webrtc::media::Sample;
use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability;
use webrtc::track::track_local::track_local_static_rtp::TrackLocalStaticRTP;
@@ -38,6 +39,10 @@ use crate::video::format::Resolution;
/// Default MTU for RTP packets
const RTP_MTU: usize = 1200;
/// Low-frequency diagnostic logging interval for H264 frame writes.
const H264_DEBUG_LOG_INTERVAL: u64 = 120;
static H264_WRITE_COUNTER: AtomicU64 = AtomicU64::new(0);
/// Video codec type for WebRTC
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -313,7 +318,20 @@ impl UniversalVideoTrack {
///
/// Sends the entire Annex B frame as a single Sample to allow the
/// H264Payloader to aggregate SPS+PPS into STAP-A packets.
async fn write_h264_frame(&self, data: Bytes, _is_keyframe: bool) -> Result<()> {
async fn write_h264_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
let frame_idx = H264_WRITE_COUNTER.fetch_add(1, Ordering::Relaxed) + 1;
if is_keyframe || frame_idx % H264_DEBUG_LOG_INTERVAL == 0 {
let (stream_format, nal_types) = detect_h264_stream_format_and_nals(&data);
info!(
"[H264-Track-Debug] frame_idx={} size={} keyframe={} stream_format={} nal_types={:?}",
frame_idx,
data.len(),
is_keyframe,
stream_format,
nal_types
);
}
// Send entire Annex B frame as one Sample
// The H264Payloader in rtp crate will:
// 1. Parse NAL units from Annex B format
@@ -470,6 +488,49 @@ impl UniversalVideoTrack {
}
}
fn detect_h264_stream_format_and_nals(data: &[u8]) -> (&'static str, Vec<u8>) {
let mut nal_types: Vec<u8> = Vec::new();
let mut i = 0usize;
while i + 4 <= data.len() {
let sc_len = if i + 4 <= data.len()
&& data[i] == 0
&& data[i + 1] == 0
&& data[i + 2] == 0
&& data[i + 3] == 1
{
4
} else if i + 3 <= data.len() && data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1 {
3
} else {
i += 1;
continue;
};
let nal_start = i + sc_len;
if nal_start < data.len() {
nal_types.push(data[nal_start] & 0x1F);
if nal_types.len() >= 12 {
break;
}
}
i = nal_start.saturating_add(1);
}
if !nal_types.is_empty() {
return ("annex-b", nal_types);
}
if data.len() >= 5 {
let first_len = u32::from_be_bytes([data[0], data[1], data[2], data[3]]) as usize;
if first_len > 0 && first_len + 4 <= data.len() {
return ("length-prefixed", vec![data[4] & 0x1F]);
}
}
("unknown", Vec::new())
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -250,8 +250,8 @@ impl WebRtcStreamer {
}
}
fn should_stop_pipeline(session_count: usize, subscriber_count: usize) -> bool {
session_count == 0 && subscriber_count == 0
fn should_stop_pipeline(session_count: usize) -> bool {
session_count == 0
}
async fn stop_pipeline_if_idle(&self, reason: &str) {
@@ -263,7 +263,7 @@ impl WebRtcStreamer {
};
let subscriber_count = pipeline.subscriber_count();
if Self::should_stop_pipeline(session_count, subscriber_count) {
if Self::should_stop_pipeline(session_count) {
info!(
"{} stopping video pipeline (sessions={}, subscribers={})",
reason, session_count, subscriber_count
@@ -1005,10 +1005,9 @@ mod tests {
}
#[test]
fn stop_pipeline_requires_no_sessions_and_no_subscribers() {
assert!(WebRtcStreamer::should_stop_pipeline(0, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(1, 0));
assert!(!WebRtcStreamer::should_stop_pipeline(0, 1));
assert!(!WebRtcStreamer::should_stop_pipeline(2, 3));
fn stop_pipeline_requires_no_sessions() {
assert!(WebRtcStreamer::should_stop_pipeline(0));
assert!(!WebRtcStreamer::should_stop_pipeline(1));
assert!(!WebRtcStreamer::should_stop_pipeline(2));
}
}