feat: 完善架构优化性能

- 调整音视频架构,提升 RKMPP 编码 MJPEG-->H264 性能,同时解决丢帧马赛克问题;
- 删除多用户逻辑,只保留单用户,支持设置 web 单会话;
- 修复删除体验不好的的回退逻辑,前端页面菜单位置微调;
- 增加 OTG USB 设备动态调整功能;
- 修复 mdns 问题,webrtc 视频切换更顺畅。
This commit is contained in:
mofeng
2026-01-25 16:04:29 +08:00
parent 01e01430da
commit 1786b7689d
66 changed files with 4225 additions and 2936 deletions

34
src/webrtc/mdns.rs Normal file
View File

@@ -0,0 +1,34 @@
use webrtc::ice::mdns::MulticastDnsMode;
pub fn mdns_mode_from_env() -> Option<MulticastDnsMode> {
let raw = std::env::var("ONE_KVM_WEBRTC_MDNS_MODE").ok()?;
let value = raw.trim().to_ascii_lowercase();
if value.is_empty() {
return None;
}
match value.as_str() {
"disabled" | "off" | "false" | "0" => Some(MulticastDnsMode::Disabled),
"query" | "query_only" | "query-only" => Some(MulticastDnsMode::QueryOnly),
"gather" | "query_and_gather" | "query-and-gather" | "on" | "true" | "1" => {
Some(MulticastDnsMode::QueryAndGather)
}
_ => None,
}
}
pub fn mdns_mode() -> MulticastDnsMode {
mdns_mode_from_env().unwrap_or(MulticastDnsMode::QueryAndGather)
}
pub fn mdns_mode_label(mode: MulticastDnsMode) -> &'static str {
match mode {
MulticastDnsMode::Disabled => "disabled",
MulticastDnsMode::QueryOnly => "query_only",
MulticastDnsMode::QueryAndGather => "query_and_gather",
}
}
pub fn default_mdns_host_name(session_id: &str) -> String {
format!("{session_id}.local")
}

View File

@@ -27,6 +27,7 @@
pub mod config;
pub mod h265_payloader;
pub(crate) mod mdns;
pub mod peer;
pub mod rtp;
pub mod session;
@@ -42,7 +43,5 @@ pub use rtp::{H264VideoTrack, H264VideoTrackConfig, OpusAudioTrack};
pub use session::WebRtcSessionManager;
pub use signaling::{ConnectionState, IceCandidate, SdpAnswer, SdpOffer, SignalingMessage};
pub use universal_session::{UniversalSession, UniversalSessionConfig, UniversalSessionInfo};
pub use video_track::{
UniversalVideoTrack, UniversalVideoTrackConfig, VideoCodec, VideoTrackStats,
};
pub use video_track::{UniversalVideoTrack, UniversalVideoTrackConfig, VideoCodec};
pub use webrtc_streamer::{SessionInfo, WebRtcStreamer, WebRtcStreamerConfig, WebRtcStreamerStats};

View File

@@ -5,9 +5,11 @@ use tokio::sync::{broadcast, watch, Mutex, RwLock};
use tracing::{debug, info};
use webrtc::api::interceptor_registry::register_default_interceptors;
use webrtc::api::media_engine::MediaEngine;
use webrtc::api::setting_engine::SettingEngine;
use webrtc::api::APIBuilder;
use webrtc::data_channel::data_channel_message::DataChannelMessage;
use webrtc::data_channel::RTCDataChannel;
use webrtc::ice::mdns::MulticastDnsMode;
use webrtc::ice_transport::ice_candidate::RTCIceCandidate;
use webrtc::ice_transport::ice_server::RTCIceServer;
use webrtc::interceptor::registry::Registry;
@@ -17,6 +19,7 @@ use webrtc::peer_connection::sdp::session_description::RTCSessionDescription;
use webrtc::peer_connection::RTCPeerConnection;
use super::config::WebRtcConfig;
use super::mdns::{default_mdns_host_name, mdns_mode};
use super::signaling::{ConnectionState, IceCandidate, SdpAnswer, SdpOffer};
use super::track::{VideoTrack, VideoTrackConfig};
use crate::error::{AppError, Result};
@@ -60,8 +63,17 @@ impl PeerConnection {
registry = register_default_interceptors(registry, &mut media_engine)
.map_err(|e| AppError::VideoError(format!("Failed to register interceptors: {}", e)))?;
// Create API
// Create API (with optional mDNS settings)
let mut setting_engine = SettingEngine::default();
let mode = mdns_mode();
setting_engine.set_ice_multicast_dns_mode(mode);
if mode == MulticastDnsMode::QueryAndGather {
setting_engine.set_multicast_dns_host_name(default_mdns_host_name(&session_id));
}
info!("WebRTC mDNS mode: {:?} (session {})", mode, session_id);
let api = APIBuilder::new()
.with_setting_engine(setting_engine)
.with_media_engine(media_engine)
.with_interceptor_registry(registry)
.build();
@@ -418,7 +430,7 @@ pub struct PeerConnectionManager {
impl PeerConnectionManager {
/// Create a new peer connection manager
pub fn new(config: WebRtcConfig) -> Self {
let (frame_tx, _) = broadcast::channel(16); // Buffer size 16 for low latency
let (frame_tx, _) = broadcast::channel(16);
Self {
config,
@@ -430,7 +442,7 @@ impl PeerConnectionManager {
/// Create a new peer connection manager with HID controller
pub fn with_hid(config: WebRtcConfig, hid: Arc<HidController>) -> Self {
let (frame_tx, _) = broadcast::channel(16); // Buffer size 16 for low latency
let (frame_tx, _) = broadcast::channel(16);
Self {
config,

View File

@@ -42,8 +42,6 @@ pub struct H264VideoTrack {
config: H264VideoTrackConfig,
/// H264 payloader for manual packetization (if needed)
payloader: Mutex<H264Payloader>,
/// Statistics
stats: Mutex<H264TrackStats>,
/// Cached SPS NAL unit for injection before IDR frames
/// Some hardware encoders don't repeat SPS/PPS with every keyframe
cached_sps: Mutex<Option<Bytes>>,
@@ -83,21 +81,6 @@ impl Default for H264VideoTrackConfig {
}
}
/// H264 track statistics
#[derive(Debug, Clone, Default)]
pub struct H264TrackStats {
/// Frames sent
pub frames_sent: u64,
/// Bytes sent
pub bytes_sent: u64,
/// Packets sent (RTP packets)
pub packets_sent: u64,
/// Key frames sent
pub keyframes_sent: u64,
/// Errors encountered
pub errors: u64,
}
impl H264VideoTrack {
/// Create a new H264 video track
///
@@ -134,7 +117,6 @@ impl H264VideoTrack {
track,
config,
payloader: Mutex::new(H264Payloader::default()),
stats: Mutex::new(H264TrackStats::default()),
cached_sps: Mutex::new(None),
cached_pps: Mutex::new(None),
}
@@ -150,11 +132,6 @@ impl H264VideoTrack {
self.track.clone()
}
/// Get current statistics
pub async fn stats(&self) -> H264TrackStats {
self.stats.lock().await.clone()
}
/// Write an H264 encoded frame to the track
///
/// The frame data should be H264 Annex B format (with start codes 0x00000001 or 0x000001).
@@ -288,16 +265,6 @@ impl H264VideoTrack {
nal_count += 1;
}
// Update statistics
if nal_count > 0 {
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += total_bytes;
if is_keyframe {
stats.keyframes_sent += 1;
}
}
trace!(
"Sent frame: {} NAL units, {} bytes, keyframe={}",
nal_count,
@@ -344,19 +311,6 @@ impl H264VideoTrack {
pub struct OpusAudioTrack {
/// The underlying WebRTC track
track: Arc<TrackLocalStaticSample>,
/// Statistics
stats: Mutex<OpusTrackStats>,
}
/// Opus track statistics
#[derive(Debug, Clone, Default)]
pub struct OpusTrackStats {
/// Packets sent
pub packets_sent: u64,
/// Bytes sent
pub bytes_sent: u64,
/// Errors
pub errors: u64,
}
impl OpusAudioTrack {
@@ -378,7 +332,6 @@ impl OpusAudioTrack {
Self {
track,
stats: Mutex::new(OpusTrackStats::default()),
}
}
@@ -392,11 +345,6 @@ impl OpusAudioTrack {
self.track.clone()
}
/// Get statistics
pub async fn stats(&self) -> OpusTrackStats {
self.stats.lock().await.clone()
}
/// Write Opus encoded audio data
///
/// # Arguments
@@ -417,23 +365,13 @@ impl OpusAudioTrack {
..Default::default()
};
match self.track.write_sample(&sample).await {
Ok(_) => {
let mut stats = self.stats.lock().await;
stats.packets_sent += 1;
stats.bytes_sent += data.len() as u64;
Ok(())
}
Err(e) => {
let mut stats = self.stats.lock().await;
stats.errors += 1;
self.track
.write_sample(&sample)
.await
.map_err(|e| {
error!("Failed to write Opus sample: {}", e);
Err(AppError::WebRtcError(format!(
"Failed to write audio sample: {}",
e
)))
}
}
AppError::WebRtcError(format!("Failed to write audio sample: {}", e))
})
}
}

View File

@@ -2,7 +2,7 @@
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::{broadcast, watch, Mutex};
use tokio::sync::{broadcast, watch};
use tracing::{debug, error, info};
use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability;
use webrtc::track::track_local::track_local_static_rtp::TrackLocalStaticRTP;
@@ -87,38 +87,11 @@ pub fn audio_codec_capability() -> RTCRtpCodecCapability {
}
}
/// Video track statistics
#[derive(Debug, Clone, Default)]
pub struct VideoTrackStats {
/// Frames sent
pub frames_sent: u64,
/// Bytes sent
pub bytes_sent: u64,
/// Packets sent
pub packets_sent: u64,
/// Packets lost (RTCP feedback)
pub packets_lost: u64,
/// Current bitrate (bps)
pub current_bitrate: u64,
/// Round trip time (ms)
pub rtt_ms: f64,
/// Jitter (ms)
pub jitter_ms: f64,
}
/// Video track for WebRTC streaming
pub struct VideoTrack {
config: VideoTrackConfig,
/// RTP track
track: Arc<TrackLocalStaticRTP>,
/// Statistics
stats: Arc<Mutex<VideoTrackStats>>,
/// Sequence number for RTP
sequence_number: Arc<Mutex<u16>>,
/// Timestamp for RTP
timestamp: Arc<Mutex<u32>>,
/// Last frame time
last_frame_time: Arc<Mutex<Option<Instant>>>,
/// Running flag
running: Arc<watch::Sender<bool>>,
}
@@ -139,10 +112,6 @@ impl VideoTrack {
Self {
config,
track,
stats: Arc::new(Mutex::new(VideoTrackStats::default())),
sequence_number: Arc::new(Mutex::new(0)),
timestamp: Arc::new(Mutex::new(0)),
last_frame_time: Arc::new(Mutex::new(None)),
running: Arc::new(running_tx),
}
}
@@ -152,25 +121,17 @@ impl VideoTrack {
self.track.clone()
}
/// Get current statistics
pub async fn stats(&self) -> VideoTrackStats {
self.stats.lock().await.clone()
}
/// Start sending frames from a broadcast receiver
pub async fn start_sending(&self, mut frame_rx: broadcast::Receiver<VideoFrame>) {
let _ = self.running.send(true);
let track = self.track.clone();
let stats = self.stats.clone();
let sequence_number = self.sequence_number.clone();
let timestamp = self.timestamp.clone();
let last_frame_time = self.last_frame_time.clone();
let clock_rate = self.config.clock_rate;
let mut running_rx = self.running.subscribe();
info!("Starting video track sender");
tokio::spawn(async move {
let mut state = SendState::default();
loop {
tokio::select! {
result = frame_rx.recv() => {
@@ -179,10 +140,7 @@ impl VideoTrack {
if let Err(e) = Self::send_frame(
&track,
&frame,
&stats,
&sequence_number,
&timestamp,
&last_frame_time,
&mut state,
clock_rate,
).await {
debug!("Failed to send frame: {}", e);
@@ -219,29 +177,22 @@ impl VideoTrack {
async fn send_frame(
track: &TrackLocalStaticRTP,
frame: &VideoFrame,
stats: &Mutex<VideoTrackStats>,
sequence_number: &Mutex<u16>,
timestamp: &Mutex<u32>,
last_frame_time: &Mutex<Option<Instant>>,
state: &mut SendState,
clock_rate: u32,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Calculate timestamp increment based on frame timing
let now = Instant::now();
let mut last_time = last_frame_time.lock().await;
let timestamp_increment = if let Some(last) = *last_time {
let timestamp_increment = if let Some(last) = state.last_frame_time {
let elapsed = now.duration_since(last);
((elapsed.as_secs_f64() * clock_rate as f64) as u32).min(clock_rate / 10)
} else {
clock_rate / 30 // Default to 30 fps
};
*last_time = Some(now);
drop(last_time);
state.last_frame_time = Some(now);
// Update timestamp
let mut ts = timestamp.lock().await;
*ts = ts.wrapping_add(timestamp_increment);
let _current_ts = *ts;
drop(ts);
state.timestamp = state.timestamp.wrapping_add(timestamp_increment);
let _current_ts = state.timestamp;
// For H.264, we need to packetize into RTP
// This is a simplified implementation - real implementation needs proper NAL unit handling
@@ -257,33 +208,34 @@ impl VideoTrack {
let _is_last = i == packet_count - 1;
// Get sequence number
let mut seq = sequence_number.lock().await;
let _seq_num = *seq;
*seq = seq.wrapping_add(1);
drop(seq);
let _seq_num = state.sequence_number;
state.sequence_number = state.sequence_number.wrapping_add(1);
// Build RTP packet payload
// For simplicity, just send raw data - real implementation needs proper RTP packetization
let payload = data[start..end].to_vec();
let payload = &data[start..end];
bytes_sent += payload.len() as u64;
// Write sample (the track handles RTP header construction)
if let Err(e) = track.write(&payload).await {
if let Err(e) = track.write(payload).await {
error!("Failed to write RTP packet: {}", e);
return Err(e.into());
}
}
// Update stats
let mut s = stats.lock().await;
s.frames_sent += 1;
s.bytes_sent += bytes_sent;
s.packets_sent += packet_count as u64;
let _ = bytes_sent;
Ok(())
}
}
#[derive(Debug, Default)]
struct SendState {
sequence_number: u16,
timestamp: u32,
last_frame_time: Option<Instant>,
}
/// Audio track configuration
#[derive(Debug, Clone)]
pub struct AudioTrackConfig {

View File

@@ -123,15 +123,6 @@ impl Default for UnifiedVideoTrackConfig {
}
}
/// Unified video track statistics
#[derive(Debug, Clone, Default)]
pub struct UnifiedVideoTrackStats {
pub frames_sent: u64,
pub bytes_sent: u64,
pub keyframes_sent: u64,
pub errors: u64,
}
/// Cached NAL parameter sets for H264
struct H264ParameterSets {
sps: Option<Bytes>,
@@ -179,8 +170,6 @@ pub struct UnifiedVideoTrack {
track: Arc<TrackLocalStaticSample>,
/// Track configuration
config: UnifiedVideoTrackConfig,
/// Statistics
stats: Mutex<UnifiedVideoTrackStats>,
/// H264 parameter set cache
h264_params: Mutex<H264ParameterSets>,
/// H265 parameter set cache
@@ -207,7 +196,6 @@ impl UnifiedVideoTrack {
Self {
track,
config,
stats: Mutex::new(UnifiedVideoTrackStats::default()),
h264_params: Mutex::new(H264ParameterSets { sps: None, pps: None }),
h265_params: Mutex::new(H265ParameterSets { vps: None, sps: None, pps: None }),
}
@@ -277,9 +265,6 @@ impl UnifiedVideoTrack {
}
/// Get statistics
pub async fn stats(&self) -> UnifiedVideoTrackStats {
self.stats.lock().await.clone()
}
/// Write an encoded frame to the track
///
@@ -504,13 +489,6 @@ impl UnifiedVideoTrack {
debug!("VP8 write_sample failed: {}", e);
}
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += data.len() as u64;
if is_keyframe {
stats.keyframes_sent += 1;
}
trace!("VP8 frame: {} bytes, keyframe={}", data.len(), is_keyframe);
Ok(())
}
@@ -531,13 +509,6 @@ impl UnifiedVideoTrack {
debug!("VP9 write_sample failed: {}", e);
}
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += data.len() as u64;
if is_keyframe {
stats.keyframes_sent += 1;
}
trace!("VP9 frame: {} bytes, keyframe={}", data.len(), is_keyframe);
Ok(())
}
@@ -572,15 +543,6 @@ impl UnifiedVideoTrack {
total_bytes += nal_data.len() as u64;
}
if nal_count > 0 {
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += total_bytes;
if is_keyframe {
stats.keyframes_sent += 1;
}
}
trace!("Sent {} NAL units, {} bytes, keyframe={}", nal_count, total_bytes, is_keyframe);
Ok(())
}

View File

@@ -4,13 +4,16 @@
//! Replaces the H264-only H264Session with a more flexible implementation.
use std::sync::Arc;
use tokio::sync::{broadcast, watch, Mutex, RwLock};
use tracing::{debug, info, trace, warn};
use std::time::{Duration, Instant};
use tokio::sync::{watch, Mutex, RwLock};
use tracing::{debug, info, warn};
use webrtc::api::interceptor_registry::register_default_interceptors;
use webrtc::api::media_engine::MediaEngine;
use webrtc::api::setting_engine::SettingEngine;
use webrtc::api::APIBuilder;
use webrtc::data_channel::data_channel_message::DataChannelMessage;
use webrtc::data_channel::RTCDataChannel;
use webrtc::ice::mdns::MulticastDnsMode;
use webrtc::ice_transport::ice_candidate::RTCIceCandidate;
use webrtc::ice_transport::ice_server::RTCIceServer;
use webrtc::interceptor::registry::Registry;
@@ -24,17 +27,21 @@ use webrtc::rtp_transceiver::rtp_codec::{
use webrtc::rtp_transceiver::RTCPFeedback;
use super::config::WebRtcConfig;
use super::mdns::{default_mdns_host_name, mdns_mode};
use super::rtp::OpusAudioTrack;
use super::signaling::{ConnectionState, IceCandidate, SdpAnswer, SdpOffer};
use super::video_track::{UniversalVideoTrack, UniversalVideoTrackConfig, VideoCodec};
use crate::audio::OpusFrame;
use crate::error::{AppError, Result};
use crate::events::{EventBus, SystemEvent};
use crate::hid::datachannel::{parse_hid_message, HidChannelEvent};
use crate::hid::HidController;
use crate::video::encoder::registry::VideoEncoderType;
use crate::video::encoder::BitratePreset;
use crate::video::format::{PixelFormat, Resolution};
use crate::video::shared_video_pipeline::EncodedVideoFrame;
use std::sync::atomic::AtomicBool;
use webrtc::ice_transport::ice_gatherer_state::RTCIceGathererState;
/// H.265/HEVC MIME type (RFC 7798)
const MIME_TYPE_H265: &str = "video/H265";
@@ -117,6 +124,8 @@ pub struct UniversalSession {
ice_candidates: Arc<Mutex<Vec<IceCandidate>>>,
/// HID controller reference
hid_controller: Option<Arc<HidController>>,
/// Event bus for WebRTC signaling events (optional)
event_bus: Option<Arc<EventBus>>,
/// Video frame receiver handle
video_receiver_handle: Mutex<Option<tokio::task::JoinHandle<()>>>,
/// Audio frame receiver handle
@@ -127,7 +136,11 @@ pub struct UniversalSession {
impl UniversalSession {
/// Create a new universal WebRTC session
pub async fn new(config: UniversalSessionConfig, session_id: String) -> Result<Self> {
pub async fn new(
config: UniversalSessionConfig,
session_id: String,
event_bus: Option<Arc<EventBus>>,
) -> Result<Self> {
info!(
"Creating {} session: {} @ {}x{} (audio={})",
config.codec,
@@ -243,8 +256,17 @@ impl UniversalSession {
registry = register_default_interceptors(registry, &mut media_engine)
.map_err(|e| AppError::VideoError(format!("Failed to register interceptors: {}", e)))?;
// Create API
// Create API (with optional mDNS settings)
let mut setting_engine = SettingEngine::default();
let mode = mdns_mode();
setting_engine.set_ice_multicast_dns_mode(mode);
if mode == MulticastDnsMode::QueryAndGather {
setting_engine.set_multicast_dns_host_name(default_mdns_host_name(&session_id));
}
info!("WebRTC mDNS mode: {:?} (session {})", mode, session_id);
let api = APIBuilder::new()
.with_setting_engine(setting_engine)
.with_media_engine(media_engine)
.with_interceptor_registry(registry)
.build();
@@ -321,6 +343,7 @@ impl UniversalSession {
state_rx,
ice_candidates: Arc::new(Mutex::new(vec![])),
hid_controller: None,
event_bus,
video_receiver_handle: Mutex::new(None),
audio_receiver_handle: Mutex::new(None),
fps: config.fps,
@@ -337,6 +360,7 @@ impl UniversalSession {
let state = self.state.clone();
let session_id = self.session_id.clone();
let codec = self.codec;
let event_bus = self.event_bus.clone();
// Connection state change handler
self.pc
@@ -372,32 +396,56 @@ impl UniversalSession {
// ICE gathering state handler
let session_id_gather = self.session_id.clone();
let event_bus_gather = event_bus.clone();
self.pc
.on_ice_gathering_state_change(Box::new(move |state| {
let session_id = session_id_gather.clone();
let event_bus = event_bus_gather.clone();
Box::pin(async move {
debug!("[ICE] Session {} gathering state: {:?}", session_id, state);
if matches!(state, RTCIceGathererState::Complete) {
if let Some(bus) = event_bus.as_ref() {
bus.publish(SystemEvent::WebRTCIceComplete { session_id });
}
}
})
}));
// ICE candidate handler
let ice_candidates = self.ice_candidates.clone();
let session_id_candidate = self.session_id.clone();
let event_bus_candidate = event_bus.clone();
self.pc
.on_ice_candidate(Box::new(move |candidate: Option<RTCIceCandidate>| {
let ice_candidates = ice_candidates.clone();
let session_id = session_id_candidate.clone();
let event_bus = event_bus_candidate.clone();
Box::pin(async move {
if let Some(c) = candidate {
let candidate_str = c.to_json().map(|j| j.candidate).unwrap_or_default();
debug!("ICE candidate: {}", candidate_str);
let candidate_json = c.to_json().ok();
let candidate_str = candidate_json
.as_ref()
.map(|j| j.candidate.clone())
.unwrap_or_default();
let candidate = IceCandidate {
candidate: candidate_str,
sdp_mid: candidate_json.as_ref().and_then(|j| j.sdp_mid.clone()),
sdp_mline_index: candidate_json.as_ref().and_then(|j| j.sdp_mline_index),
username_fragment: candidate_json
.as_ref()
.and_then(|j| j.username_fragment.clone()),
};
let mut candidates = ice_candidates.lock().await;
candidates.push(IceCandidate {
candidate: candidate_str,
sdp_mid: c.to_json().ok().and_then(|j| j.sdp_mid),
sdp_mline_index: c.to_json().ok().and_then(|j| j.sdp_mline_index),
username_fragment: None,
});
candidates.push(candidate.clone());
drop(candidates);
if let Some(bus) = event_bus.as_ref() {
bus.publish(SystemEvent::WebRTCIceCandidate {
session_id,
candidate,
});
}
}
})
}));
@@ -488,13 +536,11 @@ impl UniversalSession {
///
/// The `on_connected` callback is called when ICE connection is established,
/// allowing the caller to request a keyframe at the right time.
pub async fn start_from_video_pipeline<F>(
pub async fn start_from_video_pipeline(
&self,
mut frame_rx: broadcast::Receiver<EncodedVideoFrame>,
on_connected: F,
) where
F: FnOnce() + Send + 'static,
{
mut frame_rx: tokio::sync::mpsc::Receiver<std::sync::Arc<EncodedVideoFrame>>,
request_keyframe: Arc<dyn Fn() + Send + Sync + 'static>,
) {
info!(
"Starting {} session {} with shared encoder",
self.codec, self.session_id
@@ -505,6 +551,7 @@ impl UniversalSession {
let session_id = self.session_id.clone();
let _fps = self.fps;
let expected_codec = self.codec;
let send_in_flight = Arc::new(AtomicBool::new(false));
let handle = tokio::spawn(async move {
info!(
@@ -536,7 +583,10 @@ impl UniversalSession {
);
// Request keyframe now that connection is established
on_connected();
request_keyframe();
let mut waiting_for_keyframe = true;
let mut last_sequence: Option<u64> = None;
let mut last_keyframe_request = Instant::now() - Duration::from_secs(1);
let mut frames_sent: u64 = 0;
@@ -556,64 +606,81 @@ impl UniversalSession {
}
result = frame_rx.recv() => {
match result {
Ok(encoded_frame) => {
// Verify codec matches
let frame_codec = match encoded_frame.codec {
VideoEncoderType::H264 => VideoEncoderType::H264,
VideoEncoderType::H265 => VideoEncoderType::H265,
VideoEncoderType::VP8 => VideoEncoderType::VP8,
VideoEncoderType::VP9 => VideoEncoderType::VP9,
};
if frame_codec != expected_codec {
trace!("Skipping frame with codec {:?}, expected {:?}", frame_codec, expected_codec);
continue;
}
// Debug log for H265 frames
if expected_codec == VideoEncoderType::H265 {
if encoded_frame.is_keyframe || frames_sent % 30 == 0 {
debug!(
"[Session-H265] Received frame #{}: size={}, keyframe={}, seq={}",
frames_sent,
encoded_frame.data.len(),
encoded_frame.is_keyframe,
encoded_frame.sequence
);
}
}
// Send encoded frame via RTP
if let Err(e) = video_track
.write_frame_bytes(
encoded_frame.data.clone(),
encoded_frame.is_keyframe,
)
.await
{
if frames_sent % 100 == 0 {
debug!("Failed to write frame to track: {}", e);
}
} else {
frames_sent += 1;
// Log successful H265 frame send
if expected_codec == VideoEncoderType::H265 && (encoded_frame.is_keyframe || frames_sent % 30 == 0) {
debug!(
"[Session-H265] Frame #{} sent successfully",
frames_sent
);
}
}
}
Err(broadcast::error::RecvError::Lagged(n)) => {
debug!("Session {} lagged by {} frames", session_id, n);
}
Err(broadcast::error::RecvError::Closed) => {
let encoded_frame = match result {
Some(frame) => frame,
None => {
info!("Video frame channel closed for session {}", session_id);
break;
}
};
// Verify codec matches
let frame_codec = match encoded_frame.codec {
VideoEncoderType::H264 => VideoEncoderType::H264,
VideoEncoderType::H265 => VideoEncoderType::H265,
VideoEncoderType::VP8 => VideoEncoderType::VP8,
VideoEncoderType::VP9 => VideoEncoderType::VP9,
};
if frame_codec != expected_codec {
continue;
}
// Debug log for H265 frames
if expected_codec == VideoEncoderType::H265 {
if encoded_frame.is_keyframe || frames_sent % 30 == 0 {
debug!(
"[Session-H265] Received frame #{}: size={}, keyframe={}, seq={}",
frames_sent,
encoded_frame.data.len(),
encoded_frame.is_keyframe,
encoded_frame.sequence
);
}
}
// Ensure decoder starts from a keyframe and recover on gaps.
let mut gap_detected = false;
if let Some(prev) = last_sequence {
if encoded_frame.sequence > prev.saturating_add(1) {
gap_detected = true;
}
}
if waiting_for_keyframe || gap_detected {
if encoded_frame.is_keyframe {
waiting_for_keyframe = false;
} else {
if gap_detected {
waiting_for_keyframe = true;
}
let now = Instant::now();
if now.duration_since(last_keyframe_request)
>= Duration::from_millis(200)
{
request_keyframe();
last_keyframe_request = now;
}
continue;
}
}
let _ = send_in_flight;
// Send encoded frame via RTP (drop if previous send is still in flight)
let send_result = video_track
.write_frame_bytes(
encoded_frame.data.clone(),
encoded_frame.is_keyframe,
)
.await;
let _ = send_in_flight;
if send_result.is_err() {
// Keep quiet unless debugging send failures elsewhere
} else {
frames_sent += 1;
last_sequence = Some(encoded_frame.sequence);
}
}
}
@@ -629,7 +696,10 @@ impl UniversalSession {
}
/// Start receiving Opus audio frames
pub async fn start_audio_from_opus(&self, mut opus_rx: broadcast::Receiver<OpusFrame>) {
pub async fn start_audio_from_opus(
&self,
mut opus_rx: tokio::sync::watch::Receiver<Option<std::sync::Arc<OpusFrame>>>,
) {
let audio_track = match &self.audio_track {
Some(track) => track.clone(),
None => {
@@ -684,26 +754,25 @@ impl UniversalSession {
}
}
result = opus_rx.recv() => {
match result {
Ok(opus_frame) => {
// 20ms at 48kHz = 960 samples
let samples = 960u32;
if let Err(e) = audio_track.write_packet(&opus_frame.data, samples).await {
if packets_sent % 100 == 0 {
debug!("Failed to write audio packet: {}", e);
}
} else {
packets_sent += 1;
}
}
Err(broadcast::error::RecvError::Lagged(n)) => {
warn!("Session {} audio lagged by {} packets", session_id, n);
}
Err(broadcast::error::RecvError::Closed) => {
info!("Opus channel closed for session {}", session_id);
break;
result = opus_rx.changed() => {
if result.is_err() {
info!("Opus channel closed for session {}", session_id);
break;
}
let opus_frame = match opus_rx.borrow().clone() {
Some(frame) => frame,
None => continue,
};
// 20ms at 48kHz = 960 samples
let samples = 960u32;
if let Err(e) = audio_track.write_packet(&opus_frame.data, samples).await {
if packets_sent % 100 == 0 {
debug!("Failed to write audio packet: {}", e);
}
} else {
packets_sent += 1;
}
}
}

View File

@@ -186,19 +186,6 @@ impl UniversalVideoTrackConfig {
}
}
/// Track statistics
#[derive(Debug, Clone, Default)]
pub struct VideoTrackStats {
/// Frames sent
pub frames_sent: u64,
/// Bytes sent
pub bytes_sent: u64,
/// Keyframes sent
pub keyframes_sent: u64,
/// Errors
pub errors: u64,
}
/// Track type wrapper to support different underlying track implementations
enum TrackType {
/// Sample-based track with built-in payloader (H264, VP8, VP9)
@@ -227,8 +214,6 @@ pub struct UniversalVideoTrack {
codec: VideoCodec,
/// Configuration
config: UniversalVideoTrackConfig,
/// Statistics
stats: Mutex<VideoTrackStats>,
/// H265 RTP state (only used for H265)
h265_state: Option<Mutex<H265RtpState>>,
}
@@ -277,7 +262,6 @@ impl UniversalVideoTrack {
track,
codec: config.codec,
config,
stats: Mutex::new(VideoTrackStats::default()),
h265_state,
}
}
@@ -301,9 +285,6 @@ impl UniversalVideoTrack {
}
/// Get current statistics
pub async fn stats(&self) -> VideoTrackStats {
self.stats.lock().await.clone()
}
/// Write an encoded frame to the track
///
@@ -332,7 +313,7 @@ impl UniversalVideoTrack {
///
/// Sends the entire Annex B frame as a single Sample to allow the
/// H264Payloader to aggregate SPS+PPS into STAP-A packets.
async fn write_h264_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
async fn write_h264_frame(&self, data: Bytes, _is_keyframe: bool) -> Result<()> {
// Send entire Annex B frame as one Sample
// The H264Payloader in rtp crate will:
// 1. Parse NAL units from Annex B format
@@ -340,7 +321,6 @@ impl UniversalVideoTrack {
// 3. Aggregate SPS+PPS+IDR into STAP-A when possible
// 4. Fragment large NALs using FU-A
let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64);
let data_len = data.len();
let sample = Sample {
data,
duration: frame_duration,
@@ -358,14 +338,6 @@ impl UniversalVideoTrack {
}
}
// Update stats
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += data_len as u64;
if is_keyframe {
stats.keyframes_sent += 1;
}
Ok(())
}
@@ -379,11 +351,10 @@ impl UniversalVideoTrack {
}
/// Write VP8 frame
async fn write_vp8_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
async fn write_vp8_frame(&self, data: Bytes, _is_keyframe: bool) -> Result<()> {
// VP8 frames are sent directly without NAL parsing
// Calculate frame duration based on configured FPS
let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64);
let data_len = data.len();
let sample = Sample {
data,
duration: frame_duration,
@@ -401,23 +372,14 @@ impl UniversalVideoTrack {
}
}
// Update stats
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += data_len as u64;
if is_keyframe {
stats.keyframes_sent += 1;
}
Ok(())
}
/// Write VP9 frame
async fn write_vp9_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
async fn write_vp9_frame(&self, data: Bytes, _is_keyframe: bool) -> Result<()> {
// VP9 frames are sent directly without NAL parsing
// Calculate frame duration based on configured FPS
let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64);
let data_len = data.len();
let sample = Sample {
data,
duration: frame_duration,
@@ -435,19 +397,11 @@ impl UniversalVideoTrack {
}
}
// Update stats
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += data_len as u64;
if is_keyframe {
stats.keyframes_sent += 1;
}
Ok(())
}
/// Send H265 NAL units via custom H265Payloader
async fn send_h265_rtp(&self, payload: Bytes, is_keyframe: bool) -> Result<()> {
async fn send_h265_rtp(&self, payload: Bytes, _is_keyframe: bool) -> Result<()> {
let rtp_track = match &self.track {
TrackType::Rtp(t) => t,
TrackType::Sample(_) => {
@@ -486,8 +440,6 @@ impl UniversalVideoTrack {
(payloads, timestamp, seq_start, num_payloads)
}; // Lock released here, before network I/O
let mut total_bytes = 0u64;
// Send RTP packets without holding the lock
for (i, payload_data) in payloads.into_iter().enumerate() {
let seq = seq_start.wrapping_add(i as u16);
@@ -513,15 +465,6 @@ impl UniversalVideoTrack {
trace!("H265 write_rtp failed: {}", e);
}
total_bytes += payload_data.len() as u64;
}
// Update stats
let mut stats = self.stats.lock().await;
stats.frames_sent += 1;
stats.bytes_sent += total_bytes;
if is_keyframe {
stats.keyframes_sent += 1;
}
Ok(())

View File

@@ -15,10 +15,6 @@
//! | +-- VP8 Encoder (hardware only - VAAPI)
//! | +-- VP9 Encoder (hardware only - VAAPI)
//! |
//! +-- Audio Pipeline
//! | +-- SharedAudioPipeline
//! | +-- OpusEncoder
//! |
//! +-- UniversalSession[] (video + audio tracks + DataChannel)
//! +-- UniversalVideoTrack (H264/H265/VP8/VP9)
//! +-- Audio Track (RTP/Opus)
@@ -29,23 +25,23 @@
//!
//! - **Single encoder**: All sessions share one video encoder
//! - **Multi-codec support**: H264, H265, VP8, VP9
//! - **Audio support**: Opus audio streaming via SharedAudioPipeline
//! - **Audio support**: Opus audio streaming via AudioController
//! - **HID via DataChannel**: Keyboard/mouse events through WebRTC DataChannel
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::{broadcast, RwLock};
use tracing::{debug, error, info, trace, warn};
use tokio::sync::RwLock;
use tracing::{debug, info, trace, warn};
use crate::audio::shared_pipeline::{SharedAudioPipeline, SharedAudioPipelineConfig};
use crate::audio::{AudioController, OpusFrame};
use crate::events::EventBus;
use crate::error::{AppError, Result};
use crate::hid::HidController;
use crate::video::encoder::registry::EncoderBackend;
use crate::video::encoder::registry::VideoEncoderType;
use crate::video::encoder::VideoCodecType;
use crate::video::format::{PixelFormat, Resolution};
use crate::video::frame::VideoFrame;
use crate::video::shared_video_pipeline::{
SharedVideoPipeline, SharedVideoPipelineConfig, SharedVideoPipelineStats,
};
@@ -91,6 +87,14 @@ impl Default for WebRtcStreamerConfig {
}
}
/// Capture device configuration for direct capture pipeline
#[derive(Debug, Clone)]
pub struct CaptureDeviceConfig {
pub device_path: PathBuf,
pub buffer_count: u32,
pub jpeg_quality: u8,
}
/// WebRTC streamer statistics
#[derive(Debug, Clone, Default)]
pub struct WebRtcStreamerStats {
@@ -102,30 +106,12 @@ pub struct WebRtcStreamerStats {
pub video_pipeline: Option<VideoPipelineStats>,
/// Audio enabled
pub audio_enabled: bool,
/// Audio pipeline stats (if available)
pub audio_pipeline: Option<AudioPipelineStats>,
}
/// Video pipeline statistics
#[derive(Debug, Clone, Default)]
pub struct VideoPipelineStats {
pub frames_encoded: u64,
pub frames_dropped: u64,
pub bytes_encoded: u64,
pub keyframes_encoded: u64,
pub avg_encode_time_ms: f32,
pub current_fps: f32,
pub subscribers: u64,
}
/// Audio pipeline statistics
#[derive(Debug, Clone, Default)]
pub struct AudioPipelineStats {
pub frames_encoded: u64,
pub frames_dropped: u64,
pub bytes_encoded: u64,
pub avg_encode_time_ms: f32,
pub subscribers: u64,
}
/// Session info for listing
@@ -151,20 +137,21 @@ pub struct WebRtcStreamer {
video_pipeline: RwLock<Option<Arc<SharedVideoPipeline>>>,
/// All sessions (unified management)
sessions: Arc<RwLock<HashMap<String, Arc<UniversalSession>>>>,
/// Video frame source
video_frame_tx: RwLock<Option<broadcast::Sender<VideoFrame>>>,
/// Capture device configuration for direct capture mode
capture_device: RwLock<Option<CaptureDeviceConfig>>,
// === Audio ===
/// Audio enabled flag
audio_enabled: RwLock<bool>,
/// Shared audio pipeline for Opus encoding
audio_pipeline: RwLock<Option<Arc<SharedAudioPipeline>>>,
/// Audio controller reference
audio_controller: RwLock<Option<Arc<AudioController>>>,
// === Controllers ===
/// HID controller for DataChannel
hid_controller: RwLock<Option<Arc<HidController>>>,
/// Event bus for WebRTC signaling (optional)
events: RwLock<Option<Arc<EventBus>>>,
}
impl WebRtcStreamer {
@@ -180,11 +167,11 @@ impl WebRtcStreamer {
video_codec: RwLock::new(config.video_codec),
video_pipeline: RwLock::new(None),
sessions: Arc::new(RwLock::new(HashMap::new())),
video_frame_tx: RwLock::new(None),
capture_device: RwLock::new(None),
audio_enabled: RwLock::new(config.audio_enabled),
audio_pipeline: RwLock::new(None),
audio_controller: RwLock::new(None),
hid_controller: RwLock::new(None),
events: RwLock::new(None),
})
}
@@ -219,9 +206,10 @@ impl WebRtcStreamer {
// Update codec
*self.video_codec.write().await = codec;
// Create new pipeline with new codec
if let Some(ref tx) = *self.video_frame_tx.read().await {
self.ensure_video_pipeline(tx.clone()).await?;
// Create new pipeline with new codec if capture source is configured
let has_capture = self.capture_device.read().await.is_some();
if has_capture {
self.ensure_video_pipeline().await?;
}
info!("Video codec switched to {:?}", codec);
@@ -263,10 +251,7 @@ impl WebRtcStreamer {
}
/// Ensure video pipeline is initialized and running
async fn ensure_video_pipeline(
self: &Arc<Self>,
tx: broadcast::Sender<VideoFrame>,
) -> Result<Arc<SharedVideoPipeline>> {
async fn ensure_video_pipeline(self: &Arc<Self>) -> Result<Arc<SharedVideoPipeline>> {
let mut pipeline_guard = self.video_pipeline.write().await;
if let Some(ref pipeline) = *pipeline_guard {
@@ -290,7 +275,16 @@ impl WebRtcStreamer {
info!("Creating shared video pipeline for {:?}", codec);
let pipeline = SharedVideoPipeline::new(pipeline_config)?;
pipeline.start(tx.subscribe()).await?;
let capture_device = self.capture_device.read().await.clone();
if let Some(device) = capture_device {
pipeline
.start_with_device(device.device_path, device.buffer_count, device.jpeg_quality)
.await?;
} else {
return Err(AppError::VideoError(
"No capture device configured".to_string(),
));
}
// Start a monitor task to detect when pipeline auto-stops
let pipeline_weak = Arc::downgrade(&pipeline);
@@ -317,11 +311,7 @@ impl WebRtcStreamer {
}
drop(pipeline_guard);
// NOTE: Don't clear video_frame_tx here!
// The frame source is managed by stream_manager and should
// remain available for new sessions. Only stream_manager
// should clear it during mode switches.
info!("Video pipeline stopped, but keeping frame source for new sessions");
info!("Video pipeline stopped, but keeping capture config for new sessions");
}
break;
}
@@ -339,9 +329,8 @@ impl WebRtcStreamer {
/// components (like RustDesk) that need to share the encoded video stream.
pub async fn ensure_video_pipeline_for_external(
self: &Arc<Self>,
tx: broadcast::Sender<VideoFrame>,
) -> Result<Arc<SharedVideoPipeline>> {
self.ensure_video_pipeline(tx).await
self.ensure_video_pipeline().await
}
/// Get the current pipeline configuration (if pipeline is running)
@@ -367,13 +356,10 @@ impl WebRtcStreamer {
self.config.write().await.audio_enabled = enabled;
if enabled && !was_enabled {
// Start audio pipeline if we have an audio controller
if let Some(ref controller) = *self.audio_controller.read().await {
self.start_audio_pipeline(controller.clone()).await?;
// Reconnect audio for existing sessions if we have a controller
if let Some(ref _controller) = *self.audio_controller.read().await {
self.reconnect_audio_sources().await;
}
} else if !enabled && was_enabled {
// Stop audio pipeline
self.stop_audio_pipeline().await;
}
info!("WebRTC audio enabled: {}", enabled);
@@ -385,61 +371,16 @@ impl WebRtcStreamer {
info!("Setting audio controller for WebRTC streamer");
*self.audio_controller.write().await = Some(controller.clone());
// Start audio pipeline if audio is enabled
// Reconnect audio for existing sessions if audio is enabled
if *self.audio_enabled.read().await {
if let Err(e) = self.start_audio_pipeline(controller).await {
error!("Failed to start audio pipeline: {}", e);
}
self.reconnect_audio_sources().await;
}
}
/// Start the shared audio pipeline
async fn start_audio_pipeline(&self, controller: Arc<AudioController>) -> Result<()> {
// Check if already running
if let Some(ref pipeline) = *self.audio_pipeline.read().await {
if pipeline.is_running() {
debug!("Audio pipeline already running");
return Ok(());
}
}
// Get Opus frame receiver from audio controller
let _opus_rx = match controller.subscribe_opus_async().await {
Some(rx) => rx,
None => {
warn!("Audio controller not streaming, cannot start audio pipeline");
return Ok(());
}
};
// Create shared audio pipeline config
let config = SharedAudioPipelineConfig::default();
let pipeline = SharedAudioPipeline::new(config)?;
// Note: SharedAudioPipeline expects raw AudioFrame, but AudioController
// already provides encoded OpusFrame. We'll pass the OpusFrame directly
// to sessions instead of re-encoding.
// For now, store the pipeline reference for future use
*self.audio_pipeline.write().await = Some(pipeline);
// Reconnect audio for all existing sessions
self.reconnect_audio_sources().await;
info!("WebRTC audio pipeline started");
Ok(())
}
/// Stop the shared audio pipeline
async fn stop_audio_pipeline(&self) {
if let Some(ref pipeline) = *self.audio_pipeline.read().await {
pipeline.stop();
}
*self.audio_pipeline.write().await = None;
info!("WebRTC audio pipeline stopped");
}
/// Subscribe to encoded Opus frames (for sessions)
pub async fn subscribe_opus(&self) -> Option<broadcast::Receiver<OpusFrame>> {
pub async fn subscribe_opus(
&self,
) -> Option<tokio::sync::watch::Receiver<Option<std::sync::Arc<OpusFrame>>>> {
if let Some(ref controller) = *self.audio_controller.read().await {
controller.subscribe_opus_async().await
} else {
@@ -463,38 +404,22 @@ impl WebRtcStreamer {
}
}
// === Video Frame Source ===
/// Set video frame source
pub async fn set_video_source(&self, tx: broadcast::Sender<VideoFrame>) {
/// Set capture device for direct capture pipeline
pub async fn set_capture_device(&self, device_path: PathBuf, jpeg_quality: u8) {
info!(
"Setting video source for WebRTC streamer (receiver_count={})",
tx.receiver_count()
"Setting direct capture device for WebRTC: {:?}",
device_path
);
*self.video_frame_tx.write().await = Some(tx.clone());
*self.capture_device.write().await = Some(CaptureDeviceConfig {
device_path,
buffer_count: 2,
jpeg_quality,
});
}
// Start or restart pipeline if it exists
if let Some(ref pipeline) = *self.video_pipeline.read().await {
if !pipeline.is_running() {
info!("Starting video pipeline with new frame source");
if let Err(e) = pipeline.start(tx.subscribe()).await {
error!("Failed to start video pipeline: {}", e);
}
} else {
// Pipeline is already running but may have old frame source
// We need to restart it with the new frame source
info!("Video pipeline already running, restarting with new frame source");
pipeline.stop();
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
if let Err(e) = pipeline.start(tx.subscribe()).await {
error!("Failed to restart video pipeline: {}", e);
}
}
} else {
info!(
"No video pipeline exists yet, frame source will be used when pipeline is created"
);
}
/// Clear direct capture device configuration
pub async fn clear_capture_device(&self) {
*self.capture_device.write().await = None;
}
/// Prepare for configuration change
@@ -509,11 +434,6 @@ impl WebRtcStreamer {
self.close_all_sessions().await;
}
/// Reconnect video source after configuration change
pub async fn reconnect_video_source(&self, tx: broadcast::Sender<VideoFrame>) {
self.set_video_source(tx).await;
}
// === Configuration ===
/// Update video configuration
@@ -690,6 +610,11 @@ impl WebRtcStreamer {
*self.hid_controller.write().await = Some(hid);
}
/// Set event bus for WebRTC signaling events
pub async fn set_event_bus(&self, events: Arc<EventBus>) {
*self.events.write().await = Some(events);
}
// === Session Management ===
/// Create a new WebRTC session
@@ -698,13 +623,7 @@ impl WebRtcStreamer {
let codec = *self.video_codec.read().await;
// Ensure video pipeline is running
let frame_tx = self
.video_frame_tx
.read()
.await
.clone()
.ok_or_else(|| AppError::VideoError("No video frame source".to_string()))?;
let pipeline = self.ensure_video_pipeline(frame_tx).await?;
let pipeline = self.ensure_video_pipeline().await?;
// Create session config
let config = self.config.read().await;
@@ -720,7 +639,9 @@ impl WebRtcStreamer {
drop(config);
// Create universal session
let mut session = UniversalSession::new(session_config.clone(), session_id.clone()).await?;
let event_bus = self.events.read().await.clone();
let mut session =
UniversalSession::new(session_config.clone(), session_id.clone(), event_bus).await?;
// Set HID controller if available
// Note: We DON'T create a data channel here - the frontend creates it.
@@ -734,22 +655,22 @@ impl WebRtcStreamer {
let session = Arc::new(session);
// Subscribe to video pipeline frames
// Request keyframe after ICE connection is established (via callback)
// Request keyframe after ICE connection is established and on gaps
let pipeline_for_callback = pipeline.clone();
let session_id_for_callback = session_id.clone();
let request_keyframe = Arc::new(move || {
let pipeline = pipeline_for_callback.clone();
let sid = session_id_for_callback.clone();
tokio::spawn(async move {
info!(
"Requesting keyframe for session {} after ICE connected",
sid
);
pipeline.request_keyframe().await;
});
});
session
.start_from_video_pipeline(pipeline.subscribe(), move || {
// Spawn async task to request keyframe
let pipeline = pipeline_for_callback;
let sid = session_id_for_callback;
tokio::spawn(async move {
info!(
"Requesting keyframe for session {} after ICE connected",
sid
);
pipeline.request_keyframe().await;
});
})
.start_from_video_pipeline(pipeline.subscribe(), request_keyframe)
.await;
// Start audio if enabled
@@ -913,27 +834,7 @@ impl WebRtcStreamer {
let video_pipeline = if let Some(ref pipeline) = *self.video_pipeline.read().await {
let s = pipeline.stats().await;
Some(VideoPipelineStats {
frames_encoded: s.frames_encoded,
frames_dropped: s.frames_dropped,
bytes_encoded: s.bytes_encoded,
keyframes_encoded: s.keyframes_encoded,
avg_encode_time_ms: s.avg_encode_time_ms,
current_fps: s.current_fps,
subscribers: s.subscribers,
})
} else {
None
};
// Get audio pipeline stats
let audio_pipeline = if let Some(ref pipeline) = *self.audio_pipeline.read().await {
let stats = pipeline.stats().await;
Some(AudioPipelineStats {
frames_encoded: stats.frames_encoded,
frames_dropped: stats.frames_dropped,
bytes_encoded: stats.bytes_encoded,
avg_encode_time_ms: stats.avg_encode_time_ms,
subscribers: stats.subscribers,
})
} else {
None
@@ -944,7 +845,6 @@ impl WebRtcStreamer {
video_codec: format!("{:?}", codec),
video_pipeline,
audio_enabled: *self.audio_enabled.read().await,
audio_pipeline,
}
}
@@ -984,9 +884,6 @@ impl WebRtcStreamer {
if pipeline_running {
info!("Restarting video pipeline to apply new bitrate: {}", preset);
// Save video_frame_tx BEFORE stopping pipeline (monitor task will clear it)
let saved_frame_tx = self.video_frame_tx.read().await.clone();
// Stop existing pipeline
if let Some(ref pipeline) = *self.video_pipeline.read().await {
pipeline.stop();
@@ -998,46 +895,43 @@ impl WebRtcStreamer {
// Clear pipeline reference - will be recreated
*self.video_pipeline.write().await = None;
// Recreate pipeline with new config if we have a frame source
if let Some(tx) = saved_frame_tx {
// Get existing sessions that need to be reconnected
let session_ids: Vec<String> = self.sessions.read().await.keys().cloned().collect();
let has_source = self.capture_device.read().await.is_some();
if !has_source {
return Ok(());
}
if !session_ids.is_empty() {
// Restore video_frame_tx before recreating pipeline
*self.video_frame_tx.write().await = Some(tx.clone());
let session_ids: Vec<String> = self.sessions.read().await.keys().cloned().collect();
if !session_ids.is_empty() {
let pipeline = self.ensure_video_pipeline().await?;
// Recreate pipeline
let pipeline = self.ensure_video_pipeline(tx).await?;
// Reconnect all sessions to new pipeline
let sessions = self.sessions.read().await;
for session_id in &session_ids {
if let Some(session) = sessions.get(session_id) {
info!("Reconnecting session {} to new pipeline", session_id);
let pipeline_for_callback = pipeline.clone();
let sid = session_id.clone();
session
.start_from_video_pipeline(pipeline.subscribe(), move || {
let pipeline = pipeline_for_callback;
tokio::spawn(async move {
info!(
"Requesting keyframe for session {} after reconnect",
sid
);
pipeline.request_keyframe().await;
});
})
.await;
}
let sessions = self.sessions.read().await;
for session_id in &session_ids {
if let Some(session) = sessions.get(session_id) {
info!("Reconnecting session {} to new pipeline", session_id);
let pipeline_for_callback = pipeline.clone();
let sid = session_id.clone();
let request_keyframe = Arc::new(move || {
let pipeline = pipeline_for_callback.clone();
let sid = sid.clone();
tokio::spawn(async move {
info!(
"Requesting keyframe for session {} after reconnect",
sid
);
pipeline.request_keyframe().await;
});
});
session
.start_from_video_pipeline(pipeline.subscribe(), request_keyframe)
.await;
}
info!(
"Video pipeline restarted with {}, reconnected {} sessions",
preset,
session_ids.len()
);
}
info!(
"Video pipeline restarted with {}, reconnected {} sessions",
preset,
session_ids.len()
);
}
} else {
debug!(
@@ -1057,11 +951,11 @@ impl Default for WebRtcStreamer {
video_codec: RwLock::new(VideoCodecType::H264),
video_pipeline: RwLock::new(None),
sessions: Arc::new(RwLock::new(HashMap::new())),
video_frame_tx: RwLock::new(None),
capture_device: RwLock::new(None),
audio_enabled: RwLock::new(false),
audio_pipeline: RwLock::new(None),
audio_controller: RwLock::new(None),
hid_controller: RwLock::new(None),
events: RwLock::new(None),
}
}
}