mirror of
https://github.com/mofeng-git/One-KVM.git
synced 2026-01-29 00:51:53 +08:00
init
This commit is contained in:
103
src/webrtc/config.rs
Normal file
103
src/webrtc/config.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
//! WebRTC configuration
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// WebRTC configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WebRtcConfig {
|
||||
/// Enable WebRTC
|
||||
pub enabled: bool,
|
||||
/// STUN server URLs
|
||||
pub stun_servers: Vec<String>,
|
||||
/// TURN server configuration
|
||||
pub turn_servers: Vec<TurnServer>,
|
||||
/// Enable DataChannel for HID
|
||||
pub enable_datachannel: bool,
|
||||
/// Video codec preference
|
||||
pub video_codec: VideoCodec,
|
||||
/// Target bitrate in kbps
|
||||
pub target_bitrate_kbps: u32,
|
||||
/// Maximum bitrate in kbps
|
||||
pub max_bitrate_kbps: u32,
|
||||
/// Minimum bitrate in kbps
|
||||
pub min_bitrate_kbps: u32,
|
||||
/// Enable audio track
|
||||
pub enable_audio: bool,
|
||||
}
|
||||
|
||||
impl Default for WebRtcConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: true,
|
||||
// Empty STUN servers for local connections - host candidates work directly
|
||||
// For remote access, configure STUN/TURN servers via settings
|
||||
stun_servers: vec![],
|
||||
turn_servers: vec![],
|
||||
enable_datachannel: true,
|
||||
video_codec: VideoCodec::H264,
|
||||
target_bitrate_kbps: 8000,
|
||||
max_bitrate_kbps: 5000,
|
||||
min_bitrate_kbps: 500,
|
||||
enable_audio: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// TURN server configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TurnServer {
|
||||
/// TURN server URL (e.g., "turn:turn.example.com:3478")
|
||||
pub url: String,
|
||||
/// Username for TURN authentication
|
||||
pub username: String,
|
||||
/// Credential for TURN authentication
|
||||
pub credential: String,
|
||||
}
|
||||
|
||||
/// Video codec preference
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum VideoCodec {
|
||||
H264,
|
||||
VP8,
|
||||
VP9,
|
||||
AV1,
|
||||
}
|
||||
|
||||
impl Default for VideoCodec {
|
||||
fn default() -> Self {
|
||||
Self::H264
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for VideoCodec {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
VideoCodec::H264 => write!(f, "H.264"),
|
||||
VideoCodec::VP8 => write!(f, "VP8"),
|
||||
VideoCodec::VP9 => write!(f, "VP9"),
|
||||
VideoCodec::AV1 => write!(f, "AV1"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// ICE configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IceConfig {
|
||||
/// ICE candidate gathering timeout (ms)
|
||||
pub gathering_timeout_ms: u64,
|
||||
/// ICE connection timeout (ms)
|
||||
pub connection_timeout_ms: u64,
|
||||
/// Enable ICE lite mode
|
||||
pub ice_lite: bool,
|
||||
}
|
||||
|
||||
impl Default for IceConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
gathering_timeout_ms: 5000,
|
||||
connection_timeout_ms: 30000,
|
||||
ice_lite: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
442
src/webrtc/h265_payloader.rs
Normal file
442
src/webrtc/h265_payloader.rs
Normal file
@@ -0,0 +1,442 @@
|
||||
//! H.265/HEVC RTP Payloader
|
||||
//!
|
||||
//! Implements RFC 7798: RTP Payload Format for High Efficiency Video Coding (HEVC)
|
||||
//!
|
||||
//! H.265 NAL unit header (2 bytes):
|
||||
//! ```text
|
||||
//! +---------------+---------------+
|
||||
//! |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
|
||||
//! +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
//! |F| Type | LayerId | TID |
|
||||
//! +---------------+---------------+
|
||||
//! ```
|
||||
//!
|
||||
//! Fragmentation Unit (FU) header:
|
||||
//! ```text
|
||||
//! +---------------+---------------+---------------+
|
||||
//! |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
|
||||
//! +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
//! |F| Type(49) | LayerId | TID |S|E| FuType |
|
||||
//! +---------------+---------------+---------------+
|
||||
//! ```
|
||||
//!
|
||||
//! Aggregation Packet (AP) for VPS+SPS+PPS:
|
||||
//! ```text
|
||||
//! +---------------+---------------+---------------+---------------+
|
||||
//! | PayloadHdr (Type=48) | NALU 1 Size | NALU 1 Size |
|
||||
//! +---------------+---------------+---------------+---------------+
|
||||
//! | NALU 1 HDR | NALU 1 Data | NALU 2 Size | ... |
|
||||
//! +---------------+---------------+---------------+---------------+
|
||||
//! ```
|
||||
|
||||
use bytes::{BufMut, Bytes, BytesMut};
|
||||
|
||||
/// H.265 NAL unit types (6 bits)
|
||||
const H265_NAL_VPS: u8 = 32;
|
||||
const H265_NAL_SPS: u8 = 33;
|
||||
const H265_NAL_PPS: u8 = 34;
|
||||
const H265_NAL_AUD: u8 = 35;
|
||||
const H265_NAL_FILLER: u8 = 38;
|
||||
#[allow(dead_code)]
|
||||
const H265_NAL_SEI_PREFIX: u8 = 39; // PREFIX_SEI_NUT
|
||||
#[allow(dead_code)]
|
||||
const H265_NAL_SEI_SUFFIX: u8 = 40; // SUFFIX_SEI_NUT
|
||||
#[allow(dead_code)]
|
||||
const H265_NAL_AP: u8 = 48; // Aggregation Packet
|
||||
const H265_NAL_FU: u8 = 49; // Fragmentation Unit
|
||||
|
||||
/// H.265 NAL header size
|
||||
const H265_NAL_HEADER_SIZE: usize = 2;
|
||||
|
||||
/// FU header size (1 byte after NAL header)
|
||||
const H265_FU_HEADER_SIZE: usize = 1;
|
||||
|
||||
/// Fixed PayloadHdr for FU packets: Type=49, LayerID=0, TID=1
|
||||
/// This matches the rtp crate's FRAG_PAYLOAD_HDR
|
||||
#[allow(dead_code)]
|
||||
const FU_PAYLOAD_HDR: [u8; 2] = [0x62, 0x01];
|
||||
|
||||
/// Fixed PayloadHdr for AP packets: Type=48, LayerID=0, TID=1
|
||||
/// This matches the rtp crate's AGGR_PAYLOAD_HDR
|
||||
const AP_PAYLOAD_HDR: [u8; 2] = [0x60, 0x01];
|
||||
|
||||
/// H.265 RTP Payloader
|
||||
///
|
||||
/// Fragments H.265 NAL units for RTP transmission according to RFC 7798.
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct H265Payloader {
|
||||
/// Cached VPS NAL unit
|
||||
vps_nalu: Option<Bytes>,
|
||||
/// Cached SPS NAL unit
|
||||
sps_nalu: Option<Bytes>,
|
||||
/// Cached PPS NAL unit
|
||||
pps_nalu: Option<Bytes>,
|
||||
}
|
||||
|
||||
impl H265Payloader {
|
||||
/// Create a new H265Payloader
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Find the next Annex B start code in the NAL data
|
||||
fn next_ind(nalu: &Bytes, start: usize) -> (isize, isize) {
|
||||
let mut zero_count = 0;
|
||||
|
||||
for (i, &b) in nalu[start..].iter().enumerate() {
|
||||
if b == 0 {
|
||||
zero_count += 1;
|
||||
continue;
|
||||
} else if b == 1 && zero_count >= 2 {
|
||||
return ((start + i - zero_count) as isize, zero_count as isize + 1);
|
||||
}
|
||||
zero_count = 0;
|
||||
}
|
||||
(-1, -1)
|
||||
}
|
||||
|
||||
/// Extract NAL unit type from H.265 NAL header
|
||||
fn get_nal_type(nalu: &[u8]) -> u8 {
|
||||
if nalu.len() < 2 {
|
||||
return 0;
|
||||
}
|
||||
// Type is in bits 1-6 of the first byte
|
||||
(nalu[0] >> 1) & 0x3F
|
||||
}
|
||||
|
||||
/// Emit a single NAL unit, fragmenting if necessary
|
||||
fn emit(&mut self, nalu: &Bytes, mtu: usize, payloads: &mut Vec<Bytes>) {
|
||||
if nalu.len() < H265_NAL_HEADER_SIZE {
|
||||
return;
|
||||
}
|
||||
|
||||
let nal_type = Self::get_nal_type(nalu);
|
||||
|
||||
// Skip AUD and filler data
|
||||
if nal_type == H265_NAL_AUD || nal_type == H265_NAL_FILLER {
|
||||
return;
|
||||
}
|
||||
|
||||
// Cache parameter sets (VPS/SPS/PPS)
|
||||
match nal_type {
|
||||
H265_NAL_VPS => {
|
||||
self.vps_nalu = Some(nalu.clone());
|
||||
return; // Don't emit VPS separately, will be sent in AP
|
||||
}
|
||||
H265_NAL_SPS => {
|
||||
self.sps_nalu = Some(nalu.clone());
|
||||
return; // Don't emit SPS separately, will be sent in AP
|
||||
}
|
||||
H265_NAL_PPS => {
|
||||
self.pps_nalu = Some(nalu.clone());
|
||||
return; // Don't emit PPS separately, will be sent in AP
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Try to emit Aggregation Packet with VPS+SPS+PPS before video NAL
|
||||
self.try_emit_aggregation_packet(mtu, payloads);
|
||||
|
||||
// Single NAL unit mode - if NAL fits in one packet
|
||||
if nalu.len() <= mtu {
|
||||
payloads.push(nalu.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Fragmentation Unit (FU) mode - fragment large NAL units
|
||||
self.emit_fragmented(nalu, mtu, payloads);
|
||||
}
|
||||
|
||||
/// Try to emit an Aggregation Packet containing VPS+SPS+PPS
|
||||
fn try_emit_aggregation_packet(&mut self, mtu: usize, payloads: &mut Vec<Bytes>) {
|
||||
// Check if we have all three parameter sets
|
||||
let (vps, sps, pps) = match (&self.vps_nalu, &self.sps_nalu, &self.pps_nalu) {
|
||||
(Some(v), Some(s), Some(p)) => (v.clone(), s.clone(), p.clone()),
|
||||
_ => return,
|
||||
};
|
||||
|
||||
// Calculate AP size: PayloadHdr(2) + 3x(NALU size(2) + NALU data)
|
||||
let ap_size = H265_NAL_HEADER_SIZE + 2 + vps.len() + 2 + sps.len() + 2 + pps.len();
|
||||
|
||||
// Only create AP if it fits in MTU
|
||||
if ap_size > mtu {
|
||||
// Fall back to sending separately (as single NAL unit packets)
|
||||
payloads.push(vps);
|
||||
payloads.push(sps);
|
||||
payloads.push(pps);
|
||||
self.vps_nalu = None;
|
||||
self.sps_nalu = None;
|
||||
self.pps_nalu = None;
|
||||
return;
|
||||
}
|
||||
|
||||
// Create Aggregation Packet
|
||||
let mut ap = BytesMut::with_capacity(ap_size);
|
||||
|
||||
// PayloadHdr for AP (Type=48)
|
||||
ap.extend_from_slice(&AP_PAYLOAD_HDR);
|
||||
|
||||
// VPS: size (2 bytes big-endian) + data
|
||||
ap.put_u16(vps.len() as u16);
|
||||
ap.extend_from_slice(&vps);
|
||||
|
||||
// SPS: size (2 bytes big-endian) + data
|
||||
ap.put_u16(sps.len() as u16);
|
||||
ap.extend_from_slice(&sps);
|
||||
|
||||
// PPS: size (2 bytes big-endian) + data
|
||||
ap.put_u16(pps.len() as u16);
|
||||
ap.extend_from_slice(&pps);
|
||||
|
||||
payloads.push(ap.freeze());
|
||||
|
||||
// Clear cached parameter sets
|
||||
self.vps_nalu = None;
|
||||
self.sps_nalu = None;
|
||||
self.pps_nalu = None;
|
||||
}
|
||||
|
||||
/// Fragment a large NAL unit using FU packets
|
||||
fn emit_fragmented(&self, nalu: &Bytes, mtu: usize, payloads: &mut Vec<Bytes>) {
|
||||
if nalu.len() < H265_NAL_HEADER_SIZE {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get original NAL type for FU header
|
||||
let nal_type = Self::get_nal_type(nalu);
|
||||
|
||||
// Maximum payload size per FU packet
|
||||
// FU packet = NAL header (2) + FU header (1) + payload
|
||||
let max_fragment_size = mtu - H265_NAL_HEADER_SIZE - H265_FU_HEADER_SIZE;
|
||||
|
||||
if max_fragment_size == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip the original NAL header, we'll create new FU headers
|
||||
let nalu_payload = &nalu[H265_NAL_HEADER_SIZE..];
|
||||
let full_nalu_size = nalu_payload.len();
|
||||
|
||||
if full_nalu_size == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut offset = 0;
|
||||
|
||||
while offset < full_nalu_size {
|
||||
let remaining = full_nalu_size - offset;
|
||||
let fragment_size = remaining.min(max_fragment_size);
|
||||
|
||||
// Create FU packet
|
||||
let mut packet = BytesMut::with_capacity(H265_NAL_HEADER_SIZE + H265_FU_HEADER_SIZE + fragment_size);
|
||||
|
||||
// NAL header for FU (2 bytes)
|
||||
// Preserve F bit (bit 7) and LayerID MSB (bit 0) from original, set Type to 49
|
||||
// This matches go2rtc approach: out[0] = (out[0] & 0b10000001) | (49 << 1)
|
||||
let byte0 = (nalu[0] & 0b10000001) | (H265_NAL_FU << 1);
|
||||
// Keep original byte1 (LayerID low 5 bits + TID) unchanged
|
||||
let byte1 = nalu[1];
|
||||
packet.put_u8(byte0);
|
||||
packet.put_u8(byte1);
|
||||
|
||||
// FU header (1 byte)
|
||||
// S (1 bit) | E (1 bit) | FuType (6 bits)
|
||||
let mut fu_header = nal_type;
|
||||
if offset == 0 {
|
||||
fu_header |= 0x80; // S bit - start of fragmented NAL
|
||||
}
|
||||
if offset + fragment_size >= full_nalu_size {
|
||||
fu_header |= 0x40; // E bit - end of fragmented NAL
|
||||
}
|
||||
packet.put_u8(fu_header);
|
||||
|
||||
// FU payload
|
||||
packet.put_slice(&nalu_payload[offset..offset + fragment_size]);
|
||||
|
||||
payloads.push(packet.freeze());
|
||||
|
||||
offset += fragment_size;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl H265Payloader {
|
||||
/// Payload fragments H.265 packets across one or more RTP payloads
|
||||
///
|
||||
/// Takes Annex B format NAL units (with start codes) and returns RTP payloads
|
||||
pub fn payload(&mut self, mtu: usize, payload: &Bytes) -> Vec<Bytes> {
|
||||
if payload.is_empty() || mtu == 0 {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let mut payloads = vec![];
|
||||
|
||||
// Parse Annex B format NAL units
|
||||
let (mut next_ind_start, mut next_ind_len) = Self::next_ind(payload, 0);
|
||||
if next_ind_start == -1 {
|
||||
// No start code found, treat entire payload as single NAL
|
||||
self.emit(payload, mtu, &mut payloads);
|
||||
} else {
|
||||
while next_ind_start != -1 {
|
||||
let prev_start = (next_ind_start + next_ind_len) as usize;
|
||||
let (next_ind_start2, next_ind_len2) = Self::next_ind(payload, prev_start);
|
||||
next_ind_start = next_ind_start2;
|
||||
next_ind_len = next_ind_len2;
|
||||
|
||||
if next_ind_start != -1 {
|
||||
self.emit(
|
||||
&payload.slice(prev_start..next_ind_start as usize),
|
||||
mtu,
|
||||
&mut payloads,
|
||||
);
|
||||
} else {
|
||||
// Emit until end of stream
|
||||
self.emit(&payload.slice(prev_start..), mtu, &mut payloads);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
payloads
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_nal_type() {
|
||||
// VPS (type 32): 0x40 = 0100 0000, type = 32
|
||||
assert_eq!(H265Payloader::get_nal_type(&[0x40, 0x01]), 32);
|
||||
// SPS (type 33): 0x42 = 0100 0010, type = 33
|
||||
assert_eq!(H265Payloader::get_nal_type(&[0x42, 0x01]), 33);
|
||||
// PPS (type 34): 0x44 = 0100 0100, type = 34
|
||||
assert_eq!(H265Payloader::get_nal_type(&[0x44, 0x01]), 34);
|
||||
// IDR (type 19): 0x26 = 0010 0110, type = 19
|
||||
assert_eq!(H265Payloader::get_nal_type(&[0x26, 0x01]), 19);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_small_nalu() {
|
||||
let mut payloader = H265Payloader::new();
|
||||
// Small NAL that fits in MTU (no start code, just NAL data)
|
||||
let small_nal = Bytes::from(vec![0x26, 0x01, 0x00, 0x00, 0x00]); // IDR type
|
||||
let result = payloader.payload(1200, &small_nal);
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0], small_nal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fragmentation() {
|
||||
let mut payloader = H265Payloader::new();
|
||||
// Large NAL that needs fragmentation
|
||||
let mut large_nal = vec![0x26, 0x01]; // IDR type header
|
||||
large_nal.extend(vec![0xAA; 2000]); // Payload
|
||||
let large_nal = Bytes::from(large_nal);
|
||||
|
||||
let mtu = 1200;
|
||||
let result = payloader.payload(mtu, &large_nal);
|
||||
|
||||
// Should be fragmented into multiple FU packets
|
||||
assert!(result.len() > 1);
|
||||
|
||||
// Check first packet has S bit set
|
||||
assert_eq!(result[0][2] & 0x80, 0x80);
|
||||
|
||||
// Check last packet has E bit set
|
||||
let last = result.last().unwrap();
|
||||
assert_eq!(last[2] & 0x40, 0x40);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fu_packet_format() {
|
||||
let mut payloader = H265Payloader::new();
|
||||
// IDR NAL: type=19, header = 0x26 0x01
|
||||
let mut nal = vec![0x26, 0x01]; // IDR type header (type=19, TID=1)
|
||||
nal.extend(vec![0xAA; 2000]); // Payload
|
||||
let nal = Bytes::from(nal);
|
||||
|
||||
let mtu = 100; // Small MTU to force fragmentation
|
||||
let result = payloader.payload(mtu, &nal);
|
||||
|
||||
// Verify FU packet structure
|
||||
for (i, pkt) in result.iter().enumerate() {
|
||||
assert!(pkt.len() >= 3, "Packet too short");
|
||||
|
||||
// Check PayloadHdr (2 bytes)
|
||||
let byte0 = pkt[0];
|
||||
let byte1 = pkt[1];
|
||||
let nal_type = (byte0 >> 1) & 0x3F;
|
||||
|
||||
assert_eq!(nal_type, 49, "PayloadHdr type should be 49 (FU)");
|
||||
// byte0 should be: (0x26 & 0x81) | (49 << 1) = 0x00 | 0x62 = 0x62
|
||||
assert_eq!(byte0, 0x62, "byte0 should be 0x62");
|
||||
// byte1 should be preserved from original: 0x01
|
||||
assert_eq!(byte1, 0x01, "byte1 should be 0x01");
|
||||
|
||||
// Check FU header (1 byte)
|
||||
let fu_header = pkt[2];
|
||||
let fu_s = (fu_header >> 7) & 1;
|
||||
let fu_e = (fu_header >> 6) & 1;
|
||||
let fu_type = fu_header & 0x3F;
|
||||
|
||||
assert_eq!(fu_type, 19, "FU type should be 19 (IDR)");
|
||||
|
||||
if i == 0 {
|
||||
assert_eq!(fu_s, 1, "First packet should have S=1");
|
||||
assert_eq!(fu_e, 0, "First packet should have E=0");
|
||||
} else if i == result.len() - 1 {
|
||||
assert_eq!(fu_s, 0, "Last packet should have S=0");
|
||||
assert_eq!(fu_e, 1, "Last packet should have E=1");
|
||||
} else {
|
||||
assert_eq!(fu_s, 0, "Middle packet should have S=0");
|
||||
assert_eq!(fu_e, 0, "Middle packet should have E=0");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_with_rtp_depacketizer() {
|
||||
use rtp::codecs::h265::{H265Packet, H265Payload, H265FragmentationUnitPacket};
|
||||
use rtp::packetizer::Depacketizer;
|
||||
|
||||
let mut payloader = H265Payloader::new();
|
||||
// Create IDR NAL with enough data to fragment
|
||||
let mut nal = vec![0x26, 0x01]; // IDR type=19
|
||||
nal.extend(vec![0xBB; 3000]);
|
||||
let nal = Bytes::from(nal);
|
||||
|
||||
let result = payloader.payload(1200, &nal);
|
||||
assert!(result.len() > 1, "Should produce multiple FU packets");
|
||||
|
||||
// Verify each packet can be depacketized by rtp crate
|
||||
for (i, pkt) in result.iter().enumerate() {
|
||||
let mut h265_pkt = H265Packet::default();
|
||||
let depack_result = h265_pkt.depacketize(pkt);
|
||||
|
||||
assert!(
|
||||
depack_result.is_ok(),
|
||||
"Packet {} failed to depacketize: {:?}, bytes: {:02x?}",
|
||||
i,
|
||||
depack_result.err(),
|
||||
&pkt[..3.min(pkt.len())]
|
||||
);
|
||||
|
||||
// Verify it's recognized as FU packet
|
||||
match h265_pkt.payload() {
|
||||
H265Payload::H265FragmentationUnitPacket(fu) => {
|
||||
assert_eq!(fu.fu_header().fu_type(), 19, "FU type should be 19");
|
||||
if i == 0 {
|
||||
assert!(fu.fu_header().s(), "First packet S bit");
|
||||
}
|
||||
if i == result.len() - 1 {
|
||||
assert!(fu.fu_header().e(), "Last packet E bit");
|
||||
}
|
||||
}
|
||||
other => panic!("Expected FU packet, got {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
println!("All {} FU packets verified successfully!", result.len());
|
||||
}
|
||||
}
|
||||
46
src/webrtc/mod.rs
Normal file
46
src/webrtc/mod.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
//! WebRTC module for low-latency video streaming
|
||||
//!
|
||||
//! This module provides WebRTC-based video streaming with:
|
||||
//! - H.264 video track (hardware/software encoding)
|
||||
//! - H.265 video track (hardware only)
|
||||
//! - VP8/VP9 video track (hardware only - VAAPI)
|
||||
//! - Opus audio track (optional)
|
||||
//! - DataChannel for HID events
|
||||
//!
|
||||
//! Architecture:
|
||||
//! ```text
|
||||
//! VideoCapturer (MJPEG/YUYV)
|
||||
//! |
|
||||
//! v
|
||||
//! SharedVideoPipeline (decode -> convert -> encode)
|
||||
//! |
|
||||
//! v
|
||||
//! UniversalVideoTrack (RTP packetization)
|
||||
//! |
|
||||
//! v
|
||||
//! WebRTC PeerConnection
|
||||
//! |
|
||||
//! Browser <-------- SDP Exchange ------- API Server
|
||||
//! |
|
||||
//! +------- DataChannel ------> HID Events
|
||||
//! ```
|
||||
|
||||
pub mod config;
|
||||
pub mod h265_payloader;
|
||||
pub mod peer;
|
||||
pub mod rtp;
|
||||
pub mod session;
|
||||
pub mod signaling;
|
||||
pub mod track;
|
||||
pub mod universal_session;
|
||||
pub mod video_track;
|
||||
pub mod webrtc_streamer;
|
||||
|
||||
pub use config::WebRtcConfig;
|
||||
pub use peer::PeerConnection;
|
||||
pub use rtp::{H264VideoTrack, H264VideoTrackConfig, OpusAudioTrack};
|
||||
pub use session::WebRtcSessionManager;
|
||||
pub use signaling::{ConnectionState, IceCandidate, SdpAnswer, SdpOffer, SignalingMessage};
|
||||
pub use universal_session::{UniversalSession, UniversalSessionConfig, UniversalSessionInfo};
|
||||
pub use video_track::{UniversalVideoTrack, UniversalVideoTrackConfig, VideoCodec, VideoTrackStats};
|
||||
pub use webrtc_streamer::{SessionInfo, WebRtcStreamer, WebRtcStreamerConfig, WebRtcStreamerStats};
|
||||
492
src/webrtc/peer.rs
Normal file
492
src/webrtc/peer.rs
Normal file
@@ -0,0 +1,492 @@
|
||||
//! WebRTC peer connection management
|
||||
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{broadcast, watch, Mutex, RwLock};
|
||||
use tracing::{debug, info};
|
||||
use webrtc::api::interceptor_registry::register_default_interceptors;
|
||||
use webrtc::api::media_engine::MediaEngine;
|
||||
use webrtc::api::APIBuilder;
|
||||
use webrtc::data_channel::data_channel_message::DataChannelMessage;
|
||||
use webrtc::data_channel::RTCDataChannel;
|
||||
use webrtc::ice_transport::ice_candidate::RTCIceCandidate;
|
||||
use webrtc::ice_transport::ice_server::RTCIceServer;
|
||||
use webrtc::interceptor::registry::Registry;
|
||||
use webrtc::peer_connection::configuration::RTCConfiguration;
|
||||
use webrtc::peer_connection::peer_connection_state::RTCPeerConnectionState;
|
||||
use webrtc::peer_connection::sdp::session_description::RTCSessionDescription;
|
||||
use webrtc::peer_connection::RTCPeerConnection;
|
||||
|
||||
use super::config::WebRtcConfig;
|
||||
use super::signaling::{ConnectionState, IceCandidate, SdpAnswer, SdpOffer};
|
||||
use super::track::{VideoTrack, VideoTrackConfig};
|
||||
use crate::error::{AppError, Result};
|
||||
use crate::hid::datachannel::{parse_hid_message, HidChannelEvent};
|
||||
use crate::hid::HidController;
|
||||
use crate::video::frame::VideoFrame;
|
||||
|
||||
/// Peer connection wrapper with event handling
|
||||
pub struct PeerConnection {
|
||||
/// Session ID
|
||||
pub session_id: String,
|
||||
/// WebRTC peer connection
|
||||
pc: Arc<RTCPeerConnection>,
|
||||
/// Video track
|
||||
video_track: Option<VideoTrack>,
|
||||
/// Data channel for HID events
|
||||
data_channel: Arc<RwLock<Option<Arc<RTCDataChannel>>>>,
|
||||
/// Connection state
|
||||
state: Arc<watch::Sender<ConnectionState>>,
|
||||
/// State receiver
|
||||
state_rx: watch::Receiver<ConnectionState>,
|
||||
/// ICE candidates gathered
|
||||
ice_candidates: Arc<Mutex<Vec<IceCandidate>>>,
|
||||
/// HID controller reference
|
||||
hid_controller: Option<Arc<HidController>>,
|
||||
}
|
||||
|
||||
impl PeerConnection {
|
||||
/// Create a new peer connection
|
||||
pub async fn new(config: &WebRtcConfig, session_id: String) -> Result<Self> {
|
||||
// Create media engine
|
||||
let mut media_engine = MediaEngine::default();
|
||||
|
||||
// Register codecs
|
||||
media_engine
|
||||
.register_default_codecs()
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to register codecs: {}", e)))?;
|
||||
|
||||
// Create interceptor registry
|
||||
let mut registry = Registry::new();
|
||||
registry = register_default_interceptors(registry, &mut media_engine)
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to register interceptors: {}", e)))?;
|
||||
|
||||
// Create API
|
||||
let api = APIBuilder::new()
|
||||
.with_media_engine(media_engine)
|
||||
.with_interceptor_registry(registry)
|
||||
.build();
|
||||
|
||||
// Build ICE servers
|
||||
let mut ice_servers = vec![];
|
||||
|
||||
for stun_url in &config.stun_servers {
|
||||
ice_servers.push(RTCIceServer {
|
||||
urls: vec![stun_url.clone()],
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
for turn in &config.turn_servers {
|
||||
ice_servers.push(RTCIceServer {
|
||||
urls: vec![turn.url.clone()],
|
||||
username: turn.username.clone(),
|
||||
credential: turn.credential.clone(),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
// Create peer connection configuration
|
||||
let rtc_config = RTCConfiguration {
|
||||
ice_servers,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Create peer connection
|
||||
let pc = api
|
||||
.new_peer_connection(rtc_config)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to create peer connection: {}", e)))?;
|
||||
|
||||
let pc = Arc::new(pc);
|
||||
|
||||
// Create state channel
|
||||
let (state_tx, state_rx) = watch::channel(ConnectionState::New);
|
||||
|
||||
let peer_connection = Self {
|
||||
session_id,
|
||||
pc,
|
||||
video_track: None,
|
||||
data_channel: Arc::new(RwLock::new(None)),
|
||||
state: Arc::new(state_tx),
|
||||
state_rx,
|
||||
ice_candidates: Arc::new(Mutex::new(vec![])),
|
||||
hid_controller: None,
|
||||
};
|
||||
|
||||
// Set up event handlers
|
||||
peer_connection.setup_event_handlers().await;
|
||||
|
||||
Ok(peer_connection)
|
||||
}
|
||||
|
||||
/// Set up peer connection event handlers
|
||||
async fn setup_event_handlers(&self) {
|
||||
let state = self.state.clone();
|
||||
let session_id = self.session_id.clone();
|
||||
|
||||
// Connection state change handler
|
||||
self.pc.on_peer_connection_state_change(Box::new(move |s: RTCPeerConnectionState| {
|
||||
let state = state.clone();
|
||||
let session_id = session_id.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let new_state = match s {
|
||||
RTCPeerConnectionState::New => ConnectionState::New,
|
||||
RTCPeerConnectionState::Connecting => ConnectionState::Connecting,
|
||||
RTCPeerConnectionState::Connected => ConnectionState::Connected,
|
||||
RTCPeerConnectionState::Disconnected => ConnectionState::Disconnected,
|
||||
RTCPeerConnectionState::Failed => ConnectionState::Failed,
|
||||
RTCPeerConnectionState::Closed => ConnectionState::Closed,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
info!("Peer {} connection state: {}", session_id, new_state);
|
||||
let _ = state.send(new_state);
|
||||
})
|
||||
}));
|
||||
|
||||
// ICE candidate handler
|
||||
let ice_candidates = self.ice_candidates.clone();
|
||||
self.pc.on_ice_candidate(Box::new(move |candidate: Option<RTCIceCandidate>| {
|
||||
let ice_candidates = ice_candidates.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
if let Some(c) = candidate {
|
||||
let candidate_str = c.to_json()
|
||||
.map(|j| j.candidate)
|
||||
.unwrap_or_default();
|
||||
|
||||
debug!("ICE candidate: {}", candidate_str);
|
||||
|
||||
let mut candidates = ice_candidates.lock().await;
|
||||
candidates.push(IceCandidate {
|
||||
candidate: candidate_str,
|
||||
sdp_mid: c.to_json().ok().and_then(|j| j.sdp_mid),
|
||||
sdp_mline_index: c.to_json().ok().and_then(|j| j.sdp_mline_index),
|
||||
username_fragment: None,
|
||||
});
|
||||
}
|
||||
})
|
||||
}));
|
||||
|
||||
// Data channel handler - note: HID processing is done when hid_controller is set
|
||||
let data_channel = self.data_channel.clone();
|
||||
self.pc.on_data_channel(Box::new(move |dc: Arc<RTCDataChannel>| {
|
||||
let data_channel = data_channel.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
info!("Data channel opened: {}", dc.label());
|
||||
|
||||
// Store data channel
|
||||
*data_channel.write().await = Some(dc.clone());
|
||||
|
||||
// Message handler logs messages; HID processing requires set_hid_controller()
|
||||
dc.on_message(Box::new(move |msg: DataChannelMessage| {
|
||||
debug!("DataChannel message: {} bytes", msg.data.len());
|
||||
Box::pin(async {})
|
||||
}));
|
||||
})
|
||||
}));
|
||||
}
|
||||
|
||||
/// Set HID controller for processing DataChannel messages
|
||||
pub fn set_hid_controller(&mut self, hid: Arc<HidController>) {
|
||||
let hid_clone = hid.clone();
|
||||
let data_channel = self.data_channel.clone();
|
||||
|
||||
// Set up message handler with HID processing
|
||||
let pc = self.pc.clone();
|
||||
pc.on_data_channel(Box::new(move |dc: Arc<RTCDataChannel>| {
|
||||
let data_channel = data_channel.clone();
|
||||
let hid = hid_clone.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
info!("Data channel opened with HID support: {}", dc.label());
|
||||
|
||||
// Store data channel
|
||||
*data_channel.write().await = Some(dc.clone());
|
||||
|
||||
// Set up message handler with HID processing
|
||||
dc.on_message(Box::new(move |msg: DataChannelMessage| {
|
||||
let hid = hid.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
debug!("DataChannel HID message: {} bytes", msg.data.len());
|
||||
|
||||
// Parse and process HID message
|
||||
if let Some(event) = parse_hid_message(&msg.data) {
|
||||
match event {
|
||||
HidChannelEvent::Keyboard(kb_event) => {
|
||||
if let Err(e) = hid.send_keyboard(kb_event).await {
|
||||
debug!("Failed to send keyboard event: {}", e);
|
||||
}
|
||||
}
|
||||
HidChannelEvent::Mouse(ms_event) => {
|
||||
if let Err(e) = hid.send_mouse(ms_event).await {
|
||||
debug!("Failed to send mouse event: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}));
|
||||
})
|
||||
}));
|
||||
|
||||
self.hid_controller = Some(hid);
|
||||
}
|
||||
|
||||
/// Add video track to the connection
|
||||
pub async fn add_video_track(&mut self, config: VideoTrackConfig) -> Result<()> {
|
||||
let video_track = VideoTrack::new(config);
|
||||
|
||||
// Add track to peer connection
|
||||
self.pc
|
||||
.add_track(video_track.rtp_track())
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to add video track: {}", e)))?;
|
||||
|
||||
self.video_track = Some(video_track);
|
||||
info!("Video track added to peer connection");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create data channel for HID events
|
||||
pub async fn create_data_channel(&self, label: &str) -> Result<()> {
|
||||
let dc = self
|
||||
.pc
|
||||
.create_data_channel(label, None)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to create data channel: {}", e)))?;
|
||||
|
||||
*self.data_channel.write().await = Some(dc);
|
||||
info!("Data channel '{}' created", label);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Handle SDP offer and create answer
|
||||
pub async fn handle_offer(&self, offer: SdpOffer) -> Result<SdpAnswer> {
|
||||
// Parse and set remote description
|
||||
let sdp = RTCSessionDescription::offer(offer.sdp)
|
||||
.map_err(|e| AppError::VideoError(format!("Invalid SDP offer: {}", e)))?;
|
||||
|
||||
self.pc
|
||||
.set_remote_description(sdp)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to set remote description: {}", e)))?;
|
||||
|
||||
// Create answer
|
||||
let answer = self
|
||||
.pc
|
||||
.create_answer(None)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?;
|
||||
|
||||
// Set local description
|
||||
self.pc
|
||||
.set_local_description(answer.clone())
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?;
|
||||
|
||||
// Wait a bit for ICE candidates to gather
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
|
||||
// Get gathered ICE candidates
|
||||
let candidates = self.ice_candidates.lock().await.clone();
|
||||
|
||||
Ok(SdpAnswer::with_candidates(answer.sdp, candidates))
|
||||
}
|
||||
|
||||
/// Add ICE candidate
|
||||
pub async fn add_ice_candidate(&self, candidate: IceCandidate) -> Result<()> {
|
||||
use webrtc::ice_transport::ice_candidate::RTCIceCandidateInit;
|
||||
|
||||
let init = RTCIceCandidateInit {
|
||||
candidate: candidate.candidate,
|
||||
sdp_mid: candidate.sdp_mid,
|
||||
sdp_mline_index: candidate.sdp_mline_index,
|
||||
username_fragment: candidate.username_fragment,
|
||||
};
|
||||
|
||||
self.pc
|
||||
.add_ice_candidate(init)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to add ICE candidate: {}", e)))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get current connection state
|
||||
pub fn state(&self) -> ConnectionState {
|
||||
*self.state_rx.borrow()
|
||||
}
|
||||
|
||||
/// Subscribe to state changes
|
||||
pub fn state_watch(&self) -> watch::Receiver<ConnectionState> {
|
||||
self.state_rx.clone()
|
||||
}
|
||||
|
||||
/// Start sending video frames
|
||||
pub async fn start_video(&self, frame_rx: broadcast::Receiver<VideoFrame>) {
|
||||
if let Some(ref track) = self.video_track {
|
||||
track.start_sending(frame_rx).await;
|
||||
}
|
||||
}
|
||||
|
||||
/// Send HID data via data channel
|
||||
pub async fn send_hid_data(&self, data: &[u8]) -> Result<()> {
|
||||
let dc = self.data_channel.read().await;
|
||||
|
||||
if let Some(ref channel) = *dc {
|
||||
channel
|
||||
.send(&bytes::Bytes::copy_from_slice(data))
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to send HID data: {}", e)))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Close the connection
|
||||
pub async fn close(&self) -> Result<()> {
|
||||
if let Some(ref track) = self.video_track {
|
||||
track.stop();
|
||||
}
|
||||
|
||||
self.pc
|
||||
.close()
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to close peer connection: {}", e)))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get session ID
|
||||
pub fn session_id(&self) -> &str {
|
||||
&self.session_id
|
||||
}
|
||||
}
|
||||
|
||||
/// Manager for multiple peer connections
|
||||
pub struct PeerConnectionManager {
|
||||
config: WebRtcConfig,
|
||||
/// Active peer connections
|
||||
peers: Arc<RwLock<Vec<Arc<Mutex<PeerConnection>>>>>,
|
||||
/// Frame broadcast sender (to distribute to all peers)
|
||||
frame_tx: broadcast::Sender<VideoFrame>,
|
||||
/// HID controller for DataChannel HID processing
|
||||
hid_controller: Option<Arc<HidController>>,
|
||||
}
|
||||
|
||||
impl PeerConnectionManager {
|
||||
/// Create a new peer connection manager
|
||||
pub fn new(config: WebRtcConfig) -> Self {
|
||||
let (frame_tx, _) = broadcast::channel(16);
|
||||
|
||||
Self {
|
||||
config,
|
||||
peers: Arc::new(RwLock::new(vec![])),
|
||||
frame_tx,
|
||||
hid_controller: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new peer connection manager with HID controller
|
||||
pub fn with_hid(config: WebRtcConfig, hid: Arc<HidController>) -> Self {
|
||||
let (frame_tx, _) = broadcast::channel(16);
|
||||
|
||||
Self {
|
||||
config,
|
||||
peers: Arc::new(RwLock::new(vec![])),
|
||||
frame_tx,
|
||||
hid_controller: Some(hid),
|
||||
}
|
||||
}
|
||||
|
||||
/// Set HID controller
|
||||
pub fn set_hid_controller(&mut self, hid: Arc<HidController>) {
|
||||
self.hid_controller = Some(hid);
|
||||
}
|
||||
|
||||
/// Create a new peer connection
|
||||
pub async fn create_peer(&self) -> Result<Arc<Mutex<PeerConnection>>> {
|
||||
let session_id = uuid::Uuid::new_v4().to_string();
|
||||
let mut peer = PeerConnection::new(&self.config, session_id).await?;
|
||||
|
||||
// Add video track
|
||||
peer.add_video_track(VideoTrackConfig::default()).await?;
|
||||
|
||||
// Create data channel and set HID controller
|
||||
if self.config.enable_datachannel {
|
||||
peer.create_data_channel("hid").await?;
|
||||
|
||||
// Set HID controller if available
|
||||
if let Some(ref hid) = self.hid_controller {
|
||||
peer.set_hid_controller(hid.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let peer = Arc::new(Mutex::new(peer));
|
||||
|
||||
// Add to peers list
|
||||
self.peers.write().await.push(peer.clone());
|
||||
|
||||
// Start sending video when connected
|
||||
let frame_rx = self.frame_tx.subscribe();
|
||||
let peer_clone = peer.clone();
|
||||
tokio::spawn(async move {
|
||||
let peer = peer_clone.lock().await;
|
||||
let mut state_rx = peer.state_watch();
|
||||
drop(peer);
|
||||
|
||||
// Wait for connection
|
||||
while state_rx.changed().await.is_ok() {
|
||||
if *state_rx.borrow() == ConnectionState::Connected {
|
||||
let peer = peer_clone.lock().await;
|
||||
peer.start_video(frame_rx).await;
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(peer)
|
||||
}
|
||||
|
||||
/// Get frame sender (for video streamer to push frames)
|
||||
pub fn frame_sender(&self) -> broadcast::Sender<VideoFrame> {
|
||||
self.frame_tx.clone()
|
||||
}
|
||||
|
||||
/// Remove closed connections
|
||||
pub async fn cleanup(&self) {
|
||||
let mut peers = self.peers.write().await;
|
||||
let mut to_remove = vec![];
|
||||
|
||||
for (i, peer) in peers.iter().enumerate() {
|
||||
let p = peer.lock().await;
|
||||
if matches!(p.state(), ConnectionState::Closed | ConnectionState::Failed) {
|
||||
to_remove.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
for i in to_remove.into_iter().rev() {
|
||||
peers.remove(i);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get active peer count
|
||||
pub async fn peer_count(&self) -> usize {
|
||||
self.peers.read().await.len()
|
||||
}
|
||||
|
||||
/// Close all connections
|
||||
pub async fn close_all(&self) {
|
||||
let peers = self.peers.read().await;
|
||||
for peer in peers.iter() {
|
||||
let p = peer.lock().await;
|
||||
let _ = p.close().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
737
src/webrtc/rtp.rs
Normal file
737
src/webrtc/rtp.rs
Normal file
@@ -0,0 +1,737 @@
|
||||
//! RTP packetization for H264 video
|
||||
//!
|
||||
//! This module provides H264 RTP packetization using the rtp crate's H264Payloader.
|
||||
//! It handles:
|
||||
//! - NAL unit parsing (Annex B start codes)
|
||||
//! - SPS/PPS collection and STAP-A packetization
|
||||
//! - Single NAL unit mode for small NALs
|
||||
//! - FU-A fragmentation for large NALs
|
||||
//!
|
||||
//! IMPORTANT: Each NAL unit must be sent separately via write_sample(),
|
||||
//! without Annex B start codes. The TrackLocalStaticSample handles
|
||||
//! RTP packetization internally.
|
||||
|
||||
use bytes::Bytes;
|
||||
use rtp::codecs::h264::H264Payloader;
|
||||
use rtp::packetizer::Payloader;
|
||||
use std::io::Cursor;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{debug, error, trace};
|
||||
use webrtc::media::io::h264_reader::H264Reader;
|
||||
use webrtc::media::Sample;
|
||||
use webrtc::track::track_local::track_local_static_sample::TrackLocalStaticSample;
|
||||
use webrtc::track::track_local::TrackLocal;
|
||||
use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability;
|
||||
|
||||
use crate::error::{AppError, Result};
|
||||
use crate::video::format::Resolution;
|
||||
|
||||
/// Default MTU for RTP packets (conservative for most networks)
|
||||
pub const RTP_MTU: usize = 1200;
|
||||
|
||||
/// H264 clock rate (always 90kHz per RFC 6184)
|
||||
pub const H264_CLOCK_RATE: u32 = 90000;
|
||||
|
||||
/// H264 video track using TrackLocalStaticSample for proper packetization
|
||||
pub struct H264VideoTrack {
|
||||
/// The underlying WebRTC track with automatic packetization
|
||||
track: Arc<TrackLocalStaticSample>,
|
||||
/// Track configuration
|
||||
config: H264VideoTrackConfig,
|
||||
/// H264 payloader for manual packetization (if needed)
|
||||
payloader: Mutex<H264Payloader>,
|
||||
/// Statistics
|
||||
stats: Mutex<H264TrackStats>,
|
||||
/// Cached SPS NAL unit for injection before IDR frames
|
||||
/// Some hardware encoders don't repeat SPS/PPS with every keyframe
|
||||
cached_sps: Mutex<Option<Bytes>>,
|
||||
/// Cached PPS NAL unit for injection before IDR frames
|
||||
cached_pps: Mutex<Option<Bytes>>,
|
||||
}
|
||||
|
||||
/// H264 video track configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct H264VideoTrackConfig {
|
||||
/// Track ID
|
||||
pub track_id: String,
|
||||
/// Stream ID
|
||||
pub stream_id: String,
|
||||
/// Resolution
|
||||
pub resolution: Resolution,
|
||||
/// Target bitrate in kbps
|
||||
pub bitrate_kbps: u32,
|
||||
/// Frames per second
|
||||
pub fps: u32,
|
||||
/// H.264 profile-level-id (e.g., "42001f" for Baseline L3.1, "64001f" for High L3.1)
|
||||
/// If None, uses empty string to let browser negotiate
|
||||
/// Format: PPCCLL where PP=profile_idc, CC=constraint_flags, LL=level_idc
|
||||
pub profile_level_id: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for H264VideoTrackConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
track_id: "video0".to_string(),
|
||||
stream_id: "one-kvm-stream".to_string(),
|
||||
resolution: Resolution::HD720,
|
||||
bitrate_kbps: 8000,
|
||||
fps: 30,
|
||||
profile_level_id: None, // Let browser negotiate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// H264 track statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct H264TrackStats {
|
||||
/// Frames sent
|
||||
pub frames_sent: u64,
|
||||
/// Bytes sent
|
||||
pub bytes_sent: u64,
|
||||
/// Packets sent (RTP packets)
|
||||
pub packets_sent: u64,
|
||||
/// Key frames sent
|
||||
pub keyframes_sent: u64,
|
||||
/// Errors encountered
|
||||
pub errors: u64,
|
||||
}
|
||||
|
||||
impl H264VideoTrack {
|
||||
/// Create a new H264 video track
|
||||
///
|
||||
/// If `config.profile_level_id` is set, it will be used in SDP negotiation.
|
||||
/// Otherwise, uses empty fmtp line to let browser negotiate the best profile.
|
||||
pub fn new(config: H264VideoTrackConfig) -> Self {
|
||||
// Build sdp_fmtp_line based on profile_level_id
|
||||
let sdp_fmtp_line = if let Some(ref profile_level_id) = config.profile_level_id {
|
||||
// Use specified profile-level-id
|
||||
format!(
|
||||
"level-asymmetry-allowed=1;packetization-mode=1;profile-level-id={}",
|
||||
profile_level_id
|
||||
)
|
||||
} else {
|
||||
// Let browser negotiate - empty string for maximum compatibility
|
||||
String::new()
|
||||
};
|
||||
|
||||
let codec = RTCRtpCodecCapability {
|
||||
mime_type: "video/H264".to_string(),
|
||||
clock_rate: H264_CLOCK_RATE,
|
||||
channels: 0,
|
||||
sdp_fmtp_line,
|
||||
rtcp_feedback: vec![],
|
||||
};
|
||||
|
||||
let track = Arc::new(TrackLocalStaticSample::new(
|
||||
codec,
|
||||
config.track_id.clone(),
|
||||
config.stream_id.clone(),
|
||||
));
|
||||
|
||||
Self {
|
||||
track,
|
||||
config,
|
||||
payloader: Mutex::new(H264Payloader::default()),
|
||||
stats: Mutex::new(H264TrackStats::default()),
|
||||
cached_sps: Mutex::new(None),
|
||||
cached_pps: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the underlying WebRTC track for adding to peer connection
|
||||
pub fn track(&self) -> Arc<TrackLocalStaticSample> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Get track as TrackLocal for peer connection
|
||||
pub fn as_track_local(&self) -> Arc<dyn TrackLocal + Send + Sync> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Get current statistics
|
||||
pub async fn stats(&self) -> H264TrackStats {
|
||||
self.stats.lock().await.clone()
|
||||
}
|
||||
|
||||
/// Write an H264 encoded frame to the track
|
||||
///
|
||||
/// The frame data should be H264 Annex B format (with start codes 0x00000001 or 0x000001).
|
||||
/// This is the format produced by hwcodec/FFmpeg encoders.
|
||||
///
|
||||
/// IMPORTANT: Each NAL unit is sent separately via write_sample(), without start codes.
|
||||
/// This is required for proper WebRTC RTP packetization.
|
||||
/// See: https://github.com/webrtc-rs/webrtc/blob/master/examples/examples/play-from-disk-h264/
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `data` - H264 Annex B encoded frame data
|
||||
/// * `duration` - Frame duration (typically 1/fps seconds)
|
||||
/// * `is_keyframe` - Whether this is a keyframe (IDR frame)
|
||||
pub async fn write_frame(&self, data: &[u8], _duration: Duration, is_keyframe: bool) -> Result<()> {
|
||||
if data.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Use H264Reader to parse NAL units from Annex B data
|
||||
let cursor = Cursor::new(data);
|
||||
let mut h264_reader = H264Reader::new(cursor, 1024 * 1024);
|
||||
|
||||
// Collect all NAL units first to check for SPS/PPS presence
|
||||
let mut nals: Vec<Bytes> = Vec::new();
|
||||
let mut has_sps = false;
|
||||
let mut has_pps = false;
|
||||
let mut has_idr = false;
|
||||
|
||||
// Send each NAL unit separately (like official webrtc-rs example)
|
||||
// H264Reader returns NAL data WITHOUT start codes - this is what we need
|
||||
while let Ok(nal) = h264_reader.next_nal() {
|
||||
if nal.data.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let nal_type = nal.data[0] & 0x1F;
|
||||
|
||||
// Skip AUD NAL units (type 9) - not needed for WebRTC
|
||||
if nal_type == 9 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip filler data (type 12)
|
||||
if nal_type == 12 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Track NAL types
|
||||
match nal_type {
|
||||
5 => has_idr = true,
|
||||
7 => {
|
||||
has_sps = true;
|
||||
// Cache SPS for future injection
|
||||
*self.cached_sps.lock().await = Some(nal.data.clone().freeze());
|
||||
}
|
||||
8 => {
|
||||
has_pps = true;
|
||||
// Cache PPS for future injection
|
||||
*self.cached_pps.lock().await = Some(nal.data.clone().freeze());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
trace!(
|
||||
"Sending NAL: type={} ({}) size={} bytes",
|
||||
nal_type,
|
||||
match nal_type {
|
||||
1 => "Non-IDR slice",
|
||||
5 => "IDR slice",
|
||||
6 => "SEI",
|
||||
7 => "SPS",
|
||||
8 => "PPS",
|
||||
_ => "Other",
|
||||
},
|
||||
nal.data.len()
|
||||
);
|
||||
|
||||
nals.push(nal.data.freeze());
|
||||
}
|
||||
|
||||
// Inject cached SPS/PPS before IDR if missing
|
||||
// This is critical for hardware encoders that don't repeat SPS/PPS
|
||||
if has_idr && (!has_sps || !has_pps) {
|
||||
let mut injected_nals: Vec<Bytes> = Vec::new();
|
||||
|
||||
if !has_sps {
|
||||
if let Some(sps) = self.cached_sps.lock().await.clone() {
|
||||
debug!("Injecting cached SPS before IDR frame");
|
||||
injected_nals.push(sps);
|
||||
}
|
||||
}
|
||||
if !has_pps {
|
||||
if let Some(pps) = self.cached_pps.lock().await.clone() {
|
||||
debug!("Injecting cached PPS before IDR frame");
|
||||
injected_nals.push(pps);
|
||||
}
|
||||
}
|
||||
|
||||
if !injected_nals.is_empty() {
|
||||
injected_nals.extend(nals);
|
||||
nals = injected_nals;
|
||||
}
|
||||
}
|
||||
|
||||
let mut nal_count = 0;
|
||||
let mut total_bytes = 0u64;
|
||||
|
||||
// Send NAL data directly WITHOUT start codes
|
||||
// TrackLocalStaticSample handles RTP packetization internally
|
||||
// Use duration=1s for each NAL like official webrtc-rs example
|
||||
for nal_data in nals {
|
||||
let sample = Sample {
|
||||
data: nal_data.clone(),
|
||||
duration: Duration::from_secs(1), // Like official example
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Err(e) = self.track.write_sample(&sample).await {
|
||||
// Only log periodically to avoid spam when no peer connected
|
||||
if nal_count % 100 == 0 {
|
||||
debug!("Write sample failed (no peer?): {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
total_bytes += nal_data.len() as u64;
|
||||
nal_count += 1;
|
||||
}
|
||||
|
||||
// Update statistics
|
||||
if nal_count > 0 {
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += total_bytes;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
}
|
||||
|
||||
trace!(
|
||||
"Sent frame: {} NAL units, {} bytes, keyframe={}",
|
||||
nal_count,
|
||||
total_bytes,
|
||||
is_keyframe
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write frame with timestamp (for more precise timing control)
|
||||
pub async fn write_frame_with_timestamp(
|
||||
&self,
|
||||
data: &[u8],
|
||||
_pts_ms: u64,
|
||||
is_keyframe: bool,
|
||||
) -> Result<()> {
|
||||
// Convert pts from milliseconds to frame duration
|
||||
// Assuming constant frame rate from config
|
||||
let duration = Duration::from_millis(1000 / self.config.fps as u64);
|
||||
self.write_frame(data, duration, is_keyframe).await
|
||||
}
|
||||
|
||||
/// Manually packetize H264 data into RTP payloads
|
||||
///
|
||||
/// This is useful if you need direct control over RTP packets
|
||||
/// (e.g., for sending via TrackLocalStaticRTP instead of TrackLocalStaticSample)
|
||||
pub async fn packetize(&self, data: &[u8], mtu: usize) -> Result<Vec<Bytes>> {
|
||||
let mut payloader = self.payloader.lock().await;
|
||||
let bytes = Bytes::copy_from_slice(data);
|
||||
|
||||
payloader.payload(mtu, &bytes).map_err(|e| {
|
||||
AppError::VideoError(format!("H264 packetization failed: {}", e))
|
||||
})
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub fn config(&self) -> &H264VideoTrackConfig {
|
||||
&self.config
|
||||
}
|
||||
}
|
||||
|
||||
/// Opus audio track using TrackLocalStaticSample
|
||||
pub struct OpusAudioTrack {
|
||||
/// The underlying WebRTC track
|
||||
track: Arc<TrackLocalStaticSample>,
|
||||
/// Statistics
|
||||
stats: Mutex<OpusTrackStats>,
|
||||
}
|
||||
|
||||
/// Opus track statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct OpusTrackStats {
|
||||
/// Packets sent
|
||||
pub packets_sent: u64,
|
||||
/// Bytes sent
|
||||
pub bytes_sent: u64,
|
||||
/// Errors
|
||||
pub errors: u64,
|
||||
}
|
||||
|
||||
impl OpusAudioTrack {
|
||||
/// Create a new Opus audio track
|
||||
pub fn new(track_id: &str, stream_id: &str) -> Self {
|
||||
let codec = RTCRtpCodecCapability {
|
||||
mime_type: "audio/opus".to_string(),
|
||||
clock_rate: 48000,
|
||||
channels: 2,
|
||||
sdp_fmtp_line: "minptime=10;useinbandfec=1".to_string(),
|
||||
rtcp_feedback: vec![],
|
||||
};
|
||||
|
||||
let track = Arc::new(TrackLocalStaticSample::new(
|
||||
codec,
|
||||
track_id.to_string(),
|
||||
stream_id.to_string(),
|
||||
));
|
||||
|
||||
Self {
|
||||
track,
|
||||
stats: Mutex::new(OpusTrackStats::default()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the underlying WebRTC track
|
||||
pub fn track(&self) -> Arc<TrackLocalStaticSample> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Get track as TrackLocal
|
||||
pub fn as_track_local(&self) -> Arc<dyn TrackLocal + Send + Sync> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Get statistics
|
||||
pub async fn stats(&self) -> OpusTrackStats {
|
||||
self.stats.lock().await.clone()
|
||||
}
|
||||
|
||||
/// Write Opus encoded audio data
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `data` - Opus encoded packet
|
||||
/// * `samples` - Number of audio samples in this packet (typically 960 for 20ms at 48kHz)
|
||||
pub async fn write_packet(&self, data: &[u8], samples: u32) -> Result<()> {
|
||||
if data.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Opus frame duration based on samples
|
||||
// 48000 Hz, so duration = samples / 48000 seconds
|
||||
let duration = Duration::from_micros((samples as u64 * 1_000_000) / 48000);
|
||||
|
||||
let sample = Sample {
|
||||
data: Bytes::copy_from_slice(data),
|
||||
duration,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
match self.track.write_sample(&sample).await {
|
||||
Ok(_) => {
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.packets_sent += 1;
|
||||
stats.bytes_sent += data.len() as u64;
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.errors += 1;
|
||||
error!("Failed to write Opus sample: {}", e);
|
||||
Err(AppError::WebRtcError(format!("Failed to write audio sample: {}", e)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Strip AUD (Access Unit Delimiter) NAL units from H264 Annex B data
|
||||
/// AUD (NAL type 9) can cause decoding issues in some browser WebRTC implementations
|
||||
/// Also strips filler data (NAL type 12) and SEI (NAL type 6) for cleaner output
|
||||
pub fn strip_aud_nal_units(data: &[u8]) -> Vec<u8> {
|
||||
let mut result = Vec::with_capacity(data.len());
|
||||
let mut i = 0;
|
||||
|
||||
while i < data.len() {
|
||||
// Find start code (3 or 4 bytes)
|
||||
let (start_code_pos, start_code_len) = if i + 4 <= data.len()
|
||||
&& data[i] == 0
|
||||
&& data[i + 1] == 0
|
||||
&& data[i + 2] == 0
|
||||
&& data[i + 3] == 1
|
||||
{
|
||||
(i, 4)
|
||||
} else if i + 3 <= data.len() && data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1 {
|
||||
(i, 3)
|
||||
} else {
|
||||
i += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
let nal_start = start_code_pos + start_code_len;
|
||||
if nal_start >= data.len() {
|
||||
break;
|
||||
}
|
||||
|
||||
let nal_type = data[nal_start] & 0x1F;
|
||||
|
||||
// Find next start code to determine NAL unit end
|
||||
let mut nal_end = data.len();
|
||||
let mut j = nal_start + 1;
|
||||
while j + 3 <= data.len() {
|
||||
if (data[j] == 0 && data[j + 1] == 0 && data[j + 2] == 1)
|
||||
|| (j + 4 <= data.len()
|
||||
&& data[j] == 0
|
||||
&& data[j + 1] == 0
|
||||
&& data[j + 2] == 0
|
||||
&& data[j + 3] == 1)
|
||||
{
|
||||
nal_end = j;
|
||||
break;
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
|
||||
// Skip AUD (9), filler (12), and optionally SEI (6)
|
||||
// Keep SPS (7), PPS (8), IDR (5), non-IDR slice (1)
|
||||
if nal_type != 9 && nal_type != 12 {
|
||||
// Include this NAL unit with start code
|
||||
result.extend_from_slice(&data[start_code_pos..nal_end]);
|
||||
}
|
||||
|
||||
i = nal_end;
|
||||
}
|
||||
|
||||
// If nothing was stripped, return original data
|
||||
if result.is_empty() && !data.is_empty() {
|
||||
return data.to_vec();
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Extract SPS and PPS NAL units from H264 Annex B data
|
||||
/// Returns (SPS data without start code, PPS data without start code)
|
||||
pub fn extract_sps_pps(data: &[u8]) -> (Option<Vec<u8>>, Option<Vec<u8>>) {
|
||||
let mut sps: Option<Vec<u8>> = None;
|
||||
let mut pps: Option<Vec<u8>> = None;
|
||||
let mut i = 0;
|
||||
|
||||
while i < data.len() {
|
||||
// Find start code (3 or 4 bytes)
|
||||
let start_code_len = if i + 4 <= data.len()
|
||||
&& data[i] == 0
|
||||
&& data[i + 1] == 0
|
||||
&& data[i + 2] == 0
|
||||
&& data[i + 3] == 1
|
||||
{
|
||||
4
|
||||
} else if i + 3 <= data.len() && data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1 {
|
||||
3
|
||||
} else {
|
||||
i += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
let nal_start = i + start_code_len;
|
||||
if nal_start >= data.len() {
|
||||
break;
|
||||
}
|
||||
|
||||
let nal_type = data[nal_start] & 0x1F;
|
||||
|
||||
// Find next start code to determine NAL unit end
|
||||
let mut nal_end = data.len();
|
||||
let mut j = nal_start + 1;
|
||||
while j + 3 <= data.len() {
|
||||
if (data[j] == 0 && data[j + 1] == 0 && data[j + 2] == 1)
|
||||
|| (j + 4 <= data.len()
|
||||
&& data[j] == 0
|
||||
&& data[j + 1] == 0
|
||||
&& data[j + 2] == 0
|
||||
&& data[j + 3] == 1)
|
||||
{
|
||||
nal_end = j;
|
||||
break;
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
|
||||
// Extract SPS (NAL type 7) and PPS (NAL type 8) without start codes
|
||||
match nal_type {
|
||||
7 => {
|
||||
sps = Some(data[nal_start..nal_end].to_vec());
|
||||
}
|
||||
8 => {
|
||||
pps = Some(data[nal_start..nal_end].to_vec());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
i = nal_end;
|
||||
}
|
||||
|
||||
(sps, pps)
|
||||
}
|
||||
|
||||
/// Check if H264 Annex B data contains SPS and PPS NAL units
|
||||
pub fn has_sps_pps(data: &[u8]) -> bool {
|
||||
let mut has_sps = false;
|
||||
let mut has_pps = false;
|
||||
let mut i = 0;
|
||||
|
||||
while i < data.len() {
|
||||
// Find start code (3 or 4 bytes)
|
||||
let start_code_len = if i + 4 <= data.len()
|
||||
&& data[i] == 0
|
||||
&& data[i + 1] == 0
|
||||
&& data[i + 2] == 0
|
||||
&& data[i + 3] == 1
|
||||
{
|
||||
4
|
||||
} else if i + 3 <= data.len() && data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1 {
|
||||
3
|
||||
} else {
|
||||
i += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
let nal_start = i + start_code_len;
|
||||
if nal_start >= data.len() {
|
||||
break;
|
||||
}
|
||||
|
||||
let nal_type = data[nal_start] & 0x1F;
|
||||
|
||||
match nal_type {
|
||||
7 => has_sps = true,
|
||||
8 => has_pps = true,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if has_sps && has_pps {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Move past start code to next position
|
||||
i = nal_start + 1;
|
||||
}
|
||||
|
||||
has_sps && has_pps
|
||||
}
|
||||
|
||||
/// Check if H264 data contains a keyframe (IDR NAL unit)
|
||||
pub fn is_h264_keyframe(data: &[u8]) -> bool {
|
||||
// Look for IDR NAL unit (type 5)
|
||||
// NAL units start with 0x00 0x00 0x01 or 0x00 0x00 0x00 0x01
|
||||
let mut i = 0;
|
||||
while i < data.len() {
|
||||
// Find start code
|
||||
if i + 3 < data.len() && data[i] == 0 && data[i + 1] == 0 {
|
||||
let (nal_start, _start_code_len) = if data[i + 2] == 1 {
|
||||
(i + 3, 3)
|
||||
} else if i + 4 < data.len() && data[i + 2] == 0 && data[i + 3] == 1 {
|
||||
(i + 4, 4)
|
||||
} else {
|
||||
i += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
if nal_start < data.len() {
|
||||
let nal_type = data[nal_start] & 0x1F;
|
||||
// IDR = 5, SPS = 7, PPS = 8
|
||||
if nal_type == 5 {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
i = nal_start;
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Parse profile-level-id from SPS NAL unit data (without start code)
|
||||
///
|
||||
/// Returns a 6-character hex string like "42001f" (Baseline L3.1) or "64001f" (High L3.1)
|
||||
///
|
||||
/// SPS structure (first 4 bytes after NAL header):
|
||||
/// - Byte 0: NAL header (0x67 for SPS)
|
||||
/// - Byte 1: profile_idc (0x42=Baseline, 0x4D=Main, 0x64=High)
|
||||
/// - Byte 2: constraint_set_flags
|
||||
/// - Byte 3: level_idc (0x1f=3.1, 0x28=4.0, 0x33=5.1)
|
||||
pub fn parse_profile_level_id_from_sps(sps: &[u8]) -> Option<String> {
|
||||
// SPS NAL must be at least 4 bytes: NAL header + profile_idc + constraints + level_idc
|
||||
if sps.len() < 4 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// First byte is NAL header, skip it
|
||||
let profile_idc = sps[1];
|
||||
let constraint_set_flags = sps[2];
|
||||
let level_idc = sps[3];
|
||||
|
||||
Some(format!(
|
||||
"{:02x}{:02x}{:02x}",
|
||||
profile_idc, constraint_set_flags, level_idc
|
||||
))
|
||||
}
|
||||
|
||||
/// Extract profile-level-id from H264 Annex B data (containing SPS)
|
||||
///
|
||||
/// This function finds the SPS NAL unit and extracts the profile-level-id.
|
||||
/// Useful for determining the actual encoder output profile.
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// let h264_data = encoder.encode(&yuv)?;
|
||||
/// if let Some(profile_level_id) = extract_profile_level_id(&h264_data) {
|
||||
/// println!("Encoder outputs profile-level-id: {}", profile_level_id);
|
||||
/// // Use this to configure H264VideoTrackConfig
|
||||
/// }
|
||||
/// ```
|
||||
pub fn extract_profile_level_id(data: &[u8]) -> Option<String> {
|
||||
let (sps, _) = extract_sps_pps(data);
|
||||
sps.and_then(|sps_data| parse_profile_level_id_from_sps(&sps_data))
|
||||
}
|
||||
|
||||
/// Common H.264 profile-level-id values
|
||||
pub mod profiles {
|
||||
/// Constrained Baseline Profile Level 3.1 - Maximum browser compatibility
|
||||
pub const CONSTRAINED_BASELINE_31: &str = "42e01f";
|
||||
/// Baseline Profile Level 3.1
|
||||
pub const BASELINE_31: &str = "42001f";
|
||||
/// Main Profile Level 3.1
|
||||
pub const MAIN_31: &str = "4d001f";
|
||||
/// High Profile Level 3.1 - Hardware encoders typically output this
|
||||
pub const HIGH_31: &str = "64001f";
|
||||
/// High Profile Level 4.0
|
||||
pub const HIGH_40: &str = "640028";
|
||||
/// High Profile Level 5.1
|
||||
pub const HIGH_51: &str = "640033";
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_is_h264_keyframe() {
|
||||
// IDR frame with 4-byte start code
|
||||
let idr_frame = vec![0x00, 0x00, 0x00, 0x01, 0x65]; // NAL type 5 = IDR
|
||||
assert!(is_h264_keyframe(&idr_frame));
|
||||
|
||||
// IDR frame with 3-byte start code
|
||||
let idr_frame_3 = vec![0x00, 0x00, 0x01, 0x65];
|
||||
assert!(is_h264_keyframe(&idr_frame_3));
|
||||
|
||||
// Non-IDR frame (P-frame, NAL type 1)
|
||||
let p_frame = vec![0x00, 0x00, 0x00, 0x01, 0x41];
|
||||
assert!(!is_h264_keyframe(&p_frame));
|
||||
|
||||
// SPS (NAL type 7) - not a keyframe by itself
|
||||
let sps = vec![0x00, 0x00, 0x00, 0x01, 0x67];
|
||||
assert!(!is_h264_keyframe(&sps));
|
||||
|
||||
// Multiple NAL units with IDR
|
||||
let multi_nal = vec![
|
||||
0x00, 0x00, 0x00, 0x01, 0x67, 0x42, 0x00, 0x1f, // SPS
|
||||
0x00, 0x00, 0x00, 0x01, 0x68, 0xce, 0x38, 0x80, // PPS
|
||||
0x00, 0x00, 0x00, 0x01, 0x65, 0x88, 0x84, // IDR
|
||||
];
|
||||
assert!(is_h264_keyframe(&multi_nal));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_h264_track_config_default() {
|
||||
let config = H264VideoTrackConfig::default();
|
||||
assert_eq!(config.fps, 30);
|
||||
assert_eq!(config.bitrate_kbps, 2000);
|
||||
assert_eq!(config.resolution, Resolution::HD720);
|
||||
}
|
||||
}
|
||||
196
src/webrtc/session.rs
Normal file
196
src/webrtc/session.rs
Normal file
@@ -0,0 +1,196 @@
|
||||
//! WebRTC session management
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use tracing::{debug, info};
|
||||
|
||||
use super::config::WebRtcConfig;
|
||||
use super::peer::PeerConnection;
|
||||
use super::signaling::{IceCandidate, SdpAnswer, SdpOffer};
|
||||
use crate::error::{AppError, Result};
|
||||
|
||||
/// Maximum concurrent WebRTC sessions
|
||||
const MAX_SESSIONS: usize = 8;
|
||||
|
||||
/// WebRTC session info
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SessionInfo {
|
||||
pub session_id: String,
|
||||
pub created_at: std::time::Instant,
|
||||
pub state: String,
|
||||
}
|
||||
|
||||
/// WebRTC session manager
|
||||
pub struct WebRtcSessionManager {
|
||||
config: WebRtcConfig,
|
||||
sessions: Arc<RwLock<HashMap<String, Arc<PeerConnection>>>>,
|
||||
}
|
||||
|
||||
impl WebRtcSessionManager {
|
||||
/// Create a new session manager
|
||||
pub fn new(config: WebRtcConfig) -> Self {
|
||||
Self {
|
||||
config,
|
||||
sessions: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create with default config
|
||||
pub fn default_config() -> Self {
|
||||
Self::new(WebRtcConfig::default())
|
||||
}
|
||||
|
||||
/// Create a new WebRTC session
|
||||
pub async fn create_session(&self) -> Result<String> {
|
||||
let sessions = self.sessions.read().await;
|
||||
|
||||
// Check session limit
|
||||
if sessions.len() >= MAX_SESSIONS {
|
||||
return Err(AppError::WebRtcError(format!(
|
||||
"Maximum sessions ({}) reached",
|
||||
MAX_SESSIONS
|
||||
)));
|
||||
}
|
||||
drop(sessions);
|
||||
|
||||
// Generate session ID
|
||||
let session_id = uuid::Uuid::new_v4().to_string();
|
||||
|
||||
// Create new peer connection
|
||||
let pc = PeerConnection::new(&self.config, session_id.clone()).await?;
|
||||
|
||||
// Store session
|
||||
let mut sessions = self.sessions.write().await;
|
||||
sessions.insert(session_id.clone(), Arc::new(pc));
|
||||
|
||||
info!("WebRTC session created: {}", session_id);
|
||||
Ok(session_id)
|
||||
}
|
||||
|
||||
/// Handle SDP offer and return answer
|
||||
pub async fn handle_offer(&self, session_id: &str, offer: SdpOffer) -> Result<SdpAnswer> {
|
||||
let sessions = self.sessions.read().await;
|
||||
let pc = sessions
|
||||
.get(session_id)
|
||||
.ok_or_else(|| AppError::NotFound(format!("Session not found: {}", session_id)))?
|
||||
.clone();
|
||||
drop(sessions);
|
||||
|
||||
pc.handle_offer(offer).await
|
||||
}
|
||||
|
||||
/// Add ICE candidate
|
||||
pub async fn add_ice_candidate(&self, session_id: &str, candidate: IceCandidate) -> Result<()> {
|
||||
let sessions = self.sessions.read().await;
|
||||
let pc = sessions
|
||||
.get(session_id)
|
||||
.ok_or_else(|| AppError::NotFound(format!("Session not found: {}", session_id)))?
|
||||
.clone();
|
||||
drop(sessions);
|
||||
|
||||
pc.add_ice_candidate(candidate).await
|
||||
}
|
||||
|
||||
/// Get session info
|
||||
pub async fn get_session(&self, session_id: &str) -> Option<SessionInfo> {
|
||||
let sessions = self.sessions.read().await;
|
||||
sessions.get(session_id).map(|pc| SessionInfo {
|
||||
session_id: pc.session_id.clone(),
|
||||
created_at: std::time::Instant::now(), // TODO: store actual time
|
||||
state: format!("{:?}", pc.state()),
|
||||
})
|
||||
}
|
||||
|
||||
/// Close a session
|
||||
pub async fn close_session(&self, session_id: &str) -> Result<()> {
|
||||
let mut sessions = self.sessions.write().await;
|
||||
|
||||
if let Some(pc) = sessions.remove(session_id) {
|
||||
info!("WebRTC session closed: {}", session_id);
|
||||
pc.close().await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// List all sessions
|
||||
pub async fn list_sessions(&self) -> Vec<SessionInfo> {
|
||||
let sessions = self.sessions.read().await;
|
||||
sessions
|
||||
.values()
|
||||
.map(|pc| SessionInfo {
|
||||
session_id: pc.session_id.clone(),
|
||||
created_at: std::time::Instant::now(),
|
||||
state: format!("{:?}", pc.state()),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Clean up disconnected sessions
|
||||
pub async fn cleanup_stale_sessions(&self) {
|
||||
let sessions_to_remove: Vec<String> = {
|
||||
let sessions = self.sessions.read().await;
|
||||
sessions
|
||||
.iter()
|
||||
.filter(|(_, pc)| {
|
||||
matches!(
|
||||
pc.state(),
|
||||
super::signaling::ConnectionState::Disconnected
|
||||
| super::signaling::ConnectionState::Failed
|
||||
| super::signaling::ConnectionState::Closed
|
||||
)
|
||||
})
|
||||
.map(|(id, _)| id.clone())
|
||||
.collect()
|
||||
};
|
||||
|
||||
if !sessions_to_remove.is_empty() {
|
||||
let mut sessions = self.sessions.write().await;
|
||||
for id in sessions_to_remove {
|
||||
debug!("Removing stale WebRTC session: {}", id);
|
||||
sessions.remove(&id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get session count
|
||||
pub async fn session_count(&self) -> usize {
|
||||
self.sessions.read().await.len()
|
||||
}
|
||||
|
||||
/// Start video streaming to a session
|
||||
pub async fn start_video(&self, session_id: &str) -> Result<()> {
|
||||
let sessions = self.sessions.read().await;
|
||||
let _pc = sessions
|
||||
.get(session_id)
|
||||
.ok_or_else(|| AppError::NotFound(format!("Session not found: {}", session_id)))?
|
||||
.clone();
|
||||
drop(sessions);
|
||||
|
||||
// Video track should already be added during peer creation
|
||||
// This is a placeholder for additional video control logic
|
||||
info!("Video streaming started for session: {}", session_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop video streaming to a session
|
||||
pub async fn stop_video(&self, session_id: &str) -> Result<()> {
|
||||
let sessions = self.sessions.read().await;
|
||||
let _pc = sessions
|
||||
.get(session_id)
|
||||
.ok_or_else(|| AppError::NotFound(format!("Session not found: {}", session_id)))?
|
||||
.clone();
|
||||
drop(sessions);
|
||||
|
||||
// Placeholder for video stop logic
|
||||
info!("Video streaming stopped for session: {}", session_id);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for WebRtcSessionManager {
|
||||
fn default() -> Self {
|
||||
Self::default_config()
|
||||
}
|
||||
}
|
||||
195
src/webrtc/signaling.rs
Normal file
195
src/webrtc/signaling.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
//! WebRTC signaling types and messages
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Signaling message types
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "lowercase")]
|
||||
pub enum SignalingMessage {
|
||||
/// SDP Offer from client
|
||||
Offer(SdpOffer),
|
||||
/// SDP Answer from server
|
||||
Answer(SdpAnswer),
|
||||
/// ICE candidate
|
||||
Candidate(IceCandidate),
|
||||
/// Connection error
|
||||
Error(SignalingError),
|
||||
/// Connection closed
|
||||
Close,
|
||||
}
|
||||
|
||||
/// SDP Offer from client
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SdpOffer {
|
||||
/// SDP content
|
||||
pub sdp: String,
|
||||
}
|
||||
|
||||
impl SdpOffer {
|
||||
pub fn new(sdp: impl Into<String>) -> Self {
|
||||
Self { sdp: sdp.into() }
|
||||
}
|
||||
}
|
||||
|
||||
/// SDP Answer from server
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SdpAnswer {
|
||||
/// SDP content
|
||||
pub sdp: String,
|
||||
/// ICE candidates gathered during answer creation
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub ice_candidates: Option<Vec<IceCandidate>>,
|
||||
}
|
||||
|
||||
impl SdpAnswer {
|
||||
pub fn new(sdp: impl Into<String>) -> Self {
|
||||
Self {
|
||||
sdp: sdp.into(),
|
||||
ice_candidates: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_candidates(sdp: impl Into<String>, candidates: Vec<IceCandidate>) -> Self {
|
||||
Self {
|
||||
sdp: sdp.into(),
|
||||
ice_candidates: if candidates.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(candidates)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// ICE candidate
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IceCandidate {
|
||||
/// Candidate string
|
||||
pub candidate: String,
|
||||
/// SDP mid (media ID)
|
||||
#[serde(rename = "sdpMid")]
|
||||
pub sdp_mid: Option<String>,
|
||||
/// SDP mline index
|
||||
#[serde(rename = "sdpMLineIndex")]
|
||||
pub sdp_mline_index: Option<u16>,
|
||||
/// Username fragment
|
||||
#[serde(rename = "usernameFragment")]
|
||||
pub username_fragment: Option<String>,
|
||||
}
|
||||
|
||||
impl IceCandidate {
|
||||
pub fn new(candidate: impl Into<String>) -> Self {
|
||||
Self {
|
||||
candidate: candidate.into(),
|
||||
sdp_mid: None,
|
||||
sdp_mline_index: None,
|
||||
username_fragment: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_mid(mut self, mid: impl Into<String>, index: u16) -> Self {
|
||||
self.sdp_mid = Some(mid.into());
|
||||
self.sdp_mline_index = Some(index);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Signaling error
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SignalingError {
|
||||
/// Error code
|
||||
pub code: u32,
|
||||
/// Error message
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl SignalingError {
|
||||
pub fn new(code: u32, message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
code,
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn invalid_offer() -> Self {
|
||||
Self::new(400, "Invalid SDP offer")
|
||||
}
|
||||
|
||||
pub fn connection_failed() -> Self {
|
||||
Self::new(500, "Connection failed")
|
||||
}
|
||||
|
||||
pub fn media_error() -> Self {
|
||||
Self::new(502, "Media error")
|
||||
}
|
||||
}
|
||||
|
||||
/// WebRTC offer request (from HTTP API)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OfferRequest {
|
||||
/// SDP offer
|
||||
pub sdp: String,
|
||||
/// Client ID (optional, for tracking)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub client_id: Option<String>,
|
||||
}
|
||||
|
||||
/// WebRTC answer response (from HTTP API)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AnswerResponse {
|
||||
/// SDP answer
|
||||
pub sdp: String,
|
||||
/// Session ID for this connection
|
||||
pub session_id: String,
|
||||
/// ICE candidates
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub ice_candidates: Vec<IceCandidate>,
|
||||
}
|
||||
|
||||
impl AnswerResponse {
|
||||
pub fn new(
|
||||
sdp: impl Into<String>,
|
||||
session_id: impl Into<String>,
|
||||
ice_candidates: Vec<IceCandidate>,
|
||||
) -> Self {
|
||||
Self {
|
||||
sdp: sdp.into(),
|
||||
session_id: session_id.into(),
|
||||
ice_candidates,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// ICE candidate request (trickle ICE)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IceCandidateRequest {
|
||||
/// Session ID
|
||||
pub session_id: String,
|
||||
/// ICE candidate
|
||||
pub candidate: IceCandidate,
|
||||
}
|
||||
|
||||
/// Connection state notification
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ConnectionState {
|
||||
New,
|
||||
Connecting,
|
||||
Connected,
|
||||
Disconnected,
|
||||
Failed,
|
||||
Closed,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ConnectionState {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ConnectionState::New => write!(f, "new"),
|
||||
ConnectionState::Connecting => write!(f, "connecting"),
|
||||
ConnectionState::Connected => write!(f, "connected"),
|
||||
ConnectionState::Disconnected => write!(f, "disconnected"),
|
||||
ConnectionState::Failed => write!(f, "failed"),
|
||||
ConnectionState::Closed => write!(f, "closed"),
|
||||
}
|
||||
}
|
||||
}
|
||||
351
src/webrtc/track.rs
Normal file
351
src/webrtc/track.rs
Normal file
@@ -0,0 +1,351 @@
|
||||
//! WebRTC track implementations for video and audio
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::sync::{broadcast, watch, Mutex};
|
||||
use tracing::{debug, error, info};
|
||||
use webrtc::track::track_local::track_local_static_rtp::TrackLocalStaticRTP;
|
||||
use webrtc::track::track_local::TrackLocalWriter;
|
||||
use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability;
|
||||
|
||||
use crate::video::frame::VideoFrame;
|
||||
|
||||
/// Video track configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VideoTrackConfig {
|
||||
/// Track ID
|
||||
pub track_id: String,
|
||||
/// Stream ID
|
||||
pub stream_id: String,
|
||||
/// Video codec
|
||||
pub codec: VideoCodecType,
|
||||
/// Clock rate
|
||||
pub clock_rate: u32,
|
||||
/// Target bitrate
|
||||
pub bitrate_kbps: u32,
|
||||
}
|
||||
|
||||
impl Default for VideoTrackConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
track_id: "video0".to_string(),
|
||||
stream_id: "one-kvm-stream".to_string(),
|
||||
codec: VideoCodecType::H264,
|
||||
clock_rate: 90000,
|
||||
bitrate_kbps: 8000,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Video codec type
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum VideoCodecType {
|
||||
H264,
|
||||
VP8,
|
||||
VP9,
|
||||
}
|
||||
|
||||
impl VideoCodecType {
|
||||
pub fn mime_type(&self) -> &'static str {
|
||||
match self {
|
||||
VideoCodecType::H264 => "video/H264",
|
||||
VideoCodecType::VP8 => "video/VP8",
|
||||
VideoCodecType::VP9 => "video/VP9",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sdp_fmtp(&self) -> &'static str {
|
||||
match self {
|
||||
VideoCodecType::H264 => "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f",
|
||||
VideoCodecType::VP8 => "",
|
||||
VideoCodecType::VP9 => "profile-id=0",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create RTP codec capability for video
|
||||
pub fn video_codec_capability(codec: VideoCodecType, clock_rate: u32) -> RTCRtpCodecCapability {
|
||||
RTCRtpCodecCapability {
|
||||
mime_type: codec.mime_type().to_string(),
|
||||
clock_rate,
|
||||
channels: 0,
|
||||
sdp_fmtp_line: codec.sdp_fmtp().to_string(),
|
||||
rtcp_feedback: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Create RTP codec capability for audio (Opus)
|
||||
pub fn audio_codec_capability() -> RTCRtpCodecCapability {
|
||||
RTCRtpCodecCapability {
|
||||
mime_type: "audio/opus".to_string(),
|
||||
clock_rate: 48000,
|
||||
channels: 2,
|
||||
sdp_fmtp_line: "minptime=10;useinbandfec=1".to_string(),
|
||||
rtcp_feedback: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Video track statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct VideoTrackStats {
|
||||
/// Frames sent
|
||||
pub frames_sent: u64,
|
||||
/// Bytes sent
|
||||
pub bytes_sent: u64,
|
||||
/// Packets sent
|
||||
pub packets_sent: u64,
|
||||
/// Packets lost (RTCP feedback)
|
||||
pub packets_lost: u64,
|
||||
/// Current bitrate (bps)
|
||||
pub current_bitrate: u64,
|
||||
/// Round trip time (ms)
|
||||
pub rtt_ms: f64,
|
||||
/// Jitter (ms)
|
||||
pub jitter_ms: f64,
|
||||
}
|
||||
|
||||
/// Video track for WebRTC streaming
|
||||
pub struct VideoTrack {
|
||||
config: VideoTrackConfig,
|
||||
/// RTP track
|
||||
track: Arc<TrackLocalStaticRTP>,
|
||||
/// Statistics
|
||||
stats: Arc<Mutex<VideoTrackStats>>,
|
||||
/// Sequence number for RTP
|
||||
sequence_number: Arc<Mutex<u16>>,
|
||||
/// Timestamp for RTP
|
||||
timestamp: Arc<Mutex<u32>>,
|
||||
/// Last frame time
|
||||
last_frame_time: Arc<Mutex<Option<Instant>>>,
|
||||
/// Running flag
|
||||
running: Arc<watch::Sender<bool>>,
|
||||
}
|
||||
|
||||
impl VideoTrack {
|
||||
/// Create a new video track
|
||||
pub fn new(config: VideoTrackConfig) -> Self {
|
||||
let capability = video_codec_capability(config.codec, config.clock_rate);
|
||||
|
||||
let track = Arc::new(TrackLocalStaticRTP::new(
|
||||
capability,
|
||||
config.track_id.clone(),
|
||||
config.stream_id.clone(),
|
||||
));
|
||||
|
||||
let (running_tx, _) = watch::channel(false);
|
||||
|
||||
Self {
|
||||
config,
|
||||
track,
|
||||
stats: Arc::new(Mutex::new(VideoTrackStats::default())),
|
||||
sequence_number: Arc::new(Mutex::new(0)),
|
||||
timestamp: Arc::new(Mutex::new(0)),
|
||||
last_frame_time: Arc::new(Mutex::new(None)),
|
||||
running: Arc::new(running_tx),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the underlying RTP track
|
||||
pub fn rtp_track(&self) -> Arc<TrackLocalStaticRTP> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Get current statistics
|
||||
pub async fn stats(&self) -> VideoTrackStats {
|
||||
self.stats.lock().await.clone()
|
||||
}
|
||||
|
||||
/// Start sending frames from a broadcast receiver
|
||||
pub async fn start_sending(
|
||||
&self,
|
||||
mut frame_rx: broadcast::Receiver<VideoFrame>,
|
||||
) {
|
||||
let _ = self.running.send(true);
|
||||
let track = self.track.clone();
|
||||
let stats = self.stats.clone();
|
||||
let sequence_number = self.sequence_number.clone();
|
||||
let timestamp = self.timestamp.clone();
|
||||
let last_frame_time = self.last_frame_time.clone();
|
||||
let clock_rate = self.config.clock_rate;
|
||||
let mut running_rx = self.running.subscribe();
|
||||
|
||||
info!("Starting video track sender");
|
||||
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
tokio::select! {
|
||||
result = frame_rx.recv() => {
|
||||
match result {
|
||||
Ok(frame) => {
|
||||
if let Err(e) = Self::send_frame(
|
||||
&track,
|
||||
&frame,
|
||||
&stats,
|
||||
&sequence_number,
|
||||
×tamp,
|
||||
&last_frame_time,
|
||||
clock_rate,
|
||||
).await {
|
||||
debug!("Failed to send frame: {}", e);
|
||||
}
|
||||
}
|
||||
Err(broadcast::error::RecvError::Lagged(n)) => {
|
||||
debug!("Video track lagged by {} frames", n);
|
||||
}
|
||||
Err(broadcast::error::RecvError::Closed) => {
|
||||
debug!("Frame channel closed");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = running_rx.changed() => {
|
||||
if !*running_rx.borrow() {
|
||||
debug!("Video track stopped");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("Video track sender stopped");
|
||||
});
|
||||
}
|
||||
|
||||
/// Stop sending
|
||||
pub fn stop(&self) {
|
||||
let _ = self.running.send(false);
|
||||
}
|
||||
|
||||
/// Send a single frame as RTP packets
|
||||
async fn send_frame(
|
||||
track: &TrackLocalStaticRTP,
|
||||
frame: &VideoFrame,
|
||||
stats: &Mutex<VideoTrackStats>,
|
||||
sequence_number: &Mutex<u16>,
|
||||
timestamp: &Mutex<u32>,
|
||||
last_frame_time: &Mutex<Option<Instant>>,
|
||||
clock_rate: u32,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Calculate timestamp increment based on frame timing
|
||||
let now = Instant::now();
|
||||
let mut last_time = last_frame_time.lock().await;
|
||||
let timestamp_increment = if let Some(last) = *last_time {
|
||||
let elapsed = now.duration_since(last);
|
||||
((elapsed.as_secs_f64() * clock_rate as f64) as u32).min(clock_rate / 10)
|
||||
} else {
|
||||
clock_rate / 30 // Default to 30 fps
|
||||
};
|
||||
*last_time = Some(now);
|
||||
drop(last_time);
|
||||
|
||||
// Update timestamp
|
||||
let mut ts = timestamp.lock().await;
|
||||
*ts = ts.wrapping_add(timestamp_increment);
|
||||
let _current_ts = *ts;
|
||||
drop(ts);
|
||||
|
||||
// For H.264, we need to packetize into RTP
|
||||
// This is a simplified implementation - real implementation needs proper NAL unit handling
|
||||
let data = frame.data();
|
||||
let max_payload_size = 1200; // MTU - headers
|
||||
|
||||
let packet_count = (data.len() + max_payload_size - 1) / max_payload_size;
|
||||
let mut bytes_sent = 0u64;
|
||||
|
||||
for i in 0..packet_count {
|
||||
let start = i * max_payload_size;
|
||||
let end = ((i + 1) * max_payload_size).min(data.len());
|
||||
let _is_last = i == packet_count - 1;
|
||||
|
||||
// Get sequence number
|
||||
let mut seq = sequence_number.lock().await;
|
||||
let _seq_num = *seq;
|
||||
*seq = seq.wrapping_add(1);
|
||||
drop(seq);
|
||||
|
||||
// Build RTP packet payload
|
||||
// For simplicity, just send raw data - real implementation needs proper RTP packetization
|
||||
let payload = data[start..end].to_vec();
|
||||
bytes_sent += payload.len() as u64;
|
||||
|
||||
// Write sample (the track handles RTP header construction)
|
||||
if let Err(e) = track.write(&payload).await {
|
||||
error!("Failed to write RTP packet: {}", e);
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
|
||||
// Update stats
|
||||
let mut s = stats.lock().await;
|
||||
s.frames_sent += 1;
|
||||
s.bytes_sent += bytes_sent;
|
||||
s.packets_sent += packet_count as u64;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Audio track configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AudioTrackConfig {
|
||||
/// Track ID
|
||||
pub track_id: String,
|
||||
/// Stream ID
|
||||
pub stream_id: String,
|
||||
/// Sample rate
|
||||
pub sample_rate: u32,
|
||||
/// Channels
|
||||
pub channels: u8,
|
||||
}
|
||||
|
||||
impl Default for AudioTrackConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
track_id: "audio0".to_string(),
|
||||
stream_id: "one-kvm-stream".to_string(),
|
||||
sample_rate: 48000,
|
||||
channels: 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Audio track for WebRTC streaming
|
||||
pub struct AudioTrack {
|
||||
#[allow(dead_code)]
|
||||
config: AudioTrackConfig,
|
||||
/// RTP track
|
||||
track: Arc<TrackLocalStaticRTP>,
|
||||
/// Running flag
|
||||
running: Arc<watch::Sender<bool>>,
|
||||
}
|
||||
|
||||
impl AudioTrack {
|
||||
/// Create a new audio track
|
||||
pub fn new(config: AudioTrackConfig) -> Self {
|
||||
let capability = audio_codec_capability();
|
||||
|
||||
let track = Arc::new(TrackLocalStaticRTP::new(
|
||||
capability,
|
||||
config.track_id.clone(),
|
||||
config.stream_id.clone(),
|
||||
));
|
||||
|
||||
let (running_tx, _) = watch::channel(false);
|
||||
|
||||
Self {
|
||||
config,
|
||||
track,
|
||||
running: Arc::new(running_tx),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the underlying RTP track
|
||||
pub fn rtp_track(&self) -> Arc<TrackLocalStaticRTP> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Stop sending
|
||||
pub fn stop(&self) {
|
||||
let _ = self.running.send(false);
|
||||
}
|
||||
}
|
||||
664
src/webrtc/unified_video_track.rs
Normal file
664
src/webrtc/unified_video_track.rs
Normal file
@@ -0,0 +1,664 @@
|
||||
//! Unified video track supporting H264, H265, VP8, VP9
|
||||
//!
|
||||
//! This module provides a unified video track implementation that supports
|
||||
//! multiple video codecs with proper RTP packetization.
|
||||
//!
|
||||
//! # Supported Codecs
|
||||
//!
|
||||
//! - **H264**: NAL unit parsing with SPS/PPS caching (RFC 6184)
|
||||
//! - **H265**: NAL unit parsing with VPS/SPS/PPS caching (RFC 7798)
|
||||
//! - **VP8**: Direct frame sending (RFC 7741)
|
||||
//! - **VP9**: Direct frame sending (draft-ietf-payload-vp9)
|
||||
//!
|
||||
//! # Architecture
|
||||
//!
|
||||
//! For NAL-based codecs (H264/H265):
|
||||
//! - Parse NAL units from Annex B format
|
||||
//! - Cache parameter sets (SPS/PPS/VPS) for injection
|
||||
//! - Send each NAL unit via TrackLocalStaticSample
|
||||
//!
|
||||
//! For VP8/VP9:
|
||||
//! - Send raw encoded frames directly
|
||||
//! - webrtc-rs handles RTP packetization internally
|
||||
|
||||
use bytes::Bytes;
|
||||
use std::io::Cursor;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{debug, trace, warn};
|
||||
use webrtc::media::io::h264_reader::H264Reader;
|
||||
use webrtc::media::Sample;
|
||||
use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability;
|
||||
use webrtc::track::track_local::track_local_static_sample::TrackLocalStaticSample;
|
||||
use webrtc::track::track_local::TrackLocal;
|
||||
|
||||
use crate::error::{AppError, Result};
|
||||
use crate::video::format::Resolution;
|
||||
|
||||
/// Video codec type
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum VideoCodec {
|
||||
H264,
|
||||
H265,
|
||||
VP8,
|
||||
VP9,
|
||||
}
|
||||
|
||||
impl VideoCodec {
|
||||
/// Get MIME type for this codec
|
||||
pub fn mime_type(&self) -> &'static str {
|
||||
match self {
|
||||
VideoCodec::H264 => "video/H264",
|
||||
VideoCodec::H265 => "video/H265",
|
||||
VideoCodec::VP8 => "video/VP8",
|
||||
VideoCodec::VP9 => "video/VP9",
|
||||
}
|
||||
}
|
||||
|
||||
/// Get clock rate (always 90kHz for video)
|
||||
pub fn clock_rate(&self) -> u32 {
|
||||
90000
|
||||
}
|
||||
|
||||
/// Get SDP fmtp line for this codec
|
||||
pub fn sdp_fmtp_line(&self) -> String {
|
||||
match self {
|
||||
VideoCodec::H264 => {
|
||||
"level-asymmetry-allowed=1;packetization-mode=1".to_string()
|
||||
}
|
||||
VideoCodec::H265 => {
|
||||
// H265 fmtp parameters
|
||||
String::new()
|
||||
}
|
||||
VideoCodec::VP8 => String::new(),
|
||||
VideoCodec::VP9 => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if codec uses NAL units (H264/H265)
|
||||
pub fn uses_nal_units(&self) -> bool {
|
||||
matches!(self, VideoCodec::H264 | VideoCodec::H265)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for VideoCodec {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
VideoCodec::H264 => write!(f, "H264"),
|
||||
VideoCodec::H265 => write!(f, "H265"),
|
||||
VideoCodec::VP8 => write!(f, "VP8"),
|
||||
VideoCodec::VP9 => write!(f, "VP9"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Unified video track configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UnifiedVideoTrackConfig {
|
||||
/// Video codec
|
||||
pub codec: VideoCodec,
|
||||
/// Track ID
|
||||
pub track_id: String,
|
||||
/// Stream ID
|
||||
pub stream_id: String,
|
||||
/// Resolution
|
||||
pub resolution: Resolution,
|
||||
/// Target bitrate in kbps
|
||||
pub bitrate_kbps: u32,
|
||||
/// Frames per second
|
||||
pub fps: u32,
|
||||
}
|
||||
|
||||
impl Default for UnifiedVideoTrackConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
codec: VideoCodec::H264,
|
||||
track_id: "video0".to_string(),
|
||||
stream_id: "one-kvm-stream".to_string(),
|
||||
resolution: Resolution::HD720,
|
||||
bitrate_kbps: 8000,
|
||||
fps: 30,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Unified video track statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct UnifiedVideoTrackStats {
|
||||
pub frames_sent: u64,
|
||||
pub bytes_sent: u64,
|
||||
pub keyframes_sent: u64,
|
||||
pub errors: u64,
|
||||
}
|
||||
|
||||
/// Cached NAL parameter sets for H264
|
||||
struct H264ParameterSets {
|
||||
sps: Option<Bytes>,
|
||||
pps: Option<Bytes>,
|
||||
}
|
||||
|
||||
/// Cached NAL parameter sets for H265
|
||||
struct H265ParameterSets {
|
||||
vps: Option<Bytes>,
|
||||
sps: Option<Bytes>,
|
||||
pps: Option<Bytes>,
|
||||
}
|
||||
|
||||
/// NAL type constants for H264
|
||||
mod h264_nal {
|
||||
pub const NON_IDR_SLICE: u8 = 1;
|
||||
pub const IDR_SLICE: u8 = 5;
|
||||
pub const SEI: u8 = 6;
|
||||
pub const SPS: u8 = 7;
|
||||
pub const PPS: u8 = 8;
|
||||
pub const AUD: u8 = 9;
|
||||
pub const FILLER: u8 = 12;
|
||||
}
|
||||
|
||||
/// NAL type constants for H265
|
||||
mod h265_nal {
|
||||
pub const IDR_W_RADL: u8 = 19;
|
||||
pub const IDR_N_LP: u8 = 20;
|
||||
pub const CRA_NUT: u8 = 21;
|
||||
pub const VPS: u8 = 32;
|
||||
pub const SPS: u8 = 33;
|
||||
pub const PPS: u8 = 34;
|
||||
pub const AUD: u8 = 35;
|
||||
pub const FD_NUT: u8 = 38; // Filler data
|
||||
|
||||
/// Check if NAL type is an IDR frame
|
||||
pub fn is_idr(nal_type: u8) -> bool {
|
||||
nal_type == IDR_W_RADL || nal_type == IDR_N_LP || nal_type == CRA_NUT
|
||||
}
|
||||
}
|
||||
|
||||
/// Unified video track supporting multiple codecs
|
||||
pub struct UnifiedVideoTrack {
|
||||
/// The underlying WebRTC track
|
||||
track: Arc<TrackLocalStaticSample>,
|
||||
/// Track configuration
|
||||
config: UnifiedVideoTrackConfig,
|
||||
/// Statistics
|
||||
stats: Mutex<UnifiedVideoTrackStats>,
|
||||
/// H264 parameter set cache
|
||||
h264_params: Mutex<H264ParameterSets>,
|
||||
/// H265 parameter set cache
|
||||
h265_params: Mutex<H265ParameterSets>,
|
||||
}
|
||||
|
||||
impl UnifiedVideoTrack {
|
||||
/// Create a new unified video track
|
||||
pub fn new(config: UnifiedVideoTrackConfig) -> Self {
|
||||
let codec_capability = RTCRtpCodecCapability {
|
||||
mime_type: config.codec.mime_type().to_string(),
|
||||
clock_rate: config.codec.clock_rate(),
|
||||
channels: 0,
|
||||
sdp_fmtp_line: config.codec.sdp_fmtp_line(),
|
||||
rtcp_feedback: vec![],
|
||||
};
|
||||
|
||||
let track = Arc::new(TrackLocalStaticSample::new(
|
||||
codec_capability,
|
||||
config.track_id.clone(),
|
||||
config.stream_id.clone(),
|
||||
));
|
||||
|
||||
Self {
|
||||
track,
|
||||
config,
|
||||
stats: Mutex::new(UnifiedVideoTrackStats::default()),
|
||||
h264_params: Mutex::new(H264ParameterSets { sps: None, pps: None }),
|
||||
h265_params: Mutex::new(H265ParameterSets { vps: None, sps: None, pps: None }),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create track for H264
|
||||
pub fn h264(track_id: &str, stream_id: &str, resolution: Resolution, fps: u32) -> Self {
|
||||
Self::new(UnifiedVideoTrackConfig {
|
||||
codec: VideoCodec::H264,
|
||||
track_id: track_id.to_string(),
|
||||
stream_id: stream_id.to_string(),
|
||||
resolution,
|
||||
fps,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
/// Create track for H265
|
||||
pub fn h265(track_id: &str, stream_id: &str, resolution: Resolution, fps: u32) -> Self {
|
||||
Self::new(UnifiedVideoTrackConfig {
|
||||
codec: VideoCodec::H265,
|
||||
track_id: track_id.to_string(),
|
||||
stream_id: stream_id.to_string(),
|
||||
resolution,
|
||||
fps,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
/// Create track for VP8
|
||||
pub fn vp8(track_id: &str, stream_id: &str, resolution: Resolution, fps: u32) -> Self {
|
||||
Self::new(UnifiedVideoTrackConfig {
|
||||
codec: VideoCodec::VP8,
|
||||
track_id: track_id.to_string(),
|
||||
stream_id: stream_id.to_string(),
|
||||
resolution,
|
||||
fps,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
/// Create track for VP9
|
||||
pub fn vp9(track_id: &str, stream_id: &str, resolution: Resolution, fps: u32) -> Self {
|
||||
Self::new(UnifiedVideoTrackConfig {
|
||||
codec: VideoCodec::VP9,
|
||||
track_id: track_id.to_string(),
|
||||
stream_id: stream_id.to_string(),
|
||||
resolution,
|
||||
fps,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the underlying track for peer connection
|
||||
pub fn track(&self) -> Arc<TrackLocalStaticSample> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Get track as TrackLocal for peer connection
|
||||
pub fn as_track_local(&self) -> Arc<dyn TrackLocal + Send + Sync> {
|
||||
self.track.clone()
|
||||
}
|
||||
|
||||
/// Get current codec
|
||||
pub fn codec(&self) -> VideoCodec {
|
||||
self.config.codec
|
||||
}
|
||||
|
||||
/// Get statistics
|
||||
pub async fn stats(&self) -> UnifiedVideoTrackStats {
|
||||
self.stats.lock().await.clone()
|
||||
}
|
||||
|
||||
/// Write an encoded frame to the track
|
||||
///
|
||||
/// The frame data should be in the appropriate format for the codec:
|
||||
/// - H264/H265: Annex B format (with start codes)
|
||||
/// - VP8/VP9: Raw encoded frame
|
||||
pub async fn write_frame(&self, data: &[u8], _duration: Duration, is_keyframe: bool) -> Result<()> {
|
||||
if data.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match self.config.codec {
|
||||
VideoCodec::H264 => self.write_h264_frame(data, is_keyframe).await,
|
||||
VideoCodec::H265 => self.write_h265_frame(data, is_keyframe).await,
|
||||
VideoCodec::VP8 => self.write_vp8_frame(data, is_keyframe).await,
|
||||
VideoCodec::VP9 => self.write_vp9_frame(data, is_keyframe).await,
|
||||
}
|
||||
}
|
||||
|
||||
/// Write H264 frame (Annex B format)
|
||||
async fn write_h264_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
let cursor = Cursor::new(data);
|
||||
let mut reader = H264Reader::new(cursor, 1024 * 1024);
|
||||
|
||||
let mut nals: Vec<Bytes> = Vec::new();
|
||||
let mut has_sps = false;
|
||||
let mut has_pps = false;
|
||||
let mut has_idr = false;
|
||||
|
||||
// Parse NAL units
|
||||
while let Ok(nal) = reader.next_nal() {
|
||||
if nal.data.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let nal_type = nal.data[0] & 0x1F;
|
||||
|
||||
// Skip AUD and filler NAL units
|
||||
if nal_type == h264_nal::AUD || nal_type == h264_nal::FILLER {
|
||||
continue;
|
||||
}
|
||||
|
||||
match nal_type {
|
||||
h264_nal::IDR_SLICE => has_idr = true,
|
||||
h264_nal::SPS => {
|
||||
has_sps = true;
|
||||
*self.h264_params.lock().await = H264ParameterSets {
|
||||
sps: Some(nal.data.clone().freeze()),
|
||||
pps: self.h264_params.lock().await.pps.clone(),
|
||||
};
|
||||
}
|
||||
h264_nal::PPS => {
|
||||
has_pps = true;
|
||||
let mut params = self.h264_params.lock().await;
|
||||
params.pps = Some(nal.data.clone().freeze());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
trace!("H264 NAL: type={} size={}", nal_type, nal.data.len());
|
||||
nals.push(nal.data.freeze());
|
||||
}
|
||||
|
||||
// Inject cached SPS/PPS before IDR if missing
|
||||
if has_idr && (!has_sps || !has_pps) {
|
||||
let params = self.h264_params.lock().await;
|
||||
let mut injected: Vec<Bytes> = Vec::new();
|
||||
|
||||
if !has_sps {
|
||||
if let Some(ref sps) = params.sps {
|
||||
debug!("Injecting cached H264 SPS");
|
||||
injected.push(sps.clone());
|
||||
}
|
||||
}
|
||||
if !has_pps {
|
||||
if let Some(ref pps) = params.pps {
|
||||
debug!("Injecting cached H264 PPS");
|
||||
injected.push(pps.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !injected.is_empty() {
|
||||
injected.extend(nals);
|
||||
nals = injected;
|
||||
}
|
||||
}
|
||||
|
||||
// Send NAL units
|
||||
self.send_nal_units(nals, is_keyframe).await
|
||||
}
|
||||
|
||||
/// Write H265 frame (Annex B format)
|
||||
async fn write_h265_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
let mut nals: Vec<Bytes> = Vec::new();
|
||||
let mut has_vps = false;
|
||||
let mut has_sps = false;
|
||||
let mut has_pps = false;
|
||||
let mut has_idr = false;
|
||||
|
||||
// Parse H265 NAL units manually (H264Reader works for both since format is similar)
|
||||
let mut i = 0;
|
||||
while i < data.len() {
|
||||
// Find start code
|
||||
let (start_code_len, nal_start) = if i + 4 <= data.len()
|
||||
&& data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 0 && data[i + 3] == 1
|
||||
{
|
||||
(4, i + 4)
|
||||
} else if i + 3 <= data.len()
|
||||
&& data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1
|
||||
{
|
||||
(3, i + 3)
|
||||
} else {
|
||||
i += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
if nal_start >= data.len() {
|
||||
break;
|
||||
}
|
||||
|
||||
// Find end of NAL unit (next start code or end of data)
|
||||
let mut nal_end = data.len();
|
||||
let mut j = nal_start + 1;
|
||||
while j + 3 <= data.len() {
|
||||
if (data[j] == 0 && data[j + 1] == 0 && data[j + 2] == 1)
|
||||
|| (j + 4 <= data.len() && data[j] == 0 && data[j + 1] == 0
|
||||
&& data[j + 2] == 0 && data[j + 3] == 1)
|
||||
{
|
||||
nal_end = j;
|
||||
break;
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
|
||||
let nal_data = &data[nal_start..nal_end];
|
||||
if nal_data.is_empty() {
|
||||
i = nal_end;
|
||||
continue;
|
||||
}
|
||||
|
||||
// H265 NAL type: (first_byte >> 1) & 0x3F
|
||||
let nal_type = (nal_data[0] >> 1) & 0x3F;
|
||||
|
||||
// Skip AUD and filler
|
||||
if nal_type == h265_nal::AUD || nal_type == h265_nal::FD_NUT {
|
||||
i = nal_end;
|
||||
continue;
|
||||
}
|
||||
|
||||
match nal_type {
|
||||
h265_nal::VPS => {
|
||||
has_vps = true;
|
||||
let mut params = self.h265_params.lock().await;
|
||||
params.vps = Some(Bytes::copy_from_slice(nal_data));
|
||||
}
|
||||
h265_nal::SPS => {
|
||||
has_sps = true;
|
||||
let mut params = self.h265_params.lock().await;
|
||||
params.sps = Some(Bytes::copy_from_slice(nal_data));
|
||||
}
|
||||
h265_nal::PPS => {
|
||||
has_pps = true;
|
||||
let mut params = self.h265_params.lock().await;
|
||||
params.pps = Some(Bytes::copy_from_slice(nal_data));
|
||||
}
|
||||
_ if h265_nal::is_idr(nal_type) => {
|
||||
has_idr = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
trace!("H265 NAL: type={} size={}", nal_type, nal_data.len());
|
||||
nals.push(Bytes::copy_from_slice(nal_data));
|
||||
i = nal_end;
|
||||
}
|
||||
|
||||
// Inject cached VPS/SPS/PPS before IDR if missing
|
||||
if has_idr && (!has_vps || !has_sps || !has_pps) {
|
||||
let params = self.h265_params.lock().await;
|
||||
let mut injected: Vec<Bytes> = Vec::new();
|
||||
|
||||
if !has_vps {
|
||||
if let Some(ref vps) = params.vps {
|
||||
debug!("Injecting cached H265 VPS");
|
||||
injected.push(vps.clone());
|
||||
}
|
||||
}
|
||||
if !has_sps {
|
||||
if let Some(ref sps) = params.sps {
|
||||
debug!("Injecting cached H265 SPS");
|
||||
injected.push(sps.clone());
|
||||
}
|
||||
}
|
||||
if !has_pps {
|
||||
if let Some(ref pps) = params.pps {
|
||||
debug!("Injecting cached H265 PPS");
|
||||
injected.push(pps.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !injected.is_empty() {
|
||||
injected.extend(nals);
|
||||
nals = injected;
|
||||
}
|
||||
}
|
||||
|
||||
self.send_nal_units(nals, is_keyframe).await
|
||||
}
|
||||
|
||||
/// Write VP8 frame (raw encoded)
|
||||
async fn write_vp8_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
// VP8 frames are sent directly
|
||||
let sample = Sample {
|
||||
data: Bytes::copy_from_slice(data),
|
||||
duration: Duration::from_secs(1),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Err(e) = self.track.write_sample(&sample).await {
|
||||
debug!("VP8 write_sample failed: {}", e);
|
||||
}
|
||||
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += data.len() as u64;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
|
||||
trace!("VP8 frame: {} bytes, keyframe={}", data.len(), is_keyframe);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write VP9 frame (raw encoded)
|
||||
async fn write_vp9_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
// VP9 frames are sent directly
|
||||
let sample = Sample {
|
||||
data: Bytes::copy_from_slice(data),
|
||||
duration: Duration::from_secs(1),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Err(e) = self.track.write_sample(&sample).await {
|
||||
debug!("VP9 write_sample failed: {}", e);
|
||||
}
|
||||
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += data.len() as u64;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
|
||||
trace!("VP9 frame: {} bytes, keyframe={}", data.len(), is_keyframe);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send NAL units via track (for H264/H265)
|
||||
async fn send_nal_units(&self, nals: Vec<Bytes>, is_keyframe: bool) -> Result<()> {
|
||||
let mut total_bytes = 0u64;
|
||||
let mut nal_count = 0;
|
||||
|
||||
for nal_data in nals {
|
||||
let sample = Sample {
|
||||
data: nal_data.clone(),
|
||||
duration: Duration::from_secs(1),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Err(e) = self.track.write_sample(&sample).await {
|
||||
if nal_count % 100 == 0 {
|
||||
debug!("write_sample failed (no peer?): {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
total_bytes += nal_data.len() as u64;
|
||||
nal_count += 1;
|
||||
}
|
||||
|
||||
if nal_count > 0 {
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += total_bytes;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
}
|
||||
|
||||
trace!("Sent {} NAL units, {} bytes, keyframe={}", nal_count, total_bytes, is_keyframe);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub fn config(&self) -> &UnifiedVideoTrackConfig {
|
||||
&self.config
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if VP8 frame is a keyframe
|
||||
pub fn is_vp8_keyframe(data: &[u8]) -> bool {
|
||||
if data.is_empty() {
|
||||
return false;
|
||||
}
|
||||
// VP8 keyframe detection: first byte bit 0 is 0 for keyframe
|
||||
(data[0] & 0x01) == 0
|
||||
}
|
||||
|
||||
/// Check if VP9 frame is a keyframe
|
||||
pub fn is_vp9_keyframe(data: &[u8]) -> bool {
|
||||
if data.is_empty() {
|
||||
return false;
|
||||
}
|
||||
// VP9 keyframe detection: bit 2 of first byte is 0 for keyframe
|
||||
(data[0] & 0x04) == 0
|
||||
}
|
||||
|
||||
/// Check if H265 frame contains IDR NAL unit
|
||||
pub fn is_h265_keyframe(data: &[u8]) -> bool {
|
||||
let mut i = 0;
|
||||
while i < data.len() {
|
||||
// Find start code
|
||||
let nal_start = if i + 4 <= data.len()
|
||||
&& data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 0 && data[i + 3] == 1
|
||||
{
|
||||
i + 4
|
||||
} else if i + 3 <= data.len()
|
||||
&& data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1
|
||||
{
|
||||
i + 3
|
||||
} else {
|
||||
i += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
if nal_start >= data.len() {
|
||||
break;
|
||||
}
|
||||
|
||||
// H265 NAL type
|
||||
let nal_type = (data[nal_start] >> 1) & 0x3F;
|
||||
if h265_nal::is_idr(nal_type) {
|
||||
return true;
|
||||
}
|
||||
|
||||
i = nal_start + 1;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_video_codec_mime_types() {
|
||||
assert_eq!(VideoCodec::H264.mime_type(), "video/H264");
|
||||
assert_eq!(VideoCodec::H265.mime_type(), "video/H265");
|
||||
assert_eq!(VideoCodec::VP8.mime_type(), "video/VP8");
|
||||
assert_eq!(VideoCodec::VP9.mime_type(), "video/VP9");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_h265_nal_type() {
|
||||
// H265 NAL type is (first_byte >> 1) & 0x3F
|
||||
// VPS: type 32 = 0x40 >> 1 = 32
|
||||
let vps_header = 0x40u8; // VPS type 32
|
||||
let nal_type = (vps_header >> 1) & 0x3F;
|
||||
assert_eq!(nal_type, 32);
|
||||
|
||||
// IDR_W_RADL: type 19
|
||||
let idr_header = 0x26u8; // type 19 = 0x13 << 1 = 0x26
|
||||
let nal_type = (idr_header >> 1) & 0x3F;
|
||||
assert_eq!(nal_type, 19);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vp8_keyframe_detection() {
|
||||
// VP8 keyframe: bit 0 is 0
|
||||
assert!(is_vp8_keyframe(&[0x00]));
|
||||
assert!(!is_vp8_keyframe(&[0x01]));
|
||||
}
|
||||
}
|
||||
827
src/webrtc/universal_session.rs
Normal file
827
src/webrtc/universal_session.rs
Normal file
@@ -0,0 +1,827 @@
|
||||
//! Universal WebRTC session with multi-codec support
|
||||
//!
|
||||
//! Provides WebRTC sessions that can use any supported video codec (H264, H265, VP8, VP9).
|
||||
//! Replaces the H264-only H264Session with a more flexible implementation.
|
||||
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{broadcast, watch, Mutex, RwLock};
|
||||
use tracing::{debug, info, trace, warn};
|
||||
use webrtc::api::interceptor_registry::register_default_interceptors;
|
||||
use webrtc::api::media_engine::MediaEngine;
|
||||
use webrtc::api::APIBuilder;
|
||||
use webrtc::data_channel::data_channel_message::DataChannelMessage;
|
||||
use webrtc::data_channel::RTCDataChannel;
|
||||
use webrtc::ice_transport::ice_candidate::RTCIceCandidate;
|
||||
use webrtc::ice_transport::ice_server::RTCIceServer;
|
||||
use webrtc::interceptor::registry::Registry;
|
||||
use webrtc::peer_connection::configuration::RTCConfiguration;
|
||||
use webrtc::peer_connection::peer_connection_state::RTCPeerConnectionState;
|
||||
use webrtc::peer_connection::sdp::session_description::RTCSessionDescription;
|
||||
use webrtc::peer_connection::RTCPeerConnection;
|
||||
use webrtc::rtp_transceiver::rtp_codec::{RTCRtpCodecCapability, RTCRtpCodecParameters, RTPCodecType};
|
||||
use webrtc::rtp_transceiver::RTCPFeedback;
|
||||
|
||||
use super::config::WebRtcConfig;
|
||||
use super::rtp::OpusAudioTrack;
|
||||
use super::signaling::{ConnectionState, IceCandidate, SdpAnswer, SdpOffer};
|
||||
use super::video_track::{UniversalVideoTrack, UniversalVideoTrackConfig, VideoCodec};
|
||||
use crate::audio::OpusFrame;
|
||||
use crate::error::{AppError, Result};
|
||||
use crate::hid::datachannel::{parse_hid_message, HidChannelEvent};
|
||||
use crate::hid::HidController;
|
||||
use crate::video::encoder::registry::VideoEncoderType;
|
||||
use crate::video::format::{PixelFormat, Resolution};
|
||||
use crate::video::shared_video_pipeline::EncodedVideoFrame;
|
||||
|
||||
/// H.265/HEVC MIME type (RFC 7798)
|
||||
const MIME_TYPE_H265: &str = "video/H265";
|
||||
|
||||
/// Universal WebRTC session configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UniversalSessionConfig {
|
||||
/// WebRTC configuration
|
||||
pub webrtc: WebRtcConfig,
|
||||
/// Video codec type
|
||||
pub codec: VideoEncoderType,
|
||||
/// Input resolution
|
||||
pub resolution: Resolution,
|
||||
/// Input pixel format
|
||||
pub input_format: PixelFormat,
|
||||
/// Target bitrate in kbps
|
||||
pub bitrate_kbps: u32,
|
||||
/// Target FPS
|
||||
pub fps: u32,
|
||||
/// GOP size
|
||||
pub gop_size: u32,
|
||||
/// Enable audio track
|
||||
pub audio_enabled: bool,
|
||||
}
|
||||
|
||||
impl Default for UniversalSessionConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
webrtc: WebRtcConfig::default(),
|
||||
codec: VideoEncoderType::H264,
|
||||
resolution: Resolution::HD720,
|
||||
input_format: PixelFormat::Mjpeg,
|
||||
bitrate_kbps: 8000,
|
||||
fps: 30,
|
||||
gop_size: 30,
|
||||
audio_enabled: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl UniversalSessionConfig {
|
||||
/// Create config for specific codec
|
||||
pub fn with_codec(codec: VideoEncoderType) -> Self {
|
||||
Self {
|
||||
codec,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert VideoEncoderType to VideoCodec
|
||||
fn encoder_type_to_video_codec(encoder_type: VideoEncoderType) -> VideoCodec {
|
||||
match encoder_type {
|
||||
VideoEncoderType::H264 => VideoCodec::H264,
|
||||
VideoEncoderType::H265 => VideoCodec::H265,
|
||||
VideoEncoderType::VP8 => VideoCodec::VP8,
|
||||
VideoEncoderType::VP9 => VideoCodec::VP9,
|
||||
}
|
||||
}
|
||||
|
||||
/// Universal WebRTC session
|
||||
///
|
||||
/// Receives pre-encoded video frames and sends via WebRTC.
|
||||
/// Supports H264, H265, VP8, VP9 codecs.
|
||||
pub struct UniversalSession {
|
||||
/// Session ID
|
||||
pub session_id: String,
|
||||
/// Video codec type
|
||||
codec: VideoEncoderType,
|
||||
/// WebRTC peer connection
|
||||
pc: Arc<RTCPeerConnection>,
|
||||
/// Video track for RTP packetization
|
||||
video_track: Arc<UniversalVideoTrack>,
|
||||
/// Opus audio track (optional)
|
||||
audio_track: Option<Arc<OpusAudioTrack>>,
|
||||
/// Data channel for HID events
|
||||
data_channel: Arc<RwLock<Option<Arc<RTCDataChannel>>>>,
|
||||
/// Connection state
|
||||
state: Arc<watch::Sender<ConnectionState>>,
|
||||
/// State receiver
|
||||
state_rx: watch::Receiver<ConnectionState>,
|
||||
/// ICE candidates gathered
|
||||
ice_candidates: Arc<Mutex<Vec<IceCandidate>>>,
|
||||
/// HID controller reference
|
||||
hid_controller: Option<Arc<HidController>>,
|
||||
/// Video frame receiver handle
|
||||
video_receiver_handle: Mutex<Option<tokio::task::JoinHandle<()>>>,
|
||||
/// Audio frame receiver handle
|
||||
audio_receiver_handle: Mutex<Option<tokio::task::JoinHandle<()>>>,
|
||||
/// FPS configuration
|
||||
fps: u32,
|
||||
}
|
||||
|
||||
impl UniversalSession {
|
||||
/// Create a new universal WebRTC session
|
||||
pub async fn new(config: UniversalSessionConfig, session_id: String) -> Result<Self> {
|
||||
info!(
|
||||
"Creating {} session: {} @ {}x{} (audio={})",
|
||||
config.codec,
|
||||
session_id,
|
||||
config.resolution.width,
|
||||
config.resolution.height,
|
||||
config.audio_enabled
|
||||
);
|
||||
|
||||
// Create video track with appropriate codec
|
||||
let video_codec = encoder_type_to_video_codec(config.codec);
|
||||
let track_config = UniversalVideoTrackConfig {
|
||||
track_id: format!("video-{}", &session_id[..8.min(session_id.len())]),
|
||||
stream_id: "one-kvm-stream".to_string(),
|
||||
codec: video_codec,
|
||||
resolution: config.resolution,
|
||||
bitrate_kbps: config.bitrate_kbps,
|
||||
fps: config.fps,
|
||||
};
|
||||
let video_track = Arc::new(UniversalVideoTrack::new(track_config));
|
||||
|
||||
// Create Opus audio track if enabled
|
||||
let audio_track = if config.audio_enabled {
|
||||
Some(Arc::new(OpusAudioTrack::new(
|
||||
&format!("audio-{}", &session_id[..8.min(session_id.len())]),
|
||||
"one-kvm-stream",
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Create media engine
|
||||
let mut media_engine = MediaEngine::default();
|
||||
|
||||
// Register H.265/HEVC codec (not included in default codecs)
|
||||
// According to RFC 7798, H.265 uses MIME type video/H265
|
||||
if config.codec == VideoEncoderType::H265 {
|
||||
let video_rtcp_feedback = vec![
|
||||
RTCPFeedback {
|
||||
typ: "goog-remb".to_owned(),
|
||||
parameter: "".to_owned(),
|
||||
},
|
||||
RTCPFeedback {
|
||||
typ: "ccm".to_owned(),
|
||||
parameter: "fir".to_owned(),
|
||||
},
|
||||
RTCPFeedback {
|
||||
typ: "nack".to_owned(),
|
||||
parameter: "".to_owned(),
|
||||
},
|
||||
RTCPFeedback {
|
||||
typ: "nack".to_owned(),
|
||||
parameter: "pli".to_owned(),
|
||||
},
|
||||
];
|
||||
|
||||
// Register H.265 with profile-id=1 (Main profile) - matches Chrome's offer
|
||||
// Chrome sends: level-id=180;profile-id=1;tier-flag=0;tx-mode=SRST
|
||||
media_engine
|
||||
.register_codec(
|
||||
RTCRtpCodecParameters {
|
||||
capability: RTCRtpCodecCapability {
|
||||
mime_type: MIME_TYPE_H265.to_owned(),
|
||||
clock_rate: 90000,
|
||||
channels: 0,
|
||||
// Match browser's fmtp format for profile-id=1
|
||||
sdp_fmtp_line: "level-id=180;profile-id=1;tier-flag=0;tx-mode=SRST".to_owned(),
|
||||
rtcp_feedback: video_rtcp_feedback.clone(),
|
||||
},
|
||||
payload_type: 49, // Use same payload type as browser
|
||||
..Default::default()
|
||||
},
|
||||
RTPCodecType::Video,
|
||||
)
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to register H.265 codec: {}", e)))?;
|
||||
|
||||
// Also register profile-id=2 (Main 10) variant
|
||||
media_engine
|
||||
.register_codec(
|
||||
RTCRtpCodecParameters {
|
||||
capability: RTCRtpCodecCapability {
|
||||
mime_type: MIME_TYPE_H265.to_owned(),
|
||||
clock_rate: 90000,
|
||||
channels: 0,
|
||||
sdp_fmtp_line: "level-id=180;profile-id=2;tier-flag=0;tx-mode=SRST".to_owned(),
|
||||
rtcp_feedback: video_rtcp_feedback,
|
||||
},
|
||||
payload_type: 51,
|
||||
..Default::default()
|
||||
},
|
||||
RTPCodecType::Video,
|
||||
)
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to register H.265 codec (profile 2): {}", e)))?;
|
||||
|
||||
info!("Registered H.265/HEVC codec for session {}", session_id);
|
||||
}
|
||||
|
||||
media_engine
|
||||
.register_default_codecs()
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to register codecs: {}", e)))?;
|
||||
|
||||
// Create interceptor registry
|
||||
let mut registry = Registry::new();
|
||||
registry = register_default_interceptors(registry, &mut media_engine)
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to register interceptors: {}", e)))?;
|
||||
|
||||
// Create API
|
||||
let api = APIBuilder::new()
|
||||
.with_media_engine(media_engine)
|
||||
.with_interceptor_registry(registry)
|
||||
.build();
|
||||
|
||||
// Build ICE servers
|
||||
let mut ice_servers = vec![];
|
||||
for stun_url in &config.webrtc.stun_servers {
|
||||
ice_servers.push(RTCIceServer {
|
||||
urls: vec![stun_url.clone()],
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
for turn in &config.webrtc.turn_servers {
|
||||
// Skip TURN servers without credentials (webrtc-rs requires them)
|
||||
if turn.username.is_empty() || turn.credential.is_empty() {
|
||||
warn!(
|
||||
"Skipping TURN server {} - credentials required but missing",
|
||||
turn.url
|
||||
);
|
||||
continue;
|
||||
}
|
||||
ice_servers.push(RTCIceServer {
|
||||
urls: vec![turn.url.clone()],
|
||||
username: turn.username.clone(),
|
||||
credential: turn.credential.clone(),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
// Create peer connection
|
||||
let rtc_config = RTCConfiguration {
|
||||
ice_servers,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let pc = api
|
||||
.new_peer_connection(rtc_config)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to create peer connection: {}", e)))?;
|
||||
|
||||
let pc = Arc::new(pc);
|
||||
|
||||
// Add video track to peer connection
|
||||
pc.add_track(video_track.as_track_local())
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to add video track: {}", e)))?;
|
||||
|
||||
info!(
|
||||
"{} video track added to peer connection (session {})",
|
||||
config.codec, session_id
|
||||
);
|
||||
|
||||
// Add Opus audio track if enabled
|
||||
if let Some(ref audio) = audio_track {
|
||||
pc.add_track(audio.as_track_local())
|
||||
.await
|
||||
.map_err(|e| AppError::AudioError(format!("Failed to add audio track: {}", e)))?;
|
||||
info!("Opus audio track added to peer connection (session {})", session_id);
|
||||
}
|
||||
|
||||
// Create state channel
|
||||
let (state_tx, state_rx) = watch::channel(ConnectionState::New);
|
||||
|
||||
let session = Self {
|
||||
session_id,
|
||||
codec: config.codec,
|
||||
pc,
|
||||
video_track,
|
||||
audio_track,
|
||||
data_channel: Arc::new(RwLock::new(None)),
|
||||
state: Arc::new(state_tx),
|
||||
state_rx,
|
||||
ice_candidates: Arc::new(Mutex::new(vec![])),
|
||||
hid_controller: None,
|
||||
video_receiver_handle: Mutex::new(None),
|
||||
audio_receiver_handle: Mutex::new(None),
|
||||
fps: config.fps,
|
||||
};
|
||||
|
||||
// Set up event handlers
|
||||
session.setup_event_handlers().await;
|
||||
|
||||
Ok(session)
|
||||
}
|
||||
|
||||
/// Set up peer connection event handlers
|
||||
async fn setup_event_handlers(&self) {
|
||||
let state = self.state.clone();
|
||||
let session_id = self.session_id.clone();
|
||||
let codec = self.codec;
|
||||
|
||||
// Connection state change handler
|
||||
self.pc
|
||||
.on_peer_connection_state_change(Box::new(move |s: RTCPeerConnectionState| {
|
||||
let state = state.clone();
|
||||
let session_id = session_id.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let new_state = match s {
|
||||
RTCPeerConnectionState::New => ConnectionState::New,
|
||||
RTCPeerConnectionState::Connecting => ConnectionState::Connecting,
|
||||
RTCPeerConnectionState::Connected => ConnectionState::Connected,
|
||||
RTCPeerConnectionState::Disconnected => ConnectionState::Disconnected,
|
||||
RTCPeerConnectionState::Failed => ConnectionState::Failed,
|
||||
RTCPeerConnectionState::Closed => ConnectionState::Closed,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
info!("{} session {} state: {}", codec, session_id, new_state);
|
||||
let _ = state.send(new_state);
|
||||
})
|
||||
}));
|
||||
|
||||
// ICE connection state handler
|
||||
let session_id_ice = self.session_id.clone();
|
||||
self.pc
|
||||
.on_ice_connection_state_change(Box::new(move |state| {
|
||||
let session_id = session_id_ice.clone();
|
||||
Box::pin(async move {
|
||||
info!("[ICE] Session {} connection state: {:?}", session_id, state);
|
||||
})
|
||||
}));
|
||||
|
||||
// ICE gathering state handler
|
||||
let session_id_gather = self.session_id.clone();
|
||||
self.pc
|
||||
.on_ice_gathering_state_change(Box::new(move |state| {
|
||||
let session_id = session_id_gather.clone();
|
||||
Box::pin(async move {
|
||||
debug!("[ICE] Session {} gathering state: {:?}", session_id, state);
|
||||
})
|
||||
}));
|
||||
|
||||
// ICE candidate handler
|
||||
let ice_candidates = self.ice_candidates.clone();
|
||||
self.pc
|
||||
.on_ice_candidate(Box::new(move |candidate: Option<RTCIceCandidate>| {
|
||||
let ice_candidates = ice_candidates.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
if let Some(c) = candidate {
|
||||
let candidate_str = c.to_json().map(|j| j.candidate).unwrap_or_default();
|
||||
debug!("ICE candidate: {}", candidate_str);
|
||||
|
||||
let mut candidates = ice_candidates.lock().await;
|
||||
candidates.push(IceCandidate {
|
||||
candidate: candidate_str,
|
||||
sdp_mid: c.to_json().ok().and_then(|j| j.sdp_mid),
|
||||
sdp_mline_index: c.to_json().ok().and_then(|j| j.sdp_mline_index),
|
||||
username_fragment: None,
|
||||
});
|
||||
}
|
||||
})
|
||||
}));
|
||||
|
||||
// Data channel handler
|
||||
let data_channel = self.data_channel.clone();
|
||||
self.pc
|
||||
.on_data_channel(Box::new(move |dc: Arc<RTCDataChannel>| {
|
||||
let data_channel = data_channel.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
info!("Data channel opened: {}", dc.label());
|
||||
*data_channel.write().await = Some(dc.clone());
|
||||
|
||||
dc.on_message(Box::new(move |msg: DataChannelMessage| {
|
||||
debug!("DataChannel message: {} bytes", msg.data.len());
|
||||
Box::pin(async {})
|
||||
}));
|
||||
})
|
||||
}));
|
||||
}
|
||||
|
||||
/// Set HID controller for DataChannel HID processing
|
||||
pub fn set_hid_controller(&mut self, hid: Arc<HidController>) {
|
||||
let hid_clone = hid.clone();
|
||||
let data_channel = self.data_channel.clone();
|
||||
|
||||
self.pc
|
||||
.on_data_channel(Box::new(move |dc: Arc<RTCDataChannel>| {
|
||||
let data_channel = data_channel.clone();
|
||||
let hid = hid_clone.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
info!("Data channel with HID support: {}", dc.label());
|
||||
*data_channel.write().await = Some(dc.clone());
|
||||
|
||||
dc.on_message(Box::new(move |msg: DataChannelMessage| {
|
||||
let hid = hid.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
if let Some(event) = parse_hid_message(&msg.data) {
|
||||
match event {
|
||||
HidChannelEvent::Keyboard(kb_event) => {
|
||||
if let Err(e) = hid.send_keyboard(kb_event).await {
|
||||
debug!("Failed to send keyboard event: {}", e);
|
||||
}
|
||||
}
|
||||
HidChannelEvent::Mouse(ms_event) => {
|
||||
if let Err(e) = hid.send_mouse(ms_event).await {
|
||||
debug!("Failed to send mouse event: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}));
|
||||
})
|
||||
}));
|
||||
|
||||
self.hid_controller = Some(hid);
|
||||
}
|
||||
|
||||
/// Create data channel for HID events
|
||||
pub async fn create_data_channel(&self, label: &str) -> Result<()> {
|
||||
let dc = self
|
||||
.pc
|
||||
.create_data_channel(label, None)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to create data channel: {}", e)))?;
|
||||
|
||||
*self.data_channel.write().await = Some(dc);
|
||||
info!("Data channel '{}' created", label);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start receiving encoded video frames from shared pipeline
|
||||
///
|
||||
/// The `on_connected` callback is called when ICE connection is established,
|
||||
/// allowing the caller to request a keyframe at the right time.
|
||||
pub async fn start_from_video_pipeline<F>(
|
||||
&self,
|
||||
mut frame_rx: broadcast::Receiver<EncodedVideoFrame>,
|
||||
on_connected: F,
|
||||
)
|
||||
where
|
||||
F: FnOnce() + Send + 'static,
|
||||
{
|
||||
info!("Starting {} session {} with shared encoder", self.codec, self.session_id);
|
||||
|
||||
let video_track = self.video_track.clone();
|
||||
let mut state_rx = self.state_rx.clone();
|
||||
let session_id = self.session_id.clone();
|
||||
let _fps = self.fps;
|
||||
let expected_codec = self.codec;
|
||||
|
||||
let handle = tokio::spawn(async move {
|
||||
info!("Video receiver waiting for connection for session {}", session_id);
|
||||
|
||||
// Wait for Connected state before sending frames
|
||||
loop {
|
||||
let current_state = *state_rx.borrow();
|
||||
if current_state == ConnectionState::Connected {
|
||||
break;
|
||||
}
|
||||
if matches!(current_state, ConnectionState::Closed | ConnectionState::Failed) {
|
||||
info!("Session {} closed before connecting", session_id);
|
||||
return;
|
||||
}
|
||||
if state_rx.changed().await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
info!("Video receiver started for session {} (ICE connected)", session_id);
|
||||
|
||||
// Request keyframe now that connection is established
|
||||
on_connected();
|
||||
|
||||
let mut frames_sent: u64 = 0;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
biased;
|
||||
|
||||
result = state_rx.changed() => {
|
||||
if result.is_err() {
|
||||
break;
|
||||
}
|
||||
let state = *state_rx.borrow();
|
||||
if matches!(state, ConnectionState::Closed | ConnectionState::Failed | ConnectionState::Disconnected) {
|
||||
info!("Session {} closed, stopping receiver", session_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
result = frame_rx.recv() => {
|
||||
match result {
|
||||
Ok(encoded_frame) => {
|
||||
// Verify codec matches
|
||||
let frame_codec = match encoded_frame.codec {
|
||||
VideoEncoderType::H264 => VideoEncoderType::H264,
|
||||
VideoEncoderType::H265 => VideoEncoderType::H265,
|
||||
VideoEncoderType::VP8 => VideoEncoderType::VP8,
|
||||
VideoEncoderType::VP9 => VideoEncoderType::VP9,
|
||||
};
|
||||
|
||||
if frame_codec != expected_codec {
|
||||
trace!("Skipping frame with codec {:?}, expected {:?}", frame_codec, expected_codec);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Debug log for H265 frames
|
||||
if expected_codec == VideoEncoderType::H265 {
|
||||
if encoded_frame.is_keyframe || frames_sent % 30 == 0 {
|
||||
debug!(
|
||||
"[Session-H265] Received frame #{}: size={}, keyframe={}, seq={}",
|
||||
frames_sent,
|
||||
encoded_frame.data.len(),
|
||||
encoded_frame.is_keyframe,
|
||||
encoded_frame.sequence
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Send encoded frame via RTP
|
||||
if let Err(e) = video_track
|
||||
.write_frame(&encoded_frame.data, encoded_frame.is_keyframe)
|
||||
.await
|
||||
{
|
||||
if frames_sent % 100 == 0 {
|
||||
debug!("Failed to write frame to track: {}", e);
|
||||
}
|
||||
} else {
|
||||
frames_sent += 1;
|
||||
|
||||
// Log successful H265 frame send
|
||||
if expected_codec == VideoEncoderType::H265 && (encoded_frame.is_keyframe || frames_sent % 30 == 0) {
|
||||
debug!(
|
||||
"[Session-H265] Frame #{} sent successfully",
|
||||
frames_sent
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(broadcast::error::RecvError::Lagged(n)) => {
|
||||
warn!("Session {} lagged by {} frames", session_id, n);
|
||||
}
|
||||
Err(broadcast::error::RecvError::Closed) => {
|
||||
info!("Video frame channel closed for session {}", session_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("Video receiver stopped for session {} (sent {} frames)", session_id, frames_sent);
|
||||
});
|
||||
|
||||
*self.video_receiver_handle.lock().await = Some(handle);
|
||||
}
|
||||
|
||||
/// Start receiving Opus audio frames
|
||||
pub async fn start_audio_from_opus(&self, mut opus_rx: broadcast::Receiver<OpusFrame>) {
|
||||
let audio_track = match &self.audio_track {
|
||||
Some(track) => track.clone(),
|
||||
None => {
|
||||
debug!("Audio track not enabled for session {}", self.session_id);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
info!("Starting audio receiver for session {}", self.session_id);
|
||||
|
||||
let mut state_rx = self.state_rx.clone();
|
||||
let session_id = self.session_id.clone();
|
||||
|
||||
let handle = tokio::spawn(async move {
|
||||
// Wait for Connected state before sending audio
|
||||
loop {
|
||||
let current_state = *state_rx.borrow();
|
||||
if current_state == ConnectionState::Connected {
|
||||
break;
|
||||
}
|
||||
if matches!(current_state, ConnectionState::Closed | ConnectionState::Failed) {
|
||||
info!("Session {} closed before audio could start", session_id);
|
||||
return;
|
||||
}
|
||||
if state_rx.changed().await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
info!("Audio receiver started for session {} (ICE connected)", session_id);
|
||||
|
||||
let mut packets_sent: u64 = 0;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
biased;
|
||||
|
||||
result = state_rx.changed() => {
|
||||
if result.is_err() {
|
||||
break;
|
||||
}
|
||||
let state = *state_rx.borrow();
|
||||
if matches!(state, ConnectionState::Closed | ConnectionState::Failed | ConnectionState::Disconnected) {
|
||||
info!("Session {} closed, stopping audio receiver", session_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
result = opus_rx.recv() => {
|
||||
match result {
|
||||
Ok(opus_frame) => {
|
||||
// 20ms at 48kHz = 960 samples
|
||||
let samples = 960u32;
|
||||
if let Err(e) = audio_track.write_packet(&opus_frame.data, samples).await {
|
||||
if packets_sent % 100 == 0 {
|
||||
debug!("Failed to write audio packet: {}", e);
|
||||
}
|
||||
} else {
|
||||
packets_sent += 1;
|
||||
trace!(
|
||||
"Session {} sent audio packet {}: {} bytes",
|
||||
session_id,
|
||||
packets_sent,
|
||||
opus_frame.data.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(broadcast::error::RecvError::Lagged(n)) => {
|
||||
warn!("Session {} audio lagged by {} packets", session_id, n);
|
||||
}
|
||||
Err(broadcast::error::RecvError::Closed) => {
|
||||
info!("Opus channel closed for session {}", session_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("Audio receiver stopped for session {} (sent {} packets)", session_id, packets_sent);
|
||||
});
|
||||
|
||||
*self.audio_receiver_handle.lock().await = Some(handle);
|
||||
}
|
||||
|
||||
/// Check if audio is enabled for this session
|
||||
pub fn has_audio(&self) -> bool {
|
||||
self.audio_track.is_some()
|
||||
}
|
||||
|
||||
/// Get codec type
|
||||
pub fn codec(&self) -> VideoEncoderType {
|
||||
self.codec
|
||||
}
|
||||
|
||||
/// Handle SDP offer and create answer
|
||||
pub async fn handle_offer(&self, offer: SdpOffer) -> Result<SdpAnswer> {
|
||||
// Log offer for debugging H.265 codec negotiation
|
||||
if self.codec == VideoEncoderType::H265 {
|
||||
let has_h265 = offer.sdp.to_lowercase().contains("h265")
|
||||
|| offer.sdp.to_lowercase().contains("hevc");
|
||||
info!(
|
||||
"[SDP] Session {} offer contains H.265: {}",
|
||||
self.session_id,
|
||||
has_h265
|
||||
);
|
||||
if !has_h265 {
|
||||
warn!("[SDP] Browser offer does not include H.265 codec! Session may fail.");
|
||||
}
|
||||
}
|
||||
|
||||
let sdp = RTCSessionDescription::offer(offer.sdp)
|
||||
.map_err(|e| AppError::VideoError(format!("Invalid SDP offer: {}", e)))?;
|
||||
|
||||
self.pc
|
||||
.set_remote_description(sdp)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to set remote description: {}", e)))?;
|
||||
|
||||
let answer = self
|
||||
.pc
|
||||
.create_answer(None)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to create answer: {}", e)))?;
|
||||
|
||||
// Log answer for debugging
|
||||
if self.codec == VideoEncoderType::H265 {
|
||||
let has_h265 = answer.sdp.to_lowercase().contains("h265")
|
||||
|| answer.sdp.to_lowercase().contains("hevc");
|
||||
info!(
|
||||
"[SDP] Session {} answer contains H.265: {}",
|
||||
self.session_id,
|
||||
has_h265
|
||||
);
|
||||
if !has_h265 {
|
||||
warn!("[SDP] Answer does not include H.265! Codec negotiation may have failed.");
|
||||
}
|
||||
}
|
||||
|
||||
self.pc
|
||||
.set_local_description(answer.clone())
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to set local description: {}", e)))?;
|
||||
|
||||
// Wait for ICE candidates
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
|
||||
let candidates = self.ice_candidates.lock().await.clone();
|
||||
Ok(SdpAnswer::with_candidates(answer.sdp, candidates))
|
||||
}
|
||||
|
||||
/// Add ICE candidate
|
||||
pub async fn add_ice_candidate(&self, candidate: IceCandidate) -> Result<()> {
|
||||
use webrtc::ice_transport::ice_candidate::RTCIceCandidateInit;
|
||||
|
||||
let init = RTCIceCandidateInit {
|
||||
candidate: candidate.candidate,
|
||||
sdp_mid: candidate.sdp_mid,
|
||||
sdp_mline_index: candidate.sdp_mline_index,
|
||||
username_fragment: candidate.username_fragment,
|
||||
};
|
||||
|
||||
self.pc
|
||||
.add_ice_candidate(init)
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to add ICE candidate: {}", e)))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get current connection state
|
||||
pub fn state(&self) -> ConnectionState {
|
||||
*self.state_rx.borrow()
|
||||
}
|
||||
|
||||
/// Subscribe to state changes
|
||||
pub fn state_watch(&self) -> watch::Receiver<ConnectionState> {
|
||||
self.state_rx.clone()
|
||||
}
|
||||
|
||||
/// Close the session
|
||||
pub async fn close(&self) -> Result<()> {
|
||||
// Stop video receiver
|
||||
if let Some(handle) = self.video_receiver_handle.lock().await.take() {
|
||||
handle.abort();
|
||||
}
|
||||
|
||||
// Stop audio receiver
|
||||
if let Some(handle) = self.audio_receiver_handle.lock().await.take() {
|
||||
handle.abort();
|
||||
}
|
||||
|
||||
// Close peer connection
|
||||
self.pc
|
||||
.close()
|
||||
.await
|
||||
.map_err(|e| AppError::VideoError(format!("Failed to close peer connection: {}", e)))?;
|
||||
|
||||
let _ = self.state.send(ConnectionState::Closed);
|
||||
|
||||
info!("{} session {} closed", self.codec, self.session_id);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Session info for listing
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UniversalSessionInfo {
|
||||
pub session_id: String,
|
||||
pub codec: VideoEncoderType,
|
||||
pub created_at: std::time::Instant,
|
||||
pub state: String,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_universal_session_config_default() {
|
||||
let config = UniversalSessionConfig::default();
|
||||
assert_eq!(config.codec, VideoEncoderType::H264);
|
||||
assert_eq!(config.resolution, Resolution::HD720);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encoder_type_to_video_codec() {
|
||||
assert_eq!(encoder_type_to_video_codec(VideoEncoderType::H264), VideoCodec::H264);
|
||||
assert_eq!(encoder_type_to_video_codec(VideoEncoderType::H265), VideoCodec::H265);
|
||||
assert_eq!(encoder_type_to_video_codec(VideoEncoderType::VP8), VideoCodec::VP8);
|
||||
assert_eq!(encoder_type_to_video_codec(VideoEncoderType::VP9), VideoCodec::VP9);
|
||||
}
|
||||
}
|
||||
626
src/webrtc/video_track.rs
Normal file
626
src/webrtc/video_track.rs
Normal file
@@ -0,0 +1,626 @@
|
||||
//! Universal video track for WebRTC streaming
|
||||
//!
|
||||
//! Supports multiple codecs: H264, H265, VP8, VP9
|
||||
//!
|
||||
//! # Architecture
|
||||
//!
|
||||
//! ```text
|
||||
//! Encoded Frame (H264/H265/VP8/VP9)
|
||||
//! |
|
||||
//! v
|
||||
//! UniversalVideoTrack
|
||||
//! - H264/VP8/VP9: TrackLocalStaticSample (built-in payloader)
|
||||
//! - H265: TrackLocalStaticRTP (rtp crate HevcPayloader)
|
||||
//! |
|
||||
//! v
|
||||
//! WebRTC PeerConnection
|
||||
//! ```
|
||||
|
||||
use bytes::Bytes;
|
||||
use std::io::Cursor;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{debug, trace, warn};
|
||||
use webrtc::media::io::h264_reader::H264Reader;
|
||||
use webrtc::media::Sample;
|
||||
use webrtc::rtp_transceiver::rtp_codec::RTCRtpCodecCapability;
|
||||
use webrtc::track::track_local::track_local_static_rtp::TrackLocalStaticRTP;
|
||||
use webrtc::track::track_local::track_local_static_sample::TrackLocalStaticSample;
|
||||
use webrtc::track::track_local::{TrackLocal, TrackLocalWriter};
|
||||
|
||||
// Use our custom H265Payloader that handles ALL NAL types correctly
|
||||
// The rtp crate's HevcPayloader has bugs:
|
||||
// 1. It drops the IDR frame after emitting the AP packet
|
||||
// 2. It ignores NAL type 20 (IDR_N_LP)
|
||||
use super::h265_payloader::H265Payloader;
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::video::format::Resolution;
|
||||
|
||||
/// Default MTU for RTP packets
|
||||
const RTP_MTU: usize = 1200;
|
||||
|
||||
/// Video codec type for WebRTC
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum VideoCodec {
|
||||
/// H.264/AVC
|
||||
H264,
|
||||
/// H.265/HEVC
|
||||
H265,
|
||||
/// VP8
|
||||
VP8,
|
||||
/// VP9
|
||||
VP9,
|
||||
}
|
||||
|
||||
impl VideoCodec {
|
||||
/// Get MIME type for SDP
|
||||
pub fn mime_type(&self) -> &'static str {
|
||||
match self {
|
||||
VideoCodec::H264 => "video/H264",
|
||||
VideoCodec::H265 => "video/H265",
|
||||
VideoCodec::VP8 => "video/VP8",
|
||||
VideoCodec::VP9 => "video/VP9",
|
||||
}
|
||||
}
|
||||
|
||||
/// Get RTP clock rate (always 90kHz for video)
|
||||
pub fn clock_rate(&self) -> u32 {
|
||||
90000
|
||||
}
|
||||
|
||||
/// Get default RTP payload type
|
||||
pub fn default_payload_type(&self) -> u8 {
|
||||
match self {
|
||||
VideoCodec::H264 => 96,
|
||||
VideoCodec::VP8 => 97,
|
||||
VideoCodec::VP9 => 98,
|
||||
VideoCodec::H265 => 99,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get SDP fmtp parameters
|
||||
pub fn sdp_fmtp(&self) -> String {
|
||||
match self {
|
||||
VideoCodec::H264 => {
|
||||
"level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f".to_string()
|
||||
}
|
||||
VideoCodec::H265 => {
|
||||
// Match Chrome's H.265 fmtp format: level-id=180 (Level 6.0), profile-id=1 (Main), tier-flag=0, tx-mode=SRST
|
||||
"level-id=180;profile-id=1;tier-flag=0;tx-mode=SRST".to_string()
|
||||
}
|
||||
VideoCodec::VP8 => String::new(),
|
||||
VideoCodec::VP9 => "profile-id=0".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get display name
|
||||
pub fn display_name(&self) -> &'static str {
|
||||
match self {
|
||||
VideoCodec::H264 => "H.264",
|
||||
VideoCodec::H265 => "H.265/HEVC",
|
||||
VideoCodec::VP8 => "VP8",
|
||||
VideoCodec::VP9 => "VP9",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for VideoCodec {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.display_name())
|
||||
}
|
||||
}
|
||||
|
||||
/// Universal video track configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UniversalVideoTrackConfig {
|
||||
/// Track ID
|
||||
pub track_id: String,
|
||||
/// Stream ID
|
||||
pub stream_id: String,
|
||||
/// Video codec
|
||||
pub codec: VideoCodec,
|
||||
/// Resolution
|
||||
pub resolution: Resolution,
|
||||
/// Target bitrate in kbps
|
||||
pub bitrate_kbps: u32,
|
||||
/// Frames per second
|
||||
pub fps: u32,
|
||||
}
|
||||
|
||||
impl Default for UniversalVideoTrackConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
track_id: "video0".to_string(),
|
||||
stream_id: "one-kvm-stream".to_string(),
|
||||
codec: VideoCodec::H264,
|
||||
resolution: Resolution::HD720,
|
||||
bitrate_kbps: 8000,
|
||||
fps: 30,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl UniversalVideoTrackConfig {
|
||||
/// Create H264 config
|
||||
pub fn h264(resolution: Resolution, bitrate_kbps: u32, fps: u32) -> Self {
|
||||
Self {
|
||||
codec: VideoCodec::H264,
|
||||
resolution,
|
||||
bitrate_kbps,
|
||||
fps,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create H265 config
|
||||
pub fn h265(resolution: Resolution, bitrate_kbps: u32, fps: u32) -> Self {
|
||||
Self {
|
||||
codec: VideoCodec::H265,
|
||||
resolution,
|
||||
bitrate_kbps,
|
||||
fps,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create VP8 config
|
||||
pub fn vp8(resolution: Resolution, bitrate_kbps: u32, fps: u32) -> Self {
|
||||
Self {
|
||||
codec: VideoCodec::VP8,
|
||||
resolution,
|
||||
bitrate_kbps,
|
||||
fps,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create VP9 config
|
||||
pub fn vp9(resolution: Resolution, bitrate_kbps: u32, fps: u32) -> Self {
|
||||
Self {
|
||||
codec: VideoCodec::VP9,
|
||||
resolution,
|
||||
bitrate_kbps,
|
||||
fps,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Track statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct VideoTrackStats {
|
||||
/// Frames sent
|
||||
pub frames_sent: u64,
|
||||
/// Bytes sent
|
||||
pub bytes_sent: u64,
|
||||
/// Keyframes sent
|
||||
pub keyframes_sent: u64,
|
||||
/// Errors
|
||||
pub errors: u64,
|
||||
}
|
||||
|
||||
/// Cached codec parameters for H264/H265
|
||||
#[derive(Debug, Default)]
|
||||
struct CachedParams {
|
||||
/// H264: SPS, H265: VPS
|
||||
#[allow(dead_code)]
|
||||
vps: Option<Bytes>,
|
||||
/// SPS (both H264 and H265)
|
||||
sps: Option<Bytes>,
|
||||
/// PPS (both H264 and H265)
|
||||
pps: Option<Bytes>,
|
||||
}
|
||||
|
||||
/// Track type wrapper to support different underlying track implementations
|
||||
enum TrackType {
|
||||
/// Sample-based track with built-in payloader (H264, VP8, VP9)
|
||||
Sample(Arc<TrackLocalStaticSample>),
|
||||
/// RTP-based track with custom payloader (H265)
|
||||
Rtp(Arc<TrackLocalStaticRTP>),
|
||||
}
|
||||
|
||||
/// H265-specific RTP state
|
||||
struct H265RtpState {
|
||||
/// H265 payloader (custom implementation that handles all NAL types)
|
||||
payloader: H265Payloader,
|
||||
/// Current sequence number
|
||||
sequence_number: u16,
|
||||
/// Current RTP timestamp
|
||||
timestamp: u32,
|
||||
/// Timestamp increment per frame (90000 / fps)
|
||||
timestamp_increment: u32,
|
||||
}
|
||||
|
||||
/// Universal video track supporting H264/H265/VP8/VP9
|
||||
pub struct UniversalVideoTrack {
|
||||
/// Underlying WebRTC track (Sample or RTP based)
|
||||
track: TrackType,
|
||||
/// Codec type
|
||||
codec: VideoCodec,
|
||||
/// Configuration
|
||||
config: UniversalVideoTrackConfig,
|
||||
/// Statistics
|
||||
stats: Mutex<VideoTrackStats>,
|
||||
/// Cached parameters for H264/H265
|
||||
cached_params: Mutex<CachedParams>,
|
||||
/// H265 RTP state (only used for H265)
|
||||
h265_state: Option<Mutex<H265RtpState>>,
|
||||
}
|
||||
|
||||
impl UniversalVideoTrack {
|
||||
/// Create a new universal video track
|
||||
pub fn new(config: UniversalVideoTrackConfig) -> Self {
|
||||
let codec_capability = RTCRtpCodecCapability {
|
||||
mime_type: config.codec.mime_type().to_string(),
|
||||
clock_rate: config.codec.clock_rate(),
|
||||
channels: 0,
|
||||
sdp_fmtp_line: config.codec.sdp_fmtp(),
|
||||
rtcp_feedback: vec![],
|
||||
};
|
||||
|
||||
// Use different track types for different codecs
|
||||
let (track, h265_state) = if config.codec == VideoCodec::H265 {
|
||||
// H265 uses TrackLocalStaticRTP with official rtp crate HevcPayloader
|
||||
let rtp_track = Arc::new(TrackLocalStaticRTP::new(
|
||||
codec_capability,
|
||||
config.track_id.clone(),
|
||||
config.stream_id.clone(),
|
||||
));
|
||||
|
||||
// Create H265 RTP state with custom H265Payloader
|
||||
let h265_state = H265RtpState {
|
||||
payloader: H265Payloader::new(),
|
||||
sequence_number: rand::random::<u16>(),
|
||||
timestamp: rand::random::<u32>(),
|
||||
timestamp_increment: 90000 / config.fps.max(1),
|
||||
};
|
||||
|
||||
(TrackType::Rtp(rtp_track), Some(Mutex::new(h265_state)))
|
||||
} else {
|
||||
// H264/VP8/VP9 use TrackLocalStaticSample with built-in payloader
|
||||
let sample_track = Arc::new(TrackLocalStaticSample::new(
|
||||
codec_capability,
|
||||
config.track_id.clone(),
|
||||
config.stream_id.clone(),
|
||||
));
|
||||
|
||||
(TrackType::Sample(sample_track), None)
|
||||
};
|
||||
|
||||
Self {
|
||||
track,
|
||||
codec: config.codec,
|
||||
config,
|
||||
stats: Mutex::new(VideoTrackStats::default()),
|
||||
cached_params: Mutex::new(CachedParams::default()),
|
||||
h265_state,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get track as TrackLocal for peer connection
|
||||
pub fn as_track_local(&self) -> Arc<dyn TrackLocal + Send + Sync> {
|
||||
match &self.track {
|
||||
TrackType::Sample(t) => t.clone(),
|
||||
TrackType::Rtp(t) => t.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get codec type
|
||||
pub fn codec(&self) -> VideoCodec {
|
||||
self.codec
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub fn config(&self) -> &UniversalVideoTrackConfig {
|
||||
&self.config
|
||||
}
|
||||
|
||||
/// Get current statistics
|
||||
pub async fn stats(&self) -> VideoTrackStats {
|
||||
self.stats.lock().await.clone()
|
||||
}
|
||||
|
||||
/// Write an encoded frame to the track
|
||||
///
|
||||
/// Handles codec-specific processing:
|
||||
/// - H264/H265: NAL unit parsing, parameter caching
|
||||
/// - VP8/VP9: Direct frame sending
|
||||
pub async fn write_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
if data.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match self.codec {
|
||||
VideoCodec::H264 => self.write_h264_frame(data, is_keyframe).await,
|
||||
VideoCodec::H265 => self.write_h265_frame(data, is_keyframe).await,
|
||||
VideoCodec::VP8 => self.write_vp8_frame(data, is_keyframe).await,
|
||||
VideoCodec::VP9 => self.write_vp9_frame(data, is_keyframe).await,
|
||||
}
|
||||
}
|
||||
|
||||
/// Write H264 frame (Annex B format)
|
||||
async fn write_h264_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
let cursor = Cursor::new(data);
|
||||
let mut h264_reader = H264Reader::new(cursor, 1024 * 1024);
|
||||
|
||||
let mut nals: Vec<Bytes> = Vec::new();
|
||||
let mut has_sps = false;
|
||||
let mut has_pps = false;
|
||||
let mut has_idr = false;
|
||||
|
||||
// Parse NAL units
|
||||
while let Ok(nal) = h264_reader.next_nal() {
|
||||
if nal.data.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let nal_type = nal.data[0] & 0x1F;
|
||||
|
||||
// Skip AUD (9) and filler (12)
|
||||
if nal_type == 9 || nal_type == 12 {
|
||||
continue;
|
||||
}
|
||||
|
||||
match nal_type {
|
||||
5 => has_idr = true,
|
||||
7 => {
|
||||
has_sps = true;
|
||||
self.cached_params.lock().await.sps = Some(nal.data.clone().freeze());
|
||||
}
|
||||
8 => {
|
||||
has_pps = true;
|
||||
self.cached_params.lock().await.pps = Some(nal.data.clone().freeze());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
trace!("H264 NAL: type={} size={}", nal_type, nal.data.len());
|
||||
nals.push(nal.data.freeze());
|
||||
}
|
||||
|
||||
// Inject cached SPS/PPS before IDR if missing
|
||||
if has_idr && (!has_sps || !has_pps) {
|
||||
let mut injected: Vec<Bytes> = Vec::new();
|
||||
let params = self.cached_params.lock().await;
|
||||
|
||||
if !has_sps {
|
||||
if let Some(ref sps) = params.sps {
|
||||
debug!("Injecting cached H264 SPS");
|
||||
injected.push(sps.clone());
|
||||
}
|
||||
}
|
||||
if !has_pps {
|
||||
if let Some(ref pps) = params.pps {
|
||||
debug!("Injecting cached H264 PPS");
|
||||
injected.push(pps.clone());
|
||||
}
|
||||
}
|
||||
drop(params);
|
||||
|
||||
if !injected.is_empty() {
|
||||
injected.extend(nals);
|
||||
nals = injected;
|
||||
}
|
||||
}
|
||||
|
||||
// Send NAL units
|
||||
self.send_nals(nals, is_keyframe).await
|
||||
}
|
||||
|
||||
/// Write H265 frame (Annex B format)
|
||||
///
|
||||
/// Pass raw Annex B data directly to the official HevcPayloader.
|
||||
/// The payloader handles NAL parsing, VPS/SPS/PPS caching, AP generation, and FU fragmentation.
|
||||
async fn write_h265_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
// Pass raw Annex B data directly to the official HevcPayloader
|
||||
self.send_h265_rtp(data, is_keyframe).await
|
||||
}
|
||||
|
||||
/// Write VP8 frame
|
||||
async fn write_vp8_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
// VP8 frames are sent directly without NAL parsing
|
||||
let sample = Sample {
|
||||
data: Bytes::copy_from_slice(data),
|
||||
duration: Duration::from_secs(1),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
match &self.track {
|
||||
TrackType::Sample(track) => {
|
||||
if let Err(e) = track.write_sample(&sample).await {
|
||||
debug!("VP8 write_sample failed: {}", e);
|
||||
}
|
||||
}
|
||||
TrackType::Rtp(_) => {
|
||||
warn!("VP8 should not use RTP track");
|
||||
}
|
||||
}
|
||||
|
||||
// Update stats
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += data.len() as u64;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write VP9 frame
|
||||
async fn write_vp9_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
// VP9 frames are sent directly without NAL parsing
|
||||
let sample = Sample {
|
||||
data: Bytes::copy_from_slice(data),
|
||||
duration: Duration::from_secs(1),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
match &self.track {
|
||||
TrackType::Sample(track) => {
|
||||
if let Err(e) = track.write_sample(&sample).await {
|
||||
debug!("VP9 write_sample failed: {}", e);
|
||||
}
|
||||
}
|
||||
TrackType::Rtp(_) => {
|
||||
warn!("VP9 should not use RTP track");
|
||||
}
|
||||
}
|
||||
|
||||
// Update stats
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += data.len() as u64;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send NAL units as samples (H264 only)
|
||||
async fn send_nals(&self, nals: Vec<Bytes>, is_keyframe: bool) -> Result<()> {
|
||||
let mut total_bytes = 0u64;
|
||||
|
||||
match &self.track {
|
||||
TrackType::Sample(track) => {
|
||||
for nal_data in nals {
|
||||
let sample = Sample {
|
||||
data: nal_data.clone(),
|
||||
duration: Duration::from_secs(1),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Err(e) = track.write_sample(&sample).await {
|
||||
debug!("NAL write_sample failed: {}", e);
|
||||
}
|
||||
|
||||
total_bytes += nal_data.len() as u64;
|
||||
}
|
||||
}
|
||||
TrackType::Rtp(_) => {
|
||||
warn!("send_nals should not be called for RTP track (H265)");
|
||||
}
|
||||
}
|
||||
|
||||
// Update stats
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += total_bytes;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send H265 NAL units via custom H265Payloader
|
||||
async fn send_h265_rtp(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
|
||||
let rtp_track = match &self.track {
|
||||
TrackType::Rtp(t) => t,
|
||||
TrackType::Sample(_) => {
|
||||
warn!("send_h265_rtp called but track is Sample type");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
let h265_state = match &self.h265_state {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
warn!("send_h265_rtp called but h265_state is None");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
// Minimize lock hold time: only hold lock during payload generation and state update
|
||||
let (payloads, timestamp, seq_start, num_payloads) = {
|
||||
let mut state = h265_state.lock().await;
|
||||
let payload = Bytes::copy_from_slice(data);
|
||||
|
||||
// Use custom H265Payloader to fragment the data
|
||||
let payloads = state.payloader.payload(RTP_MTU, &payload);
|
||||
|
||||
if payloads.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let timestamp = state.timestamp;
|
||||
let num_payloads = payloads.len();
|
||||
let seq_start = state.sequence_number;
|
||||
|
||||
// Pre-increment sequence number and timestamp
|
||||
state.sequence_number = state.sequence_number.wrapping_add(num_payloads as u16);
|
||||
state.timestamp = state.timestamp.wrapping_add(state.timestamp_increment);
|
||||
|
||||
(payloads, timestamp, seq_start, num_payloads)
|
||||
}; // Lock released here, before network I/O
|
||||
|
||||
let mut total_bytes = 0u64;
|
||||
|
||||
// Send RTP packets without holding the lock
|
||||
for (i, payload_data) in payloads.into_iter().enumerate() {
|
||||
let seq = seq_start.wrapping_add(i as u16);
|
||||
let is_last = i == num_payloads - 1;
|
||||
|
||||
// Build RTP packet
|
||||
let packet = rtp::packet::Packet {
|
||||
header: rtp::header::Header {
|
||||
version: 2,
|
||||
padding: false,
|
||||
extension: false,
|
||||
marker: is_last,
|
||||
payload_type: 49,
|
||||
sequence_number: seq,
|
||||
timestamp,
|
||||
ssrc: 0,
|
||||
..Default::default()
|
||||
},
|
||||
payload: payload_data.clone(),
|
||||
};
|
||||
|
||||
if let Err(e) = rtp_track.write_rtp(&packet).await {
|
||||
trace!("H265 write_rtp failed: {}", e);
|
||||
}
|
||||
|
||||
total_bytes += payload_data.len() as u64;
|
||||
}
|
||||
|
||||
// Update stats
|
||||
let mut stats = self.stats.lock().await;
|
||||
stats.frames_sent += 1;
|
||||
stats.bytes_sent += total_bytes;
|
||||
if is_keyframe {
|
||||
stats.keyframes_sent += 1;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_video_codec_properties() {
|
||||
assert_eq!(VideoCodec::H264.mime_type(), "video/H264");
|
||||
assert_eq!(VideoCodec::H265.mime_type(), "video/H265");
|
||||
assert_eq!(VideoCodec::VP8.mime_type(), "video/VP8");
|
||||
assert_eq!(VideoCodec::VP9.mime_type(), "video/VP9");
|
||||
|
||||
assert_eq!(VideoCodec::H264.clock_rate(), 90000);
|
||||
assert_eq!(VideoCodec::H265.clock_rate(), 90000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_creation() {
|
||||
let h264_config = UniversalVideoTrackConfig::h264(Resolution::HD1080, 4000, 30);
|
||||
assert_eq!(h264_config.codec, VideoCodec::H264);
|
||||
assert_eq!(h264_config.bitrate_kbps, 4000);
|
||||
|
||||
let h265_config = UniversalVideoTrackConfig::h265(Resolution::HD720, 2000, 30);
|
||||
assert_eq!(h265_config.codec, VideoCodec::H265);
|
||||
}
|
||||
}
|
||||
938
src/webrtc/webrtc_streamer.rs
Normal file
938
src/webrtc/webrtc_streamer.rs
Normal file
@@ -0,0 +1,938 @@
|
||||
//! WebRTC Streamer - High-level WebRTC streaming manager
|
||||
//!
|
||||
//! This module provides a unified interface for WebRTC streaming mode,
|
||||
//! supporting multiple video codecs (H264, VP8, VP9, H265) and audio (Opus).
|
||||
//!
|
||||
//! # Architecture
|
||||
//!
|
||||
//! ```text
|
||||
//! WebRtcStreamer
|
||||
//! |
|
||||
//! +-- Video Pipeline
|
||||
//! | +-- SharedVideoPipeline (single encoder for all sessions)
|
||||
//! | +-- H264 Encoder
|
||||
//! | +-- H265 Encoder (hardware only)
|
||||
//! | +-- VP8 Encoder (hardware only - VAAPI)
|
||||
//! | +-- VP9 Encoder (hardware only - VAAPI)
|
||||
//! |
|
||||
//! +-- Audio Pipeline
|
||||
//! | +-- SharedAudioPipeline
|
||||
//! | +-- OpusEncoder
|
||||
//! |
|
||||
//! +-- UniversalSession[] (video + audio tracks + DataChannel)
|
||||
//! +-- UniversalVideoTrack (H264/H265/VP8/VP9)
|
||||
//! +-- Audio Track (RTP/Opus)
|
||||
//! +-- DataChannel (HID)
|
||||
//! ```
|
||||
//!
|
||||
//! # Key Features
|
||||
//!
|
||||
//! - **Single encoder**: All sessions share one video encoder
|
||||
//! - **Multi-codec support**: H264, H265, VP8, VP9
|
||||
//! - **Audio support**: Opus audio streaming via SharedAudioPipeline
|
||||
//! - **HID via DataChannel**: Keyboard/mouse events through WebRTC DataChannel
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{broadcast, RwLock};
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
use crate::audio::shared_pipeline::{SharedAudioPipeline, SharedAudioPipelineConfig};
|
||||
use crate::audio::{AudioController, OpusFrame};
|
||||
use crate::error::{AppError, Result};
|
||||
use crate::hid::HidController;
|
||||
use crate::video::encoder::registry::VideoEncoderType;
|
||||
use crate::video::encoder::registry::EncoderBackend;
|
||||
use crate::video::encoder::VideoCodecType;
|
||||
use crate::video::format::{PixelFormat, Resolution};
|
||||
use crate::video::frame::VideoFrame;
|
||||
use crate::video::shared_video_pipeline::{SharedVideoPipeline, SharedVideoPipelineConfig, SharedVideoPipelineStats};
|
||||
|
||||
use super::config::{TurnServer, WebRtcConfig};
|
||||
use super::signaling::{ConnectionState, IceCandidate, SdpAnswer, SdpOffer};
|
||||
use super::universal_session::{UniversalSession, UniversalSessionConfig};
|
||||
|
||||
/// WebRTC streamer configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct WebRtcStreamerConfig {
|
||||
/// WebRTC configuration (STUN/TURN servers, etc.)
|
||||
pub webrtc: WebRtcConfig,
|
||||
/// Video codec type
|
||||
pub video_codec: VideoCodecType,
|
||||
/// Input resolution
|
||||
pub resolution: Resolution,
|
||||
/// Input pixel format
|
||||
pub input_format: PixelFormat,
|
||||
/// Target bitrate in kbps
|
||||
pub bitrate_kbps: u32,
|
||||
/// Target FPS
|
||||
pub fps: u32,
|
||||
/// GOP size (keyframe interval)
|
||||
pub gop_size: u32,
|
||||
/// Enable audio (reserved)
|
||||
pub audio_enabled: bool,
|
||||
/// Encoder backend (None = auto select best available)
|
||||
pub encoder_backend: Option<EncoderBackend>,
|
||||
}
|
||||
|
||||
impl Default for WebRtcStreamerConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
webrtc: WebRtcConfig::default(),
|
||||
video_codec: VideoCodecType::H264,
|
||||
resolution: Resolution::HD720,
|
||||
input_format: PixelFormat::Mjpeg,
|
||||
bitrate_kbps: 8000,
|
||||
fps: 30,
|
||||
gop_size: 30,
|
||||
audio_enabled: false,
|
||||
encoder_backend: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// WebRTC streamer statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct WebRtcStreamerStats {
|
||||
/// Number of active sessions
|
||||
pub session_count: usize,
|
||||
/// Current video codec
|
||||
pub video_codec: String,
|
||||
/// Video pipeline stats (if available)
|
||||
pub video_pipeline: Option<VideoPipelineStats>,
|
||||
/// Audio enabled
|
||||
pub audio_enabled: bool,
|
||||
/// Audio pipeline stats (if available)
|
||||
pub audio_pipeline: Option<AudioPipelineStats>,
|
||||
}
|
||||
|
||||
/// Video pipeline statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct VideoPipelineStats {
|
||||
pub frames_encoded: u64,
|
||||
pub frames_dropped: u64,
|
||||
pub bytes_encoded: u64,
|
||||
pub keyframes_encoded: u64,
|
||||
pub avg_encode_time_ms: f32,
|
||||
pub current_fps: f32,
|
||||
pub subscribers: u64,
|
||||
}
|
||||
|
||||
/// Audio pipeline statistics
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct AudioPipelineStats {
|
||||
pub frames_encoded: u64,
|
||||
pub frames_dropped: u64,
|
||||
pub bytes_encoded: u64,
|
||||
pub avg_encode_time_ms: f32,
|
||||
pub subscribers: u64,
|
||||
}
|
||||
|
||||
/// Session info for listing
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SessionInfo {
|
||||
pub session_id: String,
|
||||
pub created_at: std::time::Instant,
|
||||
pub state: String,
|
||||
}
|
||||
|
||||
/// WebRTC Streamer
|
||||
///
|
||||
/// High-level manager for WebRTC streaming, supporting multiple video codecs
|
||||
/// and audio streaming via Opus.
|
||||
pub struct WebRtcStreamer {
|
||||
/// Current configuration
|
||||
config: RwLock<WebRtcStreamerConfig>,
|
||||
|
||||
// === Video ===
|
||||
/// Current video codec type
|
||||
video_codec: RwLock<VideoCodecType>,
|
||||
/// Universal video pipeline (for all codecs)
|
||||
video_pipeline: RwLock<Option<Arc<SharedVideoPipeline>>>,
|
||||
/// All sessions (unified management)
|
||||
sessions: Arc<RwLock<HashMap<String, Arc<UniversalSession>>>>,
|
||||
/// Video frame source
|
||||
video_frame_tx: RwLock<Option<broadcast::Sender<VideoFrame>>>,
|
||||
|
||||
// === Audio ===
|
||||
/// Audio enabled flag
|
||||
audio_enabled: RwLock<bool>,
|
||||
/// Shared audio pipeline for Opus encoding
|
||||
audio_pipeline: RwLock<Option<Arc<SharedAudioPipeline>>>,
|
||||
/// Audio controller reference
|
||||
audio_controller: RwLock<Option<Arc<AudioController>>>,
|
||||
|
||||
// === Controllers ===
|
||||
/// HID controller for DataChannel
|
||||
hid_controller: RwLock<Option<Arc<HidController>>>,
|
||||
}
|
||||
|
||||
impl WebRtcStreamer {
|
||||
/// Create a new WebRTC streamer
|
||||
pub fn new() -> Arc<Self> {
|
||||
Self::with_config(WebRtcStreamerConfig::default())
|
||||
}
|
||||
|
||||
/// Create a new WebRTC streamer with configuration
|
||||
pub fn with_config(config: WebRtcStreamerConfig) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
config: RwLock::new(config.clone()),
|
||||
video_codec: RwLock::new(config.video_codec),
|
||||
video_pipeline: RwLock::new(None),
|
||||
sessions: Arc::new(RwLock::new(HashMap::new())),
|
||||
video_frame_tx: RwLock::new(None),
|
||||
audio_enabled: RwLock::new(config.audio_enabled),
|
||||
audio_pipeline: RwLock::new(None),
|
||||
audio_controller: RwLock::new(None),
|
||||
hid_controller: RwLock::new(None),
|
||||
})
|
||||
}
|
||||
|
||||
// === Video Codec Management ===
|
||||
|
||||
/// Get current video codec type
|
||||
pub async fn current_video_codec(&self) -> VideoCodecType {
|
||||
*self.video_codec.read().await
|
||||
}
|
||||
|
||||
/// Set video codec type
|
||||
///
|
||||
/// Supports H264, H265, VP8, VP9. This will restart the video pipeline
|
||||
/// and close all existing sessions.
|
||||
pub async fn set_video_codec(&self, codec: VideoCodecType) -> Result<()> {
|
||||
let current = *self.video_codec.read().await;
|
||||
if current == codec {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
info!("Switching video codec from {:?} to {:?}", current, codec);
|
||||
|
||||
// Close all existing sessions
|
||||
self.close_all_sessions().await;
|
||||
|
||||
// Stop current pipeline
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
pipeline.stop();
|
||||
}
|
||||
*self.video_pipeline.write().await = None;
|
||||
|
||||
// Update codec
|
||||
*self.video_codec.write().await = codec;
|
||||
|
||||
// Create new pipeline with new codec
|
||||
if let Some(ref tx) = *self.video_frame_tx.read().await {
|
||||
self.ensure_video_pipeline(tx.clone()).await?;
|
||||
}
|
||||
|
||||
info!("Video codec switched to {:?}", codec);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get list of supported video codecs
|
||||
pub fn supported_video_codecs(&self) -> Vec<VideoCodecType> {
|
||||
use crate::video::encoder::registry::EncoderRegistry;
|
||||
|
||||
let registry = EncoderRegistry::global();
|
||||
let mut codecs = vec![];
|
||||
|
||||
// H264 always available (has software fallback)
|
||||
codecs.push(VideoCodecType::H264);
|
||||
|
||||
// Check hardware codecs
|
||||
if registry.is_format_available(VideoEncoderType::H265, true) {
|
||||
codecs.push(VideoCodecType::H265);
|
||||
}
|
||||
if registry.is_format_available(VideoEncoderType::VP8, true) {
|
||||
codecs.push(VideoCodecType::VP8);
|
||||
}
|
||||
if registry.is_format_available(VideoEncoderType::VP9, true) {
|
||||
codecs.push(VideoCodecType::VP9);
|
||||
}
|
||||
|
||||
codecs
|
||||
}
|
||||
|
||||
/// Convert VideoCodecType to VideoEncoderType
|
||||
fn codec_type_to_encoder_type(codec: VideoCodecType) -> VideoEncoderType {
|
||||
match codec {
|
||||
VideoCodecType::H264 => VideoEncoderType::H264,
|
||||
VideoCodecType::H265 => VideoEncoderType::H265,
|
||||
VideoCodecType::VP8 => VideoEncoderType::VP8,
|
||||
VideoCodecType::VP9 => VideoEncoderType::VP9,
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensure video pipeline is initialized and running
|
||||
async fn ensure_video_pipeline(&self, tx: broadcast::Sender<VideoFrame>) -> Result<Arc<SharedVideoPipeline>> {
|
||||
let mut pipeline_guard = self.video_pipeline.write().await;
|
||||
|
||||
if let Some(ref pipeline) = *pipeline_guard {
|
||||
if pipeline.is_running() {
|
||||
return Ok(pipeline.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let config = self.config.read().await;
|
||||
let codec = *self.video_codec.read().await;
|
||||
|
||||
let pipeline_config = SharedVideoPipelineConfig {
|
||||
resolution: config.resolution,
|
||||
input_format: config.input_format,
|
||||
output_codec: Self::codec_type_to_encoder_type(codec),
|
||||
bitrate_kbps: config.bitrate_kbps,
|
||||
fps: config.fps,
|
||||
gop_size: config.gop_size,
|
||||
encoder_backend: config.encoder_backend,
|
||||
};
|
||||
|
||||
info!("Creating shared video pipeline for {:?}", codec);
|
||||
let pipeline = SharedVideoPipeline::new(pipeline_config)?;
|
||||
pipeline.start(tx.subscribe()).await?;
|
||||
|
||||
*pipeline_guard = Some(pipeline.clone());
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
// === Audio Management ===
|
||||
|
||||
/// Check if audio is enabled
|
||||
pub async fn is_audio_enabled(&self) -> bool {
|
||||
*self.audio_enabled.read().await
|
||||
}
|
||||
|
||||
/// Set audio enabled state
|
||||
pub async fn set_audio_enabled(&self, enabled: bool) -> Result<()> {
|
||||
let was_enabled = *self.audio_enabled.read().await;
|
||||
*self.audio_enabled.write().await = enabled;
|
||||
self.config.write().await.audio_enabled = enabled;
|
||||
|
||||
if enabled && !was_enabled {
|
||||
// Start audio pipeline if we have an audio controller
|
||||
if let Some(ref controller) = *self.audio_controller.read().await {
|
||||
self.start_audio_pipeline(controller.clone()).await?;
|
||||
}
|
||||
} else if !enabled && was_enabled {
|
||||
// Stop audio pipeline
|
||||
self.stop_audio_pipeline().await;
|
||||
}
|
||||
|
||||
info!("WebRTC audio enabled: {}", enabled);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set audio controller reference
|
||||
pub async fn set_audio_controller(&self, controller: Arc<AudioController>) {
|
||||
info!("Setting audio controller for WebRTC streamer");
|
||||
*self.audio_controller.write().await = Some(controller.clone());
|
||||
|
||||
// Start audio pipeline if audio is enabled
|
||||
if *self.audio_enabled.read().await {
|
||||
if let Err(e) = self.start_audio_pipeline(controller).await {
|
||||
error!("Failed to start audio pipeline: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Start the shared audio pipeline
|
||||
async fn start_audio_pipeline(&self, controller: Arc<AudioController>) -> Result<()> {
|
||||
// Check if already running
|
||||
if let Some(ref pipeline) = *self.audio_pipeline.read().await {
|
||||
if pipeline.is_running() {
|
||||
debug!("Audio pipeline already running");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Get Opus frame receiver from audio controller
|
||||
let _opus_rx = match controller.subscribe_opus_async().await {
|
||||
Some(rx) => rx,
|
||||
None => {
|
||||
warn!("Audio controller not streaming, cannot start audio pipeline");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
// Create shared audio pipeline config
|
||||
let config = SharedAudioPipelineConfig::default();
|
||||
let pipeline = SharedAudioPipeline::new(config)?;
|
||||
|
||||
// Note: SharedAudioPipeline expects raw AudioFrame, but AudioController
|
||||
// already provides encoded OpusFrame. We'll pass the OpusFrame directly
|
||||
// to sessions instead of re-encoding.
|
||||
// For now, store the pipeline reference for future use
|
||||
*self.audio_pipeline.write().await = Some(pipeline);
|
||||
|
||||
// Reconnect audio for all existing sessions
|
||||
self.reconnect_audio_sources().await;
|
||||
|
||||
info!("WebRTC audio pipeline started");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop the shared audio pipeline
|
||||
async fn stop_audio_pipeline(&self) {
|
||||
if let Some(ref pipeline) = *self.audio_pipeline.read().await {
|
||||
pipeline.stop();
|
||||
}
|
||||
*self.audio_pipeline.write().await = None;
|
||||
info!("WebRTC audio pipeline stopped");
|
||||
}
|
||||
|
||||
/// Subscribe to encoded Opus frames (for sessions)
|
||||
pub async fn subscribe_opus(&self) -> Option<broadcast::Receiver<OpusFrame>> {
|
||||
if let Some(ref controller) = *self.audio_controller.read().await {
|
||||
controller.subscribe_opus_async().await
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Reconnect audio source for all existing sessions
|
||||
/// Call this after audio controller restarts (e.g., quality change)
|
||||
pub async fn reconnect_audio_sources(&self) {
|
||||
if let Some(ref controller) = *self.audio_controller.read().await {
|
||||
let sessions = self.sessions.read().await;
|
||||
for (session_id, session) in sessions.iter() {
|
||||
if session.has_audio() {
|
||||
info!("Reconnecting audio for session {}", session_id);
|
||||
if let Some(rx) = controller.subscribe_opus_async().await {
|
||||
session.start_audio_from_opus(rx).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Video Frame Source ===
|
||||
|
||||
/// Set video frame source
|
||||
pub async fn set_video_source(&self, tx: broadcast::Sender<VideoFrame>) {
|
||||
info!(
|
||||
"Setting video source for WebRTC streamer (receiver_count={})",
|
||||
tx.receiver_count()
|
||||
);
|
||||
*self.video_frame_tx.write().await = Some(tx.clone());
|
||||
|
||||
// Start or restart pipeline if it exists
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
if !pipeline.is_running() {
|
||||
info!("Starting video pipeline with new frame source");
|
||||
if let Err(e) = pipeline.start(tx.subscribe()).await {
|
||||
error!("Failed to start video pipeline: {}", e);
|
||||
}
|
||||
} else {
|
||||
// Pipeline is already running but may have old frame source
|
||||
// We need to restart it with the new frame source
|
||||
info!("Video pipeline already running, restarting with new frame source");
|
||||
pipeline.stop();
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
|
||||
if let Err(e) = pipeline.start(tx.subscribe()).await {
|
||||
error!("Failed to restart video pipeline: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
info!("No video pipeline exists yet, frame source will be used when pipeline is created");
|
||||
}
|
||||
}
|
||||
|
||||
/// Prepare for configuration change
|
||||
///
|
||||
/// This stops the encoding pipeline and closes all sessions.
|
||||
pub async fn prepare_for_config_change(&self) {
|
||||
// Stop pipeline and close sessions - will be recreated on next session
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
pipeline.stop();
|
||||
}
|
||||
*self.video_pipeline.write().await = None;
|
||||
self.close_all_sessions().await;
|
||||
}
|
||||
|
||||
/// Reconnect video source after configuration change
|
||||
pub async fn reconnect_video_source(&self, tx: broadcast::Sender<VideoFrame>) {
|
||||
self.set_video_source(tx).await;
|
||||
}
|
||||
|
||||
// === Configuration ===
|
||||
|
||||
/// Update video configuration
|
||||
///
|
||||
/// This will restart the encoding pipeline and close all sessions.
|
||||
pub async fn update_video_config(
|
||||
&self,
|
||||
resolution: Resolution,
|
||||
format: PixelFormat,
|
||||
fps: u32,
|
||||
) {
|
||||
// Stop existing pipeline
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
pipeline.stop();
|
||||
}
|
||||
*self.video_pipeline.write().await = None;
|
||||
|
||||
// Close all existing sessions - they need to reconnect
|
||||
let session_count = self.close_all_sessions().await;
|
||||
if session_count > 0 {
|
||||
info!("Closed {} existing sessions due to config change", session_count);
|
||||
}
|
||||
|
||||
// Update config
|
||||
let mut config = self.config.write().await;
|
||||
config.resolution = resolution;
|
||||
config.input_format = format;
|
||||
config.fps = fps;
|
||||
|
||||
// Scale bitrate based on resolution
|
||||
let base_pixels: u64 = 1280 * 720;
|
||||
let actual_pixels: u64 = resolution.width as u64 * resolution.height as u64;
|
||||
config.bitrate_kbps = ((8000u64 * actual_pixels / base_pixels).max(1000).min(15000)) as u32;
|
||||
|
||||
info!(
|
||||
"WebRTC config updated: {}x{} {:?} @ {} fps, {} kbps",
|
||||
resolution.width, resolution.height, format, fps, config.bitrate_kbps
|
||||
);
|
||||
}
|
||||
|
||||
/// Update encoder backend (software/hardware selection)
|
||||
pub async fn update_encoder_backend(&self, encoder_backend: Option<EncoderBackend>) {
|
||||
// Stop existing pipeline
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
pipeline.stop();
|
||||
}
|
||||
*self.video_pipeline.write().await = None;
|
||||
|
||||
// Close all existing sessions - they need to reconnect with new encoder
|
||||
let session_count = self.close_all_sessions().await;
|
||||
if session_count > 0 {
|
||||
info!("Closed {} existing sessions due to encoder backend change", session_count);
|
||||
}
|
||||
|
||||
// Update config
|
||||
let mut config = self.config.write().await;
|
||||
config.encoder_backend = encoder_backend;
|
||||
|
||||
info!(
|
||||
"WebRTC encoder backend updated: {:?}",
|
||||
encoder_backend
|
||||
);
|
||||
}
|
||||
|
||||
/// Update ICE configuration (STUN/TURN servers)
|
||||
///
|
||||
/// Note: Changes take effect for new sessions only.
|
||||
/// Existing sessions need to be reconnected to use the new ICE config.
|
||||
pub async fn update_ice_config(
|
||||
&self,
|
||||
stun_server: Option<String>,
|
||||
turn_server: Option<String>,
|
||||
turn_username: Option<String>,
|
||||
turn_password: Option<String>,
|
||||
) {
|
||||
let mut config = self.config.write().await;
|
||||
|
||||
// Update STUN servers
|
||||
config.webrtc.stun_servers.clear();
|
||||
if let Some(ref stun) = stun_server {
|
||||
if !stun.is_empty() {
|
||||
config.webrtc.stun_servers.push(stun.clone());
|
||||
info!("WebRTC STUN server updated: {}", stun);
|
||||
}
|
||||
}
|
||||
|
||||
// Update TURN servers
|
||||
config.webrtc.turn_servers.clear();
|
||||
if let Some(ref turn) = turn_server {
|
||||
if !turn.is_empty() {
|
||||
let username = turn_username.unwrap_or_default();
|
||||
let credential = turn_password.unwrap_or_default();
|
||||
config.webrtc.turn_servers.push(TurnServer {
|
||||
url: turn.clone(),
|
||||
username: username.clone(),
|
||||
credential,
|
||||
});
|
||||
info!("WebRTC TURN server updated: {} (user: {})", turn, username);
|
||||
}
|
||||
}
|
||||
|
||||
if config.webrtc.stun_servers.is_empty() && config.webrtc.turn_servers.is_empty() {
|
||||
info!("WebRTC ICE config cleared - only host candidates will be used");
|
||||
}
|
||||
}
|
||||
|
||||
/// Set HID controller for DataChannel
|
||||
pub async fn set_hid_controller(&self, hid: Arc<HidController>) {
|
||||
*self.hid_controller.write().await = Some(hid);
|
||||
}
|
||||
|
||||
// === Session Management ===
|
||||
|
||||
/// Create a new WebRTC session
|
||||
pub async fn create_session(&self) -> Result<String> {
|
||||
let session_id = uuid::Uuid::new_v4().to_string();
|
||||
let codec = *self.video_codec.read().await;
|
||||
|
||||
// Ensure video pipeline is running
|
||||
let frame_tx = self.video_frame_tx.read().await.clone()
|
||||
.ok_or_else(|| AppError::VideoError("No video frame source".to_string()))?;
|
||||
let pipeline = self.ensure_video_pipeline(frame_tx).await?;
|
||||
|
||||
// Create session config
|
||||
let config = self.config.read().await;
|
||||
let session_config = UniversalSessionConfig {
|
||||
webrtc: config.webrtc.clone(),
|
||||
codec: Self::codec_type_to_encoder_type(codec),
|
||||
resolution: config.resolution,
|
||||
input_format: config.input_format,
|
||||
bitrate_kbps: config.bitrate_kbps,
|
||||
fps: config.fps,
|
||||
gop_size: config.gop_size,
|
||||
audio_enabled: *self.audio_enabled.read().await,
|
||||
};
|
||||
drop(config);
|
||||
|
||||
// Create universal session
|
||||
let mut session = UniversalSession::new(session_config.clone(), session_id.clone()).await?;
|
||||
|
||||
// Set HID controller if available
|
||||
if let Some(ref hid) = *self.hid_controller.read().await {
|
||||
session.set_hid_controller(hid.clone());
|
||||
}
|
||||
|
||||
// Create data channel
|
||||
if self.config.read().await.webrtc.enable_datachannel {
|
||||
session.create_data_channel("hid").await?;
|
||||
}
|
||||
|
||||
let session = Arc::new(session);
|
||||
|
||||
// Subscribe to video pipeline frames
|
||||
// Request keyframe after ICE connection is established (via callback)
|
||||
let pipeline_for_callback = pipeline.clone();
|
||||
let session_id_for_callback = session_id.clone();
|
||||
session.start_from_video_pipeline(pipeline.subscribe(), move || {
|
||||
// Spawn async task to request keyframe
|
||||
let pipeline = pipeline_for_callback;
|
||||
let sid = session_id_for_callback;
|
||||
tokio::spawn(async move {
|
||||
info!("Requesting keyframe for session {} after ICE connected", sid);
|
||||
pipeline.request_keyframe().await;
|
||||
});
|
||||
}).await;
|
||||
|
||||
// Start audio if enabled
|
||||
if session_config.audio_enabled {
|
||||
if let Some(ref controller) = *self.audio_controller.read().await {
|
||||
if let Some(opus_rx) = controller.subscribe_opus_async().await {
|
||||
session.start_audio_from_opus(opus_rx).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store session
|
||||
self.sessions
|
||||
.write()
|
||||
.await
|
||||
.insert(session_id.clone(), session);
|
||||
|
||||
info!(
|
||||
"Session created: {} (codec={:?}, audio={}, {} total)",
|
||||
session_id,
|
||||
codec,
|
||||
session_config.audio_enabled,
|
||||
self.sessions.read().await.len()
|
||||
);
|
||||
|
||||
Ok(session_id)
|
||||
}
|
||||
|
||||
/// Handle SDP offer
|
||||
pub async fn handle_offer(&self, session_id: &str, offer: SdpOffer) -> Result<SdpAnswer> {
|
||||
let sessions = self.sessions.read().await;
|
||||
let session = sessions
|
||||
.get(session_id)
|
||||
.ok_or_else(|| AppError::NotFound(format!("Session not found: {}", session_id)))?;
|
||||
|
||||
session.handle_offer(offer).await
|
||||
}
|
||||
|
||||
/// Add ICE candidate
|
||||
pub async fn add_ice_candidate(&self, session_id: &str, candidate: IceCandidate) -> Result<()> {
|
||||
let sessions = self.sessions.read().await;
|
||||
let session = sessions
|
||||
.get(session_id)
|
||||
.ok_or_else(|| AppError::NotFound(format!("Session not found: {}", session_id)))?;
|
||||
|
||||
session.add_ice_candidate(candidate).await
|
||||
}
|
||||
|
||||
/// Close a session
|
||||
pub async fn close_session(&self, session_id: &str) -> Result<()> {
|
||||
let session = self.sessions.write().await.remove(session_id);
|
||||
|
||||
if let Some(session) = session {
|
||||
session.close().await?;
|
||||
}
|
||||
|
||||
// Stop pipeline if no more sessions
|
||||
if self.sessions.read().await.is_empty() {
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
info!("No more sessions, stopping video pipeline");
|
||||
pipeline.stop();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Close all sessions
|
||||
pub async fn close_all_sessions(&self) -> usize {
|
||||
let mut sessions = self.sessions.write().await;
|
||||
let count = sessions.len();
|
||||
|
||||
for (session_id, session) in sessions.drain() {
|
||||
debug!("Closing session {}", session_id);
|
||||
if let Err(e) = session.close().await {
|
||||
warn!("Error closing session {}: {}", session_id, e);
|
||||
}
|
||||
}
|
||||
|
||||
// Stop pipeline
|
||||
drop(sessions);
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
pipeline.stop();
|
||||
}
|
||||
|
||||
count
|
||||
}
|
||||
|
||||
/// Get session count
|
||||
pub async fn session_count(&self) -> usize {
|
||||
self.sessions.read().await.len()
|
||||
}
|
||||
|
||||
/// Get session info
|
||||
pub async fn get_session(&self, session_id: &str) -> Option<SessionInfo> {
|
||||
let sessions = self.sessions.read().await;
|
||||
sessions.get(session_id).map(|s| SessionInfo {
|
||||
session_id: s.session_id.clone(),
|
||||
created_at: std::time::Instant::now(),
|
||||
state: format!("{}", s.state()),
|
||||
})
|
||||
}
|
||||
|
||||
/// List all sessions
|
||||
pub async fn list_sessions(&self) -> Vec<SessionInfo> {
|
||||
self.sessions
|
||||
.read()
|
||||
.await
|
||||
.values()
|
||||
.map(|s| SessionInfo {
|
||||
session_id: s.session_id.clone(),
|
||||
created_at: std::time::Instant::now(),
|
||||
state: format!("{}", s.state()),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Cleanup closed sessions
|
||||
pub async fn cleanup(&self) {
|
||||
let to_remove: Vec<String> = {
|
||||
let sessions = self.sessions.read().await;
|
||||
sessions
|
||||
.iter()
|
||||
.filter(|(_, s)| {
|
||||
matches!(
|
||||
s.state(),
|
||||
ConnectionState::Closed | ConnectionState::Failed | ConnectionState::Disconnected
|
||||
)
|
||||
})
|
||||
.map(|(id, _)| id.clone())
|
||||
.collect()
|
||||
};
|
||||
|
||||
if !to_remove.is_empty() {
|
||||
let mut sessions = self.sessions.write().await;
|
||||
for id in &to_remove {
|
||||
debug!("Removing closed session: {}", id);
|
||||
sessions.remove(id);
|
||||
}
|
||||
|
||||
// Stop pipeline if no more sessions
|
||||
if sessions.is_empty() {
|
||||
drop(sessions);
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
info!("No more sessions after cleanup, stopping video pipeline");
|
||||
pipeline.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Statistics ===
|
||||
|
||||
/// Get streamer statistics
|
||||
pub async fn stats(&self) -> WebRtcStreamerStats {
|
||||
let codec = *self.video_codec.read().await;
|
||||
let session_count = self.session_count().await;
|
||||
|
||||
let video_pipeline = if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
let s = pipeline.stats().await;
|
||||
Some(VideoPipelineStats {
|
||||
frames_encoded: s.frames_encoded,
|
||||
frames_dropped: s.frames_dropped,
|
||||
bytes_encoded: s.bytes_encoded,
|
||||
keyframes_encoded: s.keyframes_encoded,
|
||||
avg_encode_time_ms: s.avg_encode_time_ms,
|
||||
current_fps: s.current_fps,
|
||||
subscribers: s.subscribers,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Get audio pipeline stats
|
||||
let audio_pipeline = if let Some(ref pipeline) = *self.audio_pipeline.read().await {
|
||||
let stats = pipeline.stats().await;
|
||||
Some(AudioPipelineStats {
|
||||
frames_encoded: stats.frames_encoded,
|
||||
frames_dropped: stats.frames_dropped,
|
||||
bytes_encoded: stats.bytes_encoded,
|
||||
avg_encode_time_ms: stats.avg_encode_time_ms,
|
||||
subscribers: stats.subscribers,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
WebRtcStreamerStats {
|
||||
session_count,
|
||||
video_codec: format!("{:?}", codec),
|
||||
video_pipeline,
|
||||
audio_enabled: *self.audio_enabled.read().await,
|
||||
audio_pipeline,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get pipeline statistics
|
||||
pub async fn pipeline_stats(&self) -> Option<SharedVideoPipelineStats> {
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
Some(pipeline.stats().await)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Set bitrate
|
||||
///
|
||||
/// Note: Hardware encoders (VAAPI, NVENC, etc.) don't support dynamic bitrate changes.
|
||||
/// This method restarts the pipeline to apply the new bitrate.
|
||||
pub async fn set_bitrate(&self, bitrate_kbps: u32) -> Result<()> {
|
||||
// Update config first
|
||||
self.config.write().await.bitrate_kbps = bitrate_kbps;
|
||||
|
||||
// Check if pipeline exists and is running
|
||||
let pipeline_running = {
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
pipeline.is_running()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if pipeline_running {
|
||||
info!(
|
||||
"Restarting video pipeline to apply new bitrate: {} kbps",
|
||||
bitrate_kbps
|
||||
);
|
||||
|
||||
// Stop existing pipeline
|
||||
if let Some(ref pipeline) = *self.video_pipeline.read().await {
|
||||
pipeline.stop();
|
||||
}
|
||||
|
||||
// Wait for pipeline to stop
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
|
||||
// Clear pipeline reference - will be recreated
|
||||
*self.video_pipeline.write().await = None;
|
||||
|
||||
// Recreate pipeline with new config if we have a frame source
|
||||
if let Some(ref tx) = *self.video_frame_tx.read().await {
|
||||
// Get existing sessions that need to be reconnected
|
||||
let session_ids: Vec<String> = self.sessions.read().await.keys().cloned().collect();
|
||||
|
||||
if !session_ids.is_empty() {
|
||||
// Recreate pipeline
|
||||
let pipeline = self.ensure_video_pipeline(tx.clone()).await?;
|
||||
|
||||
// Reconnect all sessions to new pipeline
|
||||
let sessions = self.sessions.read().await;
|
||||
for session_id in &session_ids {
|
||||
if let Some(session) = sessions.get(session_id) {
|
||||
info!("Reconnecting session {} to new pipeline", session_id);
|
||||
let pipeline_for_callback = pipeline.clone();
|
||||
let sid = session_id.clone();
|
||||
session.start_from_video_pipeline(pipeline.subscribe(), move || {
|
||||
let pipeline = pipeline_for_callback;
|
||||
tokio::spawn(async move {
|
||||
info!("Requesting keyframe for session {} after reconnect", sid);
|
||||
pipeline.request_keyframe().await;
|
||||
});
|
||||
}).await;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Video pipeline restarted with {} kbps, reconnected {} sessions",
|
||||
bitrate_kbps,
|
||||
session_ids.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
debug!(
|
||||
"Pipeline not running, bitrate {} kbps will apply on next start",
|
||||
bitrate_kbps
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for WebRtcStreamer {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
config: RwLock::new(WebRtcStreamerConfig::default()),
|
||||
video_codec: RwLock::new(VideoCodecType::H264),
|
||||
video_pipeline: RwLock::new(None),
|
||||
sessions: Arc::new(RwLock::new(HashMap::new())),
|
||||
video_frame_tx: RwLock::new(None),
|
||||
audio_enabled: RwLock::new(false),
|
||||
audio_pipeline: RwLock::new(None),
|
||||
audio_controller: RwLock::new(None),
|
||||
hid_controller: RwLock::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_webrtc_streamer_config_default() {
|
||||
let config = WebRtcStreamerConfig::default();
|
||||
assert_eq!(config.video_codec, VideoCodecType::H264);
|
||||
assert_eq!(config.resolution, Resolution::HD720);
|
||||
assert_eq!(config.bitrate_kbps, 8000);
|
||||
assert_eq!(config.fps, 30);
|
||||
assert!(!config.audio_enabled);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_supported_codecs() {
|
||||
let streamer = WebRtcStreamer::new();
|
||||
let codecs = streamer.supported_video_codecs();
|
||||
assert!(codecs.contains(&VideoCodecType::H264));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user