This commit is contained in:
mofeng-git
2025-12-28 18:19:16 +08:00
commit d143d158e4
771 changed files with 220548 additions and 0 deletions

564
src/stream/mjpeg.rs Normal file
View File

@@ -0,0 +1,564 @@
//! MJPEG stream handler
//!
//! Manages video frame distribution and per-client statistics.
use arc_swap::ArcSwap;
use parking_lot::Mutex as ParkingMutex;
use parking_lot::RwLock as ParkingRwLock;
use std::collections::{HashMap, VecDeque};
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::broadcast;
use tracing::{debug, info, warn};
use crate::video::encoder::JpegEncoder;
use crate::video::encoder::traits::{Encoder, EncoderConfig};
use crate::video::format::PixelFormat;
use crate::video::VideoFrame;
/// Client ID type (UUID string)
pub type ClientId = String;
/// Per-client session information
#[derive(Debug, Clone)]
pub struct ClientSession {
/// Unique client ID
pub id: ClientId,
/// Connection timestamp
pub connected_at: Instant,
/// Last activity timestamp (frame sent)
pub last_activity: Instant,
/// Frames sent to this client
pub frames_sent: u64,
/// FPS calculator (1-second rolling window)
pub fps_calculator: FpsCalculator,
}
impl ClientSession {
/// Create a new client session
pub fn new(id: ClientId) -> Self {
let now = Instant::now();
Self {
id,
connected_at: now,
last_activity: now,
frames_sent: 0,
fps_calculator: FpsCalculator::new(),
}
}
/// Get connection duration
pub fn connected_duration(&self) -> Duration {
self.last_activity.duration_since(self.connected_at)
}
/// Get idle duration
pub fn idle_duration(&self) -> Duration {
Instant::now().duration_since(self.last_activity)
}
}
/// Rolling window FPS calculator
#[derive(Debug, Clone)]
pub struct FpsCalculator {
/// Frame timestamps in last window
frame_times: VecDeque<Instant>,
/// Window duration (default 1 second)
window: Duration,
/// Cached count of frames in current window (optimization to avoid O(n) filtering)
count_in_window: usize,
}
impl FpsCalculator {
/// Create a new FPS calculator with 1-second window
pub fn new() -> Self {
Self {
frame_times: VecDeque::with_capacity(120), // Max 120fps tracking
window: Duration::from_secs(1),
count_in_window: 0,
}
}
/// Record a frame sent
pub fn record_frame(&mut self) {
let now = Instant::now();
self.frame_times.push_back(now);
// Remove frames outside window and maintain count
let cutoff = now - self.window;
while let Some(&oldest) = self.frame_times.front() {
if oldest < cutoff {
self.frame_times.pop_front();
} else {
break;
}
}
// Update cached count
self.count_in_window = self.frame_times.len();
}
/// Calculate current FPS (frames in last 1 second window)
pub fn current_fps(&self) -> u32 {
// Return cached count instead of filtering entire deque (O(1) instead of O(n))
self.count_in_window as u32
}
}
impl Default for FpsCalculator {
fn default() -> Self {
Self::new()
}
}
/// Auto-pause configuration
#[derive(Debug, Clone)]
pub struct AutoPauseConfig {
/// Enable auto-pause when no clients
pub enabled: bool,
/// Delay before pausing (default 10s)
pub shutdown_delay_secs: u64,
/// Client timeout for cleanup (default 30s)
pub client_timeout_secs: u64,
}
impl Default for AutoPauseConfig {
fn default() -> Self {
Self {
enabled: false,
shutdown_delay_secs: 10,
client_timeout_secs: 30,
}
}
}
/// MJPEG stream handler
/// Manages video frame distribution to HTTP clients
pub struct MjpegStreamHandler {
/// Current frame (latest) - using ArcSwap for lock-free reads
current_frame: ArcSwap<Option<VideoFrame>>,
/// Frame update notification
frame_notify: broadcast::Sender<()>,
/// Whether stream is online
online: AtomicBool,
/// Frame sequence counter
sequence: AtomicU64,
/// Per-client sessions (ClientId -> ClientSession)
/// Use parking_lot::RwLock for better performance
clients: ParkingRwLock<HashMap<ClientId, ClientSession>>,
/// Auto-pause configuration
auto_pause_config: ParkingRwLock<AutoPauseConfig>,
/// Last frame timestamp
last_frame_ts: ParkingRwLock<Option<Instant>>,
/// Dropped same frames count
dropped_same_frames: AtomicU64,
/// Maximum consecutive same frames to drop (0 = disabled)
max_drop_same_frames: AtomicU64,
/// JPEG encoder for non-JPEG input formats
jpeg_encoder: ParkingMutex<Option<JpegEncoder>>,
}
impl MjpegStreamHandler {
/// Create a new MJPEG stream handler
pub fn new() -> Self {
Self::with_drop_limit(100) // Default: drop up to 100 same frames
}
/// Create handler with custom drop limit
pub fn with_drop_limit(max_drop: u64) -> Self {
let (frame_notify, _) = broadcast::channel(4); // Reduced from 16 for lower latency
Self {
current_frame: ArcSwap::from_pointee(None),
frame_notify,
online: AtomicBool::new(false),
sequence: AtomicU64::new(0),
clients: ParkingRwLock::new(HashMap::new()),
jpeg_encoder: ParkingMutex::new(None),
auto_pause_config: ParkingRwLock::new(AutoPauseConfig::default()),
last_frame_ts: ParkingRwLock::new(None),
dropped_same_frames: AtomicU64::new(0),
max_drop_same_frames: AtomicU64::new(max_drop),
}
}
/// Update current frame
pub fn update_frame(&self, frame: VideoFrame) {
// If frame is not JPEG, encode it
let frame = if !frame.format.is_compressed() {
match self.encode_to_jpeg(&frame) {
Ok(jpeg_frame) => jpeg_frame,
Err(e) => {
warn!("Failed to encode frame to JPEG: {}", e);
return;
}
}
} else {
frame
};
// Frame deduplication (ustreamer-style)
// Check if this frame is identical to the previous one
let max_drop = self.max_drop_same_frames.load(Ordering::Relaxed);
if max_drop > 0 && frame.online {
let current = self.current_frame.load();
if let Some(ref prev_frame) = **current {
let dropped_count = self.dropped_same_frames.load(Ordering::Relaxed);
// Check if we should drop this frame
if dropped_count < max_drop && frames_are_identical(prev_frame, &frame) {
// Check last frame timestamp to ensure minimum 1fps
let last_ts = *self.last_frame_ts.read();
let should_force_send = if let Some(ts) = last_ts {
ts.elapsed() >= Duration::from_secs(1)
} else {
false
};
if !should_force_send {
// Drop this duplicate frame
self.dropped_same_frames.fetch_add(1, Ordering::Relaxed);
return;
}
// If more than 1 second since last frame, force send even if identical
}
}
}
// Frame is different or limit reached or forced by 1fps guarantee, update
self.dropped_same_frames.store(0, Ordering::Relaxed);
self.sequence.fetch_add(1, Ordering::Relaxed);
self.online.store(true, Ordering::SeqCst);
*self.last_frame_ts.write() = Some(Instant::now());
self.current_frame.store(Arc::new(Some(frame)));
// Notify waiting clients
let _ = self.frame_notify.send(());
}
/// Encode a non-JPEG frame to JPEG
fn encode_to_jpeg(&self, frame: &VideoFrame) -> Result<VideoFrame, String> {
let resolution = frame.resolution;
let sequence = self.sequence.load(Ordering::Relaxed);
// Get or create encoder
let mut encoder_guard = self.jpeg_encoder.lock();
let encoder = encoder_guard.get_or_insert_with(|| {
let config = EncoderConfig::jpeg(resolution, 85);
match JpegEncoder::new(config) {
Ok(enc) => {
debug!("Created JPEG encoder for MJPEG stream: {}x{}", resolution.width, resolution.height);
enc
}
Err(e) => {
warn!("Failed to create JPEG encoder: {}, using default", e);
// Try with default config
JpegEncoder::new(EncoderConfig::jpeg(resolution, 85))
.expect("Failed to create default JPEG encoder")
}
}
});
// Check if resolution changed
if encoder.config().resolution != resolution {
debug!("Resolution changed, recreating JPEG encoder: {}x{}", resolution.width, resolution.height);
let config = EncoderConfig::jpeg(resolution, 85);
*encoder = JpegEncoder::new(config).map_err(|e| format!("Failed to create encoder: {}", e))?;
}
// Encode based on input format
let encoded = match frame.format {
PixelFormat::Yuyv => {
encoder.encode_yuyv(frame.data(), sequence)
.map_err(|e| format!("YUYV encode failed: {}", e))?
}
PixelFormat::Nv12 => {
encoder.encode_nv12(frame.data(), sequence)
.map_err(|e| format!("NV12 encode failed: {}", e))?
}
PixelFormat::Rgb24 => {
encoder.encode_rgb(frame.data(), sequence)
.map_err(|e| format!("RGB encode failed: {}", e))?
}
PixelFormat::Bgr24 => {
encoder.encode_bgr(frame.data(), sequence)
.map_err(|e| format!("BGR encode failed: {}", e))?
}
_ => {
return Err(format!("Unsupported format for JPEG encoding: {}", frame.format));
}
};
// Create new VideoFrame with JPEG data
Ok(VideoFrame::from_vec(
encoded.data.to_vec(),
resolution,
PixelFormat::Mjpeg,
0, // stride not relevant for JPEG
sequence,
))
}
/// Set stream offline
pub fn set_offline(&self) {
self.online.store(false, Ordering::SeqCst);
let _ = self.frame_notify.send(());
}
/// Set stream online (called when streaming starts)
pub fn set_online(&self) {
self.online.store(true, Ordering::SeqCst);
}
/// Check if stream is online
pub fn is_online(&self) -> bool {
self.online.load(Ordering::SeqCst)
}
/// Get current client count
pub fn client_count(&self) -> u64 {
self.clients.read().len() as u64
}
/// Register a new client
pub fn register_client(&self, client_id: ClientId) {
let session = ClientSession::new(client_id.clone());
self.clients.write().insert(client_id.clone(), session);
info!("Client {} connected (total: {})", client_id, self.client_count());
}
/// Unregister a client
pub fn unregister_client(&self, client_id: &str) {
if let Some(session) = self.clients.write().remove(client_id) {
let duration = session.connected_duration();
let duration_secs = duration.as_secs_f32();
let avg_fps = if duration_secs > 0.1 {
session.frames_sent as f32 / duration_secs
} else {
0.0
};
info!(
"Client {} disconnected after {:.1}s ({} frames, {:.1} avg FPS)",
client_id, duration_secs, session.frames_sent, avg_fps
);
}
}
/// Record frame sent to a specific client
pub fn record_frame_sent(&self, client_id: &str) {
if let Some(session) = self.clients.write().get_mut(client_id) {
session.last_activity = Instant::now();
session.frames_sent += 1;
session.fps_calculator.record_frame();
}
}
/// Get per-client statistics
pub fn get_clients_stat(&self) -> HashMap<String, crate::events::types::ClientStats> {
self.clients
.read()
.iter()
.map(|(id, session)| {
(
id.clone(),
crate::events::types::ClientStats {
id: id.clone(),
fps: session.fps_calculator.current_fps(),
connected_secs: session.connected_duration().as_secs(),
},
)
})
.collect()
}
/// Get auto-pause configuration
pub fn auto_pause_config(&self) -> AutoPauseConfig {
self.auto_pause_config.read().clone()
}
/// Update auto-pause configuration
pub fn set_auto_pause_config(&self, config: AutoPauseConfig) {
let config_clone = config.clone();
*self.auto_pause_config.write() = config;
info!(
"Auto-pause config updated: enabled={}, delay={}s, timeout={}s",
config_clone.enabled, config_clone.shutdown_delay_secs, config_clone.client_timeout_secs
);
}
/// Get current frame (if any)
pub fn current_frame(&self) -> Option<VideoFrame> {
(**self.current_frame.load()).clone()
}
/// Subscribe to frame updates
pub fn subscribe(&self) -> broadcast::Receiver<()> {
self.frame_notify.subscribe()
}
/// Disconnect all clients (used during config changes)
/// This clears the client list and sets the stream offline,
/// which will cause all active MJPEG streams to terminate.
pub fn disconnect_all_clients(&self) {
let count = {
let mut clients = self.clients.write();
let count = clients.len();
clients.clear();
count
};
if count > 0 {
info!("Disconnected all {} MJPEG clients for config change", count);
}
// Set offline to signal all streaming tasks to stop
self.set_offline();
}
}
impl Default for MjpegStreamHandler {
fn default() -> Self {
Self::new()
}
}
/// RAII guard for client lifecycle management
/// Ensures cleanup even on panic or abrupt disconnection
pub struct ClientGuard {
client_id: ClientId,
handler: Arc<MjpegStreamHandler>,
}
impl ClientGuard {
/// Create a new client guard
pub fn new(client_id: ClientId, handler: Arc<MjpegStreamHandler>) -> Self {
handler.register_client(client_id.clone());
Self {
client_id,
handler,
}
}
/// Get client ID
pub fn id(&self) -> &ClientId {
&self.client_id
}
}
impl Drop for ClientGuard {
fn drop(&mut self) {
self.handler.unregister_client(&self.client_id);
}
}
impl MjpegStreamHandler {
/// Start stale client cleanup task
/// Should be called once when handler is created
pub fn start_cleanup_task(self: Arc<Self>) {
let handler = self.clone();
tokio::spawn(async move {
let mut interval = tokio::time::interval(Duration::from_secs(5));
loop {
interval.tick().await;
let timeout_secs = handler.auto_pause_config().client_timeout_secs;
let timeout = Duration::from_secs(timeout_secs);
let now = Instant::now();
let mut stale = Vec::new();
// Find stale clients
{
let clients = handler.clients.read();
for (id, session) in clients.iter() {
if now.duration_since(session.last_activity) > timeout {
stale.push(id.clone());
}
}
}
// Remove stale clients
if !stale.is_empty() {
let mut clients = handler.clients.write();
for id in stale {
if let Some(session) = clients.remove(&id) {
warn!(
"Removed stale client {} (inactive for {:.1}s)",
id,
now.duration_since(session.last_activity).as_secs_f32()
);
}
}
}
}
});
}
}
/// Compare two frames for equality (hash-based, ustreamer-style)
/// Returns true if frames are identical in geometry and content
fn frames_are_identical(a: &VideoFrame, b: &VideoFrame) -> bool {
// Quick checks first (geometry)
if a.len() != b.len() {
return false;
}
if a.resolution.width != b.resolution.width || a.resolution.height != b.resolution.height {
return false;
}
if a.format != b.format {
return false;
}
if a.stride != b.stride {
return false;
}
if a.online != b.online {
return false;
}
// Compare hashes instead of full binary data
// Hash is computed once and cached in OnceLock for efficiency
// This is much faster than binary comparison for large frames (1080p MJPEG)
a.get_hash() == b.get_hash()
}
#[cfg(test)]
mod tests {
use super::*;
use bytes::Bytes;
use crate::video::{format::Resolution, PixelFormat};
#[tokio::test]
async fn test_stream_handler() {
let handler = MjpegStreamHandler::new();
assert!(!handler.is_online());
assert_eq!(handler.client_count(), 0);
// Create a frame
let _frame = VideoFrame::new(
Bytes::from(vec![0xFF, 0xD8, 0x00, 0x00, 0xFF, 0xD9]),
Resolution::VGA,
PixelFormat::Mjpeg,
0,
1,
);
}
#[test]
fn test_fps_calculator() {
let mut calc = FpsCalculator::new();
// Initially empty
assert_eq!(calc.current_fps(), 0);
// Record some frames
calc.record_frame();
calc.record_frame();
calc.record_frame();
// Should have 3 frames in window
assert!(calc.frame_times.len() == 3);
}
}

View File

@@ -0,0 +1,487 @@
//! MJPEG Streamer - High-level MJPEG/HTTP streaming manager
//!
//! This module provides a unified interface for MJPEG streaming mode,
//! integrating video capture, MJPEG distribution, and WebSocket HID.
//!
//! # Architecture
//!
//! ```text
//! MjpegStreamer
//! |
//! +-- VideoCapturer (V4L2 video capture)
//! +-- MjpegStreamHandler (HTTP multipart video)
//! +-- WsHidHandler (WebSocket HID)
//! ```
//!
//! Note: Audio WebSocket is handled separately by audio_ws.rs (/api/ws/audio)
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use tokio::sync::{broadcast, RwLock};
use tracing::info;
use crate::audio::AudioController;
use crate::error::{AppError, Result};
use crate::events::{EventBus, SystemEvent};
use crate::hid::HidController;
use crate::video::capture::{CaptureConfig, VideoCapturer};
use crate::video::device::{enumerate_devices, find_best_device, VideoDeviceInfo};
use crate::video::format::{PixelFormat, Resolution};
use crate::video::frame::VideoFrame;
use super::mjpeg::MjpegStreamHandler;
use super::ws_hid::WsHidHandler;
/// MJPEG streamer configuration
#[derive(Debug, Clone)]
pub struct MjpegStreamerConfig {
/// Device path (None = auto-detect)
pub device_path: Option<PathBuf>,
/// Desired resolution
pub resolution: Resolution,
/// Desired format
pub format: PixelFormat,
/// Desired FPS
pub fps: u32,
/// JPEG quality (1-100)
pub jpeg_quality: u8,
}
impl Default for MjpegStreamerConfig {
fn default() -> Self {
Self {
device_path: None,
resolution: Resolution::HD1080,
format: PixelFormat::Mjpeg,
fps: 30,
jpeg_quality: 80,
}
}
}
/// MJPEG streamer state
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MjpegStreamerState {
/// Not initialized
Uninitialized,
/// Ready but not streaming
Ready,
/// Actively streaming
Streaming,
/// No video signal
NoSignal,
/// Error occurred
Error,
}
impl std::fmt::Display for MjpegStreamerState {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
MjpegStreamerState::Uninitialized => write!(f, "uninitialized"),
MjpegStreamerState::Ready => write!(f, "ready"),
MjpegStreamerState::Streaming => write!(f, "streaming"),
MjpegStreamerState::NoSignal => write!(f, "no_signal"),
MjpegStreamerState::Error => write!(f, "error"),
}
}
}
/// MJPEG streamer statistics
#[derive(Debug, Clone, Default)]
pub struct MjpegStreamerStats {
/// Current state
pub state: String,
/// Current device path
pub device: Option<String>,
/// Video resolution
pub resolution: Option<(u32, u32)>,
/// Video format
pub format: Option<String>,
/// Current FPS
pub fps: u32,
/// MJPEG client count
pub mjpeg_clients: u64,
/// WebSocket HID client count
pub ws_hid_clients: usize,
/// Total frames captured
pub frames_captured: u64,
}
/// MJPEG Streamer
///
/// High-level manager for MJPEG/HTTP streaming mode.
/// Integrates video capture, MJPEG distribution, and WebSocket HID.
pub struct MjpegStreamer {
// === Video ===
config: RwLock<MjpegStreamerConfig>,
capturer: RwLock<Option<Arc<VideoCapturer>>>,
mjpeg_handler: Arc<MjpegStreamHandler>,
current_device: RwLock<Option<VideoDeviceInfo>>,
state: RwLock<MjpegStreamerState>,
// === Audio (controller reference only, WS handled by audio_ws.rs) ===
audio_controller: RwLock<Option<Arc<AudioController>>>,
audio_enabled: AtomicBool,
// === HID ===
ws_hid_handler: Arc<WsHidHandler>,
hid_controller: RwLock<Option<Arc<HidController>>>,
// === Control ===
start_lock: tokio::sync::Mutex<()>,
events: RwLock<Option<Arc<EventBus>>>,
config_changing: AtomicBool,
}
impl MjpegStreamer {
/// Create a new MJPEG streamer
pub fn new() -> Arc<Self> {
Arc::new(Self {
config: RwLock::new(MjpegStreamerConfig::default()),
capturer: RwLock::new(None),
mjpeg_handler: Arc::new(MjpegStreamHandler::new()),
current_device: RwLock::new(None),
state: RwLock::new(MjpegStreamerState::Uninitialized),
audio_controller: RwLock::new(None),
audio_enabled: AtomicBool::new(false),
ws_hid_handler: WsHidHandler::new(),
hid_controller: RwLock::new(None),
start_lock: tokio::sync::Mutex::new(()),
events: RwLock::new(None),
config_changing: AtomicBool::new(false),
})
}
/// Create with specific config
pub fn with_config(config: MjpegStreamerConfig) -> Arc<Self> {
Arc::new(Self {
config: RwLock::new(config),
capturer: RwLock::new(None),
mjpeg_handler: Arc::new(MjpegStreamHandler::new()),
current_device: RwLock::new(None),
state: RwLock::new(MjpegStreamerState::Uninitialized),
audio_controller: RwLock::new(None),
audio_enabled: AtomicBool::new(false),
ws_hid_handler: WsHidHandler::new(),
hid_controller: RwLock::new(None),
start_lock: tokio::sync::Mutex::new(()),
events: RwLock::new(None),
config_changing: AtomicBool::new(false),
})
}
// ========================================================================
// Configuration and Setup
// ========================================================================
/// Set event bus for broadcasting state changes
pub async fn set_event_bus(&self, events: Arc<EventBus>) {
*self.events.write().await = Some(events);
}
/// Set audio controller (for reference, WebSocket handled by audio_ws.rs)
pub async fn set_audio_controller(&self, audio: Arc<AudioController>) {
*self.audio_controller.write().await = Some(audio);
info!("MjpegStreamer: Audio controller set");
}
/// Set HID controller
pub async fn set_hid_controller(&self, hid: Arc<HidController>) {
*self.hid_controller.write().await = Some(hid.clone());
self.ws_hid_handler.set_hid_controller(hid);
info!("MjpegStreamer: HID controller set");
}
/// Enable or disable audio
pub fn set_audio_enabled(&self, enabled: bool) {
self.audio_enabled.store(enabled, Ordering::SeqCst);
}
/// Check if audio is enabled
pub fn is_audio_enabled(&self) -> bool {
self.audio_enabled.load(Ordering::SeqCst)
}
// ========================================================================
// State and Status
// ========================================================================
/// Get current state
pub async fn state(&self) -> MjpegStreamerState {
*self.state.read().await
}
/// Check if config is currently being changed
pub fn is_config_changing(&self) -> bool {
self.config_changing.load(Ordering::SeqCst)
}
/// Get current device info
pub async fn current_device(&self) -> Option<VideoDeviceInfo> {
self.current_device.read().await.clone()
}
/// Get statistics
pub async fn stats(&self) -> MjpegStreamerStats {
let state = *self.state.read().await;
let device = self.current_device.read().await;
let config = self.config.read().await;
let (resolution, format, frames_captured) = if let Some(ref cap) = *self.capturer.read().await {
let stats = cap.stats().await;
(
Some((config.resolution.width, config.resolution.height)),
Some(config.format.to_string()),
stats.frames_captured,
)
} else {
(None, None, 0)
};
MjpegStreamerStats {
state: state.to_string(),
device: device.as_ref().map(|d| d.path.display().to_string()),
resolution,
format,
fps: config.fps,
mjpeg_clients: self.mjpeg_handler.client_count(),
ws_hid_clients: self.ws_hid_handler.client_count(),
frames_captured,
}
}
// ========================================================================
// Handler Access
// ========================================================================
/// Get MJPEG handler for HTTP streaming
pub fn mjpeg_handler(&self) -> Arc<MjpegStreamHandler> {
self.mjpeg_handler.clone()
}
/// Get WebSocket HID handler
pub fn ws_hid_handler(&self) -> Arc<WsHidHandler> {
self.ws_hid_handler.clone()
}
/// Get frame sender for WebRTC integration
pub async fn frame_sender(&self) -> Option<broadcast::Sender<VideoFrame>> {
if let Some(ref cap) = *self.capturer.read().await {
Some(cap.frame_sender())
} else {
None
}
}
// ========================================================================
// Initialization
// ========================================================================
/// Initialize with auto-detected device
pub async fn init_auto(self: &Arc<Self>) -> Result<()> {
let best = find_best_device()?;
self.init_with_device(best).await
}
/// Initialize with specific device
pub async fn init_with_device(self: &Arc<Self>, device: VideoDeviceInfo) -> Result<()> {
info!("MjpegStreamer: Initializing with device: {}", device.path.display());
let config = self.config.read().await.clone();
// Create capture config
let capture_config = CaptureConfig {
device_path: device.path.clone(),
resolution: config.resolution,
format: config.format,
fps: config.fps,
buffer_count: 4,
timeout: std::time::Duration::from_secs(5),
jpeg_quality: config.jpeg_quality,
};
// Create capturer
let capturer = Arc::new(VideoCapturer::new(capture_config));
// Store device and capturer
*self.current_device.write().await = Some(device);
*self.capturer.write().await = Some(capturer);
*self.state.write().await = MjpegStreamerState::Ready;
self.publish_state_change().await;
Ok(())
}
// ========================================================================
// Streaming Control
// ========================================================================
/// Start streaming
pub async fn start(self: &Arc<Self>) -> Result<()> {
let _lock = self.start_lock.lock().await;
if self.config_changing.load(Ordering::SeqCst) {
return Err(AppError::VideoError("Config change in progress".to_string()));
}
let state = *self.state.read().await;
if state == MjpegStreamerState::Streaming {
return Ok(());
}
// Get capturer
let capturer = self.capturer.read().await.clone();
let capturer = capturer.ok_or_else(|| AppError::VideoError("Not initialized".to_string()))?;
// Start capture
capturer.start().await?;
// Start frame forwarding task
let handler = self.mjpeg_handler.clone();
let mut frame_rx = capturer.frame_sender().subscribe();
tokio::spawn(async move {
while let Ok(frame) = frame_rx.recv().await {
handler.update_frame(frame);
}
});
// Note: Audio WebSocket is handled separately by audio_ws.rs (/api/ws/audio)
*self.state.write().await = MjpegStreamerState::Streaming;
self.mjpeg_handler.set_online();
self.publish_state_change().await;
info!("MjpegStreamer: Streaming started");
Ok(())
}
/// Stop streaming
pub async fn stop(&self) -> Result<()> {
let state = *self.state.read().await;
if state != MjpegStreamerState::Streaming {
return Ok(());
}
// Stop capturer
if let Some(ref cap) = *self.capturer.read().await {
let _ = cap.stop().await;
}
// Set offline
self.mjpeg_handler.set_offline();
*self.state.write().await = MjpegStreamerState::Ready;
self.publish_state_change().await;
info!("MjpegStreamer: Streaming stopped");
Ok(())
}
/// Check if streaming
pub async fn is_streaming(&self) -> bool {
*self.state.read().await == MjpegStreamerState::Streaming
}
// ========================================================================
// Configuration Updates
// ========================================================================
/// Apply video configuration
///
/// This stops the current stream, reconfigures the capturer, and restarts.
pub async fn apply_config(self: &Arc<Self>, config: MjpegStreamerConfig) -> Result<()> {
info!("MjpegStreamer: Applying config: {:?}", config);
self.config_changing.store(true, Ordering::SeqCst);
// Stop current stream
self.stop().await?;
// Disconnect all MJPEG clients
self.mjpeg_handler.disconnect_all_clients();
// Release capturer
*self.capturer.write().await = None;
// Update config
*self.config.write().await = config.clone();
// Re-initialize if device path is set
if let Some(ref path) = config.device_path {
let devices = enumerate_devices()?;
let device = devices
.into_iter()
.find(|d| d.path == *path)
.ok_or_else(|| AppError::VideoError(format!("Device not found: {}", path.display())))?;
self.init_with_device(device).await?;
}
self.config_changing.store(false, Ordering::SeqCst);
self.publish_state_change().await;
Ok(())
}
// ========================================================================
// Internal
// ========================================================================
/// Publish state change event
async fn publish_state_change(&self) {
if let Some(ref events) = *self.events.read().await {
let state = *self.state.read().await;
let device = self.current_device.read().await;
events.publish(SystemEvent::StreamStateChanged {
state: state.to_string(),
device: device.as_ref().map(|d| d.path.display().to_string()),
});
}
}
}
impl Default for MjpegStreamer {
fn default() -> Self {
Self {
config: RwLock::new(MjpegStreamerConfig::default()),
capturer: RwLock::new(None),
mjpeg_handler: Arc::new(MjpegStreamHandler::new()),
current_device: RwLock::new(None),
state: RwLock::new(MjpegStreamerState::Uninitialized),
audio_controller: RwLock::new(None),
audio_enabled: AtomicBool::new(false),
ws_hid_handler: WsHidHandler::new(),
hid_controller: RwLock::new(None),
start_lock: tokio::sync::Mutex::new(()),
events: RwLock::new(None),
config_changing: AtomicBool::new(false),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_mjpeg_streamer_creation() {
let streamer = MjpegStreamer::new();
assert!(!streamer.is_config_changing());
assert!(!streamer.is_audio_enabled());
}
#[test]
fn test_mjpeg_streamer_config_default() {
let config = MjpegStreamerConfig::default();
assert_eq!(config.resolution, Resolution::HD1080);
assert_eq!(config.format, PixelFormat::Mjpeg);
assert_eq!(config.fps, 30);
}
#[test]
fn test_mjpeg_streamer_state_display() {
assert_eq!(MjpegStreamerState::Streaming.to_string(), "streaming");
assert_eq!(MjpegStreamerState::Ready.to_string(), "ready");
}
}

17
src/stream/mod.rs Normal file
View File

@@ -0,0 +1,17 @@
//! Video streaming module
//!
//! Provides MJPEG streaming and WebSocket handlers for MJPEG mode.
//!
//! # Components
//!
//! - `MjpegStreamer` - High-level MJPEG streaming manager
//! - `MjpegStreamHandler` - HTTP multipart MJPEG video streaming
//! - `WsHidHandler` - WebSocket HID input handler
pub mod mjpeg;
pub mod mjpeg_streamer;
pub mod ws_hid;
pub use mjpeg::{ClientGuard, MjpegStreamHandler};
pub use mjpeg_streamer::{MjpegStreamer, MjpegStreamerConfig, MjpegStreamerState, MjpegStreamerStats};
pub use ws_hid::WsHidHandler;

280
src/stream/ws_hid.rs Normal file
View File

@@ -0,0 +1,280 @@
//! WebSocket HID Handler for MJPEG mode
//!
//! This module provides a standalone WebSocket HID handler that can be used
//! independently of the application state. It manages multiple WebSocket
//! connections and forwards HID events to the HID controller.
//!
//! # Protocol
//!
//! Only binary protocol is supported for optimal performance.
//! See `crate::hid::datachannel` for message format details.
//!
//! # Architecture
//!
//! ```text
//! WsHidHandler
//! |
//! +-- clients: HashMap<ClientId, WsHidClient>
//! +-- hid_controller: Arc<HidController>
//! |
//! +-- add_client() -> spawns client handler task
//! +-- remove_client()
//! ```
use axum::extract::ws::{Message, WebSocket};
use futures::{SinkExt, StreamExt};
use parking_lot::RwLock;
use std::collections::HashMap;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::mpsc;
use tracing::{debug, error, info, warn};
use crate::hid::datachannel::{parse_hid_message, HidChannelEvent};
use crate::hid::HidController;
/// Client ID type
pub type ClientId = String;
/// WebSocket HID client information
#[derive(Debug)]
pub struct WsHidClient {
/// Client ID
pub id: ClientId,
/// Connection timestamp
pub connected_at: Instant,
/// Events processed
pub events_processed: AtomicU64,
/// Shutdown signal sender
shutdown_tx: mpsc::Sender<()>,
}
impl WsHidClient {
/// Get events processed count
pub fn events_count(&self) -> u64 {
self.events_processed.load(Ordering::Relaxed)
}
/// Get connection duration in seconds
pub fn connected_secs(&self) -> u64 {
self.connected_at.elapsed().as_secs()
}
}
/// WebSocket HID Handler
///
/// Manages WebSocket connections for HID input in MJPEG mode.
/// Only binary protocol is supported for optimal performance.
pub struct WsHidHandler {
/// HID controller reference
hid_controller: RwLock<Option<Arc<HidController>>>,
/// Active clients
clients: RwLock<HashMap<ClientId, Arc<WsHidClient>>>,
/// Running state
running: AtomicBool,
/// Total events processed
total_events: AtomicU64,
}
impl WsHidHandler {
/// Create a new WebSocket HID handler
pub fn new() -> Arc<Self> {
Arc::new(Self {
hid_controller: RwLock::new(None),
clients: RwLock::new(HashMap::new()),
running: AtomicBool::new(true),
total_events: AtomicU64::new(0),
})
}
/// Set HID controller
pub fn set_hid_controller(&self, hid: Arc<HidController>) {
*self.hid_controller.write() = Some(hid);
info!("WsHidHandler: HID controller set");
}
/// Get HID controller
pub fn hid_controller(&self) -> Option<Arc<HidController>> {
self.hid_controller.read().clone()
}
/// Check if HID controller is available
pub fn is_hid_available(&self) -> bool {
self.hid_controller.read().is_some()
}
/// Get client count
pub fn client_count(&self) -> usize {
self.clients.read().len()
}
/// Check if running
pub fn is_running(&self) -> bool {
self.running.load(Ordering::SeqCst)
}
/// Stop the handler
pub fn stop(&self) {
self.running.store(false, Ordering::SeqCst);
// Signal all clients to disconnect
let clients = self.clients.read();
for client in clients.values() {
let _ = client.shutdown_tx.try_send(());
}
}
/// Get total events processed
pub fn total_events(&self) -> u64 {
self.total_events.load(Ordering::Relaxed)
}
/// Add a new WebSocket client
///
/// This spawns a background task to handle the WebSocket connection.
pub async fn add_client(self: &Arc<Self>, client_id: ClientId, socket: WebSocket) {
let (shutdown_tx, shutdown_rx) = mpsc::channel(1);
let client = Arc::new(WsHidClient {
id: client_id.clone(),
connected_at: Instant::now(),
events_processed: AtomicU64::new(0),
shutdown_tx,
});
self.clients.write().insert(client_id.clone(), client.clone());
info!(
"WsHidHandler: Client {} connected (total: {})",
client_id,
self.client_count()
);
// Spawn handler task
let handler = self.clone();
tokio::spawn(async move {
handler
.handle_client(client_id.clone(), socket, client, shutdown_rx)
.await;
handler.remove_client(&client_id);
});
}
/// Remove a client
pub fn remove_client(&self, client_id: &str) {
if let Some(client) = self.clients.write().remove(client_id) {
info!(
"WsHidHandler: Client {} disconnected after {}s ({} events)",
client_id,
client.connected_secs(),
client.events_count()
);
}
}
/// Handle a WebSocket client connection
async fn handle_client(
&self,
client_id: ClientId,
socket: WebSocket,
client: Arc<WsHidClient>,
mut shutdown_rx: mpsc::Receiver<()>,
) {
let (mut sender, mut receiver) = socket.split();
// Send initial status as binary: 0x00 = ok, 0x01 = error
let status_byte = if self.is_hid_available() { 0x00u8 } else { 0x01u8 };
let _ = sender.send(Message::Binary(vec![status_byte])).await;
loop {
tokio::select! {
biased;
_ = shutdown_rx.recv() => {
debug!("WsHidHandler: Client {} received shutdown signal", client_id);
break;
}
msg = receiver.next() => {
match msg {
Some(Ok(Message::Binary(data))) => {
if let Err(e) = self.handle_binary_message(&data, &client).await {
warn!("WsHidHandler: Failed to handle binary message: {}", e);
}
}
Some(Ok(Message::Ping(data))) => {
let _ = sender.send(Message::Pong(data)).await;
}
Some(Ok(Message::Close(_))) => {
debug!("WsHidHandler: Client {} closed connection", client_id);
break;
}
Some(Err(e)) => {
error!("WsHidHandler: WebSocket error for client {}: {}", client_id, e);
break;
}
None => {
debug!("WsHidHandler: Client {} stream ended", client_id);
break;
}
// Ignore text messages - binary protocol only
Some(Ok(Message::Text(_))) => {
warn!("WsHidHandler: Ignoring text message from client {} (binary protocol only)", client_id);
}
_ => {}
}
}
}
}
}
/// Handle binary HID message
async fn handle_binary_message(&self, data: &[u8], client: &WsHidClient) -> Result<(), String> {
let hid = self
.hid_controller
.read()
.clone()
.ok_or("HID controller not available")?;
let event = parse_hid_message(data).ok_or("Invalid binary HID message")?;
match event {
HidChannelEvent::Keyboard(kb_event) => {
hid.send_keyboard(kb_event)
.await
.map_err(|e| e.to_string())?;
}
HidChannelEvent::Mouse(ms_event) => {
hid.send_mouse(ms_event).await.map_err(|e| e.to_string())?;
}
}
client.events_processed.fetch_add(1, Ordering::Relaxed);
self.total_events.fetch_add(1, Ordering::Relaxed);
Ok(())
}
}
impl Default for WsHidHandler {
fn default() -> Self {
Self {
hid_controller: RwLock::new(None),
clients: RwLock::new(HashMap::new()),
running: AtomicBool::new(true),
total_events: AtomicU64::new(0),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ws_hid_handler_creation() {
let handler = WsHidHandler::new();
assert!(handler.is_running());
assert_eq!(handler.client_count(), 0);
assert!(!handler.is_hid_available());
}
}