feat(webrtc): 添加公共ICE服务器支持和优化HID延迟

- 重构ICE配置:将TURN配置改为统一的ICE配置,支持STUN和多TURN URL
- 添加公共ICE服务器:类似RustDesk,用户留空时使用编译时配置的公共服务器
- 优化DataChannel HID消息:使用tokio::spawn立即处理,避免依赖webrtc-rs轮询
- 添加WebRTCReady事件:客户端等待此事件后再建立连接
- 初始化时启动音频流,确保WebRTC可订阅
- 移除多余的trace/debug日志减少开销
- 更新前端配置界面支持公共ICE服务器显示
This commit is contained in:
mofeng-git
2026-01-04 15:06:08 +08:00
parent 0c82d1a840
commit 9ab3d052f9
24 changed files with 766 additions and 258 deletions

View File

@@ -64,9 +64,10 @@ fn generate_secrets() {
let mut rustdesk_public_server = String::new(); let mut rustdesk_public_server = String::new();
let mut rustdesk_public_key = String::new(); let mut rustdesk_public_key = String::new();
let mut rustdesk_relay_key = String::new(); let mut rustdesk_relay_key = String::new();
let mut turn_server = String::new(); let mut ice_stun_server = String::new();
let mut turn_username = String::new(); let mut ice_turn_urls = String::new();
let mut turn_password = String::new(); let mut ice_turn_username = String::new();
let mut ice_turn_password = String::new();
// Try to read secrets.toml // Try to read secrets.toml
if let Ok(content) = fs::read_to_string("secrets.toml") { if let Ok(content) = fs::read_to_string("secrets.toml") {
@@ -84,16 +85,19 @@ fn generate_secrets() {
} }
} }
// TURN section (for future use) // ICE section (for WebRTC)
if let Some(turn) = value.get("turn") { if let Some(ice) = value.get("ice") {
if let Some(v) = turn.get("server").and_then(|v| v.as_str()) { if let Some(v) = ice.get("stun_server").and_then(|v| v.as_str()) {
turn_server = v.to_string(); ice_stun_server = v.to_string();
} }
if let Some(v) = turn.get("username").and_then(|v| v.as_str()) { if let Some(v) = ice.get("turn_urls").and_then(|v| v.as_str()) {
turn_username = v.to_string(); ice_turn_urls = v.to_string();
} }
if let Some(v) = turn.get("password").and_then(|v| v.as_str()) { if let Some(v) = ice.get("turn_username").and_then(|v| v.as_str()) {
turn_password = v.to_string(); ice_turn_username = v.to_string();
}
if let Some(v) = ice.get("turn_password").and_then(|v| v.as_str()) {
ice_turn_password = v.to_string();
} }
} }
} else { } else {
@@ -125,29 +129,38 @@ pub mod rustdesk {{
}} }}
}} }}
/// TURN server configuration (for WebRTC) /// ICE server configuration (for WebRTC NAT traversal)
pub mod turn {{ pub mod ice {{
/// TURN server address /// Public STUN server URL
pub const SERVER: &str = "{}"; pub const STUN_SERVER: &str = "{}";
/// TURN username /// Public TURN server URLs (comma-separated)
pub const USERNAME: &str = "{}"; pub const TURN_URLS: &str = "{}";
/// TURN password /// TURN authentication username
pub const PASSWORD: &str = "{}"; pub const TURN_USERNAME: &str = "{}";
/// Check if TURN server is configured /// TURN authentication password
pub const TURN_PASSWORD: &str = "{}";
/// Check if public ICE servers are configured
pub const fn is_configured() -> bool {{ pub const fn is_configured() -> bool {{
!SERVER.is_empty() !STUN_SERVER.is_empty() || !TURN_URLS.is_empty()
}}
/// Check if TURN servers are configured (requires credentials)
pub const fn has_turn() -> bool {{
!TURN_URLS.is_empty() && !TURN_USERNAME.is_empty() && !TURN_PASSWORD.is_empty()
}} }}
}} }}
"#, "#,
escape_string(&rustdesk_public_server), escape_string(&rustdesk_public_server),
escape_string(&rustdesk_public_key), escape_string(&rustdesk_public_key),
escape_string(&rustdesk_relay_key), escape_string(&rustdesk_relay_key),
escape_string(&turn_server), escape_string(&ice_stun_server),
escape_string(&turn_username), escape_string(&ice_turn_urls),
escape_string(&turn_password), escape_string(&ice_turn_username),
escape_string(&ice_turn_password),
); );
fs::write(&dest_path, code).expect("Failed to write secrets_generated.rs"); fs::write(&dest_path, code).expect("Failed to write secrets_generated.rs");

View File

@@ -321,13 +321,26 @@ pub struct AppState {
│ └─────────────────────────────────────────────────────────────────────┘ │ │ └─────────────────────────────────────────────────────────────────────┘ │
└───────────────────────────────────────────────────────────────────────────┘ └───────────────────────────────────────────────────────────────────────────┘
├──────────────────────────────────────────┐ ├──────────────────────────────┬──────────────────────────────┐
│ │
▼ ▼
┌───────────────────┐ ┌───────────────────┐ ┌───────────────────┐ ┌───────────────────┐ ┌───────────────────┐
│ MJPEG Streamer │ WebRTC Streamer │ │ MJPEG Streamer │ WebRTC Streamer │ │ RustDesk Service
│ (HTTP Stream) │ │ (RTP Packets) │ (HTTP Stream) │ │ (RTP Packets) │ (P2P Stream)
└───────────────────┘ └───────────────────┘ │ │ │ │
│ - HTTP/1.1 │ │ - DataChannel │ │ - TCP/UDP Relay │
│ - multipart/x- │ │ - SRTP │ │ - NaCl Encrypted │
│ mixed-replace │ │ - ICE/STUN/TURN │ │ - Rendezvous │
└───────────────────┘ └───────────────────┘ └───────────────────┘
│ │ │
▼ ▼ ▼
┌───────────────────────────────────────────────────────────────────────────┐
│ Clients │
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────────────┐ │
│ │ Browser │ │ Browser │ │ RustDesk Client │ │
│ │ (MJPEG) │ │ (WebRTC) │ │ (Desktop/Mobile) │ │
│ └─────────────┘ └─────────────┘ └─────────────────────┘ │
└───────────────────────────────────────────────────────────────────────────┘
``` ```
### 4.3 OTG 服务架构 ### 4.3 OTG 服务架构

23
secrets.toml.example Normal file
View File

@@ -0,0 +1,23 @@
# One-KVM Secrets Configuration
# Copy this file to secrets.toml and fill in your values
# secrets.toml is ignored by git and will be read at compile time
[rustdesk]
# Public RustDesk ID server (hbbs) for users who leave the field empty
public_server = ""
# Public key for the RustDesk server (displayed to users)
public_key = ""
# Relay server authentication key (if relay server uses -k option)
relay_key = ""
[ice]
# Public ICE servers for WebRTC NAT traversal
# These servers are used when users enable "Use public ICE servers" option
# STUN server URL (for NAT type detection)
stun_server = ""
# TURN server URLs (for relay when direct connection fails)
# Supports multiple URLs separated by comma
turn_urls = ""
# TURN authentication credentials
turn_username = ""
turn_password = ""

View File

@@ -4,7 +4,7 @@ use audiopus::coder::GenericCtl;
use audiopus::{coder::Encoder, Application, Bitrate, Channels, SampleRate}; use audiopus::{coder::Encoder, Application, Bitrate, Channels, SampleRate};
use bytes::Bytes; use bytes::Bytes;
use std::time::Instant; use std::time::Instant;
use tracing::{info, trace}; use tracing::info;
use super::capture::AudioFrame; use super::capture::AudioFrame;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
@@ -187,12 +187,6 @@ impl OpusEncoder {
self.frame_count += 1; self.frame_count += 1;
trace!(
"Encoded {} samples to {} bytes Opus",
pcm_data.len(),
encoded_len
);
Ok(OpusFrame { Ok(OpusFrame {
data: Bytes::copy_from_slice(&self.output_buffer[..encoded_len]), data: Bytes::copy_from_slice(&self.output_buffer[..encoded_len]),
duration_ms, duration_ms,

View File

@@ -383,8 +383,10 @@ pub struct StreamConfig {
/// Bitrate preset (Speed/Balanced/Quality) /// Bitrate preset (Speed/Balanced/Quality)
pub bitrate_preset: BitratePreset, pub bitrate_preset: BitratePreset,
/// Custom STUN server (e.g., "stun:stun.l.google.com:19302") /// Custom STUN server (e.g., "stun:stun.l.google.com:19302")
/// If empty, uses public ICE servers from secrets.toml
pub stun_server: Option<String>, pub stun_server: Option<String>,
/// Custom TURN server (e.g., "turn:turn.example.com:3478") /// Custom TURN server (e.g., "turn:turn.example.com:3478")
/// If empty, uses public ICE servers from secrets.toml
pub turn_server: Option<String>, pub turn_server: Option<String>,
/// TURN username /// TURN username
pub turn_username: Option<String>, pub turn_username: Option<String>,
@@ -407,7 +409,8 @@ impl Default for StreamConfig {
mode: StreamMode::Mjpeg, mode: StreamMode::Mjpeg,
encoder: EncoderType::Auto, encoder: EncoderType::Auto,
bitrate_preset: BitratePreset::Balanced, bitrate_preset: BitratePreset::Balanced,
stun_server: Some("stun:stun.l.google.com:19302".to_string()), // Empty means use public ICE servers (like RustDesk)
stun_server: None,
turn_server: None, turn_server: None,
turn_username: None, turn_username: None,
turn_password: None, turn_password: None,
@@ -418,6 +421,16 @@ impl Default for StreamConfig {
} }
} }
impl StreamConfig {
/// Check if using public ICE servers (user left fields empty)
pub fn is_using_public_ice_servers(&self) -> bool {
use crate::webrtc::config::public_ice;
self.stun_server.as_ref().map(|s| s.is_empty()).unwrap_or(true)
&& self.turn_server.as_ref().map(|s| s.is_empty()).unwrap_or(true)
&& public_ice::is_configured()
}
}
/// Web server configuration /// Web server configuration
#[typeshare] #[typeshare]
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]

View File

@@ -187,6 +187,18 @@ pub enum SystemEvent {
device: String, device: String,
}, },
/// WebRTC is ready to accept connections
///
/// Sent after video frame source is connected to WebRTC pipeline.
/// Clients should wait for this event before attempting to create WebRTC sessions.
#[serde(rename = "stream.webrtc_ready")]
WebRTCReady {
/// Current video codec
codec: String,
/// Whether hardware encoding is being used
hardware: bool,
},
/// Stream statistics update (sent periodically for client stats) /// Stream statistics update (sent periodically for client stats)
#[serde(rename = "stream.stats_update")] #[serde(rename = "stream.stats_update")]
StreamStatsUpdate { StreamStatsUpdate {
@@ -485,6 +497,7 @@ impl SystemEvent {
Self::StreamDeviceLost { .. } => "stream.device_lost", Self::StreamDeviceLost { .. } => "stream.device_lost",
Self::StreamReconnecting { .. } => "stream.reconnecting", Self::StreamReconnecting { .. } => "stream.reconnecting",
Self::StreamRecovered { .. } => "stream.recovered", Self::StreamRecovered { .. } => "stream.recovered",
Self::WebRTCReady { .. } => "stream.webrtc_ready",
Self::StreamStatsUpdate { .. } => "stream.stats_update", Self::StreamStatsUpdate { .. } => "stream.stats_update",
Self::StreamModeChanged { .. } => "stream.mode_changed", Self::StreamModeChanged { .. } => "stream.mode_changed",
Self::HidStateChanged { .. } => "hid.state_changed", Self::HidStateChanged { .. } => "hid.state_changed",

View File

@@ -34,7 +34,7 @@
//! Consumer control event (type 0x03): //! Consumer control event (type 0x03):
//! - Bytes 1-2: Usage code (u16 LE) //! - Bytes 1-2: Usage code (u16 LE)
use tracing::{debug, warn}; use tracing::warn;
use super::types::ConsumerEvent; use super::types::ConsumerEvent;
use super::{ use super::{
@@ -115,11 +115,6 @@ fn parse_keyboard_message(data: &[u8]) -> Option<HidChannelEvent> {
right_meta: modifiers_byte & 0x80 != 0, right_meta: modifiers_byte & 0x80 != 0,
}; };
debug!(
"Parsed keyboard: {:?} key=0x{:02X} modifiers=0x{:02X}",
event_type, key, modifiers_byte
);
Some(HidChannelEvent::Keyboard(KeyboardEvent { Some(HidChannelEvent::Keyboard(KeyboardEvent {
event_type, event_type,
key, key,
@@ -168,11 +163,6 @@ fn parse_mouse_message(data: &[u8]) -> Option<HidChannelEvent> {
_ => (None, 0i8), _ => (None, 0i8),
}; };
debug!(
"Parsed mouse: {:?} x={} y={} button={:?} scroll={}",
event_type, x, y, button, scroll
);
Some(HidChannelEvent::Mouse(MouseEvent { Some(HidChannelEvent::Mouse(MouseEvent {
event_type, event_type,
x, x,
@@ -191,8 +181,6 @@ fn parse_consumer_message(data: &[u8]) -> Option<HidChannelEvent> {
let usage = u16::from_le_bytes([data[0], data[1]]); let usage = u16::from_le_bytes([data[0], data[1]]);
debug!("Parsed consumer: usage=0x{:04X}", usage);
Some(HidChannelEvent::Consumer(ConsumerEvent { usage })) Some(HidChannelEvent::Consumer(ConsumerEvent { usage }))
} }

View File

@@ -184,25 +184,44 @@ async fn main() -> anyhow::Result<()> {
let mut stun_servers = vec![]; let mut stun_servers = vec![];
let mut turn_servers = vec![]; let mut turn_servers = vec![];
// Add STUN server from config // Check if user configured custom servers
if let Some(ref stun) = config.stream.stun_server { let has_custom_stun = config.stream.stun_server.as_ref().map(|s| !s.is_empty()).unwrap_or(false);
if !stun.is_empty() { let has_custom_turn = config.stream.turn_server.as_ref().map(|s| !s.is_empty()).unwrap_or(false);
stun_servers.push(stun.clone());
tracing::info!("WebRTC STUN server configured: {}", stun);
}
}
// Add TURN server from config // If no custom servers, use public ICE servers (like RustDesk)
if let Some(ref turn) = config.stream.turn_server { if !has_custom_stun && !has_custom_turn {
if !turn.is_empty() { use one_kvm::webrtc::config::public_ice;
let username = config.stream.turn_username.clone().unwrap_or_default(); if public_ice::is_configured() {
let credential = config.stream.turn_password.clone().unwrap_or_default(); if let Some(stun) = public_ice::stun_server() {
turn_servers.push(one_kvm::webrtc::config::TurnServer { stun_servers.push(stun.clone());
url: turn.clone(), tracing::info!("Using public STUN server: {}", stun);
username: username.clone(), }
credential, for turn in public_ice::turn_servers() {
}); tracing::info!("Using public TURN server: {:?}", turn.urls);
tracing::info!("WebRTC TURN server configured: {} (user: {})", turn, username); turn_servers.push(turn);
}
} else {
tracing::info!("No public ICE servers configured, using host candidates only");
}
} else {
// Use custom servers
if let Some(ref stun) = config.stream.stun_server {
if !stun.is_empty() {
stun_servers.push(stun.clone());
tracing::info!("Using custom STUN server: {}", stun);
}
}
if let Some(ref turn) = config.stream.turn_server {
if !turn.is_empty() {
let username = config.stream.turn_username.clone().unwrap_or_default();
let credential = config.stream.turn_password.clone().unwrap_or_default();
turn_servers.push(one_kvm::webrtc::config::TurnServer::new(
turn.clone(),
username.clone(),
credential,
));
tracing::info!("Using custom TURN server: {} (user: {})", turn, username);
}
} }
} }
@@ -326,6 +345,10 @@ async fn main() -> anyhow::Result<()> {
config.audio.device, config.audio.device,
config.audio.quality config.audio.quality
); );
// Start audio streaming so WebRTC can subscribe to Opus frames
if let Err(e) = controller.start_streaming().await {
tracing::warn!("Failed to start audio streaming: {}", e);
}
} else { } else {
tracing::info!("Audio disabled in configuration"); tracing::info!("Audio disabled in configuration");
} }

View File

@@ -396,6 +396,29 @@ impl VideoStreamManager {
); );
self.webrtc_streamer.update_video_config(resolution, format, fps).await; self.webrtc_streamer.update_video_config(resolution, format, fps).await;
self.webrtc_streamer.set_video_source(frame_tx).await; self.webrtc_streamer.set_video_source(frame_tx).await;
// Get device path for events
let device_path = self.streamer.current_device().await
.map(|d| d.path.to_string_lossy().to_string())
.unwrap_or_default();
// Publish StreamConfigApplied event - clients can now safely connect
self.publish_event(SystemEvent::StreamConfigApplied {
device: device_path,
resolution: (resolution.width, resolution.height),
format: format!("{:?}", format).to_lowercase(),
fps,
})
.await;
// Publish WebRTCReady event - frame source is now connected
let codec = self.webrtc_streamer.current_video_codec().await;
let is_hardware = self.webrtc_streamer.is_hardware_encoding().await;
self.publish_event(SystemEvent::WebRTCReady {
codec: codec_to_string(codec),
hardware: is_hardware,
})
.await;
} else { } else {
warn!("No frame source available for WebRTC - sessions may fail to receive video"); warn!("No frame source available for WebRTC - sessions may fail to receive video");
} }

View File

@@ -73,6 +73,10 @@ pub struct StreamConfigResponse {
pub mode: StreamMode, pub mode: StreamMode,
pub encoder: EncoderType, pub encoder: EncoderType,
pub bitrate_preset: BitratePreset, pub bitrate_preset: BitratePreset,
/// 是否有公共 ICE 服务器可用(编译时确定)
pub has_public_ice_servers: bool,
/// 当前是否正在使用公共 ICE 服务器STUN/TURN 都为空时)
pub using_public_ice_servers: bool,
pub stun_server: Option<String>, pub stun_server: Option<String>,
pub turn_server: Option<String>, pub turn_server: Option<String>,
pub turn_username: Option<String>, pub turn_username: Option<String>,
@@ -82,10 +86,13 @@ pub struct StreamConfigResponse {
impl From<&StreamConfig> for StreamConfigResponse { impl From<&StreamConfig> for StreamConfigResponse {
fn from(config: &StreamConfig) -> Self { fn from(config: &StreamConfig) -> Self {
use crate::webrtc::config::public_ice;
Self { Self {
mode: config.mode.clone(), mode: config.mode.clone(),
encoder: config.encoder.clone(), encoder: config.encoder.clone(),
bitrate_preset: config.bitrate_preset, bitrate_preset: config.bitrate_preset,
has_public_ice_servers: public_ice::is_configured(),
using_public_ice_servers: config.is_using_public_ice_servers(),
stun_server: config.stun_server.clone(), stun_server: config.stun_server.clone(),
turn_server: config.turn_server.clone(), turn_server: config.turn_server.clone(),
turn_username: config.turn_username.clone(), turn_username: config.turn_username.clone(),
@@ -101,8 +108,10 @@ pub struct StreamConfigUpdate {
pub encoder: Option<EncoderType>, pub encoder: Option<EncoderType>,
pub bitrate_preset: Option<BitratePreset>, pub bitrate_preset: Option<BitratePreset>,
/// STUN server URL (e.g., "stun:stun.l.google.com:19302") /// STUN server URL (e.g., "stun:stun.l.google.com:19302")
/// Leave empty to use public ICE servers
pub stun_server: Option<String>, pub stun_server: Option<String>,
/// TURN server URL (e.g., "turn:turn.example.com:3478") /// TURN server URL (e.g., "turn:turn.example.com:3478")
/// Leave empty to use public ICE servers
pub turn_server: Option<String>, pub turn_server: Option<String>,
/// TURN username /// TURN username
pub turn_username: Option<String>, pub turn_username: Option<String>,
@@ -142,7 +151,7 @@ impl StreamConfigUpdate {
if let Some(preset) = self.bitrate_preset { if let Some(preset) = self.bitrate_preset {
config.bitrate_preset = preset; config.bitrate_preset = preset;
} }
// STUN/TURN settings - empty string means clear, Some("value") means set // STUN/TURN settings - empty string means clear (use public servers), Some("value") means set custom
if let Some(ref stun) = self.stun_server { if let Some(ref stun) = self.stun_server {
config.stun_server = if stun.is_empty() { None } else { Some(stun.clone()) }; config.stun_server = if stun.is_empty() { None } else { Some(stun.clone()) };
} }

View File

@@ -648,6 +648,24 @@ pub async fn setup_init(
} }
} }
// Start audio streaming if audio device was selected during setup
if new_config.audio.enabled {
let audio_config = crate::audio::AudioControllerConfig {
enabled: true,
device: new_config.audio.device.clone(),
quality: crate::audio::AudioQuality::from_str(&new_config.audio.quality),
};
if let Err(e) = state.audio.update_config(audio_config).await {
tracing::warn!("Failed to start audio during setup: {}", e);
} else {
tracing::info!("Audio started during setup: device={}", new_config.audio.device);
}
// Also enable WebRTC audio
if let Err(e) = state.stream_manager.set_webrtc_audio_enabled(true).await {
tracing::warn!("Failed to enable WebRTC audio during setup: {}", e);
}
}
tracing::info!("System initialized successfully with admin user: {}", req.username); tracing::info!("System initialized successfully with admin user: {}", req.username);
Ok(Json(LoginResponse { Ok(Json(LoginResponse {

View File

@@ -2,6 +2,63 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::secrets;
/// Public ICE server utilities
pub mod public_ice {
use super::*;
/// Check if public ICE servers are configured (at compile time)
pub fn is_configured() -> bool {
secrets::ice::is_configured()
}
/// Check if public TURN servers are configured (requires credentials)
pub fn has_turn() -> bool {
secrets::ice::has_turn()
}
/// Get the public STUN server URL
pub fn stun_server() -> Option<String> {
let server = secrets::ice::STUN_SERVER;
if server.is_empty() {
None
} else {
Some(server.to_string())
}
}
/// Get public TURN servers as TurnServer structs
pub fn turn_servers() -> Vec<TurnServer> {
if !secrets::ice::has_turn() {
return vec![];
}
let urls: Vec<String> = secrets::ice::TURN_URLS
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
if urls.is_empty() {
return vec![];
}
vec![TurnServer {
urls,
username: secrets::ice::TURN_USERNAME.to_string(),
credential: secrets::ice::TURN_PASSWORD.to_string(),
}]
}
/// Get all public ICE servers (STUN + TURN) for WebRTC configuration
pub fn get_all_servers() -> (Vec<String>, Vec<TurnServer>) {
let stun_servers = stun_server().into_iter().collect();
let turn_servers = turn_servers();
(stun_servers, turn_servers)
}
}
/// WebRTC configuration /// WebRTC configuration
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WebRtcConfig { pub struct WebRtcConfig {
@@ -46,14 +103,26 @@ impl Default for WebRtcConfig {
/// TURN server configuration /// TURN server configuration
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TurnServer { pub struct TurnServer {
/// TURN server URL (e.g., "turn:turn.example.com:3478") /// TURN server URLs (e.g., ["turn:turn.example.com:3478?transport=udp", "turn:turn.example.com:3478?transport=tcp"])
pub url: String, /// Multiple URLs allow fallback between UDP and TCP transports
pub urls: Vec<String>,
/// Username for TURN authentication /// Username for TURN authentication
pub username: String, pub username: String,
/// Credential for TURN authentication /// Credential for TURN authentication
pub credential: String, pub credential: String,
} }
impl TurnServer {
/// Create a TurnServer with a single URL (for backwards compatibility)
pub fn new(url: String, username: String, credential: String) -> Self {
Self {
urls: vec![url],
username,
credential,
}
}
}
/// Video codec preference /// Video codec preference
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]

View File

@@ -78,7 +78,7 @@ impl PeerConnection {
for turn in &config.turn_servers { for turn in &config.turn_servers {
ice_servers.push(RTCIceServer { ice_servers.push(RTCIceServer {
urls: vec![turn.url.clone()], urls: turn.urls.clone(),
username: turn.username.clone(), username: turn.username.clone(),
credential: turn.credential.clone(), credential: turn.credential.clone(),
..Default::default() ..Default::default()
@@ -207,10 +207,11 @@ impl PeerConnection {
*data_channel.write().await = Some(dc.clone()); *data_channel.write().await = Some(dc.clone());
// Set up message handler with HID processing // Set up message handler with HID processing
// Immediately spawn task in tokio runtime for low latency
dc.on_message(Box::new(move |msg: DataChannelMessage| { dc.on_message(Box::new(move |msg: DataChannelMessage| {
let hid = hid.clone(); let hid = hid.clone();
Box::pin(async move { tokio::spawn(async move {
debug!("DataChannel HID message: {} bytes", msg.data.len()); debug!("DataChannel HID message: {} bytes", msg.data.len());
// Parse and process HID message // Parse and process HID message
@@ -233,7 +234,10 @@ impl PeerConnection {
} }
} }
} }
}) });
// Return empty future (actual work is spawned above)
Box::pin(async {})
})); }));
}) })
})); }));
@@ -432,11 +436,10 @@ impl PeerConnectionManager {
// Add video track // Add video track
peer.add_video_track(VideoTrackConfig::default()).await?; peer.add_video_track(VideoTrackConfig::default()).await?;
// Create data channel and set HID controller // Set HID controller if available
// Note: We DON'T create a data channel here - the frontend creates it.
// The server receives it via on_data_channel callback set in set_hid_controller().
if self.config.enable_datachannel { if self.config.enable_datachannel {
peer.create_data_channel("hid").await?;
// Set HID controller if available
if let Some(ref hid) = self.hid_controller { if let Some(ref hid) = self.hid_controller {
peer.set_hid_controller(hid.clone()); peer.set_hid_controller(hid.clone());
} }

View File

@@ -250,13 +250,13 @@ impl UniversalSession {
// Skip TURN servers without credentials (webrtc-rs requires them) // Skip TURN servers without credentials (webrtc-rs requires them)
if turn.username.is_empty() || turn.credential.is_empty() { if turn.username.is_empty() || turn.credential.is_empty() {
warn!( warn!(
"Skipping TURN server {} - credentials required but missing", "Skipping TURN server {:?} - credentials required but missing",
turn.url turn.urls
); );
continue; continue;
} }
ice_servers.push(RTCIceServer { ice_servers.push(RTCIceServer {
urls: vec![turn.url.clone()], urls: turn.urls.clone(),
username: turn.username.clone(), username: turn.username.clone(),
credential: turn.credential.clone(), credential: turn.credential.clone(),
..Default::default() ..Default::default()
@@ -424,7 +424,9 @@ impl UniversalSession {
dc.on_message(Box::new(move |msg: DataChannelMessage| { dc.on_message(Box::new(move |msg: DataChannelMessage| {
let hid = hid.clone(); let hid = hid.clone();
Box::pin(async move { // Immediately spawn task in tokio runtime for low latency
// Don't rely on webrtc-rs to poll the returned Future
tokio::spawn(async move {
if let Some(event) = parse_hid_message(&msg.data) { if let Some(event) = parse_hid_message(&msg.data) {
match event { match event {
HidChannelEvent::Keyboard(kb_event) => { HidChannelEvent::Keyboard(kb_event) => {
@@ -444,7 +446,10 @@ impl UniversalSession {
} }
} }
} }
}) });
// Return empty future (actual work is spawned above)
Box::pin(async {})
})); }));
}) })
})); }));
@@ -654,12 +659,6 @@ impl UniversalSession {
} }
} else { } else {
packets_sent += 1; packets_sent += 1;
trace!(
"Session {} sent audio packet {}: {} bytes",
session_id,
packets_sent,
opus_frame.data.len()
);
} }
} }
Err(broadcast::error::RecvError::Lagged(n)) => { Err(broadcast::error::RecvError::Lagged(n)) => {

View File

@@ -35,7 +35,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::{broadcast, RwLock}; use tokio::sync::{broadcast, RwLock};
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, trace, warn};
use crate::audio::shared_pipeline::{SharedAudioPipeline, SharedAudioPipelineConfig}; use crate::audio::shared_pipeline::{SharedAudioPipeline, SharedAudioPipelineConfig};
use crate::audio::{AudioController, OpusFrame}; use crate::audio::{AudioController, OpusFrame};
@@ -315,9 +315,11 @@ impl WebRtcStreamer {
} }
drop(pipeline_guard); drop(pipeline_guard);
// Clear video frame source to signal upstream to stop // NOTE: Don't clear video_frame_tx here!
*streamer.video_frame_tx.write().await = None; // The frame source is managed by stream_manager and should
info!("Cleared video frame source"); // remain available for new sessions. Only stream_manager
// should clear it during mode switches.
info!("Video pipeline stopped, but keeping frame source for new sessions");
} }
break; break;
} }
@@ -512,13 +514,37 @@ impl WebRtcStreamer {
/// Update video configuration /// Update video configuration
/// ///
/// This will restart the encoding pipeline and close all sessions. /// Only restarts the encoding pipeline if configuration actually changed.
/// This allows multiple consumers (WebRTC, RustDesk) to share the same pipeline
/// without interrupting each other when they call this method with the same config.
pub async fn update_video_config( pub async fn update_video_config(
&self, &self,
resolution: Resolution, resolution: Resolution,
format: PixelFormat, format: PixelFormat,
fps: u32, fps: u32,
) { ) {
// Check if configuration actually changed
let config = self.config.read().await;
let config_changed = config.resolution != resolution
|| config.input_format != format
|| config.fps != fps;
drop(config);
if !config_changed {
// Configuration unchanged, no need to restart pipeline
trace!(
"Video config unchanged: {}x{} {:?} @ {} fps",
resolution.width, resolution.height, format, fps
);
return;
}
// Configuration changed, restart pipeline
info!(
"Video config changed, restarting pipeline: {}x{} {:?} @ {} fps",
resolution.width, resolution.height, format, fps
);
// Stop existing pipeline // Stop existing pipeline
if let Some(ref pipeline) = *self.video_pipeline.read().await { if let Some(ref pipeline) = *self.video_pipeline.read().await {
pipeline.stop(); pipeline.stop();
@@ -598,6 +624,8 @@ impl WebRtcStreamer {
/// ///
/// Note: Changes take effect for new sessions only. /// Note: Changes take effect for new sessions only.
/// Existing sessions need to be reconnected to use the new ICE config. /// Existing sessions need to be reconnected to use the new ICE config.
///
/// If both stun_server and turn_server are empty/None, uses public ICE servers.
pub async fn update_ice_config( pub async fn update_ice_config(
&self, &self,
stun_server: Option<String>, stun_server: Option<String>,
@@ -607,32 +635,49 @@ impl WebRtcStreamer {
) { ) {
let mut config = self.config.write().await; let mut config = self.config.write().await;
// Update STUN servers // Clear existing servers
config.webrtc.stun_servers.clear(); config.webrtc.stun_servers.clear();
if let Some(ref stun) = stun_server {
if !stun.is_empty() {
config.webrtc.stun_servers.push(stun.clone());
info!("WebRTC STUN server updated: {}", stun);
}
}
// Update TURN servers
config.webrtc.turn_servers.clear(); config.webrtc.turn_servers.clear();
if let Some(ref turn) = turn_server {
if !turn.is_empty() {
let username = turn_username.unwrap_or_default();
let credential = turn_password.unwrap_or_default();
config.webrtc.turn_servers.push(TurnServer {
url: turn.clone(),
username: username.clone(),
credential,
});
info!("WebRTC TURN server updated: {} (user: {})", turn, username);
}
}
if config.webrtc.stun_servers.is_empty() && config.webrtc.turn_servers.is_empty() { // Check if user configured custom servers
info!("WebRTC ICE config cleared - only host candidates will be used"); let has_custom_stun = stun_server.as_ref().map(|s| !s.is_empty()).unwrap_or(false);
let has_custom_turn = turn_server.as_ref().map(|s| !s.is_empty()).unwrap_or(false);
// If no custom servers, use public ICE servers (like RustDesk)
if !has_custom_stun && !has_custom_turn {
use crate::webrtc::config::public_ice;
if public_ice::is_configured() {
if let Some(stun) = public_ice::stun_server() {
config.webrtc.stun_servers.push(stun.clone());
info!("Using public STUN server: {}", stun);
}
for turn in public_ice::turn_servers() {
info!("Using public TURN server: {:?}", turn.urls);
config.webrtc.turn_servers.push(turn);
}
} else {
info!("No public ICE servers configured, using host candidates only");
}
} else {
// Use custom servers
if let Some(ref stun) = stun_server {
if !stun.is_empty() {
config.webrtc.stun_servers.push(stun.clone());
info!("Using custom STUN server: {}", stun);
}
}
if let Some(ref turn) = turn_server {
if !turn.is_empty() {
let username = turn_username.unwrap_or_default();
let credential = turn_password.unwrap_or_default();
config.webrtc.turn_servers.push(TurnServer::new(
turn.clone(),
username.clone(),
credential,
));
info!("Using custom TURN server: {} (user: {})", turn, username);
}
}
} }
} }
@@ -670,15 +715,14 @@ impl WebRtcStreamer {
let mut session = UniversalSession::new(session_config.clone(), session_id.clone()).await?; let mut session = UniversalSession::new(session_config.clone(), session_id.clone()).await?;
// Set HID controller if available // Set HID controller if available
// Note: We DON'T create a data channel here - the frontend creates it.
// The server only receives it via on_data_channel callback set in set_hid_controller().
// If server also created a channel, frontend's ondatachannel would overwrite its
// own channel with server's, but server's channel has no message handler!
if let Some(ref hid) = *self.hid_controller.read().await { if let Some(ref hid) = *self.hid_controller.read().await {
session.set_hid_controller(hid.clone()); session.set_hid_controller(hid.clone());
} }
// Create data channel
if self.config.read().await.webrtc.enable_datachannel {
session.create_data_channel("hid").await?;
}
let session = Arc::new(session); let session = Arc::new(session);
// Subscribe to video pipeline frames // Subscribe to video pipeline frames
@@ -901,9 +945,16 @@ impl WebRtcStreamer {
/// Set bitrate using preset /// Set bitrate using preset
/// ///
/// Note: Hardware encoders (VAAPI, NVENC, etc.) don't support dynamic bitrate changes. /// Note: Hardware encoders (VAAPI, NVENC, etc.) don't support dynamic bitrate changes.
/// This method restarts the pipeline to apply the new bitrate. /// This method restarts the pipeline to apply the new bitrate only if the preset actually changed.
pub async fn set_bitrate_preset(self: &Arc<Self>, preset: BitratePreset) -> Result<()> { pub async fn set_bitrate_preset(self: &Arc<Self>, preset: BitratePreset) -> Result<()> {
// Update config first // Check if preset actually changed
let current_preset = self.config.read().await.bitrate_preset;
if current_preset == preset {
trace!("Bitrate preset unchanged: {}", preset);
return Ok(());
}
// Update config
self.config.write().await.bitrate_preset = preset; self.config.write().await.bitrate_preset = preset;
// Check if pipeline exists and is running // Check if pipeline exists and is running

View File

@@ -1,7 +1,8 @@
<script setup lang="ts"> <script setup lang="ts">
import { ref } from 'vue' import { ref, computed } from 'vue'
import { useI18n } from 'vue-i18n' import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router' import { useRouter } from 'vue-router'
import { useSystemStore } from '@/stores/system'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
import { import {
Popover, Popover,
@@ -42,10 +43,16 @@ import MsdDialog from '@/components/MsdDialog.vue'
const { t } = useI18n() const { t } = useI18n()
const router = useRouter() const router = useRouter()
const systemStore = useSystemStore()
// Overflow menu state // Overflow menu state
const overflowMenuOpen = ref(false) const overflowMenuOpen = ref(false)
// MSD is only available when HID backend is not CH9329 (CH9329 is serial-only, no USB gadget)
const showMsd = computed(() => {
return props.isAdmin && systemStore.hid?.backend !== 'ch9329'
})
const props = defineProps<{ const props = defineProps<{
mouseMode?: 'absolute' | 'relative' mouseMode?: 'absolute' | 'relative'
videoMode?: VideoMode videoMode?: VideoMode
@@ -100,7 +107,8 @@ const extensionOpen = ref(false)
/> />
<!-- Virtual Media (MSD) - Hidden on small screens, shown in overflow --> <!-- Virtual Media (MSD) - Hidden on small screens, shown in overflow -->
<TooltipProvider v-if="props.isAdmin" class="hidden sm:block"> <!-- Also hidden when HID backend is CH9329 (no USB gadget support) -->
<TooltipProvider v-if="showMsd" class="hidden sm:block">
<Tooltip> <Tooltip>
<TooltipTrigger as-child> <TooltipTrigger as-child>
<Button variant="ghost" size="sm" class="h-8 gap-1.5 text-xs" @click="msdDialogOpen = true"> <Button variant="ghost" size="sm" class="h-8 gap-1.5 text-xs" @click="msdDialogOpen = true">
@@ -253,8 +261,8 @@ const extensionOpen = ref(false)
</Button> </Button>
</DropdownMenuTrigger> </DropdownMenuTrigger>
<DropdownMenuContent align="end" class="w-48"> <DropdownMenuContent align="end" class="w-48">
<!-- MSD - Mobile only --> <!-- MSD - Mobile only, hidden when CH9329 backend -->
<DropdownMenuItem v-if="props.isAdmin" class="sm:hidden" @click="msdDialogOpen = true; overflowMenuOpen = false"> <DropdownMenuItem v-if="showMsd" class="sm:hidden" @click="msdDialogOpen = true; overflowMenuOpen = false">
<HardDrive class="h-4 w-4 mr-2" /> <HardDrive class="h-4 w-4 mr-2" />
{{ t('actionbar.virtualMedia') }} {{ t('actionbar.virtualMedia') }}
</DropdownMenuItem> </DropdownMenuItem>

View File

@@ -52,9 +52,9 @@ async function handleLogout() {
</script> </script>
<template> <template>
<div class="min-h-screen bg-background"> <div class="h-screen flex flex-col bg-background overflow-hidden">
<!-- Header --> <!-- Header -->
<header class="sticky top-0 z-50 w-full border-b bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60"> <header class="shrink-0 z-50 w-full border-b bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60">
<div class="flex h-14 items-center px-4 max-w-full"> <div class="flex h-14 items-center px-4 max-w-full">
<!-- Logo --> <!-- Logo -->
<RouterLink to="/" class="flex items-center gap-2 font-semibold"> <RouterLink to="/" class="flex items-center gap-2 font-semibold">
@@ -128,7 +128,7 @@ async function handleLogout() {
</header> </header>
<!-- Main Content --> <!-- Main Content -->
<main class="px-4 py-6 max-w-full"> <main class="flex-1 overflow-hidden">
<slot /> <slot />
</main> </main>
</div> </div>

View File

@@ -25,7 +25,7 @@ const networkErrorMessage = ref<string | null>(null)
let reconnectTimeout: number | null = null let reconnectTimeout: number | null = null
const hidUnavailable = ref(false) // Track if HID is unavailable to prevent unnecessary reconnects const hidUnavailable = ref(false) // Track if HID is unavailable to prevent unnecessary reconnects
// Mouse throttle mechanism // Mouse throttle mechanism (10ms = 100Hz for smoother cursor movement)
let mouseThrottleMs = 10 let mouseThrottleMs = 10
let lastMouseSendTime = 0 let lastMouseSendTime = 0
let pendingMouseEvent: HidMouseEvent | null = null let pendingMouseEvent: HidMouseEvent | null = null
@@ -183,36 +183,40 @@ function _sendMouseInternal(event: HidMouseEvent): Promise<void> {
} }
// Throttled mouse event sender // Throttled mouse event sender
// Note: Returns immediately for throttled events to avoid Promise memory leak.
// When an event is throttled, we store it as pending and resolve immediately.
// A timer will send the pending event later, but that's fire-and-forget.
function sendMouse(event: HidMouseEvent): Promise<void> { function sendMouse(event: HidMouseEvent): Promise<void> {
return new Promise((resolve, reject) => { const now = Date.now()
const now = Date.now() const elapsed = now - lastMouseSendTime
const elapsed = now - lastMouseSendTime
if (elapsed >= mouseThrottleMs) { if (elapsed >= mouseThrottleMs) {
// Send immediately if enough time has passed // Send immediately if enough time has passed
lastMouseSendTime = now lastMouseSendTime = now
_sendMouseInternal(event).then(resolve).catch(reject) return _sendMouseInternal(event)
} else { } else {
// Queue the event and send after throttle period // Throttle: store event for later, resolve immediately to avoid Promise leak
pendingMouseEvent = event pendingMouseEvent = event
// Clear existing timer // Clear existing timer and set a new one
if (throttleTimer !== null) { if (throttleTimer !== null) {
clearTimeout(throttleTimer) clearTimeout(throttleTimer)
}
// Schedule send after remaining throttle time
throttleTimer = window.setTimeout(() => {
if (pendingMouseEvent) {
lastMouseSendTime = Date.now()
_sendMouseInternal(pendingMouseEvent)
.then(resolve)
.catch(reject)
pendingMouseEvent = null
}
}, mouseThrottleMs - elapsed)
} }
})
// Schedule send after remaining throttle time (fire-and-forget)
throttleTimer = window.setTimeout(() => {
if (pendingMouseEvent) {
lastMouseSendTime = Date.now()
_sendMouseInternal(pendingMouseEvent).catch(() => {
// Silently ignore errors for throttled events
})
pendingMouseEvent = null
}
}, mouseThrottleMs - elapsed)
// Resolve immediately - the event is queued, caller doesn't need to wait
return Promise.resolve()
}
} }
// Send consumer control event (multimedia keys) // Send consumer control event (multimedia keys)

View File

@@ -83,6 +83,7 @@ let sessionId: string | null = null
let statsInterval: number | null = null let statsInterval: number | null = null
let isConnecting = false // Lock to prevent concurrent connect calls let isConnecting = false // Lock to prevent concurrent connect calls
let pendingIceCandidates: RTCIceCandidate[] = [] // Queue for ICE candidates before sessionId is set let pendingIceCandidates: RTCIceCandidate[] = [] // Queue for ICE candidates before sessionId is set
let cachedMediaStream: MediaStream | null = null // Cached MediaStream to avoid recreating
const state = ref<WebRTCState>('disconnected') const state = ref<WebRTCState>('disconnected')
const videoTrack = ref<MediaStreamTrack | null>(null) const videoTrack = ref<MediaStreamTrack | null>(null)
@@ -399,8 +400,28 @@ async function connect(): Promise<boolean> {
} }
} }
isConnecting = false // 等待连接真正建立(最多等待 15 秒)
return true // 直接检查 peerConnection.connectionState 而不是 reactive state
// 因为 TypeScript 不知道 state 会被 onconnectionstatechange 回调异步修改
const connectionTimeout = 15000
const pollInterval = 100
let waited = 0
while (waited < connectionTimeout && peerConnection) {
const pcState = peerConnection.connectionState
if (pcState === 'connected') {
isConnecting = false
return true
}
if (pcState === 'failed' || pcState === 'closed') {
throw new Error('Connection failed during ICE negotiation')
}
await new Promise(resolve => setTimeout(resolve, pollInterval))
waited += pollInterval
}
// 超时
throw new Error('Connection timeout waiting for ICE negotiation')
} catch (err) { } catch (err) {
state.value = 'failed' state.value = 'failed'
error.value = err instanceof Error ? err.message : 'Connection failed' error.value = err instanceof Error ? err.message : 'Connection failed'
@@ -441,6 +462,7 @@ async function disconnect() {
videoTrack.value = null videoTrack.value = null
audioTrack.value = null audioTrack.value = null
cachedMediaStream = null // Clear cached stream on disconnect
state.value = 'disconnected' state.value = 'disconnected'
error.value = null error.value = null
@@ -493,20 +515,49 @@ function sendMouse(event: HidMouseEvent): boolean {
} }
} }
// Get MediaStream for video element // Get MediaStream for video element (cached to avoid recreating)
function getMediaStream(): MediaStream | null { function getMediaStream(): MediaStream | null {
if (!videoTrack.value && !audioTrack.value) { if (!videoTrack.value && !audioTrack.value) {
return null return null
} }
const stream = new MediaStream() // Reuse cached stream if tracks match
if (cachedMediaStream) {
const currentVideoTracks = cachedMediaStream.getVideoTracks()
const currentAudioTracks = cachedMediaStream.getAudioTracks()
const videoMatches = videoTrack.value
? currentVideoTracks.includes(videoTrack.value)
: currentVideoTracks.length === 0
const audioMatches = audioTrack.value
? currentAudioTracks.includes(audioTrack.value)
: currentAudioTracks.length === 0
if (videoMatches && audioMatches) {
return cachedMediaStream
}
// Tracks changed, update the cached stream
// Remove old tracks
currentVideoTracks.forEach(t => cachedMediaStream!.removeTrack(t))
currentAudioTracks.forEach(t => cachedMediaStream!.removeTrack(t))
// Add new tracks
if (videoTrack.value) cachedMediaStream.addTrack(videoTrack.value)
if (audioTrack.value) cachedMediaStream.addTrack(audioTrack.value)
return cachedMediaStream
}
// Create new cached stream
cachedMediaStream = new MediaStream()
if (videoTrack.value) { if (videoTrack.value) {
stream.addTrack(videoTrack.value) cachedMediaStream.addTrack(videoTrack.value)
} }
if (audioTrack.value) { if (audioTrack.value) {
stream.addTrack(audioTrack.value) cachedMediaStream.addTrack(audioTrack.value)
} }
return stream return cachedMediaStream
} }
// Composable export // Composable export

View File

@@ -283,12 +283,12 @@ export default {
fullscreen: 'Fullscreen', fullscreen: 'Fullscreen',
exitFullscreen: 'Exit Fullscreen', exitFullscreen: 'Exit Fullscreen',
screenshot: 'Screenshot', screenshot: 'Screenshot',
reconnect: 'Reconnect', reconnect: 'Refresh Page',
noVideo: 'No video signal', noVideo: 'No video signal',
connecting: 'Connecting...', connecting: 'Connecting...',
streamOffline: 'Stream offline', streamOffline: 'Stream offline',
connectionFailed: 'Connection Failed', connectionFailed: 'Connection Failed',
connectionFailedDesc: 'Unable to connect to video stream, please check device status', connectionFailedDesc: 'Unable to connect to video stream, please refresh page or check device status',
videoRestarting: 'Video stream is restarting', videoRestarting: 'Video stream is restarting',
deviceSwitching: 'Switching video device...', deviceSwitching: 'Switching video device...',
configChanging: 'Applying new configuration...', configChanging: 'Applying new configuration...',
@@ -570,16 +570,18 @@ export default {
// WebRTC / ICE // WebRTC / ICE
webrtcSettings: 'WebRTC Settings', webrtcSettings: 'WebRTC Settings',
webrtcSettingsDesc: 'Configure STUN/TURN servers for NAT traversal', webrtcSettingsDesc: 'Configure STUN/TURN servers for NAT traversal',
usingPublicIceServers: 'Using public ICE servers',
publicIceServersHint: 'Leave empty to use built-in public STUN/TURN servers for NAT traversal',
stunServer: 'STUN Server', stunServer: 'STUN Server',
stunServerPlaceholder: 'stun:stun.l.google.com:19302', stunServerPlaceholder: 'stun:stun.l.google.com:19302',
stunServerHint: 'STUN server for NAT traversal (e.g., stun:stun.l.google.com:19302)', stunServerHint: 'Custom STUN server (leave empty to use public server)',
turnServer: 'TURN Server', turnServer: 'TURN Server',
turnServerPlaceholder: 'turn:turn.example.com:3478', turnServerPlaceholder: 'turn:turn.example.com:3478',
turnServerHint: 'TURN relay server for restrictive networks (optional)', turnServerHint: 'Custom TURN relay server (leave empty to use public server)',
turnUsername: 'TURN Username', turnUsername: 'TURN Username',
turnPassword: 'TURN Password', turnPassword: 'TURN Password',
turnPasswordConfigured: 'Password already configured. Leave empty to keep current password.', turnPasswordConfigured: 'Password already configured. Leave empty to keep current password.',
turnCredentialsHint: 'Credentials for TURN server authentication', turnCredentialsHint: 'Credentials for TURN server authentication (only needed for custom servers)',
iceConfigNote: 'Note: Changes require reconnecting the WebRTC session to take effect.', iceConfigNote: 'Note: Changes require reconnecting the WebRTC session to take effect.',
}, },
virtualKeyboard: { virtualKeyboard: {
@@ -628,6 +630,7 @@ export default {
absolute: 'Absolute', absolute: 'Absolute',
relative: 'Relative', relative: 'Relative',
connection: 'Connection', connection: 'Connection',
channel: 'Channel',
networkError: 'Network Error', networkError: 'Network Error',
disconnected: 'Disconnected', disconnected: 'Disconnected',
availability: 'Availability', availability: 'Availability',
@@ -637,6 +640,7 @@ export default {
quality: 'Quality', quality: 'Quality',
streaming: 'Streaming', streaming: 'Streaming',
off: 'Off', off: 'Off',
defaultDevice: 'Default',
notConnected: 'Not Connected', notConnected: 'Not Connected',
connected: 'Connected', connected: 'Connected',
image: 'Image', image: 'Image',

View File

@@ -283,12 +283,12 @@ export default {
fullscreen: '全屏', fullscreen: '全屏',
exitFullscreen: '退出全屏', exitFullscreen: '退出全屏',
screenshot: '截图', screenshot: '截图',
reconnect: '重新连接', reconnect: '刷新网页',
noVideo: '无视频信号', noVideo: '无视频信号',
connecting: '正在连接...', connecting: '正在连接...',
streamOffline: '视频流离线', streamOffline: '视频流离线',
connectionFailed: '连接失败', connectionFailed: '连接失败',
connectionFailedDesc: '无法连接到视频流,请检查设备状态', connectionFailedDesc: '无法连接到视频流,请刷新网页或检查设备状态',
videoRestarting: '视频流正在重启', videoRestarting: '视频流正在重启',
deviceSwitching: '正在切换视频设备...', deviceSwitching: '正在切换视频设备...',
configChanging: '正在应用新配置...', configChanging: '正在应用新配置...',
@@ -570,16 +570,18 @@ export default {
// WebRTC / ICE // WebRTC / ICE
webrtcSettings: 'WebRTC 设置', webrtcSettings: 'WebRTC 设置',
webrtcSettingsDesc: '配置 STUN/TURN 服务器以实现 NAT 穿透', webrtcSettingsDesc: '配置 STUN/TURN 服务器以实现 NAT 穿透',
usingPublicIceServers: '正在使用公共 ICE 服务器',
publicIceServersHint: '留空以使用内置的公共 STUN/TURN 服务器进行 NAT 穿透',
stunServer: 'STUN 服务器', stunServer: 'STUN 服务器',
stunServerPlaceholder: 'stun:stun.l.google.com:19302', stunServerPlaceholder: 'stun:stun.l.google.com:19302',
stunServerHint: '用于 NAT 穿透的 STUN 服务器例如stun:stun.l.google.com:19302', stunServerHint: '自定义 STUN 服务器(留空则使用公共服务器',
turnServer: 'TURN 服务器', turnServer: 'TURN 服务器',
turnServerPlaceholder: 'turn:turn.example.com:3478', turnServerPlaceholder: 'turn:turn.example.com:3478',
turnServerHint: '用于限制性网络的 TURN 中继服务器(可选', turnServerHint: '自定义 TURN 中继服务器(留空则使用公共服务器',
turnUsername: 'TURN 用户名', turnUsername: 'TURN 用户名',
turnPassword: 'TURN 密码', turnPassword: 'TURN 密码',
turnPasswordConfigured: '密码已配置。留空则保持当前密码。', turnPasswordConfigured: '密码已配置。留空则保持当前密码。',
turnCredentialsHint: '用于 TURN 服务器认证的凭据', turnCredentialsHint: '用于 TURN 服务器认证的凭据(仅自定义服务器需要)',
iceConfigNote: '注意:更改后需要重新连接 WebRTC 会话才能生效。', iceConfigNote: '注意:更改后需要重新连接 WebRTC 会话才能生效。',
}, },
virtualKeyboard: { virtualKeyboard: {
@@ -628,6 +630,7 @@ export default {
absolute: '绝对定位', absolute: '绝对定位',
relative: '相对定位', relative: '相对定位',
connection: '连接', connection: '连接',
channel: '通道',
networkError: '网络错误', networkError: '网络错误',
disconnected: '已断开', disconnected: '已断开',
availability: '可用性', availability: '可用性',
@@ -637,6 +640,7 @@ export default {
quality: '质量', quality: '质量',
streaming: '传输中', streaming: '传输中',
off: '关闭', off: '关闭',
defaultDevice: '默认',
notConnected: '未连接', notConnected: '未连接',
connected: '已连接', connected: '已连接',
image: '镜像', image: '镜像',

View File

@@ -232,9 +232,15 @@ export interface StreamConfig {
encoder: EncoderType; encoder: EncoderType;
/** Bitrate preset (Speed/Balanced/Quality) */ /** Bitrate preset (Speed/Balanced/Quality) */
bitrate_preset: BitratePreset; bitrate_preset: BitratePreset;
/** Custom STUN server (e.g., "stun:stun.l.google.com:19302") */ /**
* Custom STUN server (e.g., "stun:stun.l.google.com:19302")
* If empty, uses public ICE servers from secrets.toml
*/
stun_server?: string; stun_server?: string;
/** Custom TURN server (e.g., "turn:turn.example.com:3478") */ /**
* Custom TURN server (e.g., "turn:turn.example.com:3478")
* If empty, uses public ICE servers from secrets.toml
*/
turn_server?: string; turn_server?: string;
/** TURN username */ /** TURN username */
turn_username?: string; turn_username?: string;
@@ -532,6 +538,10 @@ export interface StreamConfigResponse {
mode: StreamMode; mode: StreamMode;
encoder: EncoderType; encoder: EncoderType;
bitrate_preset: BitratePreset; bitrate_preset: BitratePreset;
/** 是否有公共 ICE 服务器可用(编译时确定) */
has_public_ice_servers: boolean;
/** 当前是否正在使用公共 ICE 服务器STUN/TURN 都为空时) */
using_public_ice_servers: boolean;
stun_server?: string; stun_server?: string;
turn_server?: string; turn_server?: string;
turn_username?: string; turn_username?: string;
@@ -543,9 +553,15 @@ export interface StreamConfigUpdate {
mode?: StreamMode; mode?: StreamMode;
encoder?: EncoderType; encoder?: EncoderType;
bitrate_preset?: BitratePreset; bitrate_preset?: BitratePreset;
/** STUN server URL (e.g., "stun:stun.l.google.com:19302") */ /**
* STUN server URL (e.g., "stun:stun.l.google.com:19302")
* Leave empty to use public ICE servers
*/
stun_server?: string; stun_server?: string;
/** TURN server URL (e.g., "turn:turn.example.com:3478") */ /**
* TURN server URL (e.g., "turn:turn.example.com:3478")
* Leave empty to use public ICE servers
*/
turn_server?: string; turn_server?: string;
/** TURN username */ /** TURN username */
turn_username?: string; turn_username?: string;

View File

@@ -1,5 +1,5 @@
<script setup lang="ts"> <script setup lang="ts">
import { ref, onMounted, onUnmounted, computed, watch } from 'vue' import { ref, onMounted, onUnmounted, computed, watch, nextTick } from 'vue'
import { useI18n } from 'vue-i18n' import { useI18n } from 'vue-i18n'
import { useRouter } from 'vue-router' import { useRouter } from 'vue-router'
import { useSystemStore } from '@/stores/system' import { useSystemStore } from '@/stores/system'
@@ -9,6 +9,7 @@ import { useHidWebSocket } from '@/composables/useHidWebSocket'
import { useWebRTC } from '@/composables/useWebRTC' import { useWebRTC } from '@/composables/useWebRTC'
import { getUnifiedAudio } from '@/composables/useUnifiedAudio' import { getUnifiedAudio } from '@/composables/useUnifiedAudio'
import { streamApi, hidApi, atxApi, extensionsApi, atxConfigApi, userApi } from '@/api' import { streamApi, hidApi, atxApi, extensionsApi, atxConfigApi, userApi } from '@/api'
import type { HidKeyboardEvent, HidMouseEvent } from '@/types/hid'
import { toast } from 'vue-sonner' import { toast } from 'vue-sonner'
import { generateUUID } from '@/lib/utils' import { generateUUID } from '@/lib/utils'
import type { VideoMode } from '@/components/VideoConfigPopover.vue' import type { VideoMode } from '@/components/VideoConfigPopover.vue'
@@ -186,6 +187,18 @@ const videoDetails = computed<StatusDetail[]>(() => {
}) })
const hidStatus = computed<'connected' | 'connecting' | 'disconnected' | 'error'>(() => { const hidStatus = computed<'connected' | 'connecting' | 'disconnected' | 'error'>(() => {
// In WebRTC mode, check DataChannel status first
if (videoMode.value !== 'mjpeg') {
// DataChannel is ready - HID is connected via WebRTC
if (webrtc.dataChannelReady.value) return 'connected'
// WebRTC is connecting - HID is also connecting
if (webrtc.isConnecting.value) return 'connecting'
// WebRTC is connected but DataChannel not ready - still connecting
if (webrtc.isConnected.value) return 'connecting'
// WebRTC not connected - fall through to WebSocket check as fallback
}
// MJPEG mode or WebRTC fallback: check WebSocket HID status
// If HID WebSocket has network error, show connecting (yellow) // If HID WebSocket has network error, show connecting (yellow)
if (hidWs.networkError.value) return 'connecting' if (hidWs.networkError.value) return 'connecting'
@@ -221,13 +234,31 @@ const hidDetails = computed<StatusDetail[]>(() => {
{ label: t('statusCard.currentMode'), value: mouseMode.value === 'absolute' ? t('statusCard.absolute') : t('statusCard.relative'), status: 'ok' }, { label: t('statusCard.currentMode'), value: mouseMode.value === 'absolute' ? t('statusCard.absolute') : t('statusCard.relative'), status: 'ok' },
] ]
// Add connection status // Add HID channel info based on video mode
if (hidWs.networkError.value) { if (videoMode.value !== 'mjpeg') {
details.push({ label: t('statusCard.connection'), value: t('statusCard.networkError'), status: 'warning' }) // WebRTC mode - show DataChannel status
} else if (!hidWs.connected.value) { if (webrtc.dataChannelReady.value) {
details.push({ label: t('statusCard.connection'), value: t('statusCard.disconnected'), status: 'warning' }) details.push({ label: t('statusCard.channel'), value: 'DataChannel (WebRTC)', status: 'ok' })
} else if (hidWs.hidUnavailable.value) { } else if (webrtc.isConnecting.value || webrtc.isConnected.value) {
details.push({ label: t('statusCard.availability'), value: t('statusCard.hidUnavailable'), status: 'warning' }) details.push({ label: t('statusCard.channel'), value: 'DataChannel', status: 'warning' })
} else {
// Fallback to WebSocket
details.push({ label: t('statusCard.channel'), value: 'WebSocket (fallback)', status: hidWs.connected.value ? 'ok' : 'warning' })
}
} else {
// MJPEG mode - WebSocket HID
details.push({ label: t('statusCard.channel'), value: 'WebSocket', status: hidWs.connected.value ? 'ok' : 'warning' })
}
// Add connection status for WebSocket (only relevant for MJPEG or fallback)
if (videoMode.value === 'mjpeg' || !webrtc.dataChannelReady.value) {
if (hidWs.networkError.value) {
details.push({ label: t('statusCard.connection'), value: t('statusCard.networkError'), status: 'warning' })
} else if (!hidWs.connected.value) {
details.push({ label: t('statusCard.connection'), value: t('statusCard.disconnected'), status: 'warning' })
} else if (hidWs.hidUnavailable.value) {
details.push({ label: t('statusCard.availability'), value: t('statusCard.hidUnavailable'), status: 'warning' })
}
} }
return details return details
@@ -242,10 +273,20 @@ const audioStatus = computed<'connected' | 'connecting' | 'disconnected' | 'erro
return 'disconnected' return 'disconnected'
}) })
// Helper function to translate audio quality
function translateAudioQuality(quality: string | undefined): string {
if (!quality) return t('common.unknown')
const qualityLower = quality.toLowerCase()
if (qualityLower === 'voice') return t('actionbar.qualityVoice')
if (qualityLower === 'balanced') return t('actionbar.qualityBalanced')
if (qualityLower === 'high') return t('actionbar.qualityHigh')
return quality // fallback to original value
}
const audioQuickInfo = computed(() => { const audioQuickInfo = computed(() => {
const audio = systemStore.audio const audio = systemStore.audio
if (!audio?.available) return '' if (!audio?.available) return ''
if (audio.streaming) return audio.quality if (audio.streaming) return translateAudioQuality(audio.quality)
return t('statusCard.off') return t('statusCard.off')
}) })
@@ -258,8 +299,8 @@ const audioDetails = computed<StatusDetail[]>(() => {
if (!audio) return [] if (!audio) return []
return [ return [
{ label: t('statusCard.device'), value: audio.device || 'default' }, { label: t('statusCard.device'), value: audio.device || t('statusCard.defaultDevice') },
{ label: t('statusCard.quality'), value: audio.quality }, { label: t('statusCard.quality'), value: translateAudioQuality(audio.quality) },
{ label: t('statusCard.streaming'), value: audio.streaming ? t('statusCard.yes') : t('statusCard.no'), status: audio.streaming ? 'ok' : undefined }, { label: t('statusCard.streaming'), value: audio.streaming ? t('statusCard.yes') : t('statusCard.no'), status: audio.streaming ? 'ok' : undefined },
] ]
}) })
@@ -387,6 +428,11 @@ function handleVideoError() {
return return
} }
// 如果正在切换模式,忽略错误(可能是 503 错误,因为后端已切换模式)
if (isModeSwitching.value) {
return
}
// 如果正在刷新视频,忽略清空 src 时触发的错误 // 如果正在刷新视频,忽略清空 src 时触发的错误
if (isRefreshingVideo) { if (isRefreshingVideo) {
return return
@@ -676,6 +722,12 @@ function handleStreamConfigApplied(data: any) {
// Refresh video based on current mode // Refresh video based on current mode
videoRestarting.value = false videoRestarting.value = false
// 如果正在进行模式切换不需要在这里处理WebRTCReady 事件会处理)
if (isModeSwitching.value) {
console.log('[StreamConfigApplied] Mode switch in progress, waiting for WebRTCReady')
return
}
if (videoMode.value !== 'mjpeg') { if (videoMode.value !== 'mjpeg') {
// In WebRTC mode, reconnect WebRTC (session was closed due to config change) // In WebRTC mode, reconnect WebRTC (session was closed due to config change)
switchToWebRTC(videoMode.value) switchToWebRTC(videoMode.value)
@@ -690,6 +742,17 @@ function handleStreamConfigApplied(data: any) {
}) })
} }
// 处理 WebRTC 就绪事件 - 这是后端真正准备好接受 WebRTC 连接的信号
function handleWebRTCReady(data: { codec: string; hardware: boolean }) {
console.log(`[WebRTCReady] Backend ready: codec=${data.codec}, hardware=${data.hardware}`)
// 如果正在进行模式切换,标记后端已就绪
if (isModeSwitching.value) {
console.log('[WebRTCReady] Signaling backend ready for WebRTC connection')
backendReadyForWebRTC = true
}
}
function handleStreamStateChanged(data: any) { function handleStreamStateChanged(data: any) {
if (data.state === 'error') { if (data.state === 'error') {
videoError.value = true videoError.value = true
@@ -778,7 +841,13 @@ function handleStreamModeChanged(data: { mode: string; previous_mode: string })
// Server returns: 'mjpeg', 'h264', 'h265', 'vp8', 'vp9', or 'webrtc' // Server returns: 'mjpeg', 'h264', 'h265', 'vp8', 'vp9', or 'webrtc'
const newMode = data.mode === 'webrtc' ? 'h264' : data.mode as VideoMode const newMode = data.mode === 'webrtc' ? 'h264' : data.mode as VideoMode
// Show toast notification // 如果正在进行模式切换,忽略这个事件(这是我们自己触发的切换产生的)
if (isModeSwitching.value) {
console.log('[StreamModeChanged] Mode switch in progress, ignoring event')
return
}
// Show toast notification only if this is an external mode change
toast.info(t('console.streamModeChanged'), { toast.info(t('console.streamModeChanged'), {
description: t('console.streamModeChangedDesc', { mode: data.mode.toUpperCase() }), description: t('console.streamModeChangedDesc', { mode: data.mode.toUpperCase() }),
duration: 5000, duration: 5000,
@@ -792,6 +861,14 @@ function handleStreamModeChanged(data: { mode: string; previous_mode: string })
// 标记是否正在刷新视频(用于忽略清空 src 时触发的 error 事件) // 标记是否正在刷新视频(用于忽略清空 src 时触发的 error 事件)
let isRefreshingVideo = false let isRefreshingVideo = false
// 标记是否正在切换模式(防止竞态条件和 503 错误)
const isModeSwitching = ref(false)
// 标记后端是否已准备好接受 WebRTC 连接(由 StreamConfigApplied 事件设置)
let backendReadyForWebRTC = false
function reloadPage() {
window.location.reload()
}
function refreshVideo() { function refreshVideo() {
backendFps.value = 0 backendFps.value = 0
@@ -845,6 +922,7 @@ async function connectWebRTCOnly(codec: VideoMode = 'h264') {
mjpegTimestamp.value = 0 mjpegTimestamp.value = 0
if (videoRef.value) { if (videoRef.value) {
videoRef.value.src = '' videoRef.value.src = ''
videoRef.value.removeAttribute('src')
} }
videoLoading.value = true videoLoading.value = true
@@ -859,18 +937,9 @@ async function connectWebRTCOnly(codec: VideoMode = 'h264') {
duration: 3000, duration: 3000,
}) })
// Try to attach video immediately in case track is already available // 强制重新绑定视频(即使 track 已存在)
if (webrtc.videoTrack.value && webrtcVideoRef.value) { // 这解决了页面返回时视频不显示的问题
const stream = webrtc.getMediaStream() await rebindWebRTCVideo()
if (stream) {
webrtcVideoRef.value.srcObject = stream
try {
await webrtcVideoRef.value.play()
} catch {
// AbortError is expected when switching modes quickly, ignore it
}
}
}
videoLoading.value = false videoLoading.value = false
videoMode.value = codec videoMode.value = codec
@@ -885,6 +954,28 @@ async function connectWebRTCOnly(codec: VideoMode = 'h264') {
} }
} }
// 强制重新绑定 WebRTC 视频到视频元素
// 解决页面切换后视频不显示的问题
async function rebindWebRTCVideo() {
if (!webrtcVideoRef.value) return
// 先清空再重新绑定,确保浏览器重新渲染
webrtcVideoRef.value.srcObject = null
await nextTick()
if (webrtc.videoTrack.value) {
const stream = webrtc.getMediaStream()
if (stream) {
webrtcVideoRef.value.srcObject = stream
try {
await webrtcVideoRef.value.play()
} catch {
// AbortError is expected when switching modes quickly, ignore it
}
}
}
}
// WebRTC video mode handling (switches server mode) // WebRTC video mode handling (switches server mode)
async function switchToWebRTC(codec: VideoMode = 'h264') { async function switchToWebRTC(codec: VideoMode = 'h264') {
// 清除 MJPEG 相关的定时器,防止切换后重新加载 MJPEG // 清除 MJPEG 相关的定时器,防止切换后重新加载 MJPEG
@@ -918,29 +1009,48 @@ async function switchToWebRTC(codec: VideoMode = 'h264') {
} }
// Step 2: Call backend API to switch mode with specific codec // Step 2: Call backend API to switch mode with specific codec
// 重置就绪标志
backendReadyForWebRTC = false
await streamApi.setMode(codec) await streamApi.setMode(codec)
// Step 3: Connect WebRTC with new codec // Step 3: 等待后端完成格式切换(由 StreamConfigApplied 事件触发)
const success = await webrtc.connect() // 后端需要时间来:停止捕获 → 切换格式 → 重启捕获 → 连接 frame source
// 使用轮询等待,最多等待 3 秒
const maxWaitTime = 3000
const pollInterval = 100
let waited = 0
while (!backendReadyForWebRTC && waited < maxWaitTime) {
await new Promise(resolve => setTimeout(resolve, pollInterval))
waited += pollInterval
}
if (!backendReadyForWebRTC) {
console.warn('[WebRTC] Backend not ready after timeout, attempting connection anyway')
} else {
console.log('[WebRTC] Backend ready signal received, connecting')
}
// Step 4: Connect WebRTC with retry
let retries = 3
let success = false
while (retries > 0 && !success) {
success = await webrtc.connect()
if (!success) {
retries--
if (retries > 0) {
console.log(`[WebRTC] Connection failed, retrying (${retries} attempts left)`)
await new Promise(resolve => setTimeout(resolve, 500))
}
}
}
if (success) { if (success) {
toast.success(t('console.webrtcConnected'), { toast.success(t('console.webrtcConnected'), {
description: t('console.webrtcConnectedDesc'), description: t('console.webrtcConnectedDesc'),
duration: 3000, duration: 3000,
}) })
// Video will be attached by the watch on webrtc.videoTrack // 强制重新绑定视频
// But also try to attach immediately in case track is already available await rebindWebRTCVideo()
if (webrtc.videoTrack.value && webrtcVideoRef.value) {
const stream = webrtc.getMediaStream()
if (stream) {
webrtcVideoRef.value.srcObject = stream
try {
await webrtcVideoRef.value.play()
} catch {
// AbortError is expected when switching modes quickly, ignore it
}
}
}
videoLoading.value = false videoLoading.value = false
@@ -995,40 +1105,49 @@ async function switchToMJPEG() {
} }
// Handle video mode change // Handle video mode change
function handleVideoModeChange(mode: VideoMode) { async function handleVideoModeChange(mode: VideoMode) {
// 防止重复切换和竞态条件
if (mode === videoMode.value) return if (mode === videoMode.value) return
if (isModeSwitching.value) {
// Reset mjpegTimestamp to 0 when switching away from MJPEG console.log('[VideoMode] Switch already in progress, ignoring')
// This prevents mjpegUrl from returning a valid URL and stops MJPEG requests return
if (mode !== 'mjpeg') {
mjpegTimestamp.value = 0
} }
videoMode.value = mode isModeSwitching.value = true
localStorage.setItem('videoMode', mode)
// All WebRTC modes: h264, h265, vp8, vp9 try {
if (mode !== 'mjpeg') { // Reset mjpegTimestamp to 0 when switching away from MJPEG
switchToWebRTC(mode) // This prevents mjpegUrl from returning a valid URL and stops MJPEG requests
} else { if (mode !== 'mjpeg') {
switchToMJPEG() mjpegTimestamp.value = 0
// 完全清理 MJPEG 图片元素
if (videoRef.value) {
videoRef.value.src = ''
videoRef.value.removeAttribute('src')
}
// 等待一小段时间确保浏览器取消 pending 请求
await new Promise(resolve => setTimeout(resolve, 50))
}
videoMode.value = mode
localStorage.setItem('videoMode', mode)
// All WebRTC modes: h264, h265, vp8, vp9
if (mode !== 'mjpeg') {
await switchToWebRTC(mode)
} else {
await switchToMJPEG()
}
} finally {
isModeSwitching.value = false
} }
} }
// Watch for WebRTC video track changes // Watch for WebRTC video track changes
watch(() => webrtc.videoTrack.value, async (track) => { watch(() => webrtc.videoTrack.value, async (track) => {
if (track && webrtcVideoRef.value && videoMode.value !== 'mjpeg') { if (track && webrtcVideoRef.value && videoMode.value !== 'mjpeg') {
const stream = webrtc.getMediaStream() // 使用统一的重新绑定函数
await rebindWebRTCVideo()
if (stream) {
webrtcVideoRef.value.srcObject = stream
try {
await webrtcVideoRef.value.play()
} catch {
// AbortError is expected when switching modes quickly, ignore it
}
}
} }
}) })
@@ -1232,6 +1351,41 @@ function handleHidError(_error: any, _operation: string) {
// All HID errors are silently ignored // All HID errors are silently ignored
} }
// HID channel selection: use WebRTC DataChannel when available, fallback to WebSocket
function sendKeyboardEvent(type: 'down' | 'up', key: number, modifiers?: { ctrl?: boolean; shift?: boolean; alt?: boolean; meta?: boolean }) {
// In WebRTC mode with DataChannel ready, use DataChannel for lower latency
if (videoMode.value !== 'mjpeg' && webrtc.dataChannelReady.value) {
const event: HidKeyboardEvent = {
type: type === 'down' ? 'keydown' : 'keyup',
key,
modifiers,
}
const sent = webrtc.sendKeyboard(event)
if (sent) return
// Fallback to WebSocket if DataChannel send failed
}
// Use WebSocket as fallback or for MJPEG mode
hidApi.keyboard(type, key, modifiers).catch(err => handleHidError(err, `keyboard ${type}`))
}
function sendMouseEvent(data: { type: 'move' | 'move_abs' | 'down' | 'up' | 'scroll'; x?: number; y?: number; button?: 'left' | 'right' | 'middle'; scroll?: number }) {
// In WebRTC mode with DataChannel ready, use DataChannel for lower latency
if (videoMode.value !== 'mjpeg' && webrtc.dataChannelReady.value) {
const event: HidMouseEvent = {
type: data.type === 'move_abs' ? 'moveabs' : data.type,
x: data.x,
y: data.y,
button: data.button === 'left' ? 0 : data.button === 'middle' ? 1 : data.button === 'right' ? 2 : undefined,
scroll: data.scroll,
}
const sent = webrtc.sendMouse(event)
if (sent) return
// Fallback to WebSocket if DataChannel send failed
}
// Use WebSocket as fallback or for MJPEG mode
hidApi.mouse(data).catch(err => handleHidError(err, `mouse ${data.type}`))
}
// Check if a key should be blocked (prevented from default behavior) // Check if a key should be blocked (prevented from default behavior)
function shouldBlockKey(e: KeyboardEvent): boolean { function shouldBlockKey(e: KeyboardEvent): boolean {
// In fullscreen mode, block all keys for maximum capture // In fullscreen mode, block all keys for maximum capture
@@ -1291,7 +1445,7 @@ function handleKeyDown(e: KeyboardEvent) {
meta: e.metaKey, meta: e.metaKey,
} }
hidApi.keyboard('down', e.keyCode, modifiers).catch(err => handleHidError(err, 'keyboard down')) sendKeyboardEvent('down', e.keyCode, modifiers)
} }
function handleKeyUp(e: KeyboardEvent) { function handleKeyUp(e: KeyboardEvent) {
@@ -1310,7 +1464,7 @@ function handleKeyUp(e: KeyboardEvent) {
const keyName = e.key === ' ' ? 'Space' : e.key const keyName = e.key === ' ' ? 'Space' : e.key
pressedKeys.value = pressedKeys.value.filter(k => k !== keyName) pressedKeys.value = pressedKeys.value.filter(k => k !== keyName)
hidApi.keyboard('up', e.keyCode).catch(err => handleHidError(err, 'keyboard up')) sendKeyboardEvent('up', e.keyCode)
} }
function handleMouseMove(e: MouseEvent) { function handleMouseMove(e: MouseEvent) {
@@ -1325,7 +1479,7 @@ function handleMouseMove(e: MouseEvent) {
const y = Math.round((e.clientY - rect.top) / rect.height * 32767) const y = Math.round((e.clientY - rect.top) / rect.height * 32767)
mousePosition.value = { x, y } mousePosition.value = { x, y }
hidApi.mouse({ type: 'move_abs', x, y }).catch(err => handleHidError(err, 'mouse move')) sendMouseEvent({ type: 'move_abs', x, y })
} else { } else {
// Relative mode: use movementX/Y when pointer is locked // Relative mode: use movementX/Y when pointer is locked
if (isPointerLocked.value) { if (isPointerLocked.value) {
@@ -1338,7 +1492,7 @@ function handleMouseMove(e: MouseEvent) {
const clampedDx = Math.max(-127, Math.min(127, dx)) const clampedDx = Math.max(-127, Math.min(127, dx))
const clampedDy = Math.max(-127, Math.min(127, dy)) const clampedDy = Math.max(-127, Math.min(127, dy))
hidApi.mouse({ type: 'move', x: clampedDx, y: clampedDy }).catch(err => handleHidError(err, 'mouse move')) sendMouseEvent({ type: 'move', x: clampedDx, y: clampedDy })
} }
// Update display position (accumulated delta for display only) // Update display position (accumulated delta for display only)
@@ -1372,7 +1526,7 @@ function handleMouseDown(e: MouseEvent) {
const button = e.button === 0 ? 'left' : e.button === 2 ? 'right' : 'middle' const button = e.button === 0 ? 'left' : e.button === 2 ? 'right' : 'middle'
pressedMouseButton.value = button pressedMouseButton.value = button
hidApi.mouse({ type: 'down', button }).catch(err => handleHidError(err, 'mouse down')) sendMouseEvent({ type: 'down', button })
} }
function handleMouseUp(e: MouseEvent) { function handleMouseUp(e: MouseEvent) {
@@ -1401,13 +1555,13 @@ function handleMouseUpInternal(rawButton: number) {
} }
pressedMouseButton.value = null pressedMouseButton.value = null
hidApi.mouse({ type: 'up', button }).catch(err => handleHidError(err, 'mouse up')) sendMouseEvent({ type: 'up', button })
} }
function handleWheel(e: WheelEvent) { function handleWheel(e: WheelEvent) {
e.preventDefault() e.preventDefault()
const scroll = e.deltaY > 0 ? -1 : 1 const scroll = e.deltaY > 0 ? -1 : 1
hidApi.mouse({ type: 'scroll', scroll }).catch(err => handleHidError(err, 'mouse scroll')) sendMouseEvent({ type: 'scroll', scroll })
} }
function handleContextMenu(e: MouseEvent) { function handleContextMenu(e: MouseEvent) {
@@ -1456,7 +1610,7 @@ function handleBlur() {
if (pressedMouseButton.value !== null) { if (pressedMouseButton.value !== null) {
const button = pressedMouseButton.value const button = pressedMouseButton.value
pressedMouseButton.value = null pressedMouseButton.value = null
hidApi.mouse({ type: 'up', button }).catch(err => handleHidError(err, 'mouse up (blur)')) sendMouseEvent({ type: 'up', button })
} }
} }
@@ -1514,6 +1668,7 @@ onMounted(async () => {
// 1. 先注册 WebSocket 事件监听器 // 1. 先注册 WebSocket 事件监听器
on('stream.config_changing', handleStreamConfigChanging) on('stream.config_changing', handleStreamConfigChanging)
on('stream.config_applied', handleStreamConfigApplied) on('stream.config_applied', handleStreamConfigApplied)
on('stream.webrtc_ready', handleWebRTCReady)
on('stream.state_changed', handleStreamStateChanged) on('stream.state_changed', handleStreamStateChanged)
on('stream.stats_update', handleStreamStatsUpdate) on('stream.stats_update', handleStreamStatsUpdate)
on('stream.mode_changed', handleStreamModeChanged) on('stream.mode_changed', handleStreamModeChanged)
@@ -1613,6 +1768,7 @@ onUnmounted(() => {
// Unregister WebSocket event handlers // Unregister WebSocket event handlers
off('stream.config_changing', handleStreamConfigChanging) off('stream.config_changing', handleStreamConfigChanging)
off('stream.config_applied', handleStreamConfigApplied) off('stream.config_applied', handleStreamConfigApplied)
off('stream.webrtc_ready', handleWebRTCReady)
off('stream.state_changed', handleStreamStateChanged) off('stream.state_changed', handleStreamStateChanged)
off('stream.stats_update', handleStreamStatsUpdate) off('stream.stats_update', handleStreamStatsUpdate)
off('stream.mode_changed', handleStreamModeChanged) off('stream.mode_changed', handleStreamModeChanged)
@@ -1646,6 +1802,7 @@ onUnmounted(() => {
// Remove WebSocket event listeners // Remove WebSocket event listeners
off('stream.config_changing', handleStreamConfigChanging) off('stream.config_changing', handleStreamConfigChanging)
off('stream.config_applied', handleStreamConfigApplied) off('stream.config_applied', handleStreamConfigApplied)
off('stream.webrtc_ready', handleWebRTCReady)
off('stream.state_changed', handleStreamStateChanged) off('stream.state_changed', handleStreamStateChanged)
off('stream.stats_update', handleStreamStatsUpdate) off('stream.stats_update', handleStreamStatsUpdate)
off('stream.mode_changed', handleStreamModeChanged) off('stream.mode_changed', handleStreamModeChanged)
@@ -1710,9 +1867,9 @@ onUnmounted(() => {
:details="hidDetails" :details="hidDetails"
/> />
<!-- MSD Status - Admin only --> <!-- MSD Status - Admin only, hidden when CH9329 backend (no USB gadget support) -->
<StatusCard <StatusCard
v-if="authStore.isAdmin && systemStore.msd?.available" v-if="authStore.isAdmin && systemStore.msd?.available && systemStore.hid?.backend !== 'ch9329'"
:title="t('statusCard.msd')" :title="t('statusCard.msd')"
type="msd" type="msd"
:status="msdStatus" :status="msdStatus"
@@ -1882,7 +2039,7 @@ onUnmounted(() => {
</div> </div>
</div> </div>
<div class="flex gap-2"> <div class="flex gap-2">
<Button variant="secondary" size="sm" @click="refreshVideo"> <Button variant="secondary" size="sm" @click="reloadPage">
<RefreshCw class="h-4 w-4 mr-2" /> <RefreshCw class="h-4 w-4 mr-2" />
{{ t('console.reconnect') }} {{ t('console.reconnect') }}
</Button> </Button>

View File

@@ -1,5 +1,5 @@
<script setup lang="ts"> <script setup lang="ts">
import { ref, onMounted } from 'vue' import { ref, computed, onMounted } from 'vue'
import { useI18n } from 'vue-i18n' import { useI18n } from 'vue-i18n'
import { useSystemStore } from '@/stores/system' import { useSystemStore } from '@/stores/system'
import { import {
@@ -252,6 +252,12 @@ const config = ref({
// 跟踪服务器是否已配置 TURN 密码 // 跟踪服务器是否已配置 TURN 密码
const hasTurnPassword = ref(false) const hasTurnPassword = ref(false)
// 跟踪公共 ICE 服务器状态
const hasPublicIceServers = ref(false)
const usingPublicIceServers = computed(() => {
return !config.value.stun_server && !config.value.turn_server && hasPublicIceServers.value
})
// OTG Descriptor settings // OTG Descriptor settings
const otgVendorIdHex = ref('1d6b') const otgVendorIdHex = ref('1d6b')
const otgProductIdHex = ref('0104') const otgProductIdHex = ref('0104')
@@ -305,7 +311,7 @@ const selectedBackendFormats = computed(() => {
}) })
// Video selection computed properties // Video selection computed properties
import { computed, watch } from 'vue' import { watch } from 'vue'
const selectedDevice = computed(() => { const selectedDevice = computed(() => {
return devices.value.video.find(d => d.path === config.value.video_device) return devices.value.video.find(d => d.path === config.value.video_device)
@@ -555,6 +561,9 @@ async function loadConfig() {
// 设置是否已配置 TURN 密码 // 设置是否已配置 TURN 密码
hasTurnPassword.value = stream.has_turn_password || false hasTurnPassword.value = stream.has_turn_password || false
// 设置公共 ICE 服务器状态
hasPublicIceServers.value = stream.has_public_ice_servers || false
// 加载 OTG 描述符配置 // 加载 OTG 描述符配置
if (hid.otg_descriptor) { if (hid.otg_descriptor) {
otgVendorIdHex.value = hid.otg_descriptor.vendor_id?.toString(16).padStart(4, '0') || '1d6b' otgVendorIdHex.value = hid.otg_descriptor.vendor_id?.toString(16).padStart(4, '0') || '1d6b'
@@ -1068,7 +1077,7 @@ onMounted(async () => {
<template> <template>
<AppLayout> <AppLayout>
<div class="flex h-[calc(100vh-6rem)]"> <div class="flex h-full overflow-hidden">
<!-- Mobile Header --> <!-- Mobile Header -->
<div class="lg:hidden fixed top-16 left-0 right-0 z-20 flex items-center justify-between px-4 py-3 border-b bg-background"> <div class="lg:hidden fixed top-16 left-0 right-0 z-20 flex items-center justify-between px-4 py-3 border-b bg-background">
<h1 class="text-lg font-semibold">{{ t('settings.title') }}</h1> <h1 class="text-lg font-semibold">{{ t('settings.title') }}</h1>
@@ -1259,6 +1268,9 @@ onMounted(async () => {
:placeholder="t('settings.stunServerPlaceholder')" :placeholder="t('settings.stunServerPlaceholder')"
/> />
<p class="text-xs text-muted-foreground">{{ t('settings.stunServerHint') }}</p> <p class="text-xs text-muted-foreground">{{ t('settings.stunServerHint') }}</p>
<p v-if="usingPublicIceServers && hasPublicIceServers" class="text-xs text-blue-500">
{{ t('settings.usingPublicIceServers') }}
</p>
</div> </div>
<Separator /> <Separator />
<div class="space-y-2"> <div class="space-y-2">