feat: 支持 MJPEG 解码与 MSD 目录配置

- FFmpeg/hwcodec 增加 RKMPP MJPEG 解码与 RAM FFI,ARM 构建启用对应解码器
  - 共享视频管线新增 MJPEG 解码路径(RKMPP/TurboJPEG),优化 WebRTC 发送与 MJPEG 去重
  - MSD 配置改为 msd_dir 并自动创建子目录,接口与前端设置同步更新
  - 更新包依赖与版本号
This commit is contained in:
mofeng-git
2026-01-11 16:32:37 +08:00
parent 0f52168e75
commit 01e01430da
30 changed files with 1185 additions and 260 deletions

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "one-kvm" name = "one-kvm"
version = "0.1.0" version = "0.1.1"
edition = "2021" edition = "2021"
authors = ["SilentWind"] authors = ["SilentWind"]
description = "A open and lightweight IP-KVM solution written in Rust" description = "A open and lightweight IP-KVM solution written in Rust"

View File

@@ -217,6 +217,9 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
-Dlibrga_demo=false \ -Dlibrga_demo=false \
&& ninja -C build \ && ninja -C build \
&& ninja -C build install \ && ninja -C build install \
# Create static librga.a from built objects (rkrga uses shared_library)
&& ar rcs /usr/aarch64-linux-gnu/lib/librga.a $(find build -name '*.o') \
&& ranlib /usr/aarch64-linux-gnu/lib/librga.a \
&& sed -i 's/^Libs:.*$/& -lstdc++ -lm -lpthread/' /usr/aarch64-linux-gnu/lib/pkgconfig/librga.pc \ && sed -i 's/^Libs:.*$/& -lstdc++ -lm -lpthread/' /usr/aarch64-linux-gnu/lib/pkgconfig/librga.pc \
&& cd .. \ && cd .. \
# Create pkg-config wrapper for cross-compilation # Create pkg-config wrapper for cross-compilation
@@ -267,8 +270,10 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
--disable-avfilter \ --disable-avfilter \
--disable-avdevice \ --disable-avdevice \
--disable-postproc \ --disable-postproc \
# Disable all decoders # Disable all decoders (re-enable only what we need)
--disable-decoders \ --disable-decoders \
--enable-decoder=mjpeg \
--enable-decoder=mjpeg_rkmpp \
# Disable all encoders, enable only needed ones # Disable all encoders, enable only needed ones
--disable-encoders \ --disable-encoders \
--enable-encoder=h264_rkmpp \ --enable-encoder=h264_rkmpp \
@@ -292,8 +297,7 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
--disable-bsfs \ --disable-bsfs \
--enable-bsf=h264_mp4toannexb \ --enable-bsf=h264_mp4toannexb \
--enable-bsf=hevc_mp4toannexb \ --enable-bsf=hevc_mp4toannexb \
# Disable hardware decoding # Hardware decoding uses explicit rkmpp decoder (no hwaccel flag)
--disable-hwaccels \
# Disable other unused features # Disable other unused features
--disable-indevs \ --disable-indevs \
--disable-outdevs \ --disable-outdevs \

View File

@@ -206,6 +206,9 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
-Dlibrga_demo=false \ -Dlibrga_demo=false \
&& ninja -C build \ && ninja -C build \
&& ninja -C build install \ && ninja -C build install \
# Create static librga.a from built objects (rkrga uses shared_library)
&& ar rcs /usr/arm-linux-gnueabihf/lib/librga.a $(find build -name '*.o') \
&& ranlib /usr/arm-linux-gnueabihf/lib/librga.a \
&& sed -i 's/^Libs:.*$/& -lstdc++ -lm -lpthread/' /usr/arm-linux-gnueabihf/lib/pkgconfig/librga.pc \ && sed -i 's/^Libs:.*$/& -lstdc++ -lm -lpthread/' /usr/arm-linux-gnueabihf/lib/pkgconfig/librga.pc \
&& cd .. \ && cd .. \
# Create pkg-config wrapper for cross-compilation # Create pkg-config wrapper for cross-compilation
@@ -256,8 +259,10 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
--disable-avfilter \ --disable-avfilter \
--disable-avdevice \ --disable-avdevice \
--disable-postproc \ --disable-postproc \
# Disable all decoders # Disable all decoders (re-enable only what we need)
--disable-decoders \ --disable-decoders \
--enable-decoder=mjpeg \
--enable-decoder=mjpeg_rkmpp \
# Disable all encoders, enable only needed ones # Disable all encoders, enable only needed ones
--disable-encoders \ --disable-encoders \
--enable-encoder=h264_rkmpp \ --enable-encoder=h264_rkmpp \
@@ -281,8 +286,7 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
--disable-bsfs \ --disable-bsfs \
--enable-bsf=h264_mp4toannexb \ --enable-bsf=h264_mp4toannexb \
--enable-bsf=hevc_mp4toannexb \ --enable-bsf=hevc_mp4toannexb \
# Disable hardware decoding # Hardware decoding uses explicit rkmpp decoder (no hwaccel flag)
--disable-hwaccels \
# Disable other unused features # Disable other unused features
--disable-indevs \ --disable-indevs \
--disable-outdevs \ --disable-outdevs \

View File

@@ -125,13 +125,20 @@ EOF
chmod 755 "$PKG_DIR/DEBIAN/prerm" chmod 755 "$PKG_DIR/DEBIAN/prerm"
# Create control file # Create control file
BASE_DEPS="libc6 (>= 2.31), libgcc-s1, libstdc++6, libasound2 (>= 1.1), libdrm2 (>= 2.4)"
AMD64_DEPS="libva2 (>= 2.0), libva-drm2 (>= 2.10), libva-x11-2 (>= 2.10), libmfx1 (>= 21.1), libx11-6 (>= 1.6), libxcb1 (>= 1.14)"
DEPS="$BASE_DEPS"
if [ "$DEB_ARCH" = "amd64" ]; then
DEPS="$DEPS, $AMD64_DEPS"
fi
cat > "$PKG_DIR/DEBIAN/control" <<EOF cat > "$PKG_DIR/DEBIAN/control" <<EOF
Package: one-kvm Package: one-kvm
Version: $VERSION Version: $VERSION
Section: admin Section: admin
Priority: optional Priority: optional
Architecture: $DEB_ARCH Architecture: $DEB_ARCH
Depends: libc6 (>= 2.31), libgcc-s1, libstdc++6, libasound2 (>= 1.1), libva2 (>= 2.0), libdrm2 (>= 2.4), libx11-6 (>= 1.6), libxcb1 (>= 1.14) Depends: $DEPS
Maintainer: SilentWind <admin@mofeng.run> Maintainer: SilentWind <admin@mofeng.run>
Description: A open and lightweight IP-KVM solution Description: A open and lightweight IP-KVM solution
Enables BIOS-level remote management of servers and workstations. Enables BIOS-level remote management of servers and workstations.

View File

@@ -6,6 +6,7 @@ description = "Hardware video codec for IP-KVM (Windows/Linux)"
[features] [features]
default = [] default = []
rkmpp = []
[dependencies] [dependencies]
log = "0.4" log = "0.4"

View File

@@ -152,8 +152,13 @@ mod ffmpeg {
} else { } else {
// RKMPP for ARM // RKMPP for ARM
println!("cargo:rustc-link-lib=rockchip_mpp"); println!("cargo:rustc-link-lib=rockchip_mpp");
let rga_static = lib_dir.join("librga.a");
if rga_static.exists() {
println!("cargo:rustc-link-lib=static=rga");
} else {
println!("cargo:rustc-link-lib=rga"); println!("cargo:rustc-link-lib=rga");
} }
}
// Software codec dependencies (dynamic - GPL) // Software codec dependencies (dynamic - GPL)
println!("cargo:rustc-link-lib=x264"); println!("cargo:rustc-link-lib=x264");
@@ -198,15 +203,24 @@ mod ffmpeg {
if let Ok(output) = Command::new("pkg-config").args(&pkg_config_args).output() { if let Ok(output) = Command::new("pkg-config").args(&pkg_config_args).output() {
if output.status.success() { if output.status.success() {
let libs_str = String::from_utf8_lossy(&output.stdout); let libs_str = String::from_utf8_lossy(&output.stdout);
let mut link_paths: Vec<String> = Vec::new();
for flag in libs_str.split_whitespace() { for flag in libs_str.split_whitespace() {
if flag.starts_with("-L") { if flag.starts_with("-L") {
println!("cargo:rustc-link-search=native={}", &flag[2..]); let path = flag[2..].to_string();
println!("cargo:rustc-link-search=native={}", path);
link_paths.push(path);
} else if flag.starts_with("-l") { } else if flag.starts_with("-l") {
let lib_name = &flag[2..]; let lib_name = &flag[2..];
if use_static { if use_static {
// For static linking, link FFmpeg libs statically, others dynamically // For static linking, link FFmpeg libs statically, others dynamically
if lib_name.starts_with("av") || lib_name == "swresample" { if lib_name.starts_with("av") || lib_name == "swresample" {
println!("cargo:rustc-link-lib=static={}", lib_name); println!("cargo:rustc-link-lib=static={}", lib_name);
} else if lib_name == "rga"
&& link_paths
.iter()
.any(|path| Path::new(path).join("librga.a").exists())
{
println!("cargo:rustc-link-lib=static=rga");
} else { } else {
// Runtime libraries (va, drm, etc.) must be dynamic // Runtime libraries (va, drm, etc.) must be dynamic
println!("cargo:rustc-link-lib={}", lib_name); println!("cargo:rustc-link-lib={}", lib_name);
@@ -343,6 +357,20 @@ mod ffmpeg {
.write_to_file(Path::new(&env::var_os("OUT_DIR").unwrap()).join("ffmpeg_ram_ffi.rs")) .write_to_file(Path::new(&env::var_os("OUT_DIR").unwrap()).join("ffmpeg_ram_ffi.rs"))
.unwrap(); .unwrap();
builder.files(["ffmpeg_ram_encode.cpp"].map(|f| ffmpeg_ram_dir.join(f))); builder.file(ffmpeg_ram_dir.join("ffmpeg_ram_encode.cpp"));
// RKMPP decode only exists on ARM builds where FFmpeg is compiled with RKMPP support.
// Avoid compiling this file on x86/x64 where `AV_HWDEVICE_TYPE_RKMPP` doesn't exist.
let target_arch = std::env::var("CARGO_CFG_TARGET_ARCH").unwrap_or_default();
let enable_rkmpp = matches!(target_arch.as_str(), "aarch64" | "arm")
|| std::env::var_os("CARGO_FEATURE_RKMPP").is_some();
if enable_rkmpp {
builder.file(ffmpeg_ram_dir.join("ffmpeg_ram_decode.cpp"));
} else {
println!(
"cargo:info=Skipping ffmpeg_ram_decode.cpp (RKMPP) for arch {}",
target_arch
);
}
} }
} }

View File

@@ -0,0 +1,280 @@
// Minimal FFmpeg RAM MJPEG decoder (RKMPP only) -> NV12 in CPU memory.
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavutil/error.h>
#include <libavutil/hwcontext.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
}
#include <string>
#include <string.h>
#include <vector>
#include "common.h"
#define LOG_MODULE "FFMPEG_RAM_DEC"
#include <log.h>
#include <util.h>
typedef void (*RamDecodeCallback)(const uint8_t *data, int len, int width,
int height, int pixfmt, const void *obj);
namespace {
thread_local std::string g_last_error;
static void set_last_error(const std::string &msg) {
g_last_error = msg;
}
class FFmpegRamDecoder {
public:
AVCodecContext *c_ = NULL;
AVPacket *pkt_ = NULL;
AVFrame *frame_ = NULL;
AVFrame *sw_frame_ = NULL;
std::string name_;
int width_ = 0;
int height_ = 0;
AVPixelFormat sw_pixfmt_ = AV_PIX_FMT_NV12;
int thread_count_ = 1;
RamDecodeCallback callback_ = NULL;
AVHWDeviceType hw_device_type_ = AV_HWDEVICE_TYPE_NONE;
AVPixelFormat hw_pixfmt_ = AV_PIX_FMT_NONE;
AVBufferRef *hw_device_ctx_ = NULL;
explicit FFmpegRamDecoder(const char *name, int width, int height, int sw_pixfmt,
int thread_count, RamDecodeCallback callback) {
name_ = name ? name : "";
width_ = width;
height_ = height;
sw_pixfmt_ = (AVPixelFormat)sw_pixfmt;
thread_count_ = thread_count > 0 ? thread_count : 1;
callback_ = callback;
if (name_.find("rkmpp") != std::string::npos) {
hw_device_type_ = AV_HWDEVICE_TYPE_RKMPP;
}
}
~FFmpegRamDecoder() {}
static enum AVPixelFormat get_hw_format(AVCodecContext *ctx,
const enum AVPixelFormat *pix_fmts) {
FFmpegRamDecoder *dec = reinterpret_cast<FFmpegRamDecoder *>(ctx->opaque);
if (dec && dec->hw_pixfmt_ != AV_PIX_FMT_NONE) {
const enum AVPixelFormat *p;
for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) {
if (*p == dec->hw_pixfmt_) {
return *p;
}
}
}
return pix_fmts[0];
}
bool init() {
g_last_error.clear();
const AVCodec *codec = NULL;
int ret = 0;
if (!(codec = avcodec_find_decoder_by_name(name_.c_str()))) {
set_last_error(std::string("Decoder not found: ") + name_);
return false;
}
if (!(c_ = avcodec_alloc_context3(codec))) {
set_last_error(std::string("Could not allocate decoder context"));
return false;
}
c_->width = width_;
c_->height = height_;
c_->thread_count = thread_count_;
c_->opaque = this;
if (hw_device_type_ != AV_HWDEVICE_TYPE_NONE) {
const AVCodecHWConfig *cfg = NULL;
for (int i = 0; (cfg = avcodec_get_hw_config(codec, i)); i++) {
if (cfg->device_type == hw_device_type_) {
hw_pixfmt_ = cfg->pix_fmt;
break;
}
}
if (hw_pixfmt_ == AV_PIX_FMT_NONE) {
set_last_error(std::string("No suitable HW pixfmt for decoder"));
return false;
}
ret = av_hwdevice_ctx_create(&hw_device_ctx_, hw_device_type_, NULL, NULL, 0);
if (ret < 0) {
set_last_error(std::string("av_hwdevice_ctx_create failed, ret = ") + av_err2str(ret));
return false;
}
c_->hw_device_ctx = av_buffer_ref(hw_device_ctx_);
c_->get_format = get_hw_format;
AVBufferRef *frames_ref = av_hwframe_ctx_alloc(c_->hw_device_ctx);
if (!frames_ref) {
set_last_error(std::string("av_hwframe_ctx_alloc failed"));
return false;
}
AVHWFramesContext *frames_ctx = (AVHWFramesContext *)frames_ref->data;
frames_ctx->format = hw_pixfmt_;
frames_ctx->sw_format = sw_pixfmt_;
frames_ctx->width = width_;
frames_ctx->height = height_;
frames_ctx->initial_pool_size = 8;
ret = av_hwframe_ctx_init(frames_ref);
if (ret < 0) {
av_buffer_unref(&frames_ref);
set_last_error(std::string("av_hwframe_ctx_init failed, ret = ") + av_err2str(ret));
return false;
}
c_->hw_frames_ctx = av_buffer_ref(frames_ref);
av_buffer_unref(&frames_ref);
}
if ((ret = avcodec_open2(c_, codec, NULL)) < 0) {
set_last_error(std::string("avcodec_open2 failed, ret = ") + av_err2str(ret));
return false;
}
pkt_ = av_packet_alloc();
frame_ = av_frame_alloc();
sw_frame_ = av_frame_alloc();
if (!pkt_ || !frame_ || !sw_frame_) {
set_last_error(std::string("Failed to allocate packet/frame"));
return false;
}
return true;
}
int decode(const uint8_t *data, int length, const void *obj) {
g_last_error.clear();
int ret = 0;
if (!c_ || !pkt_ || !frame_) {
set_last_error(std::string("Decoder not initialized"));
return -1;
}
av_packet_unref(pkt_);
ret = av_new_packet(pkt_, length);
if (ret < 0) {
set_last_error(std::string("av_new_packet failed, ret = ") + av_err2str(ret));
return ret;
}
memcpy(pkt_->data, data, length);
pkt_->size = length;
ret = avcodec_send_packet(c_, pkt_);
av_packet_unref(pkt_);
if (ret < 0) {
set_last_error(std::string("avcodec_send_packet failed, ret = ") + av_err2str(ret));
return ret;
}
while (true) {
ret = avcodec_receive_frame(c_, frame_);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
}
if (ret < 0) {
set_last_error(std::string("avcodec_receive_frame failed, ret = ") + av_err2str(ret));
return ret;
}
AVFrame *out = frame_;
if (frame_->format == hw_pixfmt_) {
av_frame_unref(sw_frame_);
ret = av_hwframe_transfer_data(sw_frame_, frame_, 0);
if (ret < 0) {
set_last_error(std::string("av_hwframe_transfer_data failed, ret = ") + av_err2str(ret));
return ret;
}
out = sw_frame_;
}
int buf_size =
av_image_get_buffer_size((AVPixelFormat)out->format, out->width, out->height, 1);
if (buf_size < 0) {
set_last_error(std::string("av_image_get_buffer_size failed, ret = ") + av_err2str(buf_size));
return buf_size;
}
std::vector<uint8_t> buf(buf_size);
ret = av_image_copy_to_buffer(buf.data(), buf_size,
(const uint8_t *const *)out->data, out->linesize,
(AVPixelFormat)out->format, out->width, out->height, 1);
if (ret < 0) {
set_last_error(std::string("av_image_copy_to_buffer failed, ret = ") + av_err2str(ret));
return ret;
}
if (callback_) {
callback_(buf.data(), buf_size, out->width, out->height, out->format, obj);
}
av_frame_unref(frame_);
}
return 0;
}
void fini() {
if (pkt_) {
av_packet_free(&pkt_);
}
if (frame_) {
av_frame_free(&frame_);
}
if (sw_frame_) {
av_frame_free(&sw_frame_);
}
if (c_) {
avcodec_free_context(&c_);
}
if (hw_device_ctx_) {
av_buffer_unref(&hw_device_ctx_);
}
}
};
} // namespace
extern "C" void *ffmpeg_ram_new_decoder(const char *name, int width, int height,
int sw_pixfmt, int thread_count,
RamDecodeCallback callback) {
FFmpegRamDecoder *dec =
new FFmpegRamDecoder(name, width, height, sw_pixfmt, thread_count, callback);
if (!dec->init()) {
dec->fini();
delete dec;
return NULL;
}
return dec;
}
extern "C" int ffmpeg_ram_decode(void *decoder, const uint8_t *data, int length,
const void *obj) {
FFmpegRamDecoder *dec = reinterpret_cast<FFmpegRamDecoder *>(decoder);
if (!dec) {
return -1;
}
return dec->decode(data, length, obj);
}
extern "C" void ffmpeg_ram_free_decoder(void *decoder) {
FFmpegRamDecoder *dec = reinterpret_cast<FFmpegRamDecoder *>(decoder);
if (!dec) {
return;
}
dec->fini();
delete dec;
}
extern "C" const char *ffmpeg_ram_last_error(void) {
return g_last_error.c_str();
}

View File

@@ -7,6 +7,8 @@
typedef void (*RamEncodeCallback)(const uint8_t *data, int len, int64_t pts, typedef void (*RamEncodeCallback)(const uint8_t *data, int len, int64_t pts,
int key, const void *obj); int key, const void *obj);
typedef void (*RamDecodeCallback)(const uint8_t *data, int len, int width,
int height, int pixfmt, const void *obj);
void *ffmpeg_ram_new_encoder(const char *name, const char *mc_name, int width, void *ffmpeg_ram_new_encoder(const char *name, const char *mc_name, int width,
int height, int pixfmt, int align, int fps, int height, int pixfmt, int align, int fps,
@@ -23,4 +25,12 @@ int ffmpeg_ram_get_linesize_offset_length(int pix_fmt, int width, int height,
int ffmpeg_ram_set_bitrate(void *encoder, int kbs); int ffmpeg_ram_set_bitrate(void *encoder, int kbs);
void ffmpeg_ram_request_keyframe(void *encoder); void ffmpeg_ram_request_keyframe(void *encoder);
void *ffmpeg_ram_new_decoder(const char *name, int width, int height,
int sw_pixfmt, int thread_count,
RamDecodeCallback callback);
int ffmpeg_ram_decode(void *decoder, const uint8_t *data, int length,
const void *obj);
void ffmpeg_ram_free_decoder(void *decoder);
const char *ffmpeg_ram_last_error(void);
#endif // FFMPEG_RAM_FFI_H #endif // FFMPEG_RAM_FFI_H

View File

@@ -0,0 +1,127 @@
use crate::{
ffmpeg::{init_av_log, AVPixelFormat},
ffmpeg_ram::{
ffmpeg_ram_decode, ffmpeg_ram_free_decoder, ffmpeg_ram_last_error,
ffmpeg_ram_new_decoder,
},
};
use std::{
ffi::{c_void, CString},
os::raw::c_int,
slice,
};
#[derive(Debug, Clone, PartialEq)]
pub struct DecodeContext {
pub name: String,
pub width: i32,
pub height: i32,
pub sw_pixfmt: AVPixelFormat,
pub thread_count: i32,
}
pub struct DecodeFrame {
pub data: Vec<u8>,
pub width: i32,
pub height: i32,
pub pixfmt: AVPixelFormat,
}
pub struct Decoder {
codec: *mut c_void,
frames: *mut Vec<DecodeFrame>,
pub ctx: DecodeContext,
}
// Safety: Decoder is only accessed through higher-level synchronization
// (a tokio::Mutex in the video pipeline). It is never accessed concurrently,
// but may be moved across threads; the underlying FFmpeg RAM decoder state
// is thread-confined per instance, so Send (but not Sync) is acceptable.
unsafe impl Send for Decoder {}
impl Decoder {
pub fn new(ctx: DecodeContext) -> Result<Self, ()> {
init_av_log();
unsafe {
let codec = ffmpeg_ram_new_decoder(
CString::new(ctx.name.as_str()).map_err(|_| ())?.as_ptr(),
ctx.width,
ctx.height,
ctx.sw_pixfmt as c_int,
ctx.thread_count,
Some(Decoder::callback),
);
if codec.is_null() {
let msg = last_error_message();
if !msg.is_empty() {
log::error!("ffmpeg_ram_new_decoder failed: {}", msg);
}
return Err(());
}
Ok(Decoder {
codec,
frames: Box::into_raw(Box::new(Vec::<DecodeFrame>::new())),
ctx,
})
}
}
pub fn decode(&mut self, data: &[u8]) -> Result<&mut Vec<DecodeFrame>, i32> {
unsafe {
(&mut *self.frames).clear();
let ret = ffmpeg_ram_decode(
self.codec,
data.as_ptr(),
data.len() as c_int,
self.frames as *const _ as *const c_void,
);
if ret != 0 {
let msg = last_error_message();
if !msg.is_empty() {
log::error!("ffmpeg_ram_decode failed: {}", msg);
}
return Err(ret);
}
Ok(&mut *self.frames)
}
}
extern "C" fn callback(
data: *const u8,
size: c_int,
width: c_int,
height: c_int,
pixfmt: c_int,
obj: *const c_void,
) {
unsafe {
let frames = &mut *(obj as *mut Vec<DecodeFrame>);
frames.push(DecodeFrame {
data: slice::from_raw_parts(data, size as usize).to_vec(),
width,
height,
pixfmt: std::mem::transmute::<i32, AVPixelFormat>(pixfmt),
});
}
}
}
impl Drop for Decoder {
fn drop(&mut self) {
unsafe {
ffmpeg_ram_free_decoder(self.codec);
drop(Box::from_raw(self.frames));
}
}
}
fn last_error_message() -> String {
unsafe {
let ptr = ffmpeg_ram_last_error();
if ptr.is_null() {
return String::new();
}
let cstr = std::ffi::CStr::from_ptr(ptr);
cstr.to_string_lossy().to_string()
}
}

View File

@@ -12,6 +12,47 @@ use std::ffi::c_int;
include!(concat!(env!("OUT_DIR"), "/ffmpeg_ram_ffi.rs")); include!(concat!(env!("OUT_DIR"), "/ffmpeg_ram_ffi.rs"));
#[cfg(any(target_arch = "aarch64", target_arch = "arm", feature = "rkmpp"))]
pub mod decode;
// Provide a small stub on non-ARM builds so dependents can still compile, but decoder
// construction will fail (since the C++ RKMPP decoder isn't built/linked).
#[cfg(not(any(target_arch = "aarch64", target_arch = "arm", feature = "rkmpp")))]
pub mod decode {
use crate::ffmpeg::AVPixelFormat;
#[derive(Debug, Clone, PartialEq)]
pub struct DecodeContext {
pub name: String,
pub width: i32,
pub height: i32,
pub sw_pixfmt: AVPixelFormat,
pub thread_count: i32,
}
pub struct DecodeFrame {
pub data: Vec<u8>,
pub width: i32,
pub height: i32,
pub pixfmt: AVPixelFormat,
}
pub struct Decoder {
pub ctx: DecodeContext,
}
impl Decoder {
pub fn new(ctx: DecodeContext) -> Result<Self, ()> {
let _ = ctx;
Err(())
}
pub fn decode(&mut self, _data: &[u8]) -> Result<&mut Vec<DecodeFrame>, i32> {
Err(-1)
}
}
}
pub mod encode; pub mod encode;
pub enum Priority { pub enum Priority {

View File

@@ -202,25 +202,37 @@ impl Default for HidConfig {
pub struct MsdConfig { pub struct MsdConfig {
/// Enable MSD functionality /// Enable MSD functionality
pub enabled: bool, pub enabled: bool,
/// Storage path for ISO/IMG images /// MSD base directory (absolute path)
pub images_path: String, pub msd_dir: String,
/// Path for Ventoy bootable drive file
pub drive_path: String,
/// Ventoy drive size in MB (minimum 1024 MB / 1 GB)
pub virtual_drive_size_mb: u32,
} }
impl Default for MsdConfig { impl Default for MsdConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
enabled: true, enabled: true,
images_path: "./data/msd/images".to_string(), msd_dir: String::new(),
drive_path: "./data/msd/ventoy.img".to_string(),
virtual_drive_size_mb: 16 * 1024, // 16GB default
} }
} }
} }
impl MsdConfig {
pub fn msd_dir_path(&self) -> std::path::PathBuf {
std::path::PathBuf::from(&self.msd_dir)
}
pub fn images_dir(&self) -> std::path::PathBuf {
self.msd_dir_path().join("images")
}
pub fn ventoy_dir(&self) -> std::path::PathBuf {
self.msd_dir_path().join("ventoy")
}
pub fn drive_path(&self) -> std::path::PathBuf {
self.ventoy_dir().join("ventoy.img")
}
}
// Re-export ATX types from atx module for configuration // Re-export ATX types from atx module for configuration
pub use crate::atx::{ActiveLevel, AtxDriverType, AtxKeyConfig, AtxLedConfig}; pub use crate::atx::{ActiveLevel, AtxDriverType, AtxKeyConfig, AtxLedConfig};

View File

@@ -65,7 +65,7 @@ struct CliArgs {
#[arg(long, value_name = "FILE", requires = "ssl_cert")] #[arg(long, value_name = "FILE", requires = "ssl_cert")]
ssl_key: Option<PathBuf>, ssl_key: Option<PathBuf>,
/// Data directory path (default: ./data) /// Data directory path (default: /etc/one-kvm)
#[arg(short = 'd', long, value_name = "DIR")] #[arg(short = 'd', long, value_name = "DIR")]
data_dir: Option<PathBuf>, data_dir: Option<PathBuf>,
@@ -104,6 +104,34 @@ async fn main() -> anyhow::Result<()> {
let config_store = ConfigStore::new(&db_path).await?; let config_store = ConfigStore::new(&db_path).await?;
let mut config = (*config_store.get()).clone(); let mut config = (*config_store.get()).clone();
// Normalize MSD directory (absolute path under data dir if empty/relative)
let mut msd_dir_updated = false;
if config.msd.msd_dir.trim().is_empty() {
let msd_dir = data_dir.join("msd");
config.msd.msd_dir = msd_dir.to_string_lossy().to_string();
msd_dir_updated = true;
} else if !PathBuf::from(&config.msd.msd_dir).is_absolute() {
let msd_dir = data_dir.join(&config.msd.msd_dir);
tracing::warn!(
"MSD directory is relative, rebasing to {}",
msd_dir.display()
);
config.msd.msd_dir = msd_dir.to_string_lossy().to_string();
msd_dir_updated = true;
}
if msd_dir_updated {
config_store.set(config.clone()).await?;
}
// Ensure MSD directories exist (msd/images, msd/ventoy)
let msd_dir = PathBuf::from(&config.msd.msd_dir);
if let Err(e) = tokio::fs::create_dir_all(msd_dir.join("images")).await {
tracing::warn!("Failed to create MSD images directory: {}", e);
}
if let Err(e) = tokio::fs::create_dir_all(msd_dir.join("ventoy")).await {
tracing::warn!("Failed to create MSD ventoy directory: {}", e);
}
// Apply CLI argument overrides to config (only if explicitly specified) // Apply CLI argument overrides to config (only if explicitly specified)
if let Some(addr) = args.address { if let Some(addr) = args.address {
config.web.bind_address = addr; config.web.bind_address = addr;
@@ -344,11 +372,7 @@ async fn main() -> anyhow::Result<()> {
); );
} }
let controller = MsdController::new( let controller = MsdController::new(otg_service.clone(), config.msd.msd_dir_path());
otg_service.clone(),
&config.msd.images_path,
&config.msd.drive_path,
);
if let Err(e) = controller.init().await { if let Err(e) = controller.init().await {
tracing::warn!("Failed to initialize MSD controller: {}", e); tracing::warn!("Failed to initialize MSD controller: {}", e);
None None

View File

@@ -32,6 +32,8 @@ pub struct MsdController {
state: RwLock<MsdState>, state: RwLock<MsdState>,
/// Images storage path /// Images storage path
images_path: PathBuf, images_path: PathBuf,
/// Ventoy directory path
ventoy_dir: PathBuf,
/// Virtual drive path /// Virtual drive path
drive_path: PathBuf, drive_path: PathBuf,
/// Event bus for broadcasting state changes (optional) /// Event bus for broadcasting state changes (optional)
@@ -49,19 +51,22 @@ impl MsdController {
/// ///
/// # Parameters /// # Parameters
/// * `otg_service` - OTG service for gadget management /// * `otg_service` - OTG service for gadget management
/// * `images_path` - Directory path for storing ISO/IMG files /// * `msd_dir` - Base directory for MSD storage
/// * `drive_path` - File path for the virtual FAT32 drive
pub fn new( pub fn new(
otg_service: Arc<OtgService>, otg_service: Arc<OtgService>,
images_path: impl Into<PathBuf>, msd_dir: impl Into<PathBuf>,
drive_path: impl Into<PathBuf>,
) -> Self { ) -> Self {
let msd_dir = msd_dir.into();
let images_path = msd_dir.join("images");
let ventoy_dir = msd_dir.join("ventoy");
let drive_path = ventoy_dir.join("ventoy.img");
Self { Self {
otg_service, otg_service,
msd_function: RwLock::new(None), msd_function: RwLock::new(None),
state: RwLock::new(MsdState::default()), state: RwLock::new(MsdState::default()),
images_path: images_path.into(), images_path,
drive_path: drive_path.into(), ventoy_dir,
drive_path,
events: tokio::sync::RwLock::new(None), events: tokio::sync::RwLock::new(None),
downloads: Arc::new(RwLock::new(HashMap::new())), downloads: Arc::new(RwLock::new(HashMap::new())),
operation_lock: Arc::new(RwLock::new(())), operation_lock: Arc::new(RwLock::new(())),
@@ -77,6 +82,9 @@ impl MsdController {
if let Err(e) = std::fs::create_dir_all(&self.images_path) { if let Err(e) = std::fs::create_dir_all(&self.images_path) {
warn!("Failed to create images directory: {}", e); warn!("Failed to create images directory: {}", e);
} }
if let Err(e) = std::fs::create_dir_all(&self.ventoy_dir) {
warn!("Failed to create ventoy directory: {}", e);
}
// 2. Request MSD function from OtgService // 2. Request MSD function from OtgService
info!("Requesting MSD function from OtgService"); info!("Requesting MSD function from OtgService");
@@ -364,6 +372,11 @@ impl MsdController {
&self.images_path &self.images_path
} }
/// Get ventoy directory path
pub fn ventoy_dir(&self) -> &PathBuf {
&self.ventoy_dir
}
/// Get virtual drive path /// Get virtual drive path
pub fn drive_path(&self) -> &PathBuf { pub fn drive_path(&self) -> &PathBuf {
&self.drive_path &self.drive_path
@@ -588,10 +601,9 @@ mod tests {
async fn test_controller_creation() { async fn test_controller_creation() {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
let otg_service = Arc::new(OtgService::new()); let otg_service = Arc::new(OtgService::new());
let images_path = temp_dir.path().join("images"); let msd_dir = temp_dir.path().join("msd");
let drive_path = temp_dir.path().join("ventoy.img");
let controller = MsdController::new(otg_service, &images_path, &drive_path); let controller = MsdController::new(otg_service, &msd_dir);
// Check that MSD is not initialized (msd_function is None) // Check that MSD is not initialized (msd_function is None)
let state = controller.state().await; let state = controller.state().await;
@@ -604,10 +616,9 @@ mod tests {
async fn test_state_default() { async fn test_state_default() {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
let otg_service = Arc::new(OtgService::new()); let otg_service = Arc::new(OtgService::new());
let images_path = temp_dir.path().join("images"); let msd_dir = temp_dir.path().join("msd");
let drive_path = temp_dir.path().join("ventoy.img");
let controller = MsdController::new(otg_service, &images_path, &drive_path); let controller = MsdController::new(otg_service, &msd_dir);
let state = controller.state().await; let state = controller.state().await;
assert!(!state.available); assert!(!state.available);

View File

@@ -184,11 +184,22 @@ impl MjpegStreamHandler {
/// Update current frame /// Update current frame
pub fn update_frame(&self, frame: VideoFrame) { pub fn update_frame(&self, frame: VideoFrame) {
// Skip JPEG encoding if no clients are connected (optimization for WebRTC-only mode) // Fast path: if no MJPEG clients are connected, do minimal bookkeeping and avoid
// This avoids unnecessary libyuv conversion when only WebRTC is active // expensive work (JPEG encoding and per-frame dedup hashing).
if self.clients.read().is_empty() && !frame.format.is_compressed() { let has_clients = !self.clients.read().is_empty();
// Still update the online status and sequence for monitoring purposes if !has_clients {
// but skip the expensive JPEG encoding self.dropped_same_frames.store(0, Ordering::Relaxed);
self.sequence.fetch_add(1, Ordering::Relaxed);
self.online.store(frame.online, Ordering::SeqCst);
*self.last_frame_ts.write() = Some(Instant::now());
// Keep the latest compressed frame for "instant first frame" when a client connects.
// Avoid retaining large raw buffers when there are no MJPEG clients.
if frame.format.is_compressed() {
self.current_frame.store(Arc::new(Some(frame)));
} else {
self.current_frame.store(Arc::new(None));
}
return; return;
} }
@@ -237,7 +248,7 @@ impl MjpegStreamHandler {
self.dropped_same_frames.store(0, Ordering::Relaxed); self.dropped_same_frames.store(0, Ordering::Relaxed);
self.sequence.fetch_add(1, Ordering::Relaxed); self.sequence.fetch_add(1, Ordering::Relaxed);
self.online.store(true, Ordering::SeqCst); self.online.store(frame.online, Ordering::SeqCst);
*self.last_frame_ts.write() = Some(Instant::now()); *self.last_frame_ts.write() = Some(Instant::now());
self.current_frame.store(Arc::new(Some(frame))); self.current_frame.store(Arc::new(Some(frame)));
@@ -535,9 +546,44 @@ fn frames_are_identical(a: &VideoFrame, b: &VideoFrame) -> bool {
return false; return false;
} }
// Compare hashes instead of full binary data // Avoid hashing the whole frame for obviously different frames by sampling a few
// Hash is computed once and cached in OnceLock for efficiency // fixed-size windows first. If all samples match, fall back to the cached hash.
// This is much faster than binary comparison for large frames (1080p MJPEG) let a_data = a.data();
let b_data = b.data();
let len = a_data.len();
// Small frames: direct compare is cheap.
if len <= 256 {
return a_data == b_data;
}
const SAMPLE: usize = 16;
debug_assert!(len == b_data.len());
// Head + tail.
if a_data[..SAMPLE] != b_data[..SAMPLE] {
return false;
}
if a_data[len - SAMPLE..] != b_data[len - SAMPLE..] {
return false;
}
// Two interior samples (quarter + middle) to catch common "same header/footer" cases.
let quarter = len / 4;
let quarter_start = quarter.saturating_sub(SAMPLE / 2);
if a_data[quarter_start..quarter_start + SAMPLE]
!= b_data[quarter_start..quarter_start + SAMPLE]
{
return false;
}
let mid = len / 2;
let mid_start = mid.saturating_sub(SAMPLE / 2);
if a_data[mid_start..mid_start + SAMPLE] != b_data[mid_start..mid_start + SAMPLE] {
return false;
}
// Compare hashes instead of full binary data.
// Hash is computed once and cached in OnceLock for efficiency.
a.get_hash() == b.get_hash() a.get_hash() == b.get_hash()
} }

View File

@@ -0,0 +1,95 @@
//! MJPEG decoder using RKMPP via hwcodec (FFmpeg RAM).
use hwcodec::ffmpeg::AVPixelFormat;
use hwcodec::ffmpeg_ram::decode::{DecodeContext, Decoder};
use tracing::warn;
use crate::error::{AppError, Result};
use crate::video::convert::Nv12Converter;
use crate::video::format::Resolution;
pub struct MjpegRkmppDecoder {
decoder: Decoder,
resolution: Resolution,
nv16_to_nv12: Option<Nv12Converter>,
last_pixfmt: Option<AVPixelFormat>,
}
impl MjpegRkmppDecoder {
pub fn new(resolution: Resolution) -> Result<Self> {
let ctx = DecodeContext {
name: "mjpeg_rkmpp".to_string(),
width: resolution.width as i32,
height: resolution.height as i32,
sw_pixfmt: AVPixelFormat::AV_PIX_FMT_NV12,
thread_count: 1,
};
let decoder = Decoder::new(ctx).map_err(|_| {
AppError::VideoError("Failed to create mjpeg_rkmpp decoder".to_string())
})?;
Ok(Self {
decoder,
resolution,
nv16_to_nv12: None,
last_pixfmt: None,
})
}
pub fn decode_to_nv12(&mut self, mjpeg: &[u8]) -> Result<Vec<u8>> {
let frames = self
.decoder
.decode(mjpeg)
.map_err(|e| AppError::VideoError(format!("mjpeg_rkmpp decode failed: {}", e)))?;
if frames.is_empty() {
return Err(AppError::VideoError(
"mjpeg_rkmpp decode returned no frames".to_string(),
));
}
if frames.len() > 1 {
warn!(
"mjpeg_rkmpp decode returned {} frames, using last",
frames.len()
);
}
let frame = frames
.pop()
.ok_or_else(|| AppError::VideoError("mjpeg_rkmpp decode returned empty".to_string()))?;
if frame.width as u32 != self.resolution.width
|| frame.height as u32 != self.resolution.height
{
warn!(
"mjpeg_rkmpp output size {}x{} differs from expected {}x{}",
frame.width, frame.height, self.resolution.width, self.resolution.height
);
}
if let Some(last) = self.last_pixfmt {
if frame.pixfmt != last {
warn!(
"mjpeg_rkmpp output pixfmt changed from {:?} to {:?}",
last, frame.pixfmt
);
}
} else {
self.last_pixfmt = Some(frame.pixfmt);
}
let pixfmt = self.last_pixfmt.unwrap_or(frame.pixfmt);
match pixfmt {
AVPixelFormat::AV_PIX_FMT_NV12 => Ok(frame.data),
AVPixelFormat::AV_PIX_FMT_NV16 => {
if self.nv16_to_nv12.is_none() {
self.nv16_to_nv12 = Some(Nv12Converter::nv16_to_nv12(self.resolution));
}
let conv = self.nv16_to_nv12.as_mut().unwrap();
let nv12 = conv.convert(&frame.data)?;
Ok(nv12.to_vec())
}
other => Err(AppError::VideoError(format!(
"mjpeg_rkmpp output pixfmt {:?} (expected NV12/NV16)",
other
))),
}
}
}

View File

@@ -0,0 +1,54 @@
//! MJPEG decoder using TurboJPEG (software) -> RGB24.
use turbojpeg::{Decompressor, Image, PixelFormat as TJPixelFormat};
use crate::error::{AppError, Result};
use crate::video::format::Resolution;
pub struct MjpegTurboDecoder {
decompressor: Decompressor,
resolution: Resolution,
}
impl MjpegTurboDecoder {
pub fn new(resolution: Resolution) -> Result<Self> {
let decompressor = Decompressor::new().map_err(|e| {
AppError::VideoError(format!("Failed to create turbojpeg decoder: {}", e))
})?;
Ok(Self {
decompressor,
resolution,
})
}
pub fn decode_to_rgb(&mut self, mjpeg: &[u8]) -> Result<Vec<u8>> {
let header = self
.decompressor
.read_header(mjpeg)
.map_err(|e| AppError::VideoError(format!("turbojpeg read_header failed: {}", e)))?;
if header.width as u32 != self.resolution.width
|| header.height as u32 != self.resolution.height
{
return Err(AppError::VideoError(format!(
"turbojpeg size mismatch: {}x{} (expected {}x{})",
header.width, header.height, self.resolution.width, self.resolution.height
)));
}
let pitch = header.width * 3;
let mut image = Image {
pixels: vec![0u8; header.height * pitch],
width: header.width,
pitch,
height: header.height,
format: TJPixelFormat::RGB,
};
self.decompressor
.decompress(mjpeg, image.as_deref_mut())
.map_err(|e| AppError::VideoError(format!("turbojpeg decode failed: {}", e)))?;
Ok(image.pixels)
}
}

View File

@@ -1,3 +1,11 @@
//! Video decoder implementations //! Video decoder implementations
//! //!
//! This module provides video decoding capabilities. //! This module provides video decoding capabilities.
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
pub mod mjpeg_rkmpp;
pub mod mjpeg_turbo;
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
pub use mjpeg_rkmpp::MjpegRkmppDecoder;
pub use mjpeg_turbo::MjpegTurboDecoder;

View File

@@ -28,14 +28,17 @@ const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3;
use crate::error::{AppError, Result}; use crate::error::{AppError, Result};
use crate::video::convert::{Nv12Converter, PixelConverter}; use crate::video::convert::{Nv12Converter, PixelConverter};
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
use crate::video::decoder::MjpegRkmppDecoder;
use crate::video::decoder::MjpegTurboDecoder;
use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat}; use crate::video::encoder::h264::{detect_best_encoder, H264Config, H264Encoder, H264InputFormat};
use crate::video::encoder::h265::{ use crate::video::encoder::h265::{
detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat, detect_best_h265_encoder, H265Config, H265Encoder, H265InputFormat,
}; };
use crate::video::encoder::registry::{EncoderBackend, EncoderRegistry, VideoEncoderType}; use crate::video::encoder::registry::{EncoderBackend, EncoderRegistry, VideoEncoderType};
use crate::video::encoder::traits::EncoderConfig; use crate::video::encoder::traits::EncoderConfig;
use crate::video::encoder::vp8::{VP8Config, VP8Encoder}; use crate::video::encoder::vp8::{detect_best_vp8_encoder, VP8Config, VP8Encoder};
use crate::video::encoder::vp9::{VP9Config, VP9Encoder}; use crate::video::encoder::vp9::{detect_best_vp9_encoder, VP9Config, VP9Encoder};
use crate::video::format::{PixelFormat, Resolution}; use crate::video::format::{PixelFormat, Resolution};
use crate::video::frame::VideoFrame; use crate::video::frame::VideoFrame;
@@ -292,10 +295,27 @@ impl VideoEncoderTrait for VP9EncoderWrapper {
} }
} }
enum MjpegDecoderKind {
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
Rkmpp(MjpegRkmppDecoder),
Turbo(MjpegTurboDecoder),
}
impl MjpegDecoderKind {
fn decode(&mut self, data: &[u8]) -> Result<Vec<u8>> {
match self {
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
MjpegDecoderKind::Rkmpp(decoder) => decoder.decode_to_nv12(data),
MjpegDecoderKind::Turbo(decoder) => decoder.decode_to_rgb(data),
}
}
}
/// Universal shared video pipeline /// Universal shared video pipeline
pub struct SharedVideoPipeline { pub struct SharedVideoPipeline {
config: RwLock<SharedVideoPipelineConfig>, config: RwLock<SharedVideoPipelineConfig>,
encoder: Mutex<Option<Box<dyn VideoEncoderTrait + Send>>>, encoder: Mutex<Option<Box<dyn VideoEncoderTrait + Send>>>,
mjpeg_decoder: Mutex<Option<MjpegDecoderKind>>,
nv12_converter: Mutex<Option<Nv12Converter>>, nv12_converter: Mutex<Option<Nv12Converter>>,
yuv420p_converter: Mutex<Option<PixelConverter>>, yuv420p_converter: Mutex<Option<PixelConverter>>,
/// Whether the encoder needs YUV420P (true) or NV12 (false) /// Whether the encoder needs YUV420P (true) or NV12 (false)
@@ -333,6 +353,7 @@ impl SharedVideoPipeline {
let pipeline = Arc::new(Self { let pipeline = Arc::new(Self {
config: RwLock::new(config), config: RwLock::new(config),
encoder: Mutex::new(None), encoder: Mutex::new(None),
mjpeg_decoder: Mutex::new(None),
nv12_converter: Mutex::new(None), nv12_converter: Mutex::new(None),
yuv420p_converter: Mutex::new(None), yuv420p_converter: Mutex::new(None),
encoder_needs_yuv420p: AtomicBool::new(false), encoder_needs_yuv420p: AtomicBool::new(false),
@@ -367,12 +388,16 @@ impl SharedVideoPipeline {
} }
}; };
let needs_mjpeg_decode = config.input_format.is_compressed();
// Check if RKMPP backend is available for direct input optimization // Check if RKMPP backend is available for direct input optimization
let is_rkmpp_available = registry let is_rkmpp_available = registry
.encoder_with_backend(VideoEncoderType::H264, EncoderBackend::Rkmpp) .encoder_with_backend(VideoEncoderType::H264, EncoderBackend::Rkmpp)
.is_some(); .is_some();
let use_yuyv_direct = is_rkmpp_available && config.input_format == PixelFormat::Yuyv; let use_yuyv_direct =
is_rkmpp_available && !needs_mjpeg_decode && config.input_format == PixelFormat::Yuyv;
let use_rkmpp_direct = is_rkmpp_available let use_rkmpp_direct = is_rkmpp_available
&& !needs_mjpeg_decode
&& matches!( && matches!(
config.input_format, config.input_format,
PixelFormat::Yuyv PixelFormat::Yuyv
@@ -396,10 +421,9 @@ impl SharedVideoPipeline {
); );
} }
// Create encoder based on codec type let selected_codec_name = match config.output_codec {
let encoder: Box<dyn VideoEncoderTrait + Send> = match config.output_codec {
VideoEncoderType::H264 => { VideoEncoderType::H264 => {
let codec_name = if use_rkmpp_direct { if use_rkmpp_direct {
// Force RKMPP backend for direct input // Force RKMPP backend for direct input
get_codec_name(VideoEncoderType::H264, Some(EncoderBackend::Rkmpp)).ok_or_else( get_codec_name(VideoEncoderType::H264, Some(EncoderBackend::Rkmpp)).ok_or_else(
|| { || {
@@ -423,11 +447,109 @@ impl SharedVideoPipeline {
detected.ok_or_else(|| { detected.ok_or_else(|| {
AppError::VideoError("No H.264 encoder available".to_string()) AppError::VideoError("No H.264 encoder available".to_string())
})? })?
}
}
VideoEncoderType::H265 => {
if use_rkmpp_direct {
get_codec_name(VideoEncoderType::H265, Some(EncoderBackend::Rkmpp)).ok_or_else(
|| {
AppError::VideoError(
"RKMPP backend not available for H.265".to_string(),
)
},
)?
} else if let Some(ref backend) = config.encoder_backend {
get_codec_name(VideoEncoderType::H265, Some(*backend)).ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support H.265",
backend
))
})?
} else {
let (_encoder_type, detected) =
detect_best_h265_encoder(config.resolution.width, config.resolution.height);
detected.ok_or_else(|| {
AppError::VideoError("No H.265 encoder available".to_string())
})?
}
}
VideoEncoderType::VP8 => {
if let Some(ref backend) = config.encoder_backend {
get_codec_name(VideoEncoderType::VP8, Some(*backend)).ok_or_else(|| {
AppError::VideoError(format!("Backend {:?} does not support VP8", backend))
})?
} else {
let (_encoder_type, detected) =
detect_best_vp8_encoder(config.resolution.width, config.resolution.height);
detected.ok_or_else(|| {
AppError::VideoError("No VP8 encoder available".to_string())
})?
}
}
VideoEncoderType::VP9 => {
if let Some(ref backend) = config.encoder_backend {
get_codec_name(VideoEncoderType::VP9, Some(*backend)).ok_or_else(|| {
AppError::VideoError(format!("Backend {:?} does not support VP9", backend))
})?
} else {
let (_encoder_type, detected) =
detect_best_vp9_encoder(config.resolution.width, config.resolution.height);
detected.ok_or_else(|| {
AppError::VideoError("No VP9 encoder available".to_string())
})?
}
}
}; };
let is_rkmpp_encoder = selected_codec_name.contains("rkmpp");
let is_software_encoder = selected_codec_name.contains("libx264")
|| selected_codec_name.contains("libx265")
|| selected_codec_name.contains("libvpx");
let pipeline_input_format = if needs_mjpeg_decode {
if is_rkmpp_encoder {
info!(
"MJPEG input detected, using RKMPP decoder ({} -> NV12 with NV16 fallback)",
config.input_format
);
#[cfg(any(target_arch = "aarch64", target_arch = "arm"))]
{
let decoder = MjpegRkmppDecoder::new(config.resolution)?;
*self.mjpeg_decoder.lock().await = Some(MjpegDecoderKind::Rkmpp(decoder));
PixelFormat::Nv12
}
#[cfg(not(any(target_arch = "aarch64", target_arch = "arm")))]
{
return Err(AppError::VideoError(
"RKMPP MJPEG decode is only supported on ARM builds".to_string(),
));
}
} else if is_software_encoder {
info!(
"MJPEG input detected, using TurboJPEG decoder ({} -> RGB24)",
config.input_format
);
let decoder = MjpegTurboDecoder::new(config.resolution)?;
*self.mjpeg_decoder.lock().await = Some(MjpegDecoderKind::Turbo(decoder));
PixelFormat::Rgb24
} else {
return Err(AppError::VideoError(
"MJPEG input requires RKMPP or software encoder".to_string(),
));
}
} else {
*self.mjpeg_decoder.lock().await = None;
config.input_format
};
// Create encoder based on codec type
let encoder: Box<dyn VideoEncoderTrait + Send> = match config.output_codec {
VideoEncoderType::H264 => {
let codec_name = selected_codec_name.clone();
let is_rkmpp = codec_name.contains("rkmpp"); let is_rkmpp = codec_name.contains("rkmpp");
let direct_input_format = if is_rkmpp { let direct_input_format = if is_rkmpp {
match config.input_format { match pipeline_input_format {
PixelFormat::Yuyv => Some(H264InputFormat::Yuyv422), PixelFormat::Yuyv => Some(H264InputFormat::Yuyv422),
PixelFormat::Yuv420 => Some(H264InputFormat::Yuv420p), PixelFormat::Yuv420 => Some(H264InputFormat::Yuv420p),
PixelFormat::Rgb24 => Some(H264InputFormat::Rgb24), PixelFormat::Rgb24 => Some(H264InputFormat::Rgb24),
@@ -439,7 +561,7 @@ impl SharedVideoPipeline {
_ => None, _ => None,
} }
} else if codec_name.contains("libx264") { } else if codec_name.contains("libx264") {
match config.input_format { match pipeline_input_format {
PixelFormat::Nv12 => Some(H264InputFormat::Nv12), PixelFormat::Nv12 => Some(H264InputFormat::Nv12),
PixelFormat::Nv16 => Some(H264InputFormat::Nv16), PixelFormat::Nv16 => Some(H264InputFormat::Nv16),
PixelFormat::Nv21 => Some(H264InputFormat::Nv21), PixelFormat::Nv21 => Some(H264InputFormat::Nv21),
@@ -485,32 +607,11 @@ impl SharedVideoPipeline {
Box::new(H264EncoderWrapper(encoder)) Box::new(H264EncoderWrapper(encoder))
} }
VideoEncoderType::H265 => { VideoEncoderType::H265 => {
let codec_name = if use_rkmpp_direct { let codec_name = selected_codec_name.clone();
get_codec_name(VideoEncoderType::H265, Some(EncoderBackend::Rkmpp)).ok_or_else(
|| {
AppError::VideoError(
"RKMPP backend not available for H.265".to_string(),
)
},
)?
} else if let Some(ref backend) = config.encoder_backend {
get_codec_name(VideoEncoderType::H265, Some(*backend)).ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support H.265",
backend
))
})?
} else {
let (_encoder_type, detected) =
detect_best_h265_encoder(config.resolution.width, config.resolution.height);
detected.ok_or_else(|| {
AppError::VideoError("No H.265 encoder available".to_string())
})?
};
let is_rkmpp = codec_name.contains("rkmpp"); let is_rkmpp = codec_name.contains("rkmpp");
let direct_input_format = if is_rkmpp { let direct_input_format = if is_rkmpp {
match config.input_format { match pipeline_input_format {
PixelFormat::Yuyv => Some(H265InputFormat::Yuyv422), PixelFormat::Yuyv => Some(H265InputFormat::Yuyv422),
PixelFormat::Yuv420 => Some(H265InputFormat::Yuv420p), PixelFormat::Yuv420 => Some(H265InputFormat::Yuv420p),
PixelFormat::Rgb24 => Some(H265InputFormat::Rgb24), PixelFormat::Rgb24 => Some(H265InputFormat::Rgb24),
@@ -522,7 +623,7 @@ impl SharedVideoPipeline {
_ => None, _ => None,
} }
} else if codec_name.contains("libx265") { } else if codec_name.contains("libx265") {
match config.input_format { match pipeline_input_format {
PixelFormat::Yuv420 => Some(H265InputFormat::Yuv420p), PixelFormat::Yuv420 => Some(H265InputFormat::Yuv420p),
_ => None, _ => None,
} }
@@ -572,23 +673,14 @@ impl SharedVideoPipeline {
VideoEncoderType::VP8 => { VideoEncoderType::VP8 => {
let encoder_config = let encoder_config =
VP8Config::low_latency(config.resolution, config.bitrate_kbps()); VP8Config::low_latency(config.resolution, config.bitrate_kbps());
let codec_name = selected_codec_name.clone();
let encoder = if let Some(ref backend) = config.encoder_backend { if let Some(ref backend) = config.encoder_backend {
let codec_name = get_codec_name(VideoEncoderType::VP8, Some(*backend))
.ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support VP8",
backend
))
})?;
info!( info!(
"Creating VP8 encoder with backend {:?} (codec: {})", "Creating VP8 encoder with backend {:?} (codec: {})",
backend, codec_name backend, codec_name
); );
VP8Encoder::with_codec(encoder_config, &codec_name)? }
} else { let encoder = VP8Encoder::with_codec(encoder_config, &codec_name)?;
VP8Encoder::new(encoder_config)?
};
info!("Created VP8 encoder: {}", encoder.codec_name()); info!("Created VP8 encoder: {}", encoder.codec_name());
Box::new(VP8EncoderWrapper(encoder)) Box::new(VP8EncoderWrapper(encoder))
@@ -596,23 +688,14 @@ impl SharedVideoPipeline {
VideoEncoderType::VP9 => { VideoEncoderType::VP9 => {
let encoder_config = let encoder_config =
VP9Config::low_latency(config.resolution, config.bitrate_kbps()); VP9Config::low_latency(config.resolution, config.bitrate_kbps());
let codec_name = selected_codec_name.clone();
let encoder = if let Some(ref backend) = config.encoder_backend { if let Some(ref backend) = config.encoder_backend {
let codec_name = get_codec_name(VideoEncoderType::VP9, Some(*backend))
.ok_or_else(|| {
AppError::VideoError(format!(
"Backend {:?} does not support VP9",
backend
))
})?;
info!( info!(
"Creating VP9 encoder with backend {:?} (codec: {})", "Creating VP9 encoder with backend {:?} (codec: {})",
backend, codec_name backend, codec_name
); );
VP9Encoder::with_codec(encoder_config, &codec_name)? }
} else { let encoder = VP9Encoder::with_codec(encoder_config, &codec_name)?;
VP9Encoder::new(encoder_config)?
};
info!("Created VP9 encoder: {}", encoder.codec_name()); info!("Created VP9 encoder: {}", encoder.codec_name());
Box::new(VP9EncoderWrapper(encoder)) Box::new(VP9EncoderWrapper(encoder))
@@ -623,7 +706,7 @@ impl SharedVideoPipeline {
let codec_name = encoder.codec_name(); let codec_name = encoder.codec_name();
let use_direct_input = if codec_name.contains("rkmpp") { let use_direct_input = if codec_name.contains("rkmpp") {
matches!( matches!(
config.input_format, pipeline_input_format,
PixelFormat::Yuyv PixelFormat::Yuyv
| PixelFormat::Yuv420 | PixelFormat::Yuv420
| PixelFormat::Rgb24 | PixelFormat::Rgb24
@@ -635,7 +718,7 @@ impl SharedVideoPipeline {
) )
} else if codec_name.contains("libx264") { } else if codec_name.contains("libx264") {
matches!( matches!(
config.input_format, pipeline_input_format,
PixelFormat::Nv12 | PixelFormat::Nv16 | PixelFormat::Nv21 | PixelFormat::Yuv420 PixelFormat::Nv12 | PixelFormat::Nv16 | PixelFormat::Nv21 | PixelFormat::Yuv420
) )
} else { } else {
@@ -645,7 +728,7 @@ impl SharedVideoPipeline {
// Determine if encoder needs YUV420P (software encoders) or NV12 (hardware encoders) // Determine if encoder needs YUV420P (software encoders) or NV12 (hardware encoders)
let needs_yuv420p = if codec_name.contains("libx264") { let needs_yuv420p = if codec_name.contains("libx264") {
!matches!( !matches!(
config.input_format, pipeline_input_format,
PixelFormat::Nv12 | PixelFormat::Nv16 | PixelFormat::Nv21 | PixelFormat::Yuv420 PixelFormat::Nv12 | PixelFormat::Nv16 | PixelFormat::Nv21 | PixelFormat::Yuv420
) )
} else { } else {
@@ -667,7 +750,7 @@ impl SharedVideoPipeline {
// Create converter or decoder based on input format and encoder needs // Create converter or decoder based on input format and encoder needs
info!( info!(
"Initializing input format handler for: {} -> {}", "Initializing input format handler for: {} -> {}",
config.input_format, pipeline_input_format,
if use_direct_input { if use_direct_input {
"direct" "direct"
} else if needs_yuv420p { } else if needs_yuv420p {
@@ -686,7 +769,7 @@ impl SharedVideoPipeline {
(None, None) (None, None)
} else if needs_yuv420p { } else if needs_yuv420p {
// Software encoder needs YUV420P // Software encoder needs YUV420P
match config.input_format { match pipeline_input_format {
PixelFormat::Yuv420 => { PixelFormat::Yuv420 => {
info!("Using direct YUV420P input (no conversion)"); info!("Using direct YUV420P input (no conversion)");
(None, None) (None, None)
@@ -729,13 +812,13 @@ impl SharedVideoPipeline {
_ => { _ => {
return Err(AppError::VideoError(format!( return Err(AppError::VideoError(format!(
"Unsupported input format for software encoding: {}", "Unsupported input format for software encoding: {}",
config.input_format pipeline_input_format
))); )));
} }
} }
} else { } else {
// Hardware encoder needs NV12 // Hardware encoder needs NV12
match config.input_format { match pipeline_input_format {
PixelFormat::Nv12 => { PixelFormat::Nv12 => {
info!("Using direct NV12 input (no conversion)"); info!("Using direct NV12 input (no conversion)");
(None, None) (None, None)
@@ -767,7 +850,7 @@ impl SharedVideoPipeline {
_ => { _ => {
return Err(AppError::VideoError(format!( return Err(AppError::VideoError(format!(
"Unsupported input format for hardware encoding: {}", "Unsupported input format for hardware encoding: {}",
config.input_format pipeline_input_format
))); )));
} }
} }
@@ -857,6 +940,7 @@ impl SharedVideoPipeline {
// Clear encoder state // Clear encoder state
*self.encoder.lock().await = None; *self.encoder.lock().await = None;
*self.mjpeg_decoder.lock().await = None;
*self.nv12_converter.lock().await = None; *self.nv12_converter.lock().await = None;
*self.yuv420p_converter.lock().await = None; *self.yuv420p_converter.lock().await = None;
self.encoder_needs_yuv420p.store(false, Ordering::Release); self.encoder_needs_yuv420p.store(false, Ordering::Release);
@@ -973,8 +1057,10 @@ impl SharedVideoPipeline {
} }
// Batch update stats every second (reduces lock contention) // Batch update stats every second (reduces lock contention)
if last_fps_time.elapsed() >= Duration::from_secs(1) { let fps_elapsed = last_fps_time.elapsed();
let current_fps = fps_frame_count as f32 / last_fps_time.elapsed().as_secs_f32(); if fps_elapsed >= Duration::from_secs(1) {
let current_fps =
fps_frame_count as f32 / fps_elapsed.as_secs_f32();
fps_frame_count = 0; fps_frame_count = 0;
last_fps_time = Instant::now(); last_fps_time = Instant::now();
@@ -1020,11 +1106,25 @@ impl SharedVideoPipeline {
frame: &VideoFrame, frame: &VideoFrame,
frame_count: u64, frame_count: u64,
) -> Result<Option<EncodedVideoFrame>> { ) -> Result<Option<EncodedVideoFrame>> {
let (fps, codec, input_format) = {
let config = self.config.read().await; let config = self.config.read().await;
(config.fps, config.output_codec, config.input_format)
};
let raw_frame = frame.data(); let raw_frame = frame.data();
let fps = config.fps; let decoded_buf = if input_format.is_compressed() {
let codec = config.output_codec; let decoded = {
drop(config); let mut decoder_guard = self.mjpeg_decoder.lock().await;
let decoder = decoder_guard.as_mut().ok_or_else(|| {
AppError::VideoError("MJPEG decoder not initialized".to_string())
})?;
decoder.decode(raw_frame)?
};
Some(decoded)
} else {
None
};
let raw_frame = decoded_buf.as_deref().unwrap_or(raw_frame);
// Calculate PTS from real capture timestamp (lock-free using AtomicI64) // Calculate PTS from real capture timestamp (lock-free using AtomicI64)
// This ensures smooth playback even when capture timing varies // This ensures smooth playback even when capture timing varies

View File

@@ -220,11 +220,8 @@ pub async fn apply_hid_config(
// Get MSD config from store // Get MSD config from store
let config = state.config.get(); let config = state.config.get();
let msd = crate::msd::MsdController::new( let msd =
state.otg_service.clone(), crate::msd::MsdController::new(state.otg_service.clone(), config.msd.msd_dir_path());
&config.msd.images_path,
&config.msd.drive_path,
);
if let Err(e) = msd.init().await { if let Err(e) = msd.init().await {
tracing::warn!("Failed to auto-initialize MSD for OTG: {}", e); tracing::warn!("Failed to auto-initialize MSD for OTG: {}", e);
@@ -253,23 +250,50 @@ pub async fn apply_msd_config(
// Check if MSD enabled state changed // Check if MSD enabled state changed
let old_msd_enabled = old_config.enabled; let old_msd_enabled = old_config.enabled;
let new_msd_enabled = new_config.enabled; let new_msd_enabled = new_config.enabled;
let msd_dir_changed = old_config.msd_dir != new_config.msd_dir;
tracing::info!( tracing::info!(
"MSD enabled: old={}, new={}", "MSD enabled: old={}, new={}",
old_msd_enabled, old_msd_enabled,
new_msd_enabled new_msd_enabled
); );
if msd_dir_changed {
tracing::info!("MSD directory changed: {}", new_config.msd_dir);
}
if old_msd_enabled != new_msd_enabled { // Ensure MSD directories exist (msd/images, msd/ventoy)
if new_msd_enabled { let msd_dir = new_config.msd_dir_path();
// MSD was disabled, now enabled - need to initialize if let Err(e) = std::fs::create_dir_all(msd_dir.join("images")) {
tracing::info!("MSD enabled in config, initializing..."); tracing::warn!("Failed to create MSD images directory: {}", e);
}
if let Err(e) = std::fs::create_dir_all(msd_dir.join("ventoy")) {
tracing::warn!("Failed to create MSD ventoy directory: {}", e);
}
let msd = crate::msd::MsdController::new( let needs_reload = old_msd_enabled != new_msd_enabled || msd_dir_changed;
state.otg_service.clone(), if !needs_reload {
&new_config.images_path, tracing::info!(
&new_config.drive_path, "MSD enabled state unchanged ({}) and directory unchanged, no reload needed",
new_msd_enabled
); );
return Ok(());
}
if new_msd_enabled {
tracing::info!("(Re)initializing MSD...");
// Shutdown existing controller if present
let mut msd_guard = state.msd.write().await;
if let Some(msd) = msd_guard.as_mut() {
if let Err(e) = msd.shutdown().await {
tracing::warn!("MSD shutdown failed: {}", e);
}
}
*msd_guard = None;
drop(msd_guard);
let msd =
crate::msd::MsdController::new(state.otg_service.clone(), new_config.msd_dir_path());
msd.init() msd.init()
.await .await
.map_err(|e| AppError::Config(format!("MSD initialization failed: {}", e)))?; .map_err(|e| AppError::Config(format!("MSD initialization failed: {}", e)))?;
@@ -282,23 +306,18 @@ pub async fn apply_msd_config(
*state.msd.write().await = Some(msd); *state.msd.write().await = Some(msd);
tracing::info!("MSD initialized successfully"); tracing::info!("MSD initialized successfully");
} else { } else {
// MSD was enabled, now disabled - shutdown // MSD disabled - shutdown
tracing::info!("MSD disabled in config, shutting down..."); tracing::info!("MSD disabled in config, shutting down...");
if let Some(msd) = state.msd.write().await.as_mut() { let mut msd_guard = state.msd.write().await;
if let Some(msd) = msd_guard.as_mut() {
if let Err(e) = msd.shutdown().await { if let Err(e) = msd.shutdown().await {
tracing::warn!("MSD shutdown failed: {}", e); tracing::warn!("MSD shutdown failed: {}", e);
} }
} }
*state.msd.write().await = None; *msd_guard = None;
tracing::info!("MSD shutdown complete"); tracing::info!("MSD shutdown complete");
} }
} else {
tracing::info!(
"MSD enabled state unchanged ({}), no reload needed",
new_msd_enabled
);
}
Ok(()) Ok(())
} }

View File

@@ -3,6 +3,7 @@ use crate::error::AppError;
use crate::rustdesk::config::RustDeskConfig; use crate::rustdesk::config::RustDeskConfig;
use crate::video::encoder::BitratePreset; use crate::video::encoder::BitratePreset;
use serde::Deserialize; use serde::Deserialize;
use std::path::Path;
use typeshare::typeshare; use typeshare::typeshare;
// ===== Video Config ===== // ===== Video Config =====
@@ -305,16 +306,20 @@ impl HidConfigUpdate {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct MsdConfigUpdate { pub struct MsdConfigUpdate {
pub enabled: Option<bool>, pub enabled: Option<bool>,
pub images_path: Option<String>, pub msd_dir: Option<String>,
pub drive_path: Option<String>,
pub virtual_drive_size_mb: Option<u32>,
} }
impl MsdConfigUpdate { impl MsdConfigUpdate {
pub fn validate(&self) -> crate::error::Result<()> { pub fn validate(&self) -> crate::error::Result<()> {
if let Some(size) = self.virtual_drive_size_mb { if let Some(ref dir) = self.msd_dir {
if !(1..=10240).contains(&size) { let trimmed = dir.trim();
return Err(AppError::BadRequest("Drive size must be 1-10240 MB".into())); if trimmed.is_empty() {
return Err(AppError::BadRequest("MSD directory cannot be empty".into()));
}
if !Path::new(trimmed).is_absolute() {
return Err(AppError::BadRequest(
"MSD directory must be an absolute path".into(),
));
} }
} }
Ok(()) Ok(())
@@ -324,14 +329,8 @@ impl MsdConfigUpdate {
if let Some(enabled) = self.enabled { if let Some(enabled) = self.enabled {
config.enabled = enabled; config.enabled = enabled;
} }
if let Some(ref path) = self.images_path { if let Some(ref dir) = self.msd_dir {
config.images_path = path.clone(); config.msd_dir = dir.trim().to_string();
}
if let Some(ref path) = self.drive_path {
config.drive_path = path.clone();
}
if let Some(size) = self.virtual_drive_size_mb {
config.virtual_drive_size_mb = size;
} }
} }
} }

View File

@@ -90,12 +90,13 @@ pub struct CapabilityInfo {
pub async fn system_info(State(state): State<Arc<AppState>>) -> Json<SystemInfo> { pub async fn system_info(State(state): State<Arc<AppState>>) -> Json<SystemInfo> {
let config = state.config.get(); let config = state.config.get();
// Get disk space information for MSD images directory // Get disk space information for MSD base directory
let disk_space = { let disk_space = {
if let Some(ref msd_controller) = *state.msd.read().await { let msd_dir = config.msd.msd_dir_path();
get_disk_space(msd_controller.images_path()).ok() if msd_dir.as_os_str().is_empty() {
} else {
None None
} else {
get_disk_space(&msd_dir).ok()
} }
}; };
@@ -933,31 +934,56 @@ pub async fn update_config(
} }
} }
// MSD config processing - reload if enabled state changed // MSD config processing - reload if enabled state or directory changed
if has_msd { if has_msd {
tracing::info!("MSD config sent, checking if reload needed..."); tracing::info!("MSD config sent, checking if reload needed...");
tracing::debug!("Old MSD config: {:?}", old_config.msd); tracing::debug!("Old MSD config: {:?}", old_config.msd);
tracing::debug!("New MSD config: {:?}", new_config.msd); tracing::debug!("New MSD config: {:?}", new_config.msd);
// Check if MSD enabled state changed
let old_msd_enabled = old_config.msd.enabled; let old_msd_enabled = old_config.msd.enabled;
let new_msd_enabled = new_config.msd.enabled; let new_msd_enabled = new_config.msd.enabled;
let msd_dir_changed = old_config.msd.msd_dir != new_config.msd.msd_dir;
tracing::info!( tracing::info!(
"MSD enabled: old={}, new={}", "MSD enabled: old={}, new={}",
old_msd_enabled, old_msd_enabled,
new_msd_enabled new_msd_enabled
); );
if msd_dir_changed {
tracing::info!("MSD directory changed: {}", new_config.msd.msd_dir);
}
if old_msd_enabled != new_msd_enabled { // Ensure MSD directories exist (msd/images, msd/ventoy)
if new_msd_enabled { let msd_dir = new_config.msd.msd_dir_path();
// MSD was disabled, now enabled - need to initialize if let Err(e) = std::fs::create_dir_all(msd_dir.join("images")) {
tracing::info!("MSD enabled in config, initializing..."); tracing::warn!("Failed to create MSD images directory: {}", e);
}
if let Err(e) = std::fs::create_dir_all(msd_dir.join("ventoy")) {
tracing::warn!("Failed to create MSD ventoy directory: {}", e);
}
let needs_reload = old_msd_enabled != new_msd_enabled || msd_dir_changed;
if !needs_reload {
tracing::info!(
"MSD enabled state unchanged ({}) and directory unchanged, no reload needed",
new_msd_enabled
);
} else if new_msd_enabled {
tracing::info!("(Re)initializing MSD...");
// Shutdown existing controller if present
let mut msd_guard = state.msd.write().await;
if let Some(msd) = msd_guard.as_mut() {
if let Err(e) = msd.shutdown().await {
tracing::warn!("MSD shutdown failed: {}", e);
}
}
*msd_guard = None;
drop(msd_guard);
let msd = crate::msd::MsdController::new( let msd = crate::msd::MsdController::new(
state.otg_service.clone(), state.otg_service.clone(),
&new_config.msd.images_path, new_config.msd.msd_dir_path(),
&new_config.msd.drive_path,
); );
if let Err(e) = msd.init().await { if let Err(e) = msd.init().await {
tracing::error!("MSD initialization failed: {}", e); tracing::error!("MSD initialization failed: {}", e);
@@ -977,23 +1003,17 @@ pub async fn update_config(
*state.msd.write().await = Some(msd); *state.msd.write().await = Some(msd);
tracing::info!("MSD initialized successfully"); tracing::info!("MSD initialized successfully");
} else { } else {
// MSD was enabled, now disabled - shutdown
tracing::info!("MSD disabled in config, shutting down..."); tracing::info!("MSD disabled in config, shutting down...");
if let Some(msd) = state.msd.write().await.as_mut() { let mut msd_guard = state.msd.write().await;
if let Some(msd) = msd_guard.as_mut() {
if let Err(e) = msd.shutdown().await { if let Err(e) = msd.shutdown().await {
tracing::warn!("MSD shutdown failed: {}", e); tracing::warn!("MSD shutdown failed: {}", e);
} }
} }
*state.msd.write().await = None; *msd_guard = None;
tracing::info!("MSD shutdown complete"); tracing::info!("MSD shutdown complete");
} }
} else {
tracing::info!(
"MSD enabled state unchanged ({}), no reload needed",
new_msd_enabled
);
}
} }
Ok(Json(LoginResponse { Ok(Json(LoginResponse {
@@ -2069,7 +2089,7 @@ pub async fn msd_status(State(state): State<Arc<AppState>>) -> Result<Json<MsdSt
/// List all available images /// List all available images
pub async fn msd_images_list(State(state): State<Arc<AppState>>) -> Result<Json<Vec<ImageInfo>>> { pub async fn msd_images_list(State(state): State<Arc<AppState>>) -> Result<Json<Vec<ImageInfo>>> {
let config = state.config.get(); let config = state.config.get();
let images_path = std::path::PathBuf::from(&config.msd.images_path); let images_path = config.msd.images_dir();
let manager = ImageManager::new(images_path); let manager = ImageManager::new(images_path);
let images = manager.list()?; let images = manager.list()?;
@@ -2082,7 +2102,7 @@ pub async fn msd_image_upload(
mut multipart: Multipart, mut multipart: Multipart,
) -> Result<Json<ImageInfo>> { ) -> Result<Json<ImageInfo>> {
let config = state.config.get(); let config = state.config.get();
let images_path = std::path::PathBuf::from(&config.msd.images_path); let images_path = config.msd.images_dir();
let manager = ImageManager::new(images_path); let manager = ImageManager::new(images_path);
while let Some(field) = multipart while let Some(field) = multipart
@@ -2115,7 +2135,7 @@ pub async fn msd_image_get(
AxumPath(id): AxumPath<String>, AxumPath(id): AxumPath<String>,
) -> Result<Json<ImageInfo>> { ) -> Result<Json<ImageInfo>> {
let config = state.config.get(); let config = state.config.get();
let images_path = std::path::PathBuf::from(&config.msd.images_path); let images_path = config.msd.images_dir();
let manager = ImageManager::new(images_path); let manager = ImageManager::new(images_path);
let image = manager.get(&id)?; let image = manager.get(&id)?;
@@ -2128,7 +2148,7 @@ pub async fn msd_image_delete(
AxumPath(id): AxumPath<String>, AxumPath(id): AxumPath<String>,
) -> Result<Json<LoginResponse>> { ) -> Result<Json<LoginResponse>> {
let config = state.config.get(); let config = state.config.get();
let images_path = std::path::PathBuf::from(&config.msd.images_path); let images_path = config.msd.images_dir();
let manager = ImageManager::new(images_path); let manager = ImageManager::new(images_path);
manager.delete(&id)?; manager.delete(&id)?;
@@ -2194,7 +2214,7 @@ pub async fn msd_connect(
})?; })?;
// Get image info from ImageManager // Get image info from ImageManager
let images_path = std::path::PathBuf::from(&config.msd.images_path); let images_path = config.msd.images_dir();
let manager = ImageManager::new(images_path); let manager = ImageManager::new(images_path);
let image = manager.get(&image_id)?; let image = manager.get(&image_id)?;
@@ -2240,7 +2260,7 @@ pub async fn msd_disconnect(State(state): State<Arc<AppState>>) -> Result<Json<L
/// Get drive info /// Get drive info
pub async fn msd_drive_info(State(state): State<Arc<AppState>>) -> Result<Json<DriveInfo>> { pub async fn msd_drive_info(State(state): State<Arc<AppState>>) -> Result<Json<DriveInfo>> {
let config = state.config.get(); let config = state.config.get();
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
let drive = VentoyDrive::new(drive_path); let drive = VentoyDrive::new(drive_path);
if !drive.exists() { if !drive.exists() {
@@ -2257,7 +2277,7 @@ pub async fn msd_drive_init(
Json(req): Json<DriveInitRequest>, Json(req): Json<DriveInitRequest>,
) -> Result<Json<DriveInfo>> { ) -> Result<Json<DriveInfo>> {
let config = state.config.get(); let config = state.config.get();
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
let drive = VentoyDrive::new(drive_path); let drive = VentoyDrive::new(drive_path);
let info = drive.init(req.size_mb).await?; let info = drive.init(req.size_mb).await?;
@@ -2281,7 +2301,7 @@ pub async fn msd_drive_delete(State(state): State<Arc<AppState>>) -> Result<Json
drop(msd_guard); drop(msd_guard);
// Delete the drive file // Delete the drive file
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
if drive_path.exists() { if drive_path.exists() {
std::fs::remove_file(&drive_path) std::fs::remove_file(&drive_path)
.map_err(|e| AppError::Internal(format!("Failed to delete drive file: {}", e)))?; .map_err(|e| AppError::Internal(format!("Failed to delete drive file: {}", e)))?;
@@ -2299,7 +2319,7 @@ pub async fn msd_drive_files(
Query(params): Query<HashMap<String, String>>, Query(params): Query<HashMap<String, String>>,
) -> Result<Json<Vec<DriveFile>>> { ) -> Result<Json<Vec<DriveFile>>> {
let config = state.config.get(); let config = state.config.get();
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
let drive = VentoyDrive::new(drive_path); let drive = VentoyDrive::new(drive_path);
let dir_path = params.get("path").map(|s| s.as_str()).unwrap_or("/"); let dir_path = params.get("path").map(|s| s.as_str()).unwrap_or("/");
@@ -2314,7 +2334,7 @@ pub async fn msd_drive_upload(
mut multipart: Multipart, mut multipart: Multipart,
) -> Result<Json<LoginResponse>> { ) -> Result<Json<LoginResponse>> {
let config = state.config.get(); let config = state.config.get();
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
let drive = VentoyDrive::new(drive_path); let drive = VentoyDrive::new(drive_path);
let target_dir = params.get("path").map(|s| s.as_str()).unwrap_or("/"); let target_dir = params.get("path").map(|s| s.as_str()).unwrap_or("/");
@@ -2359,7 +2379,7 @@ pub async fn msd_drive_download(
AxumPath(file_path): AxumPath<String>, AxumPath(file_path): AxumPath<String>,
) -> Result<Response> { ) -> Result<Response> {
let config = state.config.get(); let config = state.config.get();
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
let drive = VentoyDrive::new(drive_path); let drive = VentoyDrive::new(drive_path);
// Get file stream (returns file size and channel receiver) // Get file stream (returns file size and channel receiver)
@@ -2393,7 +2413,7 @@ pub async fn msd_drive_file_delete(
AxumPath(file_path): AxumPath<String>, AxumPath(file_path): AxumPath<String>,
) -> Result<Json<LoginResponse>> { ) -> Result<Json<LoginResponse>> {
let config = state.config.get(); let config = state.config.get();
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
let drive = VentoyDrive::new(drive_path); let drive = VentoyDrive::new(drive_path);
drive.delete(&file_path).await?; drive.delete(&file_path).await?;
@@ -2410,7 +2430,7 @@ pub async fn msd_drive_mkdir(
AxumPath(dir_path): AxumPath<String>, AxumPath(dir_path): AxumPath<String>,
) -> Result<Json<LoginResponse>> { ) -> Result<Json<LoginResponse>> {
let config = state.config.get(); let config = state.config.get();
let drive_path = std::path::PathBuf::from(&config.msd.drive_path); let drive_path = config.msd.drive_path();
let drive = VentoyDrive::new(drive_path); let drive = VentoyDrive::new(drive_path);
drive.mkdir(&dir_path).await?; drive.mkdir(&dir_path).await?;

View File

@@ -586,7 +586,10 @@ impl UniversalSession {
// Send encoded frame via RTP // Send encoded frame via RTP
if let Err(e) = video_track if let Err(e) = video_track
.write_frame(&encoded_frame.data, encoded_frame.is_keyframe) .write_frame_bytes(
encoded_frame.data.clone(),
encoded_frame.is_keyframe,
)
.await .await
{ {
if frames_sent % 100 == 0 { if frames_sent % 100 == 0 {

View File

@@ -310,7 +310,7 @@ impl UniversalVideoTrack {
/// Handles codec-specific processing: /// Handles codec-specific processing:
/// - H264/H265: NAL unit parsing, parameter caching /// - H264/H265: NAL unit parsing, parameter caching
/// - VP8/VP9: Direct frame sending /// - VP8/VP9: Direct frame sending
pub async fn write_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> { pub async fn write_frame_bytes(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
if data.is_empty() { if data.is_empty() {
return Ok(()); return Ok(());
} }
@@ -323,11 +323,16 @@ impl UniversalVideoTrack {
} }
} }
pub async fn write_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> {
self.write_frame_bytes(Bytes::copy_from_slice(data), is_keyframe)
.await
}
/// Write H264 frame (Annex B format) /// Write H264 frame (Annex B format)
/// ///
/// Sends the entire Annex B frame as a single Sample to allow the /// Sends the entire Annex B frame as a single Sample to allow the
/// H264Payloader to aggregate SPS+PPS into STAP-A packets. /// H264Payloader to aggregate SPS+PPS into STAP-A packets.
async fn write_h264_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> { async fn write_h264_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
// Send entire Annex B frame as one Sample // Send entire Annex B frame as one Sample
// The H264Payloader in rtp crate will: // The H264Payloader in rtp crate will:
// 1. Parse NAL units from Annex B format // 1. Parse NAL units from Annex B format
@@ -335,8 +340,9 @@ impl UniversalVideoTrack {
// 3. Aggregate SPS+PPS+IDR into STAP-A when possible // 3. Aggregate SPS+PPS+IDR into STAP-A when possible
// 4. Fragment large NALs using FU-A // 4. Fragment large NALs using FU-A
let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64); let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64);
let data_len = data.len();
let sample = Sample { let sample = Sample {
data: Bytes::copy_from_slice(data), data,
duration: frame_duration, duration: frame_duration,
..Default::default() ..Default::default()
}; };
@@ -355,7 +361,7 @@ impl UniversalVideoTrack {
// Update stats // Update stats
let mut stats = self.stats.lock().await; let mut stats = self.stats.lock().await;
stats.frames_sent += 1; stats.frames_sent += 1;
stats.bytes_sent += data.len() as u64; stats.bytes_sent += data_len as u64;
if is_keyframe { if is_keyframe {
stats.keyframes_sent += 1; stats.keyframes_sent += 1;
} }
@@ -367,18 +373,19 @@ impl UniversalVideoTrack {
/// ///
/// Pass raw Annex B data directly to the official HevcPayloader. /// Pass raw Annex B data directly to the official HevcPayloader.
/// The payloader handles NAL parsing, VPS/SPS/PPS caching, AP generation, and FU fragmentation. /// The payloader handles NAL parsing, VPS/SPS/PPS caching, AP generation, and FU fragmentation.
async fn write_h265_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> { async fn write_h265_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
// Pass raw Annex B data directly to the official HevcPayloader // Pass raw Annex B data directly to the official HevcPayloader
self.send_h265_rtp(data, is_keyframe).await self.send_h265_rtp(data, is_keyframe).await
} }
/// Write VP8 frame /// Write VP8 frame
async fn write_vp8_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> { async fn write_vp8_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
// VP8 frames are sent directly without NAL parsing // VP8 frames are sent directly without NAL parsing
// Calculate frame duration based on configured FPS // Calculate frame duration based on configured FPS
let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64); let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64);
let data_len = data.len();
let sample = Sample { let sample = Sample {
data: Bytes::copy_from_slice(data), data,
duration: frame_duration, duration: frame_duration,
..Default::default() ..Default::default()
}; };
@@ -397,7 +404,7 @@ impl UniversalVideoTrack {
// Update stats // Update stats
let mut stats = self.stats.lock().await; let mut stats = self.stats.lock().await;
stats.frames_sent += 1; stats.frames_sent += 1;
stats.bytes_sent += data.len() as u64; stats.bytes_sent += data_len as u64;
if is_keyframe { if is_keyframe {
stats.keyframes_sent += 1; stats.keyframes_sent += 1;
} }
@@ -406,12 +413,13 @@ impl UniversalVideoTrack {
} }
/// Write VP9 frame /// Write VP9 frame
async fn write_vp9_frame(&self, data: &[u8], is_keyframe: bool) -> Result<()> { async fn write_vp9_frame(&self, data: Bytes, is_keyframe: bool) -> Result<()> {
// VP9 frames are sent directly without NAL parsing // VP9 frames are sent directly without NAL parsing
// Calculate frame duration based on configured FPS // Calculate frame duration based on configured FPS
let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64); let frame_duration = Duration::from_micros(1_000_000 / self.config.fps.max(1) as u64);
let data_len = data.len();
let sample = Sample { let sample = Sample {
data: Bytes::copy_from_slice(data), data,
duration: frame_duration, duration: frame_duration,
..Default::default() ..Default::default()
}; };
@@ -430,7 +438,7 @@ impl UniversalVideoTrack {
// Update stats // Update stats
let mut stats = self.stats.lock().await; let mut stats = self.stats.lock().await;
stats.frames_sent += 1; stats.frames_sent += 1;
stats.bytes_sent += data.len() as u64; stats.bytes_sent += data_len as u64;
if is_keyframe { if is_keyframe {
stats.keyframes_sent += 1; stats.keyframes_sent += 1;
} }
@@ -439,7 +447,7 @@ impl UniversalVideoTrack {
} }
/// Send H265 NAL units via custom H265Payloader /// Send H265 NAL units via custom H265Payloader
async fn send_h265_rtp(&self, data: &[u8], is_keyframe: bool) -> Result<()> { async fn send_h265_rtp(&self, payload: Bytes, is_keyframe: bool) -> Result<()> {
let rtp_track = match &self.track { let rtp_track = match &self.track {
TrackType::Rtp(t) => t, TrackType::Rtp(t) => t,
TrackType::Sample(_) => { TrackType::Sample(_) => {
@@ -459,7 +467,6 @@ impl UniversalVideoTrack {
// Minimize lock hold time: only hold lock during payload generation and state update // Minimize lock hold time: only hold lock during payload generation and state update
let (payloads, timestamp, seq_start, num_payloads) = { let (payloads, timestamp, seq_start, num_payloads) = {
let mut state = h265_state.lock().await; let mut state = h265_state.lock().await;
let payload = Bytes::copy_from_slice(data);
// Use custom H265Payloader to fragment the data // Use custom H265Payloader to fragment the data
let payloads = state.payloader.payload(RTP_MTU, &payload); let payloads = state.payloader.payload(RTP_MTU, &payload);

View File

@@ -1,7 +1,7 @@
{ {
"name": "web", "name": "web",
"private": true, "private": true,
"version": "0.0.0", "version": "0.1.1",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite", "dev": "vite",

View File

@@ -49,8 +49,10 @@ const systemStore = useSystemStore()
const overflowMenuOpen = ref(false) const overflowMenuOpen = ref(false)
// MSD is only available when HID backend is not CH9329 (CH9329 is serial-only, no USB gadget) // MSD is only available when HID backend is not CH9329 (CH9329 is serial-only, no USB gadget)
const hidBackend = computed(() => (systemStore.hid?.backend ?? '').toLowerCase())
const isCh9329Backend = computed(() => hidBackend.value.includes('ch9329'))
const showMsd = computed(() => { const showMsd = computed(() => {
return props.isAdmin && systemStore.hid?.backend !== 'ch9329' return props.isAdmin && !isCh9329Backend.value
}) })
const props = defineProps<{ const props = defineProps<{
@@ -310,5 +312,5 @@ const extensionOpen = ref(false)
</div> </div>
<!-- MSD Dialog --> <!-- MSD Dialog -->
<MsdDialog v-model:open="msdDialogOpen" /> <MsdDialog v-if="showMsd" v-model:open="msdDialogOpen" />
</template> </template>

View File

@@ -114,6 +114,7 @@ function detectBrowserCodecSupport() {
// Check if a codec is supported by browser // Check if a codec is supported by browser
const isBrowserSupported = (codecId: string): boolean => { const isBrowserSupported = (codecId: string): boolean => {
if (codecId === 'mjpeg') return true
return browserSupportedCodecs.value.has(codecId) return browserSupportedCodecs.value.has(codecId)
} }
@@ -704,7 +705,7 @@ watch(currentConfig, () => {
v-for="format in availableFormats" v-for="format in availableFormats"
:key="format.format" :key="format.format"
:value="format.format" :value="format.format"
:class="['text-xs', { 'opacity-50': isFormatNotRecommended(format.format) }]" class="text-xs"
> >
<div class="flex items-center gap-2"> <div class="flex items-center gap-2">
<span>{{ format.description }}</span> <span>{{ format.description }}</span>

View File

@@ -502,6 +502,11 @@ export default {
notAvailable: 'Not available', notAvailable: 'Not available',
msdEnable: 'Enable MSD', msdEnable: 'Enable MSD',
msdEnableDesc: 'Enable to mount ISO images and virtual drives to the target machine', msdEnableDesc: 'Enable to mount ISO images and virtual drives to the target machine',
msdCh9329Warning: 'HID backend is CH9329, MSD is unavailable',
msdCh9329WarningDesc: 'CH9329 is a serial HID backend and does not support USB Gadget MSD',
msdDir: 'MSD directory',
msdDirDesc: 'MSD base directory containing images/ and ventoy/ subfolders',
msdDirHint: 'Changing this rebuilds MSD and updates console capacity stats',
willBeEnabledAfterSave: 'Will be enabled after save', willBeEnabledAfterSave: 'Will be enabled after save',
disabled: 'Disabled', disabled: 'Disabled',
msdDesc: 'Mass Storage Device allows you to mount ISO images and virtual drives to the target machine. Use the MSD panel on the main page to manage images.', msdDesc: 'Mass Storage Device allows you to mount ISO images and virtual drives to the target machine. Use the MSD panel on the main page to manage images.',

View File

@@ -502,6 +502,11 @@ export default {
notAvailable: '不可用', notAvailable: '不可用',
msdEnable: '启用 MSD', msdEnable: '启用 MSD',
msdEnableDesc: '启用后可以挂载 ISO 镜像和虚拟驱动器到目标机器', msdEnableDesc: '启用后可以挂载 ISO 镜像和虚拟驱动器到目标机器',
msdCh9329Warning: '当前 HID 后端为 CH9329MSD 功能不可用',
msdCh9329WarningDesc: 'CH9329 为串口 HID 方案,不支持 USB Gadget 的 MSD 功能',
msdDir: 'MSD 目录',
msdDirDesc: 'MSD 根目录,内部包含 images/ 和 ventoy/ 两个子目录',
msdDirHint: '修改后会重建 MSD控制台容量统计以该目录为准',
willBeEnabledAfterSave: '保存后生效', willBeEnabledAfterSave: '保存后生效',
disabled: '已禁用', disabled: '已禁用',
msdDesc: '虚拟存储设备允许您将 ISO 镜像和虚拟驱动器挂载到目标机器。请在主页面的 MSD 面板中管理镜像。', msdDesc: '虚拟存储设备允许您将 ISO 镜像和虚拟驱动器挂载到目标机器。请在主页面的 MSD 面板中管理镜像。',

View File

@@ -76,12 +76,8 @@ export interface HidConfig {
export interface MsdConfig { export interface MsdConfig {
/** Enable MSD functionality */ /** Enable MSD functionality */
enabled: boolean; enabled: boolean;
/** Storage path for ISO/IMG images */ /** MSD base directory (absolute path) */
images_path: string; msd_dir: string;
/** Path for Ventoy bootable drive file */
drive_path: string;
/** Ventoy drive size in MB (minimum 1024 MB / 1 GB) */
virtual_drive_size_mb: number;
} }
/** Driver type for ATX key operations */ /** Driver type for ATX key operations */
@@ -511,9 +507,7 @@ export interface HidConfigUpdate {
export interface MsdConfigUpdate { export interface MsdConfigUpdate {
enabled?: boolean; enabled?: boolean;
images_path?: string; msd_dir?: string;
drive_path?: string;
virtual_drive_size_mb?: number;
} }
export interface RustDeskConfigUpdate { export interface RustDeskConfigUpdate {

View File

@@ -233,6 +233,7 @@ const config = ref({
hid_serial_device: '', hid_serial_device: '',
hid_serial_baudrate: 9600, hid_serial_baudrate: 9600,
msd_enabled: false, msd_enabled: false,
msd_dir: '',
network_port: 8080, network_port: 8080,
encoder_backend: 'auto', encoder_backend: 'auto',
// STUN/TURN settings // STUN/TURN settings
@@ -297,6 +298,8 @@ const selectedBackendFormats = computed(() => {
return backend?.supported_formats || [] return backend?.supported_formats || []
}) })
const isCh9329Backend = computed(() => config.value.hid_backend === 'ch9329')
// Video selection computed properties // Video selection computed properties
import { watch } from 'vue' import { watch } from 'vue'
@@ -536,6 +539,7 @@ async function loadConfig() {
hid_serial_device: hid.ch9329_port || '', hid_serial_device: hid.ch9329_port || '',
hid_serial_baudrate: hid.ch9329_baudrate || 9600, hid_serial_baudrate: hid.ch9329_baudrate || 9600,
msd_enabled: msd.enabled || false, msd_enabled: msd.enabled || false,
msd_dir: msd.msd_dir || '',
network_port: 8080, // 从旧 API 加载 network_port: 8080, // 从旧 API 加载
encoder_backend: stream.encoder || 'auto', encoder_backend: stream.encoder || 'auto',
// STUN/TURN settings // STUN/TURN settings
@@ -1499,6 +1503,10 @@ onMounted(async () => {
<CardDescription>{{ t('settings.msdDesc') }}</CardDescription> <CardDescription>{{ t('settings.msdDesc') }}</CardDescription>
</CardHeader> </CardHeader>
<CardContent class="space-y-4"> <CardContent class="space-y-4">
<div v-if="isCh9329Backend" class="rounded-md border border-amber-200 bg-amber-50 px-3 py-2 text-sm text-amber-900">
<p class="font-medium">{{ t('settings.msdCh9329Warning') }}</p>
<p class="text-xs text-amber-900/80">{{ t('settings.msdCh9329WarningDesc') }}</p>
</div>
<div class="flex items-center justify-between"> <div class="flex items-center justify-between">
<div class="space-y-0.5"> <div class="space-y-0.5">
<Label for="msd-enabled">{{ t('settings.msdEnable') }}</Label> <Label for="msd-enabled">{{ t('settings.msdEnable') }}</Label>
@@ -1506,11 +1514,21 @@ onMounted(async () => {
</div> </div>
<Switch <Switch
id="msd-enabled" id="msd-enabled"
:disabled="isCh9329Backend"
:model-value="config.msd_enabled" :model-value="config.msd_enabled"
@update:model-value="onMsdEnabledChange" @update:model-value="onMsdEnabledChange"
/> />
</div> </div>
<Separator /> <Separator />
<div class="space-y-4">
<div class="space-y-2">
<Label for="msd-dir">{{ t('settings.msdDir') }}</Label>
<Input id="msd-dir" v-model="config.msd_dir" placeholder="/etc/one-kvm/msd" :disabled="isCh9329Backend" />
<p class="text-xs text-muted-foreground">{{ t('settings.msdDirDesc') }}</p>
</div>
<p class="text-xs text-muted-foreground">{{ t('settings.msdDirHint') }}</p>
</div>
<Separator />
<div class="flex items-center justify-between"> <div class="flex items-center justify-between">
<div> <div>
<p class="text-sm font-medium">{{ t('settings.msdStatus') }}</p> <p class="text-sm font-medium">{{ t('settings.msdStatus') }}</p>