From e670f1ffd130c0ecfa63b1ac543d58be1dfbde34 Mon Sep 17 00:00:00 2001 From: mofeng-git Date: Sat, 10 Jan 2026 10:59:00 +0800 Subject: [PATCH] =?UTF-8?q?refactor:=20=E5=8D=87=E7=BA=A7=E4=BE=9D?= =?UTF-8?q?=E8=B5=96=E7=89=88=E6=9C=AC=E5=B9=B6=E4=BC=98=E5=8C=96=E6=9E=84?= =?UTF-8?q?=E5=BB=BA=E7=B3=BB=E7=BB=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 升级核心依赖 (axum 0.8, tower-http 0.6, alsa 0.11 等) - 简化交叉编译配置,切换至 Debian 11 提高兼容性 - 新增 Debian 包打包支持 (debuerreotype 模板) - 移除独立的 mjpeg 解码器,简化视频模块 - 静态链接 libx264/libx265/libopus 到二进制 --- Cargo.toml | 47 +- Cross.toml | 37 +- build/Dockerfile.runtime | 30 +- build/build-images.sh | 80 +++ .../{Dockerfile.aarch64 => Dockerfile.arm64} | 128 +++-- build/cross/Dockerfile.armv7 | 126 +++-- build/cross/Dockerfile.x86_64 | 101 ++-- build/debian/changelog.tpl | 5 + build/debian/control.tpl | 22 + build/debian/copyright | 29 ++ build/debian/postinst.tpl | 39 ++ build/debian/prerm.tpl | 28 + build/one-kvm.service | 15 + build/package-deb.sh | 178 +++++++ build/package-docker.sh | 2 +- libs/hwcodec/build.rs | 6 +- libs/ventoy-img-rs/Cargo.toml | 8 +- libs/ventoy-img-rs/README.md | 2 +- res/vcpkg/libyuv/build.rs | 52 +- res/vcpkg/libyuv/src/lib.rs | 138 ----- src/audio/capture.rs | 38 +- src/hid/datachannel.rs | 1 + src/hid/websocket.rs | 6 +- src/main.rs | 4 +- src/msd/ventoy_drive.rs | 88 ++++ src/otg/manager.rs | 6 +- src/otg/service.rs | 2 +- src/rustdesk/config.rs | 9 +- src/stream/ws_hid.rs | 2 +- src/utils/throttle.rs | 9 + src/video/convert.rs | 86 ---- src/video/decoder/mjpeg.rs | 481 ------------------ src/video/decoder/mod.rs | 10 +- src/video/encoder/codec.rs | 2 +- src/video/h264_pipeline.rs | 70 +-- src/video/mod.rs | 3 +- src/video/shared_video_pipeline.rs | 95 +--- src/web/audio_ws.rs | 6 +- src/web/handlers/terminal.rs | 4 +- src/web/routes.rs | 26 +- src/web/static_files.rs | 2 +- src/web/ws.rs | 8 +- src/webrtc/h265_payloader.rs | 2 +- src/webrtc/rtp.rs | 2 +- src/webrtc/webrtc_streamer.rs | 2 +- web/src/views/SettingsView.vue | 12 - 46 files changed, 893 insertions(+), 1156 deletions(-) create mode 100755 build/build-images.sh rename build/cross/{Dockerfile.aarch64 => Dockerfile.arm64} (69%) create mode 100644 build/debian/changelog.tpl create mode 100644 build/debian/control.tpl create mode 100644 build/debian/copyright create mode 100644 build/debian/postinst.tpl create mode 100644 build/debian/prerm.tpl create mode 100644 build/one-kvm.service create mode 100755 build/package-deb.sh delete mode 100644 src/video/decoder/mjpeg.rs diff --git a/Cargo.toml b/Cargo.toml index 6dfe6769..6172656f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" authors = ["SilentWind"] description = "A open and lightweight IP-KVM solution written in Rust" -license = "GPL-3.0" +license = "GPL-2.0" repository = "https://github.com/mofeng-git/One-KVM" keywords = ["kvm", "ipkvm", "remote-management", "embedded"] categories = ["embedded", "network-programming"] @@ -15,9 +15,9 @@ tokio = { version = "1", features = ["full"] } tokio-util = { version = "0.7", features = ["rt"] } # Web framework -axum = { version = "0.7", features = ["ws", "multipart", "tokio"] } -axum-extra = { version = "0.9", features = ["typed-header", "cookie"] } -tower-http = { version = "0.5", features = ["fs", "cors", "trace", "compression-gzip"] } +axum = { version = "0.8", features = ["ws", "multipart", "tokio"] } +axum-extra = { version = "0.12", features = ["typed-header", "cookie"] } +tower-http = { version = "0.6", features = ["fs", "cors", "trace", "compression-gzip"] } # Database - Use bundled SQLite for static linking sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"] } @@ -31,22 +31,22 @@ tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } # Error handling -thiserror = "1" +thiserror = "2" anyhow = "1" # Authentication argon2 = "0.5" -rand = "0.8" +rand = "0.9" # Utilities uuid = { version = "1", features = ["v4", "serde"] } chrono = { version = "0.4", features = ["serde"] } base64 = "0.22" -nix = { version = "0.29", features = ["fs", "net", "hostname", "poll"] } +nix = { version = "0.30", features = ["fs", "net", "hostname", "poll"] } # HTTP client (for URL downloads) -# Use rustls-tls by default, but allow native-tls for systems with older GLIBC -reqwest = { version = "0.12", features = ["stream", "rustls-tls"], default-features = false } +# Use rustls by default, but allow native-tls for systems with older GLIBC +reqwest = { version = "0.13", features = ["stream", "rustls"], default-features = false } urlencoding = "2" # Static file embedding @@ -55,10 +55,8 @@ mime_guess = "2" # TLS/HTTPS rustls = { version = "0.23", features = ["ring"] } -rustls-pemfile = "2" -tokio-rustls = { version = "0.26", features = ["ring"] } -rcgen = "0.13" -axum-server = { version = "0.7", features = ["tls-rustls"] } +rcgen = "0.14" +axum-server = { version = "0.8", features = ["tls-rustls"] } # CLI argument parsing clap = { version = "4", features = ["derive"] } @@ -70,11 +68,11 @@ time = "0.3" v4l = "0.14" # JPEG encoding (libjpeg-turbo, SIMD accelerated) -turbojpeg = "1.1" +turbojpeg = "1.3" # Bytes handling bytes = "1" -bytemuck = { version = "1.14", features = ["derive"] } +bytemuck = { version = "1.24", features = ["derive"] } # Frame deduplication (hash-based comparison) xxhash-rust = { version = "0.8", features = ["xxh64"] } @@ -84,11 +82,11 @@ async-stream = "0.3" futures = "0.3" # WebSocket client (for ttyd proxy) -tokio-tungstenite = "0.24" +tokio-tungstenite = "0.28" # High-performance synchronization parking_lot = "0.12" -arc-swap = "1.7" +arc-swap = "1.8" # WebRTC webrtc = "0.14" @@ -96,7 +94,7 @@ rtp = "0.14" # Audio (ALSA capture + Opus encoding) # Note: audiopus links to libopus.so (unavoidable for audio support) -alsa = "0.9" +alsa = "0.11" audiopus = "0.2" # HID (serial port for CH9329) @@ -114,7 +112,7 @@ gpio-cdev = "0.6" hwcodec = { path = "libs/hwcodec" } # RustDesk protocol support -protobuf = { version = "3.4", features = ["with-bytes"] } +protobuf = { version = "3.7", features = ["with-bytes"] } sodiumoxide = "0.2" sha2 = "0.10" @@ -129,8 +127,8 @@ tokio-test = "0.4" tempfile = "3" [build-dependencies] -protobuf-codegen = "3.4" -toml = "0.8" +protobuf-codegen = "3.7" +toml = "0.9" [profile.release] opt-level = 3 @@ -142,9 +140,4 @@ panic = "abort" # Static linking profile for musl targets [profile.release-static] inherits = "release" -opt-level = "z" # Optimize for size - -# Cross-compilation targets -# aarch64-unknown-linux-gnu (ARM64) - Primary target -# armv7-unknown-linux-gnueabihf (ARMv7) -# x86_64-unknown-linux-gnu (x86_64) \ No newline at end of file +opt-level = "z" # Optimize for size \ No newline at end of file diff --git a/Cross.toml b/Cross.toml index dfba5d16..ad6e799b 100644 --- a/Cross.toml +++ b/Cross.toml @@ -1,49 +1,16 @@ # Cross-compilation configuration for One-KVM -# Uses custom Debian 12 based images for consistent build/runtime environment +# Uses pre-built custom Debian 11 based images for consistent build/runtime environment # See: https://github.com/cross-rs/cross [build] # Default Docker image settings default-target = "x86_64-unknown-linux-gnu" -[build.env] -passthrough = [ - "RUST_BACKTRACE", - "CARGO_INCREMENTAL", -] - -# x86_64 target - use custom Debian 12 image [target.x86_64-unknown-linux-gnu] dockerfile = "build/cross/Dockerfile.x86_64" -[target.x86_64-unknown-linux-gnu.env] -passthrough = [ - "PKG_CONFIG_ALLOW_CROSS=1", - "PKG_CONFIG_PATH=/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig", -] - -# ARM64 target - use custom Debian 12 image with multiarch [target.aarch64-unknown-linux-gnu] -dockerfile = "build/cross/Dockerfile.aarch64" +dockerfile = "build/cross/Dockerfile.arm64" -[target.aarch64-unknown-linux-gnu.env] -passthrough = [ - "PKG_CONFIG_ALLOW_CROSS=1", - "PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig:/usr/share/pkgconfig", - "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc", - "CC_aarch64_unknown_linux_gnu=aarch64-linux-gnu-gcc", - "CXX_aarch64_unknown_linux_gnu=aarch64-linux-gnu-g++", -] - -# ARMv7 target - use custom Debian 12 image with multiarch [target.armv7-unknown-linux-gnueabihf] dockerfile = "build/cross/Dockerfile.armv7" - -[target.armv7-unknown-linux-gnueabihf.env] -passthrough = [ - "PKG_CONFIG_ALLOW_CROSS=1", - "PKG_CONFIG_PATH=/usr/lib/arm-linux-gnueabihf/pkgconfig:/usr/share/pkgconfig", - "CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc", - "CC_armv7_unknown_linux_gnueabihf=arm-linux-gnueabihf-gcc", - "CXX_armv7_unknown_linux_gnueabihf=arm-linux-gnueabihf-g++", -] diff --git a/build/Dockerfile.runtime b/build/Dockerfile.runtime index fc91b443..44617adf 100644 --- a/build/Dockerfile.runtime +++ b/build/Dockerfile.runtime @@ -1,32 +1,36 @@ # One-KVM Runtime Image # This Dockerfile only packages pre-compiled binaries (no compilation) # Used after cross-compiling with `cross build` +# Using Debian 11 for maximum compatibility (GLIBC 2.31) ARG TARGETPLATFORM=linux/amd64 -FROM debian:12-slim +FROM debian:11-slim ARG TARGETPLATFORM # Install runtime dependencies in a single layer -# Static linked: FFmpeg core, libyuv, libvpx, libjpeg-turbo -# Dynamic linked: hardware acceleration drivers, GPL codecs (x264/x265) +# All codec libraries (libx264, libx265, libopus) are now statically linked +# Only hardware acceleration drivers and core system libraries remain dynamic RUN apt-get update && \ apt-get install -y --no-install-recommends \ - # Core runtime (all platforms) - libasound2 \ - libv4l-0 \ - libudev1 \ - libdrm2 \ - libopus0 \ + # Core runtime (all platforms) - no codec libs needed ca-certificates \ - # GPL codecs (must be dynamic for license compliance) - libx264-164 \ - libx265-199 && \ + libudev1 \ + libasound2 \ + # v4l2 is handled by kernel, minimal userspace needed + libv4l-0 \ + && \ # Platform-specific hardware acceleration if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \ apt-get install -y --no-install-recommends \ - libva2 libva-drm2 libva-x11-2 libx11-6 libxcb1 libmfx1; \ + libva2 libva-drm2 libva-x11-2 libx11-6 libxcb1 libxau6 libxdmcp6 libmfx1; \ + elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \ + apt-get install -y --no-install-recommends \ + libdrm2 libva2; \ + elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \ + apt-get install -y --no-install-recommends \ + libdrm2 libva2; \ fi && \ rm -rf /var/lib/apt/lists/* && \ mkdir -p /etc/one-kvm/ventoy diff --git a/build/build-images.sh b/build/build-images.sh new file mode 100755 index 00000000..106a9b6e --- /dev/null +++ b/build/build-images.sh @@ -0,0 +1,80 @@ +#!/bin/bash +# Build cross-compiled binaries using cross with custom Dockerfiles +# Usage: ./build/build-images.sh [arch] +# Example: ./build/build-images.sh x86_64 + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(dirname "$SCRIPT_DIR")" + +# Supported architectures (Rust target) +ARCH_MAP=( + "x86_64-unknown-linux-gnu" + "aarch64-unknown-linux-gnu" + "armv7-unknown-linux-gnueabihf" +) + +# Build for specific architecture using cross +build_arch() { + local rust_target="$1" + + echo "=== Building: $rust_target (via cross with custom Dockerfile) ===" + cross build --release --target "$rust_target" +} + +# Main +case "${1:-all}" in + all) + for target in "${ARCH_MAP[@]}"; do + build_arch "$target" + done + ;; + x86_64|arm64|armv7) + case "$1" in + x86_64) build_arch "x86_64-unknown-linux-gnu" ;; + arm64) build_arch "aarch64-unknown-linux-gnu" ;; + armv7) build_arch "armv7-unknown-linux-gnueabihf" ;; + esac + ;; + help|--help|-h) + echo "Usage: $0 [arch|help]" + echo "" + echo "Commands:" + echo " all (default) Build all architectures" + echo " x86_64 Build only x86_64" + echo " arm64 Build only arm64" + echo " armv7 Build only ARMv7" + echo "" + echo "Examples:" + echo " $0 # Build all" + echo " $0 x86_64 # Build x86_64 only" + exit 0 + ;; + *) + echo "Error: Unknown argument: $1" + exit 1 + ;; +esac + +echo "" +echo "Binaries built:" +for target in "${ARCH_MAP[@]}"; do + if [ -f "$PROJECT_DIR/target/$target/release/one-kvm" ]; then + echo " $target: OK" + fi +done +echo "" +echo "Static libraries:" +for target in "${ARCH_MAP[@]}"; do + case "$target" in + x86_64-unknown-linux-gnu) gnu_target="x86_64-linux-gnu" ;; + aarch64-unknown-linux-gnu) gnu_target="aarch64-linux-gnu" ;; + armv7-unknown-linux-gnueabihf) gnu_target="armv7-linux-gnueabihf" ;; + esac + if [ -d "$PROJECT_DIR/target/one-kvm-libs/$gnu_target/lib" ]; then + echo " $gnu_target: OK" + fi +done +echo "" +echo "Next step: ./build/package-docker.sh or ./build/package-deb.sh" diff --git a/build/cross/Dockerfile.aarch64 b/build/cross/Dockerfile.arm64 similarity index 69% rename from build/cross/Dockerfile.aarch64 rename to build/cross/Dockerfile.arm64 index 89730253..158b8fe9 100644 --- a/build/cross/Dockerfile.aarch64 +++ b/build/cross/Dockerfile.arm64 @@ -1,7 +1,7 @@ -# Cross-compilation image for ARM64 based on Debian 12 -# Uses multiarch to install ARM64 libraries on x86_64 host +# Cross-compilation image for ARM64 based on Debian 11 +# Build on Debian 11 (GLIBC 2.31) for maximum runtime compatibility -FROM debian:12 +FROM debian:11 # Set Rustup mirrors (Aliyun) ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \ @@ -28,7 +28,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ git \ libclang-dev \ llvm \ - mold \ meson \ ninja-build \ wget \ @@ -36,6 +35,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gcc-aarch64-linux-gnu \ g++-aarch64-linux-gnu \ libc6-dev-arm64-cross \ + # Autotools for libopus (requires autoreconf) + autoconf \ + automake \ + libtool \ && rm -rf /var/lib/apt/lists/* # Install ARM64 development libraries (without VAAPI/X11 for ARM) @@ -44,24 +47,22 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libv4l-dev:arm64 \ libudev-dev:arm64 \ zlib1g-dev:arm64 \ - # Note: libjpeg-turbo, libyuv, libvpx are built from source below for static linking - libx264-dev:arm64 \ - libx265-dev:arm64 \ - libopus-dev:arm64 \ + # Note: libjpeg-turbo, libyuv, libvpx, libx264, libx265, libopus are built from source below for static linking libdrm-dev:arm64 \ && rm -rf /var/lib/apt/lists/* # Build static libjpeg-turbo from source (cross-compile for ARM64) RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \ && cd /tmp/libjpeg-turbo \ - && cmake -B build -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + && mkdir build && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DCMAKE_INSTALL_PREFIX=/usr/aarch64-linux-gnu \ -DCMAKE_SYSTEM_NAME=Linux \ -DCMAKE_SYSTEM_PROCESSOR=aarch64 \ -DCMAKE_C_COMPILER=aarch64-linux-gnu-gcc \ - -DCMAKE_INSTALL_PREFIX=/opt/one-kvm-libs/aarch64-linux-gnu \ -DENABLE_SHARED=OFF -DENABLE_STATIC=ON \ - && cmake --build build -j$(nproc) \ - && cmake --install build \ + && make -j$(nproc) \ + && make install \ && rm -rf /tmp/libjpeg-turbo # Build static libyuv from source (cross-compile for ARM64) @@ -69,16 +70,13 @@ RUN git clone --depth 1 https://github.com/lemenkov/libyuv /tmp/libyuv \ && cd /tmp/libyuv \ && mkdir build && cd build \ && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DCMAKE_INSTALL_PREFIX=/usr/aarch64-linux-gnu \ -DCMAKE_SYSTEM_NAME=Linux \ -DCMAKE_SYSTEM_PROCESSOR=aarch64 \ -DCMAKE_C_COMPILER=aarch64-linux-gnu-gcc \ -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++ \ - -DCMAKE_PREFIX_PATH=/opt/one-kvm-libs/aarch64-linux-gnu \ - -DCMAKE_INSTALL_PREFIX=/opt/one-kvm-libs/aarch64-linux-gnu \ && make -j$(nproc) \ - && mkdir -p /opt/one-kvm-libs/aarch64-linux-gnu/lib \ - && cp libyuv.a /opt/one-kvm-libs/aarch64-linux-gnu/lib/ \ - && cp -r ../include /opt/one-kvm-libs/aarch64-linux-gnu/ \ + && make install \ && rm -rf /tmp/libyuv # Build static libvpx from source (cross-compile for ARM64) @@ -91,7 +89,8 @@ RUN git clone --depth 1 https://github.com/webmproject/libvpx /tmp/libvpx \ && export LD=aarch64-linux-gnu-ld \ && export AR=aarch64-linux-gnu-ar \ && export CROSS=aarch64-linux-gnu- \ - && ./configure --prefix=/opt/one-kvm-libs/aarch64-linux-gnu \ + && ./configure \ + --prefix=/usr/aarch64-linux-gnu \ --target=arm64-linux-gcc \ --enable-static --disable-shared --enable-pic \ --disable-examples --disable-tools --disable-docs \ @@ -102,9 +101,79 @@ RUN git clone --depth 1 https://github.com/webmproject/libvpx /tmp/libvpx \ && file libvpx.a \ && make install \ && echo "=== libvpx: Verifying installed library ===" \ - && file /opt/one-kvm-libs/aarch64-linux-gnu/lib/libvpx.a \ + && file /usr/aarch64-linux-gnu/lib/libvpx.a \ && rm -rf /tmp/libvpx +# Build static libx264 from source (cross-compile for ARM64) +RUN git clone --depth 1 https://code.videolan.org/videolan/x264.git /tmp/x264 \ + && cd /tmp/x264 \ + && export CC=aarch64-linux-gnu-gcc \ + && export AR=aarch64-linux-gnu-ar \ + && export RANLIB=aarch64-linux-gnu-ranlib \ + && ./configure \ + --prefix=/usr/aarch64-linux-gnu \ + --host=aarch64-linux-gnu \ + --enable-static \ + --enable-pic \ + --disable-cli \ + && make -j$(nproc) \ + && make install \ + && rm -rf /tmp/x264 + +# Build static libx265 from source (cross-compile for ARM64) +RUN git clone --depth 1 https://bitbucket.org/multicoreware/x265_git /tmp/x265 \ + && cd /tmp/x265 \ + && cd source \ + && mkdir -p build \ + && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/aarch64-linux-gnu \ + -DCMAKE_SYSTEM_NAME=Linux \ + -DCMAKE_SYSTEM_PROCESSOR=aarch64 \ + -DCMAKE_C_COMPILER=aarch64-linux-gnu-gcc \ + -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++ \ + -DENABLE_SHARED=OFF \ + -DENABLE_CLI=OFF \ + -DENABLE_NEON_DOTPROD=OFF \ + -DENABLE_NEON_I8MM=OFF \ + -DENABLE_SVE=OFF \ + -DENABLE_SVE2=OFF \ + -DENABLE_SVE2_BITPERM=OFF \ + -DBUILD_SHARED_LIBS=OFF \ + && make -j$(nproc) \ + && make install \ + && rm -rf /tmp/x265 + +# Create pkg-config file for x265 (required by FFmpeg) +RUN mkdir -p /usr/aarch64-linux-gnu/lib/pkgconfig && \ + cat > /usr/aarch64-linux-gnu/lib/pkgconfig/x265.pc < /tmp/aarch64-cross.txt \ @@ -146,10 +216,11 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \ -Dlibrga_demo=false \ && ninja -C build \ && ninja -C build install \ + && sed -i 's/^Libs:.*$/& -lstdc++ -lm -lpthread/' /usr/aarch64-linux-gnu/lib/pkgconfig/librga.pc \ && cd .. \ # Create pkg-config wrapper for cross-compilation && echo '#!/bin/sh' > /tmp/aarch64-pkg-config \ - && echo 'export PKG_CONFIG_LIBDIR=/opt/one-kvm-libs/aarch64-linux-gnu/lib/pkgconfig:/usr/aarch64-linux-gnu/lib/pkgconfig:/usr/lib/aarch64-linux-gnu/pkgconfig' >> /tmp/aarch64-pkg-config \ + && echo 'export PKG_CONFIG_LIBDIR=/usr/aarch64-linux-gnu/lib/pkgconfig:/usr/lib/aarch64-linux-gnu/pkgconfig' >> /tmp/aarch64-pkg-config \ && echo 'export PKG_CONFIG_PATH=""' >> /tmp/aarch64-pkg-config \ && echo 'export PKG_CONFIG_SYSROOT_DIR=""' >> /tmp/aarch64-pkg-config \ && echo 'exec pkg-config "$@"' >> /tmp/aarch64-pkg-config \ @@ -157,7 +228,7 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \ # Build FFmpeg with RKMPP (minimal build for encoding only) && cd ffmpeg-rockchip \ && ./configure \ - --prefix=/opt/one-kvm-libs/aarch64-linux-gnu \ + --prefix=/usr/aarch64-linux-gnu \ --cross-prefix=aarch64-linux-gnu- \ --arch=aarch64 \ --target-os=linux \ @@ -235,25 +306,14 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \ # Add Rust target RUN rustup target add aarch64-unknown-linux-gnu -# Create symlink for mold to work with cross-compiler -RUN ln -s /usr/bin/mold /usr/bin/aarch64-linux-gnu-ld.mold - -# Copy entrypoint script -COPY build/cross/entrypoint.sh /usr/local/bin/cross-entrypoint.sh -RUN chmod +x /usr/local/bin/cross-entrypoint.sh - # Configure environment for cross-compilation ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \ CC_aarch64_unknown_linux_gnu=aarch64-linux-gnu-gcc \ CXX_aarch64_unknown_linux_gnu=aarch64-linux-gnu-g++ \ AR_aarch64_unknown_linux_gnu=aarch64-linux-gnu-ar \ - PKG_CONFIG_LIBDIR=/opt/one-kvm-libs/aarch64-linux-gnu/lib/pkgconfig:/usr/aarch64-linux-gnu/lib/pkgconfig:/usr/lib/aarch64-linux-gnu/pkgconfig \ + PKG_CONFIG_LIBDIR=/usr/aarch64-linux-gnu/lib/pkgconfig:/usr/lib/aarch64-linux-gnu/pkgconfig \ PKG_CONFIG_PATH="" \ PKG_CONFIG_ALLOW_CROSS=1 \ - LIBRARY_PATH="/opt/one-kvm-libs/aarch64-linux-gnu/lib" \ - CPATH="/opt/one-kvm-libs/aarch64-linux-gnu/include" \ FFMPEG_STATIC=1 \ LIBYUV_STATIC=1 \ - RUSTFLAGS="-C linker=aarch64-linux-gnu-gcc -C link-arg=-fuse-ld=mold" - -ENTRYPOINT ["/usr/local/bin/cross-entrypoint.sh"] + RUSTFLAGS="-C linker=aarch64-linux-gnu-gcc" diff --git a/build/cross/Dockerfile.armv7 b/build/cross/Dockerfile.armv7 index 9d521515..bb4cfc56 100644 --- a/build/cross/Dockerfile.armv7 +++ b/build/cross/Dockerfile.armv7 @@ -1,7 +1,7 @@ -# Cross-compilation image for ARMv7 based on Debian 12 -# Uses multiarch to install ARMv7 libraries on x86_64 host +# Cross-compilation image for ARMv7 based on Debian 11 +# Build on Debian 11 (GLIBC 2.31) for maximum runtime compatibility -FROM debian:12 +FROM debian:11 # Set Rustup mirrors (Aliyun) ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \ @@ -28,7 +28,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ git \ libclang-dev \ llvm \ - mold \ meson \ ninja-build \ wget \ @@ -36,6 +35,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gcc-arm-linux-gnueabihf \ g++-arm-linux-gnueabihf \ libc6-dev-armhf-cross \ + # Autotools for libopus (requires autoreconf) + autoconf \ + automake \ + libtool \ && rm -rf /var/lib/apt/lists/* # Install ARMv7 development libraries (without VAAPI/X11 for ARM) @@ -44,24 +47,21 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libv4l-dev:armhf \ libudev-dev:armhf \ zlib1g-dev:armhf \ - # Note: libjpeg-turbo, libyuv, libvpx are built from source below for static linking - libx264-dev:armhf \ - libx265-dev:armhf \ - libopus-dev:armhf \ libdrm-dev:armhf \ && rm -rf /var/lib/apt/lists/* # Build static libjpeg-turbo from source (cross-compile for ARMv7) RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \ && cd /tmp/libjpeg-turbo \ - && cmake -B build -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + && mkdir build && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DCMAKE_INSTALL_PREFIX=/usr/arm-linux-gnueabihf \ -DCMAKE_SYSTEM_NAME=Linux \ -DCMAKE_SYSTEM_PROCESSOR=arm \ -DCMAKE_C_COMPILER=arm-linux-gnueabihf-gcc \ - -DCMAKE_INSTALL_PREFIX=/opt/one-kvm-libs/armv7-linux-gnueabihf \ -DENABLE_SHARED=OFF -DENABLE_STATIC=ON \ - && cmake --build build -j$(nproc) \ - && cmake --install build \ + && make -j$(nproc) \ + && make install \ && rm -rf /tmp/libjpeg-turbo # Build static libyuv from source (cross-compile for ARMv7) @@ -69,20 +69,16 @@ RUN git clone --depth 1 https://github.com/lemenkov/libyuv /tmp/libyuv \ && cd /tmp/libyuv \ && mkdir build && cd build \ && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DCMAKE_INSTALL_PREFIX=/usr/arm-linux-gnueabihf \ -DCMAKE_SYSTEM_NAME=Linux \ -DCMAKE_SYSTEM_PROCESSOR=arm \ -DCMAKE_C_COMPILER=arm-linux-gnueabihf-gcc \ -DCMAKE_CXX_COMPILER=arm-linux-gnueabihf-g++ \ - -DCMAKE_PREFIX_PATH=/opt/one-kvm-libs/armv7-linux-gnueabihf \ - -DCMAKE_INSTALL_PREFIX=/opt/one-kvm-libs/armv7-linux-gnueabihf \ && make -j$(nproc) \ - && mkdir -p /opt/one-kvm-libs/armv7-linux-gnueabihf/lib \ - && cp libyuv.a /opt/one-kvm-libs/armv7-linux-gnueabihf/lib/ \ - && cp -r ../include /opt/one-kvm-libs/armv7-linux-gnueabihf/ \ + && make install \ && rm -rf /tmp/libyuv # Build static libvpx from source (cross-compile for ARMv7) -# CC/CXX/LD/AR must be environment variables, not configure arguments RUN git clone --depth 1 https://github.com/webmproject/libvpx /tmp/libvpx \ && cd /tmp/libvpx \ && export CC=arm-linux-gnueabihf-gcc \ @@ -90,17 +86,83 @@ RUN git clone --depth 1 https://github.com/webmproject/libvpx /tmp/libvpx \ && export LD=arm-linux-gnueabihf-ld \ && export AR=arm-linux-gnueabihf-ar \ && export CROSS=arm-linux-gnueabihf- \ - && ./configure --prefix=/opt/one-kvm-libs/armv7-linux-gnueabihf \ + && ./configure \ + --prefix=/usr/arm-linux-gnueabihf \ --target=armv7-linux-gcc \ --enable-static --disable-shared --enable-pic \ --disable-examples --disable-tools --disable-docs \ --disable-unit-tests \ && make -j$(nproc) \ - && echo "=== libvpx: Checking architecture ===" \ - && file libvpx.a \ && make install \ && rm -rf /tmp/libvpx +# Build static libx264 from source (cross-compile for ARMv7) +RUN git clone --depth 1 https://code.videolan.org/videolan/x264.git /tmp/x264 \ + && cd /tmp/x264 \ + && export CC=arm-linux-gnueabihf-gcc \ + && export AR=arm-linux-gnueabihf-ar \ + && export RANLIB=arm-linux-gnueabihf-ranlib \ + && ./configure \ + --prefix=/usr/arm-linux-gnueabihf \ + --host=arm-linux-gnueabihf \ + --cross-prefix=arm-linux-gnueabihf- \ + --enable-static --disable-shared \ + --enable-pic \ + --disable-cli \ + && make -j$(nproc) \ + && make install \ + && rm -rf /tmp/x264 + +# Build static libx265 from source (cross-compile for ARMv7) +RUN git clone --depth 1 https://bitbucket.org/multicoreware/x265_git /tmp/x265 \ + && cd /tmp/x265 \ + && cd source \ + && mkdir -p build \ + && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/arm-linux-gnueabihf \ + -DCMAKE_SYSTEM_NAME=Linux \ + -DCMAKE_SYSTEM_PROCESSOR=arm \ + -DCMAKE_C_COMPILER=arm-linux-gnueabihf-gcc \ + -DCMAKE_CXX_COMPILER=arm-linux-gnueabihf-g++ \ + -DENABLE_SHARED=OFF \ + -DENABLE_CLI=OFF \ + -DBUILD_SHARED_LIBS=OFF \ + -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + && make -j$(nproc) \ + && make install \ + && rm -rf /tmp/x265 + +# Create pkg-config file for x265 (required by FFmpeg) +RUN mkdir -p /usr/arm-linux-gnueabihf/lib/pkgconfig && \ + cat > /usr/arm-linux-gnueabihf/lib/pkgconfig/x265.pc < /tmp/armhf-cross.txt \ @@ -142,10 +205,11 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \ -Dlibrga_demo=false \ && ninja -C build \ && ninja -C build install \ + && sed -i 's/^Libs:.*$/& -lstdc++ -lm -lpthread/' /usr/arm-linux-gnueabihf/lib/pkgconfig/librga.pc \ && cd .. \ # Create pkg-config wrapper for cross-compilation && echo '#!/bin/sh' > /tmp/armhf-pkg-config \ - && echo 'export PKG_CONFIG_LIBDIR=/opt/one-kvm-libs/armv7-linux-gnueabihf/lib/pkgconfig:/usr/arm-linux-gnueabihf/lib/pkgconfig:/usr/lib/arm-linux-gnueabihf/pkgconfig' >> /tmp/armhf-pkg-config \ + && echo 'export PKG_CONFIG_LIBDIR=/usr/arm-linux-gnueabihf/lib/pkgconfig:/usr/arm-linux-gnueabihf/lib/pkgconfig:/usr/lib/arm-linux-gnueabihf/pkgconfig' >> /tmp/armhf-pkg-config \ && echo 'export PKG_CONFIG_PATH=""' >> /tmp/armhf-pkg-config \ && echo 'export PKG_CONFIG_SYSROOT_DIR=""' >> /tmp/armhf-pkg-config \ && echo 'exec pkg-config "$@"' >> /tmp/armhf-pkg-config \ @@ -153,7 +217,7 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \ # Build FFmpeg with RKMPP (minimal build for encoding only) && cd ffmpeg-rockchip \ && ./configure \ - --prefix=/opt/one-kvm-libs/armv7-linux-gnueabihf \ + --prefix=/usr/arm-linux-gnueabihf \ --cross-prefix=arm-linux-gnueabihf- \ --arch=arm \ --target-os=linux \ @@ -231,25 +295,17 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \ # Add Rust target RUN rustup target add armv7-unknown-linux-gnueabihf -# Create symlink for mold to work with cross-compiler -RUN ln -s /usr/bin/mold /usr/bin/arm-linux-gnueabihf-ld.mold - -# Copy entrypoint script -COPY build/cross/entrypoint.sh /usr/local/bin/cross-entrypoint.sh -RUN chmod +x /usr/local/bin/cross-entrypoint.sh - # Configure environment for cross-compilation ENV CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc \ CC_armv7_unknown_linux_gnueabihf=arm-linux-gnueabihf-gcc \ CXX_armv7_unknown_linux_gnueabihf=arm-linux-gnueabihf-g++ \ AR_armv7_unknown_linux_gnueabihf=arm-linux-gnueabihf-ar \ - PKG_CONFIG_LIBDIR=/opt/one-kvm-libs/armv7-linux-gnueabihf/lib/pkgconfig:/usr/arm-linux-gnueabihf/lib/pkgconfig:/usr/lib/arm-linux-gnueabihf/pkgconfig \ + PKG_CONFIG_LIBDIR=/usr/arm-linux-gnueabihf/lib/pkgconfig:/usr/lib/arm-linux-gnueabihf/pkgconfig \ PKG_CONFIG_PATH="" \ PKG_CONFIG_ALLOW_CROSS=1 \ - LIBRARY_PATH="/opt/one-kvm-libs/armv7-linux-gnueabihf/lib" \ - CPATH="/opt/one-kvm-libs/armv7-linux-gnueabihf/include" \ FFMPEG_STATIC=1 \ LIBYUV_STATIC=1 \ - RUSTFLAGS="-C linker=arm-linux-gnueabihf-gcc -C link-arg=-fuse-ld=mold" + RUSTFLAGS="-C linker=arm-linux-gnueabihf-gcc" -ENTRYPOINT ["/usr/local/bin/cross-entrypoint.sh"] +# Default command +CMD ["bash"] diff --git a/build/cross/Dockerfile.x86_64 b/build/cross/Dockerfile.x86_64 index b7fe2731..779b1b02 100644 --- a/build/cross/Dockerfile.x86_64 +++ b/build/cross/Dockerfile.x86_64 @@ -1,7 +1,7 @@ -# Cross-compilation image for x86_64 based on Debian 12 -# Matches the runtime environment exactly +# Cross-compilation image for x86_64 based on Debian 11 +# Build on Debian 11 (GLIBC 2.31) for maximum runtime compatibility -FROM debian:12 +FROM debian:11 # Set Rustup mirrors (Aliyun) ENV RUSTUP_UPDATE_ROOT=https://mirrors.aliyun.com/rustup/rustup \ @@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ ENV PATH="/root/.cargo/bin:${PATH}" # Install build dependencies -# Note: libyuv, libvpx are built from source below for static linking +# Note: libyuv, libvpx, libx264, libx265, libopus are built from source below for static linking RUN apt-get update && apt-get install -y --no-install-recommends \ # Build tools build-essential \ @@ -28,20 +28,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ git \ libclang-dev \ llvm \ - mold \ wget \ + # Autotools for libopus (requires autoreconf) + autoconf \ + automake \ + libtool \ # Core system libraries libasound2-dev \ libv4l-dev \ libudev-dev \ zlib1g-dev \ - # Note: libjpeg-turbo is built from source below for static linking - # Video codec libraries (dynamic, for software fallback) - libx264-dev \ - libx265-dev \ - # Audio codec - libopus-dev \ - # Hardware acceleration + # Note: libjpeg-turbo, libx264, libx265, libopus are built from source below for static linking libva-dev \ libdrm-dev \ libmfx-dev \ @@ -55,11 +52,11 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ # Build static libjpeg-turbo from source (needed by libyuv) RUN git clone --depth 1 https://github.com/libjpeg-turbo/libjpeg-turbo /tmp/libjpeg-turbo \ && cd /tmp/libjpeg-turbo \ - && cmake -B build -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ - -DCMAKE_INSTALL_PREFIX=/opt/one-kvm-libs/x86_64-linux-gnu \ + && mkdir build && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ -DENABLE_SHARED=OFF -DENABLE_STATIC=ON \ - && cmake --build build -j$(nproc) \ - && cmake --install build \ + && make -j$(nproc) \ + && make install \ && rm -rf /tmp/libjpeg-turbo # Build static libyuv from source (uses libjpeg-turbo headers) @@ -67,32 +64,75 @@ RUN git clone --depth 1 https://github.com/lemenkov/libyuv /tmp/libyuv \ && cd /tmp/libyuv \ && mkdir build && cd build \ && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ - -DCMAKE_PREFIX_PATH=/opt/one-kvm-libs/x86_64-linux-gnu \ - -DCMAKE_INSTALL_PREFIX=/opt/one-kvm-libs/x86_64-linux-gnu \ && make -j$(nproc) \ - && mkdir -p /opt/one-kvm-libs/x86_64-linux-gnu/lib \ - && cp libyuv.a /opt/one-kvm-libs/x86_64-linux-gnu/lib/ \ - && cp -r ../include /opt/one-kvm-libs/x86_64-linux-gnu/ \ + && make install \ && rm -rf /tmp/libyuv # Build static libvpx from source RUN git clone --depth 1 https://github.com/webmproject/libvpx /tmp/libvpx \ && cd /tmp/libvpx \ - && ./configure --prefix=/opt/one-kvm-libs/x86_64-linux-gnu \ + && ./configure \ --enable-static --disable-shared --enable-pic \ --disable-examples --disable-tools --disable-docs \ && make -j$(nproc) \ && make install \ && rm -rf /tmp/libvpx +# Build static libx264 from source +RUN git clone --depth 1 https://code.videolan.org/videolan/x264.git /tmp/x264 \ + && cd /tmp/x264 \ + && ./configure --enable-static --disable-cli \ + && make -j$(nproc) \ + && make install \ + && rm -rf /tmp/x264 + +# Build static libx265 from source +RUN git clone --depth 1 https://bitbucket.org/multicoreware/x265_git /tmp/x265 \ + && cd /tmp/x265 \ + && cd source \ + && mkdir -p build \ + && cd build \ + && cmake .. -DCMAKE_BUILD_TYPE=Release \ + -DENABLE_SHARED=OFF \ + -DENABLE_CLI=OFF \ + -DBUILD_SHARED_LIBS=OFF \ + && make -j$(nproc) \ + && make install \ + && rm -rf /tmp/x265 + +# Create pkg-config file for x265 (required by FFmpeg) +# Fix: Added -lstdc++ -lm -ldl -lpthread to Libs for static linking compatibility +RUN mkdir -p /usr/local/lib/pkgconfig && \ + cat > /usr/local/lib/pkgconfig/x265.pc < {date} diff --git a/build/debian/control.tpl b/build/debian/control.tpl new file mode 100644 index 00000000..fc3c7e00 --- /dev/null +++ b/build/debian/control.tpl @@ -0,0 +1,22 @@ +Source: one-kvm +Section: admin +Priority: optional +Maintainer: SilentWind + +Package: one-kvm +Architecture: {arch} +Depends: ${{auto}}, ca-certificates{distsuffix} +Description: A open and lightweight IP-KVM solution written in Rust + Enables BIOS-level remote management of servers and workstations. + . + One-KVM provides video capture, HID emulation (keyboard/mouse), + mass storage device forwarding, and ATX power control for + remote server management over IP. + . + Features: + * Hardware-accelerated video encoding (VAAPI, QSV, RKMPP) + * WebRTC and MJPEG streaming with low latency + * USB HID emulation via OTG gadget + * Mass storage device for ISO/IMG mounting + * ATX power control via GPIO or USB relay +Homepage: https://github.com/mofeng-git/One-KVM diff --git a/build/debian/copyright b/build/debian/copyright new file mode 100644 index 00000000..242d6dfb --- /dev/null +++ b/build/debian/copyright @@ -0,0 +1,29 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: one-kvm +Source: https://github.com/mofeng-git/one-KVM + +Files: * +Copyright: 2025 One-KVM contributors +License: GPL-2.0 + +Files: libs/ventoy-img-rs/resources/* +Copyright: Ventoy contributors +License: GPL-2.0 + +License: GPL-2.0 + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 2 of the License, or + (at your option) any later version. + . + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + . + You should have received a copy of the GNU General Public License + along with this program. If not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +On Debian systems, the full text of the GNU General Public License +version 2 can be found in the file `/usr/share/common-licenses/GPL-2'. diff --git a/build/debian/postinst.tpl b/build/debian/postinst.tpl new file mode 100644 index 00000000..88c81472 --- /dev/null +++ b/build/debian/postinst.tpl @@ -0,0 +1,39 @@ +#!/bin/bash +# Post-installation script for one-kvm + +set -e + +case "$1" in + configure|abort-upgrade|abort-remove|abort-deconfigure) + # Create data directory + mkdir -p /var/lib/one-kvm/ventoy + mkdir -p /var/log/one-kvm + + # Set permissions + chmod 755 /var/lib/one-kvm + chmod 755 /var/lib/one-kvm/ventoy + chmod 755 /var/log/one-kvm + + # Enable and start service (if systemd is available) + if [ -d /run/systemd/system ]; then + systemctl daemon-reload + systemctl enable one-kvm + # Don't start here, let user configure first + fi + ;; + triggered) + # Handle triggers (e.g., systemd restart) + if [ -d /run/systemd/system ]; then + systemctl restart one-kvm || true + fi + ;; + abort-rollback|failed-upgrade) + exit 0 + ;; + *) + echo "postinst called with unknown argument: $1" >&2 + exit 1 + ;; +esac + +exit 0 diff --git a/build/debian/prerm.tpl b/build/debian/prerm.tpl new file mode 100644 index 00000000..43234879 --- /dev/null +++ b/build/debian/prerm.tpl @@ -0,0 +1,28 @@ +#!/bin/bash +# Pre-removal script for one-kvm + +set -e + +case "$1" in + remove|purge) + # Stop service if running + if [ -d /run/systemd/system ]; then + systemctl stop one-kvm || true + systemctl disable one-kvm || true + fi + ;; + upgrade|deconfigure) + # Keep data on upgrade + : + ;; + failed-upgrade) + # Handle upgrade failure + : + ;; + *) + echo "prerm called with unknown argument: $1" >&2 + exit 1 + ;; +esac + +exit 0 diff --git a/build/one-kvm.service b/build/one-kvm.service new file mode 100644 index 00000000..fbc07d53 --- /dev/null +++ b/build/one-kvm.service @@ -0,0 +1,15 @@ +[Unit] +Description=One-KVM IP-KVM Service +Documentation=https://github.com/mofeng-git/One-KVM +After=network.target +Wants=network-online.target + +[Service] +Type=simple +User=root +ExecStart=/usr/bin/one-kvm +Restart=on-failure +RestartSec=5 + +[Install] +WantedBy=multi-user.target diff --git a/build/package-deb.sh b/build/package-deb.sh new file mode 100755 index 00000000..e754bbe9 --- /dev/null +++ b/build/package-deb.sh @@ -0,0 +1,178 @@ +#!/bin/bash +# Build deb packages from pre-compiled binaries +# Binaries are compiled once on Debian 11 (GLIBC 2.31) via build-images.sh +# This script packages them directly on the host using dpkg-deb +# Usage: ./build/build-deb.sh [arch] +# Example: ./build/build-deb.sh aarch64 + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(dirname "$SCRIPT_DIR")" + +# Version from Cargo.toml +VERSION=$(grep -m1 '^version =' "$PROJECT_DIR/Cargo.toml" | cut -d'"' -f2) +if [ -z "$VERSION" ]; then + echo "Error: Could not extract version from Cargo.toml" + exit 1 +fi + +OUTPUT_DIR="$PROJECT_DIR/target/debian" +mkdir -p "$OUTPUT_DIR" + +# Supported architectures +TARGETS=( + "x86_64-unknown-linux-gnu:amd64" + "aarch64-unknown-linux-gnu:arm64" + "armv7-unknown-linux-gnueabihf:armhf" +) + +# Package single architecture +package_arch() { + local RUST_TARGET="$1" + local DEB_ARCH="$2" + + echo "========================================" + echo "Packaging: $RUST_TARGET -> $DEB_ARCH" + echo "========================================" + + local BINARY_PATH="$PROJECT_DIR/target/$RUST_TARGET/release/one-kvm" + if [[ ! -f "$BINARY_PATH" ]]; then + echo "Error: Binary not found at $BINARY_PATH" + echo "Please run ./build/build-images.sh first." + return 1 + fi + + local PKG_DIR="/tmp/one-kvm-pkg-$$" + local DEB_PATH="$OUTPUT_DIR/one-kvm_${VERSION}_${DEB_ARCH}.deb" + + # Create package structure + mkdir -p "$PKG_DIR/DEBIAN" + mkdir -p "$PKG_DIR/usr/bin" + mkdir -p "$PKG_DIR/etc/one-kvm/ventoy" + mkdir -p "$PKG_DIR/lib/systemd/system" + + # Copy binary + cp "$BINARY_PATH" "$PKG_DIR/usr/bin/one-kvm" + chmod 755 "$PKG_DIR/usr/bin/one-kvm" + + # Copy and process ventoy resources (decompress .xz files) + if [ -d "$PROJECT_DIR/libs/ventoy-img-rs/resources" ]; then + for file in "$PROJECT_DIR/libs/ventoy-img-rs/resources/"*; do + if [ -f "$file" ]; then + local filename=$(basename "$file") + if [[ "$filename" == *.xz ]]; then + # Decompress xz files to target dir (not in-place) + xz -d -c "$file" > "$PKG_DIR/etc/one-kvm/ventoy/${filename%.xz}" + else + cp "$file" "$PKG_DIR/etc/one-kvm/ventoy/" + fi + fi + done + fi + + # Copy systemd service file + if [ -f "$SCRIPT_DIR/one-kvm.service" ]; then + cp "$SCRIPT_DIR/one-kvm.service" "$PKG_DIR/lib/systemd/system/" + fi + + # Create postinst script (enable service on install) + cat > "$PKG_DIR/DEBIAN/postinst" <<'EOF' +#!/bin/bash +set -e + +case "$1" in + configure) + # Enable and start service + if [ -f /lib/systemd/system/one-kvm.service ]; then + systemctl enable one-kvm + systemctl start one-kvm || true + fi + ;; + abort-upgrade|abort-deconfigure|abort-remove) + ;; + *) + ;; +esac +exit 0 +EOF + chmod 755 "$PKG_DIR/DEBIAN/postinst" + + # Create prerm script (stop service on remove) + cat > "$PKG_DIR/DEBIAN/prerm" <<'EOF' +#!/bin/bash +set -e + +case "$1" in + remove|deconfigure) + if [ -f /lib/systemd/system/one-kvm.service ]; then + systemctl stop one-kvm || true + systemctl disable one-kvm || true + fi + ;; + upgrade) + if [ -f /lib/systemd/system/one-kvm.service ]; then + systemctl stop one-kvm || true + fi + ;; + failed-upgrade) + ;; + *) + ;; +esac +exit 0 +EOF + chmod 755 "$PKG_DIR/DEBIAN/prerm" + + # Create control file + cat > "$PKG_DIR/DEBIAN/control" <= 2.31), libgcc-s1, libstdc++6, libasound2 (>= 1.1), libva2 (>= 2.0), libdrm2 (>= 2.4), libx11-6 (>= 1.6), libxcb1 (>= 1.14) +Maintainer: SilentWind +Description: A open and lightweight IP-KVM solution + Enables BIOS-level remote management of servers and workstations. + Built on Debian 11, compatible with Debian 11+, Ubuntu 20.04+. +EOF + + # Build deb directly on host + dpkg-deb --build "$PKG_DIR" "$DEB_PATH" + + rm -rf "$PKG_DIR" + echo "Created: $DEB_PATH" +} + +# Main +if [ -n "$1" ]; then + # Package specific arch + FOUND=0 + for target in "${TARGETS[@]}"; do + IFS=':' read -r RUST_TARGET DEB_ARCH <<< "$target" + if [[ "$1" == "$DEB_ARCH" ]] || [[ "$1" == "$RUST_TARGET" ]]; then + package_arch "$RUST_TARGET" "$DEB_ARCH" + FOUND=1 + break + fi + done + + if [ $FOUND -eq 0 ]; then + echo "Error: Unknown architecture: $1" + echo "Available: amd64, arm64, armhf" + exit 1 + fi +else + # Package all architectures + for target in "${TARGETS[@]}"; do + IFS=':' read -r RUST_TARGET DEB_ARCH <<< "$target" + package_arch "$RUST_TARGET" "$DEB_ARCH" + done +fi + +echo "" +echo "========================================" +echo "All packages built successfully!" +echo "========================================" +ls -la "$OUTPUT_DIR"/*.deb diff --git a/build/package-docker.sh b/build/package-docker.sh index 813bbc59..ceb1217d 100755 --- a/build/package-docker.sh +++ b/build/package-docker.sh @@ -81,7 +81,7 @@ while [[ $# -gt 0 ]]; do BUILD_BINARY=true shift ;; - --help) + -h|--help) echo "Usage: $0 [OPTIONS]" echo "" echo "Package pre-compiled One-KVM binaries into Docker images." diff --git a/libs/hwcodec/build.rs b/libs/hwcodec/build.rs index 0b2727f1..7980a7fb 100644 --- a/libs/hwcodec/build.rs +++ b/libs/hwcodec/build.rs @@ -113,9 +113,9 @@ mod ffmpeg { path } else { match target_arch.as_str() { - "x86_64" => "/opt/one-kvm-libs/x86_64-linux-gnu", - "aarch64" => "/opt/one-kvm-libs/aarch64-linux-gnu", - "arm" => "/opt/one-kvm-libs/armv7-linux-gnueabihf", + "x86_64" => "/usr/local", + "aarch64" => "/usr/aarch64-linux-gnu", + "arm" => "/usr/arm-linux-gnueabihf", _ => "", } .to_string() diff --git a/libs/ventoy-img-rs/Cargo.toml b/libs/ventoy-img-rs/Cargo.toml index 907dc19b..ea878394 100644 --- a/libs/ventoy-img-rs/Cargo.toml +++ b/libs/ventoy-img-rs/Cargo.toml @@ -3,7 +3,7 @@ name = "ventoy-img" version = "0.1.0" edition = "2021" description = "Create and manage Ventoy bootable IMG files without root or loop devices" -license = "GPL-3.0" +license = "GPL-2.0" [dependencies] # CLI @@ -12,12 +12,6 @@ clap = { version = "4", features = ["derive"] } # Error handling thiserror = "1" -# Time handling -chrono = "0.4" - -# CRC32 for exFAT checksum -crc32fast = "1" - [dev-dependencies] tempfile = "3" diff --git a/libs/ventoy-img-rs/README.md b/libs/ventoy-img-rs/README.md index 3c7f6aa1..7a9f2e67 100644 --- a/libs/ventoy-img-rs/README.md +++ b/libs/ventoy-img-rs/README.md @@ -142,7 +142,7 @@ fn main() -> Result<()> { ## 许可证 -GPL-3.0 +GPL-2.0 ## 致谢 diff --git a/res/vcpkg/libyuv/build.rs b/res/vcpkg/libyuv/build.rs index bc3000bd..d754996a 100644 --- a/res/vcpkg/libyuv/build.rs +++ b/res/vcpkg/libyuv/build.rs @@ -59,11 +59,6 @@ fn generate_bindings(cpp_dir: &Path) { .allowlist_function("UYVYToARGB") .allowlist_function("ARGBToRGB24") .allowlist_function("ARGBToRAW") - // MJPEG decoding - .allowlist_function("MJPGToI420") - .allowlist_function("MJPGToNV12") - .allowlist_function("MJPGToARGB") - .allowlist_function("MJPGSize") // Scaling .allowlist_function("I420Scale") .allowlist_function("NV12Scale") @@ -152,25 +147,15 @@ fn link_vcpkg(mut path: PathBuf) -> bool { let use_static = env::var("LIBYUV_STATIC").map(|v| v == "1").unwrap_or(false); let static_lib = lib_path.join("libyuv.a"); - let jpeg_static = lib_path.join("libjpeg.a"); - let turbojpeg_static = lib_path.join("libturbojpeg.a"); if use_static && static_lib.exists() { // Static linking (for deb packaging) println!("cargo:rustc-link-lib=static=yuv"); - if turbojpeg_static.exists() { - println!("cargo:rustc-link-lib=static=turbojpeg"); - } else if jpeg_static.exists() { - println!("cargo:rustc-link-lib=static=jpeg"); - } else { - println!("cargo:rustc-link-lib=jpeg"); - } println!("cargo:rustc-link-lib=stdc++"); println!("cargo:info=Using libyuv from vcpkg (static linking)"); } else { // Dynamic linking (default for development) println!("cargo:rustc-link-lib=yuv"); - println!("cargo:rustc-link-lib=jpeg"); println!("cargo:rustc-link-lib=stdc++"); println!("cargo:info=Using libyuv from vcpkg (dynamic linking)"); } @@ -204,23 +189,6 @@ fn link_pkg_config() -> bool { } } - // Also need libjpeg - if let Ok(jpeg_output) = Command::new("pkg-config") - .args(["--libs", "libjpeg"]) - .output() - { - if jpeg_output.status.success() { - let jpeg_flags = String::from_utf8_lossy(&jpeg_output.stdout); - for flag in jpeg_flags.split_whitespace() { - if flag.starts_with("-L") { - println!("cargo:rustc-link-search=native={}", &flag[2..]); - } else if flag.starts_with("-l") { - println!("cargo:rustc-link-lib={}", &flag[2..]); - } - } - } - } - #[cfg(target_os = "linux")] println!("cargo:rustc-link-lib=stdc++"); @@ -240,9 +208,9 @@ fn link_system() -> bool { format!("{}/lib", path) } else { match target_arch.as_str() { - "x86_64" => "/opt/one-kvm-libs/x86_64-linux-gnu/lib", - "aarch64" => "/opt/one-kvm-libs/aarch64-linux-gnu/lib", - "arm" => "/opt/one-kvm-libs/armv7-linux-gnueabihf/lib", + "x86_64" => "/usr/local/lib", + "aarch64" => "/usr/aarch64-linux-gnu/lib", + "arm" => "/usr/arm-linux-gnueabihf/lib", _ => "", } .to_string() @@ -276,19 +244,6 @@ fn link_system() -> bool { if use_static && libyuv_static.exists() { println!("cargo:rustc-link-search=native={}", path); println!("cargo:rustc-link-lib=static=yuv"); - - // Check for static libjpeg-turbo in the same directory - let turbojpeg_static = lib_path.join("libturbojpeg.a"); - let jpeg_static = lib_path.join("libjpeg.a"); - if turbojpeg_static.exists() { - println!("cargo:rustc-link-lib=static=turbojpeg"); - } else if jpeg_static.exists() { - println!("cargo:rustc-link-lib=static=jpeg"); - } else { - // Fall back to dynamic jpeg - println!("cargo:rustc-link-lib=jpeg"); - } - println!("cargo:rustc-link-lib=stdc++"); println!("cargo:info=Using system libyuv from {} (static linking)", path); return true; @@ -298,7 +253,6 @@ fn link_system() -> bool { if libyuv_so.exists() { println!("cargo:rustc-link-search=native={}", path); println!("cargo:rustc-link-lib=yuv"); - println!("cargo:rustc-link-lib=jpeg"); #[cfg(target_os = "linux")] println!("cargo:rustc-link-lib=stdc++"); diff --git a/res/vcpkg/libyuv/src/lib.rs b/res/vcpkg/libyuv/src/lib.rs index e5cece52..45e1fc66 100644 --- a/res/vcpkg/libyuv/src/lib.rs +++ b/res/vcpkg/libyuv/src/lib.rs @@ -58,8 +58,6 @@ pub enum YuvError { BufferTooSmall, /// libyuv function returned an error code ConversionFailed(i32), - /// MJPEG data is invalid or corrupt - InvalidMjpeg, } impl fmt::Display for YuvError { @@ -68,7 +66,6 @@ impl fmt::Display for YuvError { YuvError::InvalidDimensions => write!(f, "Invalid dimensions (must be even)"), YuvError::BufferTooSmall => write!(f, "Buffer too small"), YuvError::ConversionFailed(code) => write!(f, "Conversion failed with code {}", code), - YuvError::InvalidMjpeg => write!(f, "Invalid MJPEG data"), } } } @@ -915,129 +912,6 @@ pub fn bgr24_to_nv12(src: &[u8], dst: &mut [u8], width: i32, height: i32) -> Res i420_to_nv12(&i420_buffer, dst, width, height) } -// ============================================================================ -// MJPEG decoding -// ============================================================================ - -/// Decode MJPEG to I420 -/// -/// # Arguments -/// * `src` - Source MJPEG data -/// * `dst` - Destination I420 buffer -/// * `width` - Expected frame width -/// * `height` - Expected frame height -/// -/// # Note -/// This function requires libyuv to be compiled with JPEG support -pub fn mjpeg_to_i420(src: &[u8], dst: &mut [u8], width: i32, height: i32) -> Result<()> { - if width % 2 != 0 || height % 2 != 0 { - return Err(YuvError::InvalidDimensions); - } - - let w = width as usize; - let h = height as usize; - let y_size = w * h; - let uv_size = (w / 2) * (h / 2); - - if dst.len() < i420_size(w, h) { - return Err(YuvError::BufferTooSmall); - } - - if src.len() < 2 || src[0] != 0xFF || src[1] != 0xD8 { - return Err(YuvError::InvalidMjpeg); - } - - call_yuv!(MJPGToI420( - src.as_ptr(), - usize_to_size_t(src.len()), - dst.as_mut_ptr(), - width, - dst[y_size..].as_mut_ptr(), - width / 2, - dst[y_size + uv_size..].as_mut_ptr(), - width / 2, - width, - height, - width, - height, - )) -} - -/// Decode MJPEG to NV12 (optimal for VAAPI) -pub fn mjpeg_to_nv12(src: &[u8], dst: &mut [u8], width: i32, height: i32) -> Result<()> { - if width % 2 != 0 || height % 2 != 0 { - return Err(YuvError::InvalidDimensions); - } - - let w = width as usize; - let h = height as usize; - let y_size = w * h; - - if dst.len() < nv12_size(w, h) { - return Err(YuvError::BufferTooSmall); - } - - if src.len() < 2 || src[0] != 0xFF || src[1] != 0xD8 { - return Err(YuvError::InvalidMjpeg); - } - - call_yuv!(MJPGToNV12( - src.as_ptr(), - usize_to_size_t(src.len()), - dst.as_mut_ptr(), - width, - dst[y_size..].as_mut_ptr(), - width, - width, - height, - width, - height, - )) -} - -/// Decode MJPEG to BGRA -pub fn mjpeg_to_bgra(src: &[u8], dst: &mut [u8], width: i32, height: i32) -> Result<()> { - let w = width as usize; - let h = height as usize; - - if dst.len() < argb_size(w, h) { - return Err(YuvError::BufferTooSmall); - } - - if src.len() < 2 || src[0] != 0xFF || src[1] != 0xD8 { - return Err(YuvError::InvalidMjpeg); - } - - call_yuv!(MJPGToARGB( - src.as_ptr(), - usize_to_size_t(src.len()), - dst.as_mut_ptr(), - width * 4, - width, - height, - width, - height, - )) -} - -/// Get MJPEG frame dimensions without decoding -pub fn mjpeg_size(src: &[u8]) -> Result<(i32, i32)> { - if src.len() < 2 || src[0] != 0xFF || src[1] != 0xD8 { - return Err(YuvError::InvalidMjpeg); - } - - let mut width: i32 = 0; - let mut height: i32 = 0; - - let ret = unsafe { MJPGSize(src.as_ptr(), usize_to_size_t(src.len()), &mut width, &mut height) }; - - if ret != 0 || width <= 0 || height <= 0 { - return Err(YuvError::InvalidMjpeg); - } - - Ok((width, height)) -} - // ============================================================================ // Scaling // ============================================================================ @@ -1199,18 +1073,6 @@ impl Converter { Ok(&self.nv12_buffer) } - /// Decode MJPEG to NV12, returns reference to internal buffer - pub fn mjpeg_to_nv12(&mut self, src: &[u8]) -> Result<&[u8]> { - mjpeg_to_nv12(src, &mut self.nv12_buffer, self.width, self.height)?; - Ok(&self.nv12_buffer) - } - - /// Decode MJPEG to I420, returns reference to internal buffer - pub fn mjpeg_to_i420(&mut self, src: &[u8]) -> Result<&[u8]> { - mjpeg_to_i420(src, &mut self.i420_buffer, self.width, self.height)?; - Ok(&self.i420_buffer) - } - /// Convert I420 to NV12, returns reference to internal buffer pub fn i420_to_nv12(&mut self, src: &[u8]) -> Result<&[u8]> { i420_to_nv12(src, &mut self.nv12_buffer, self.width, self.height)?; diff --git a/src/audio/capture.rs b/src/audio/capture.rs index b8c67ef3..4252b9cf 100644 --- a/src/audio/capture.rs +++ b/src/audio/capture.rs @@ -7,10 +7,12 @@ use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; use std::sync::Arc; use std::time::Instant; use tokio::sync::{broadcast, watch, Mutex}; -use tracing::{debug, error, info, warn}; +use tracing::{debug, info}; use super::device::AudioDeviceInfo; use crate::error::{AppError, Result}; +use crate::utils::LogThrottler; +use crate::{error_throttled, warn_throttled}; /// Audio capture configuration #[derive(Debug, Clone)] @@ -134,6 +136,8 @@ pub struct AudioCapturer { stop_flag: Arc, sequence: Arc, capture_handle: Mutex>>, + /// Log throttler to prevent log flooding + log_throttler: LogThrottler, } impl AudioCapturer { @@ -151,6 +155,7 @@ impl AudioCapturer { stop_flag: Arc::new(AtomicBool::new(false)), sequence: Arc::new(AtomicU64::new(0)), capture_handle: Mutex::new(None), + log_throttler: LogThrottler::with_secs(5), } } @@ -193,9 +198,10 @@ impl AudioCapturer { let frame_tx = self.frame_tx.clone(); let stop_flag = self.stop_flag.clone(); let sequence = self.sequence.clone(); + let log_throttler = self.log_throttler.clone(); let handle = tokio::task::spawn_blocking(move || { - capture_loop(config, state, stats, frame_tx, stop_flag, sequence); + capture_loop(config, state, stats, frame_tx, stop_flag, sequence, log_throttler); }); *self.capture_handle.lock().await = Some(handle); @@ -229,11 +235,20 @@ fn capture_loop( frame_tx: broadcast::Sender, stop_flag: Arc, sequence: Arc, + log_throttler: LogThrottler, ) { - let result = run_capture(&config, &state, &stats, &frame_tx, &stop_flag, &sequence); + let result = run_capture( + &config, + &state, + &stats, + &frame_tx, + &stop_flag, + &sequence, + &log_throttler, + ); if let Err(e) = result { - error!("Audio capture error: {}", e); + error_throttled!(log_throttler, "capture_error", "Audio capture error: {}", e); let _ = state.send(CaptureState::Error); } else { let _ = state.send(CaptureState::Stopped); @@ -247,6 +262,7 @@ fn run_capture( frame_tx: &broadcast::Sender, stop_flag: &AtomicBool, sequence: &AtomicU64, + log_throttler: &LogThrottler, ) -> Result<()> { // Open ALSA device let pcm = PCM::new(&config.device_name, Direction::Capture, false).map_err(|e| { @@ -316,7 +332,7 @@ fn run_capture( // Check PCM state match pcm.state() { State::XRun => { - warn!("Audio buffer overrun, recovering"); + warn_throttled!(log_throttler, "xrun", "Audio buffer overrun, recovering"); if let Ok(mut s) = stats.try_lock() { s.buffer_overruns += 1; } @@ -324,7 +340,7 @@ fn run_capture( continue; } State::Suspended => { - warn!("Audio device suspended, recovering"); + warn_throttled!(log_throttler, "suspended", "Audio device suspended, recovering"); let _ = pcm.resume(); continue; } @@ -370,13 +386,19 @@ fn run_capture( let desc = e.to_string(); if desc.contains("EPIPE") || desc.contains("Broken pipe") { // Buffer overrun - warn!("Audio buffer overrun"); + warn_throttled!(log_throttler, "buffer_overrun", "Audio buffer overrun"); if let Ok(mut s) = stats.try_lock() { s.buffer_overruns += 1; } let _ = pcm.prepare(); + } else if desc.contains("No such device") || desc.contains("ENODEV") { + // Device disconnected - use longer throttle for this + error_throttled!(log_throttler, "no_device", "Audio read error: {}", e); + if let Ok(mut s) = stats.try_lock() { + s.frames_dropped += 1; + } } else { - error!("Audio read error: {}", e); + error_throttled!(log_throttler, "read_error", "Audio read error: {}", e); if let Ok(mut s) = stats.try_lock() { s.frames_dropped += 1; } diff --git a/src/hid/datachannel.rs b/src/hid/datachannel.rs index 7dd85372..2c898e72 100644 --- a/src/hid/datachannel.rs +++ b/src/hid/datachannel.rs @@ -284,6 +284,7 @@ mod tests { right_alt: false, right_meta: false, }, + is_usb_hid: false, }; let encoded = encode_keyboard_event(&event); diff --git a/src/hid/websocket.rs b/src/hid/websocket.rs index e8388c3a..e427a29f 100644 --- a/src/hid/websocket.rs +++ b/src/hid/websocket.rs @@ -50,7 +50,7 @@ async fn handle_hid_socket(socket: WebSocket, state: Arc) { vec![RESP_ERR_HID_UNAVAILABLE] }; - if sender.send(Message::Binary(initial_response)).await.is_err() { + if sender.send(Message::Binary(initial_response.into())).await.is_err() { error!("Failed to send initial HID status"); return; } @@ -66,7 +66,7 @@ async fn handle_hid_socket(socket: WebSocket, state: Arc) { warn!("HID controller not available, ignoring message"); } // Send error response (optional, for client awareness) - let _ = sender.send(Message::Binary(vec![RESP_ERR_HID_UNAVAILABLE])).await; + let _ = sender.send(Message::Binary(vec![RESP_ERR_HID_UNAVAILABLE].into())).await; continue; } @@ -83,7 +83,7 @@ async fn handle_hid_socket(socket: WebSocket, state: Arc) { if log_throttler.should_log("text_message_rejected") { debug!("Received text message (not supported): {} bytes", text.len()); } - let _ = sender.send(Message::Binary(vec![RESP_ERR_INVALID_MESSAGE])).await; + let _ = sender.send(Message::Binary(vec![RESP_ERR_INVALID_MESSAGE].into())).await; } Ok(Message::Ping(data)) => { let _ = sender.send(Message::Pong(data)).await; diff --git a/src/main.rs b/src/main.rs index 82e3bba3..189ecc43 100644 --- a/src/main.rs +++ b/src/main.rs @@ -532,7 +532,7 @@ async fn main() -> anyhow::Result<()> { let cert = generate_self_signed_cert()?; tokio::fs::create_dir_all(&cert_dir).await?; tokio::fs::write(&cert_path, cert.cert.pem()).await?; - tokio::fs::write(&key_path, cert.key_pair.serialize_pem()).await?; + tokio::fs::write(&key_path, cert.signing_key.serialize_pem()).await?; } else { tracing::info!("Using existing TLS certificate from {}", cert_dir.display()); } @@ -633,7 +633,7 @@ fn parse_video_config(config: &AppConfig) -> (PixelFormat, Resolution) { } /// Generate a self-signed TLS certificate -fn generate_self_signed_cert() -> anyhow::Result { +fn generate_self_signed_cert() -> anyhow::Result> { use rcgen::generate_simple_self_signed; let subject_alt_names = vec![ diff --git a/src/msd/ventoy_drive.rs b/src/msd/ventoy_drive.rs index 56d590ed..04d03be2 100644 --- a/src/msd/ventoy_drive.rs +++ b/src/msd/ventoy_drive.rs @@ -507,10 +507,83 @@ impl Drop for ChannelWriter { #[cfg(test)] mod tests { use super::*; + use std::process::Command; + use std::sync::OnceLock; use tempfile::TempDir; + /// Path to ventoy resources directory + static RESOURCE_DIR: &str = concat!( + env!("CARGO_MANIFEST_DIR"), + "/../ventoy-img-rs/resources" + ); + + /// Initialize ventoy resources once + fn init_ventoy_resources() -> bool { + static INIT: OnceLock = OnceLock::new(); + *INIT.get_or_init(|| { + let resource_path = std::path::Path::new(RESOURCE_DIR); + + // Decompress xz files if needed + let core_xz = resource_path.join("core.img.xz"); + let core_img = resource_path.join("core.img"); + if core_xz.exists() && !core_img.exists() { + if let Err(e) = decompress_xz(&core_xz, &core_img) { + eprintln!("Failed to decompress core.img.xz: {}", e); + return false; + } + } + + let disk_xz = resource_path.join("ventoy.disk.img.xz"); + let disk_img = resource_path.join("ventoy.disk.img"); + if disk_xz.exists() && !disk_img.exists() { + if let Err(e) = decompress_xz(&disk_xz, &disk_img) { + eprintln!("Failed to decompress ventoy.disk.img.xz: {}", e); + return false; + } + } + + // Initialize resources + if let Err(e) = ventoy_img::resources::init_resources(resource_path) { + eprintln!("Failed to init ventoy resources: {}", e); + return false; + } + + true + }) + } + + /// Decompress xz file using system command + fn decompress_xz(src: &std::path::Path, dst: &std::path::Path) -> std::io::Result<()> { + let output = Command::new("xz") + .args(&["-d", "-k", "-c", src.to_str().unwrap()]) + .output()?; + + if !output.status.success() { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + format!("xz decompress failed: {}", String::from_utf8_lossy(&output.stderr)), + )); + } + + std::fs::write(dst, &output.stdout)?; + Ok(()) + } + + /// Ensure resources are initialized, skip test if failed + fn ensure_resources() -> bool { + if !init_ventoy_resources() { + eprintln!("Skipping test: ventoy resources not available"); + false + } else { + true + } + } + #[tokio::test] async fn test_drive_init() { + if !ensure_resources() { + return; + } let temp_dir = TempDir::new().unwrap(); let drive_path = temp_dir.path().join("test_ventoy.img"); let drive = VentoyDrive::new(drive_path); @@ -522,6 +595,9 @@ mod tests { #[tokio::test] async fn test_drive_mkdir() { + if !ensure_resources() { + return; + } let temp_dir = TempDir::new().unwrap(); let drive_path = temp_dir.path().join("test_ventoy.img"); let drive = VentoyDrive::new(drive_path); @@ -537,6 +613,9 @@ mod tests { #[tokio::test] async fn test_drive_file_write_and_read() { + if !ensure_resources() { + return; + } let temp_dir = TempDir::new().unwrap(); let drive_path = temp_dir.path().join("test_ventoy.img"); let drive = VentoyDrive::new(drive_path.clone()); @@ -565,6 +644,9 @@ mod tests { #[tokio::test] async fn test_drive_get_file_info() { + if !ensure_resources() { + return; + } let temp_dir = TempDir::new().unwrap(); let drive_path = temp_dir.path().join("test_ventoy.img"); let drive = VentoyDrive::new(drive_path.clone()); @@ -611,6 +693,9 @@ mod tests { #[tokio::test] async fn test_drive_stream_read() { + if !ensure_resources() { + return; + } let temp_dir = TempDir::new().unwrap(); let drive_path = temp_dir.path().join("test_ventoy.img"); let drive = VentoyDrive::new(drive_path.clone()); @@ -652,6 +737,9 @@ mod tests { #[tokio::test] async fn test_drive_stream_read_small_file() { + if !ensure_resources() { + return; + } let temp_dir = TempDir::new().unwrap(); let drive_path = temp_dir.path().join("test_ventoy.img"); let drive = VentoyDrive::new(drive_path.clone()); diff --git a/src/otg/manager.rs b/src/otg/manager.rs index 0b1862f3..6840d639 100644 --- a/src/otg/manager.rs +++ b/src/otg/manager.rs @@ -423,13 +423,13 @@ mod tests { fn test_endpoint_tracking() { let mut manager = OtgGadgetManager::with_config("test", 8); - // Keyboard uses 2 endpoints + // Keyboard uses 1 endpoint let _ = manager.add_keyboard(); - assert_eq!(manager.endpoint_allocator.used(), 2); + assert_eq!(manager.endpoint_allocator.used(), 1); // Mouse uses 1 endpoint each let _ = manager.add_mouse_relative(); let _ = manager.add_mouse_absolute(); - assert_eq!(manager.endpoint_allocator.used(), 4); + assert_eq!(manager.endpoint_allocator.used(), 3); } } diff --git a/src/otg/service.rs b/src/otg/service.rs index e89ce83c..f64948f8 100644 --- a/src/otg/service.rs +++ b/src/otg/service.rs @@ -558,7 +558,7 @@ mod tests { #[test] fn test_service_creation() { - let service = OtgService::new(); + let _service = OtgService::new(); // Just test that creation doesn't panic assert!(!OtgService::is_available() || true); // Depends on environment } diff --git a/src/rustdesk/config.rs b/src/rustdesk/config.rs index 1532f986..a51649ec 100644 --- a/src/rustdesk/config.rs +++ b/src/rustdesk/config.rs @@ -161,8 +161,8 @@ impl RustDeskConfig { /// Generate a random 9-digit device ID pub fn generate_device_id() -> String { use rand::Rng; - let mut rng = rand::thread_rng(); - let id: u32 = rng.gen_range(100_000_000..999_999_999); + let mut rng = rand::rng(); + let id: u32 = rng.random_range(100_000_000..999_999_999); id.to_string() } @@ -170,10 +170,10 @@ pub fn generate_device_id() -> String { pub fn generate_random_password() -> String { use rand::Rng; const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); (0..8) .map(|_| { - let idx = rng.gen_range(0..CHARSET.len()); + let idx = rng.random_range(0..CHARSET.len()); CHARSET[idx] as char }) .collect() @@ -226,6 +226,7 @@ mod tests { // No rendezvous server, relay is None config.rendezvous_server = String::new(); + config.relay_server = None; assert_eq!(config.relay_addr(), None); } diff --git a/src/stream/ws_hid.rs b/src/stream/ws_hid.rs index f94db54f..929d7278 100644 --- a/src/stream/ws_hid.rs +++ b/src/stream/ws_hid.rs @@ -183,7 +183,7 @@ impl WsHidHandler { // Send initial status as binary: 0x00 = ok, 0x01 = error let status_byte = if self.is_hid_available() { 0x00u8 } else { 0x01u8 }; - let _ = sender.send(Message::Binary(vec![status_byte])).await; + let _ = sender.send(Message::Binary(vec![status_byte].into())).await; loop { tokio::select! { diff --git a/src/utils/throttle.rs b/src/utils/throttle.rs index 5a067210..f983e61d 100644 --- a/src/utils/throttle.rs +++ b/src/utils/throttle.rs @@ -111,6 +111,15 @@ impl LogThrottler { } } +impl Clone for LogThrottler { + fn clone(&self) -> Self { + Self { + last_logged: RwLock::new(HashMap::new()), + interval: self.interval, + } + } +} + impl Default for LogThrottler { /// Create a default log throttler with 5 second interval fn default() -> Self { diff --git a/src/video/convert.rs b/src/video/convert.rs index c890447c..ca60b80d 100644 --- a/src/video/convert.rs +++ b/src/video/convert.rs @@ -414,64 +414,6 @@ pub fn yuyv_buffer_size(resolution: Resolution) -> usize { (resolution.width * resolution.height * 2) as usize } -// ============================================================================ -// MJPEG Decoder - Decodes JPEG to YUV420P using libyuv -// ============================================================================ - -/// MJPEG/JPEG decoder that outputs YUV420P using libyuv -pub struct MjpegDecoder { - /// Resolution hint (can be updated from decoded frame) - resolution: Resolution, - /// YUV420P output buffer - yuv_buffer: Yuv420pBuffer, -} - -impl MjpegDecoder { - /// Create a new MJPEG decoder with expected resolution - pub fn new(resolution: Resolution) -> Result { - Ok(Self { - resolution, - yuv_buffer: Yuv420pBuffer::new(resolution), - }) - } - - /// Decode MJPEG/JPEG data to YUV420P using libyuv - pub fn decode(&mut self, jpeg_data: &[u8]) -> Result<&[u8]> { - // Get MJPEG dimensions - let (width, height) = libyuv::mjpeg_size(jpeg_data) - .map_err(|e| AppError::VideoError(format!("Failed to get MJPEG size: {}", e)))?; - - // Check if resolution changed - if width != self.resolution.width as i32 || height != self.resolution.height as i32 { - tracing::debug!( - "MJPEG resolution changed: {}x{} -> {}x{}", - self.resolution.width, - self.resolution.height, - width, - height - ); - self.resolution = Resolution::new(width as u32, height as u32); - self.yuv_buffer = Yuv420pBuffer::new(self.resolution); - } - - // Decode MJPEG directly to I420 using libyuv - libyuv::mjpeg_to_i420(jpeg_data, self.yuv_buffer.as_bytes_mut(), width, height) - .map_err(|e| AppError::VideoError(format!("MJPEG decode failed: {}", e)))?; - - Ok(self.yuv_buffer.as_bytes()) - } - - /// Get current resolution - pub fn resolution(&self) -> Resolution { - self.resolution - } - - /// Get YUV420P buffer size - pub fn yuv_buffer_size(&self) -> usize { - self.yuv_buffer.len() - } -} - // ============================================================================ // NV12 Converter for VAAPI encoder (using libyuv) // ============================================================================ @@ -572,34 +514,6 @@ pub fn yuyv_to_nv12(yuyv: &[u8], nv12: &mut [u8], width: usize, height: usize) { } } -// ============================================================================ -// Extended PixelConverter for MJPEG support -// ============================================================================ - -/// MJPEG to YUV420P converter (wraps MjpegDecoder) -pub struct MjpegToYuv420Converter { - decoder: MjpegDecoder, -} - -impl MjpegToYuv420Converter { - /// Create a new MJPEG to YUV420P converter - pub fn new(resolution: Resolution) -> Result { - Ok(Self { - decoder: MjpegDecoder::new(resolution)?, - }) - } - - /// Convert MJPEG data to YUV420P - pub fn convert(&mut self, mjpeg_data: &[u8]) -> Result<&[u8]> { - self.decoder.decode(mjpeg_data) - } - - /// Get current resolution - pub fn resolution(&self) -> Resolution { - self.decoder.resolution() - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/src/video/decoder/mjpeg.rs b/src/video/decoder/mjpeg.rs deleted file mode 100644 index 69aec3cd..00000000 --- a/src/video/decoder/mjpeg.rs +++ /dev/null @@ -1,481 +0,0 @@ -//! MJPEG decoder implementations -//! -//! Provides MJPEG decoding using libyuv for SIMD-accelerated decoding. -//! All decoders output to standard YUV formats suitable for encoding. - -use std::sync::Once; -use tracing::{debug, info}; - -use crate::error::{AppError, Result}; -use crate::video::format::Resolution; - -static INIT_LOGGING: Once = Once::new(); - -/// Initialize decoder logging (only once) -fn init_decoder_logging() { - INIT_LOGGING.call_once(|| { - debug!("MJPEG decoder logging initialized"); - }); -} - -/// MJPEG decoder configuration -#[derive(Debug, Clone)] -pub struct MjpegVaapiDecoderConfig { - /// Expected resolution (can be updated from decoded frame) - pub resolution: Resolution, - /// Use hardware acceleration (ignored, kept for API compatibility) - pub use_hwaccel: bool, -} - -impl Default for MjpegVaapiDecoderConfig { - fn default() -> Self { - Self { - resolution: Resolution::HD1080, - use_hwaccel: true, - } - } -} - -/// Decoded frame data in NV12 format -#[derive(Debug, Clone)] -pub struct DecodedNv12Frame { - /// Y plane data - pub y_plane: Vec, - /// UV interleaved plane data - pub uv_plane: Vec, - /// Y plane linesize (stride) - pub y_linesize: i32, - /// UV plane linesize (stride) - pub uv_linesize: i32, - /// Frame width - pub width: i32, - /// Frame height - pub height: i32, -} - -/// Decoded frame data in YUV420P (I420) format -#[derive(Debug, Clone)] -pub struct DecodedYuv420pFrame { - /// Y plane data - pub y_plane: Vec, - /// U plane data - pub u_plane: Vec, - /// V plane data - pub v_plane: Vec, - /// Y plane linesize (stride) - pub y_linesize: i32, - /// U plane linesize (stride) - pub u_linesize: i32, - /// V plane linesize (stride) - pub v_linesize: i32, - /// Frame width - pub width: i32, - /// Frame height - pub height: i32, -} - -impl DecodedYuv420pFrame { - /// Get packed YUV420P data (Y plane followed by U and V planes, with stride removed) - pub fn to_packed_yuv420p(&self) -> Vec { - let width = self.width as usize; - let height = self.height as usize; - let y_size = width * height; - let uv_size = width * height / 4; - - let mut packed = Vec::with_capacity(y_size + uv_size * 2); - - // Copy Y plane, removing stride padding if any - if self.y_linesize as usize == width { - packed.extend_from_slice(&self.y_plane[..y_size]); - } else { - for row in 0..height { - let src_offset = row * self.y_linesize as usize; - packed.extend_from_slice(&self.y_plane[src_offset..src_offset + width]); - } - } - - // Copy U plane - let uv_width = width / 2; - let uv_height = height / 2; - if self.u_linesize as usize == uv_width { - packed.extend_from_slice(&self.u_plane[..uv_size]); - } else { - for row in 0..uv_height { - let src_offset = row * self.u_linesize as usize; - packed.extend_from_slice(&self.u_plane[src_offset..src_offset + uv_width]); - } - } - - // Copy V plane - if self.v_linesize as usize == uv_width { - packed.extend_from_slice(&self.v_plane[..uv_size]); - } else { - for row in 0..uv_height { - let src_offset = row * self.v_linesize as usize; - packed.extend_from_slice(&self.v_plane[src_offset..src_offset + uv_width]); - } - } - - packed - } - - /// Copy packed YUV420P data to external buffer (zero allocation) - /// Returns the number of bytes written, or None if buffer too small - pub fn copy_to_packed_yuv420p(&self, dst: &mut [u8]) -> Option { - let width = self.width as usize; - let height = self.height as usize; - let y_size = width * height; - let uv_size = width * height / 4; - let total_size = y_size + uv_size * 2; - - if dst.len() < total_size { - return None; - } - - // Copy Y plane - if self.y_linesize as usize == width { - dst[..y_size].copy_from_slice(&self.y_plane[..y_size]); - } else { - for row in 0..height { - let src_offset = row * self.y_linesize as usize; - let dst_offset = row * width; - dst[dst_offset..dst_offset + width] - .copy_from_slice(&self.y_plane[src_offset..src_offset + width]); - } - } - - // Copy U plane - let uv_width = width / 2; - let uv_height = height / 2; - if self.u_linesize as usize == uv_width { - dst[y_size..y_size + uv_size].copy_from_slice(&self.u_plane[..uv_size]); - } else { - for row in 0..uv_height { - let src_offset = row * self.u_linesize as usize; - let dst_offset = y_size + row * uv_width; - dst[dst_offset..dst_offset + uv_width] - .copy_from_slice(&self.u_plane[src_offset..src_offset + uv_width]); - } - } - - // Copy V plane - let v_offset = y_size + uv_size; - if self.v_linesize as usize == uv_width { - dst[v_offset..v_offset + uv_size].copy_from_slice(&self.v_plane[..uv_size]); - } else { - for row in 0..uv_height { - let src_offset = row * self.v_linesize as usize; - let dst_offset = v_offset + row * uv_width; - dst[dst_offset..dst_offset + uv_width] - .copy_from_slice(&self.v_plane[src_offset..src_offset + uv_width]); - } - } - - Some(total_size) - } -} - -impl DecodedNv12Frame { - /// Get packed NV12 data (Y plane followed by UV plane, with stride removed) - pub fn to_packed_nv12(&self) -> Vec { - let width = self.width as usize; - let height = self.height as usize; - let y_size = width * height; - let uv_size = width * height / 2; - - let mut packed = Vec::with_capacity(y_size + uv_size); - - // Copy Y plane, removing stride padding if any - if self.y_linesize as usize == width { - // No padding, direct copy - packed.extend_from_slice(&self.y_plane[..y_size]); - } else { - // Has padding, copy row by row - for row in 0..height { - let src_offset = row * self.y_linesize as usize; - packed.extend_from_slice(&self.y_plane[src_offset..src_offset + width]); - } - } - - // Copy UV plane, removing stride padding if any - let uv_height = height / 2; - if self.uv_linesize as usize == width { - // No padding, direct copy - packed.extend_from_slice(&self.uv_plane[..uv_size]); - } else { - // Has padding, copy row by row - for row in 0..uv_height { - let src_offset = row * self.uv_linesize as usize; - packed.extend_from_slice(&self.uv_plane[src_offset..src_offset + width]); - } - } - - packed - } - - /// Copy packed NV12 data to external buffer (zero allocation) - /// Returns the number of bytes written, or None if buffer too small - pub fn copy_to_packed_nv12(&self, dst: &mut [u8]) -> Option { - let width = self.width as usize; - let height = self.height as usize; - let y_size = width * height; - let uv_size = width * height / 2; - let total_size = y_size + uv_size; - - if dst.len() < total_size { - return None; - } - - // Copy Y plane, removing stride padding if any - if self.y_linesize as usize == width { - // No padding, direct copy - dst[..y_size].copy_from_slice(&self.y_plane[..y_size]); - } else { - // Has padding, copy row by row - for row in 0..height { - let src_offset = row * self.y_linesize as usize; - let dst_offset = row * width; - dst[dst_offset..dst_offset + width] - .copy_from_slice(&self.y_plane[src_offset..src_offset + width]); - } - } - - // Copy UV plane, removing stride padding if any - let uv_height = height / 2; - if self.uv_linesize as usize == width { - // No padding, direct copy - dst[y_size..total_size].copy_from_slice(&self.uv_plane[..uv_size]); - } else { - // Has padding, copy row by row - for row in 0..uv_height { - let src_offset = row * self.uv_linesize as usize; - let dst_offset = y_size + row * width; - dst[dst_offset..dst_offset + width] - .copy_from_slice(&self.uv_plane[src_offset..src_offset + width]); - } - } - - Some(total_size) - } -} - -/// MJPEG decoder with NV12 output -/// -/// Uses libyuv for SIMD-accelerated MJPEG decoding to YUV420P, -/// then converts to NV12 for hardware encoder compatibility. -/// Named "VaapiDecoder" for API compatibility with existing code. -pub struct MjpegVaapiDecoder { - /// Configuration - config: MjpegVaapiDecoderConfig, - /// Frame counter - frame_count: u64, -} - -impl MjpegVaapiDecoder { - /// Create a new MJPEG decoder - pub fn new(config: MjpegVaapiDecoderConfig) -> Result { - init_decoder_logging(); - - info!( - "Creating MJPEG decoder with libyuv (SIMD-accelerated, NV12 output)" - ); - - Ok(Self { - config, - frame_count: 0, - }) - } - - /// Create with default config - pub fn with_vaapi(resolution: Resolution) -> Result { - Self::new(MjpegVaapiDecoderConfig { - resolution, - use_hwaccel: true, - }) - } - - /// Create with software decoding (same as with_vaapi, kept for API compatibility) - pub fn with_software(resolution: Resolution) -> Result { - Self::new(MjpegVaapiDecoderConfig { - resolution, - use_hwaccel: false, - }) - } - - /// Check if hardware acceleration is active (always false, using libyuv) - pub fn is_hwaccel_active(&self) -> bool { - false - } - - /// Decode MJPEG frame to NV12 - /// - /// Returns the decoded frame in NV12 format, or an error if decoding fails. - pub fn decode(&mut self, jpeg_data: &[u8]) -> Result { - if jpeg_data.len() < 2 { - return Err(AppError::VideoError("JPEG data too small".to_string())); - } - - // Verify JPEG signature (FFD8) - if jpeg_data[0] != 0xFF || jpeg_data[1] != 0xD8 { - return Err(AppError::VideoError("Invalid JPEG signature".to_string())); - } - - self.frame_count += 1; - - // Get JPEG dimensions - let (width, height) = libyuv::mjpeg_size(jpeg_data) - .map_err(|e| AppError::VideoError(format!("Failed to read MJPEG size: {}", e)))?; - - // Decode MJPEG to YUV420P first - let y_size = (width * height) as usize; - let uv_size = y_size / 4; - let yuv420_size = y_size + uv_size * 2; - let mut yuv_data = vec![0u8; yuv420_size]; - - libyuv::mjpeg_to_i420(jpeg_data, &mut yuv_data, width, height) - .map_err(|e| AppError::VideoError(format!("libyuv MJPEG→I420 failed: {}", e)))?; - - // Convert I420 to NV12 - let nv12_size = (width * height * 3 / 2) as usize; - let mut nv12_data = vec![0u8; nv12_size]; - - libyuv::i420_to_nv12(&yuv_data, &mut nv12_data, width, height) - .map_err(|e| AppError::VideoError(format!("libyuv I420→NV12 failed: {}", e)))?; - - // Split into Y and UV planes - let y_plane = nv12_data[..y_size].to_vec(); - let uv_plane = nv12_data[y_size..].to_vec(); - - Ok(DecodedNv12Frame { - y_plane, - uv_plane, - y_linesize: width, - uv_linesize: width, - width, - height, - }) - } - - /// Get frame count - pub fn frame_count(&self) -> u64 { - self.frame_count - } - - /// Get current resolution from config - pub fn resolution(&self) -> Resolution { - self.config.resolution - } -} - -/// Libyuv-based MJPEG decoder for direct YUV420P output -/// -/// This decoder is optimized for software encoders (libvpx, libx265) that need YUV420P input. -/// It uses libyuv's MJPGToI420 to decode directly to I420/YUV420P format. -pub struct MjpegTurboDecoder { - /// Frame counter - frame_count: u64, -} - -impl MjpegTurboDecoder { - /// Create a new libyuv-based MJPEG decoder - pub fn new(resolution: Resolution) -> Result { - info!( - "Created libyuv MJPEG decoder for {}x{} (direct YUV420P output)", - resolution.width, resolution.height - ); - - Ok(Self { - frame_count: 0, - }) - } - - /// Decode MJPEG frame directly to YUV420P using libyuv - /// - /// This is the optimal path for software encoders that need YUV420P input. - /// libyuv handles all JPEG subsampling formats internally. - pub fn decode_to_yuv420p(&mut self, jpeg_data: &[u8]) -> Result { - if jpeg_data.len() < 2 || jpeg_data[0] != 0xFF || jpeg_data[1] != 0xD8 { - return Err(AppError::VideoError("Invalid JPEG data".to_string())); - } - - self.frame_count += 1; - - // Get JPEG dimensions - let (width, height) = libyuv::mjpeg_size(jpeg_data) - .map_err(|e| AppError::VideoError(format!("Failed to read MJPEG size: {}", e)))?; - - let y_size = (width * height) as usize; - let uv_size = y_size / 4; - let yuv420_size = y_size + uv_size * 2; - - let mut yuv_data = vec![0u8; yuv420_size]; - - libyuv::mjpeg_to_i420(jpeg_data, &mut yuv_data, width, height) - .map_err(|e| AppError::VideoError(format!("libyuv MJPEG→I420 failed: {}", e)))?; - - Ok(DecodedYuv420pFrame { - y_plane: yuv_data[..y_size].to_vec(), - u_plane: yuv_data[y_size..y_size + uv_size].to_vec(), - v_plane: yuv_data[y_size + uv_size..].to_vec(), - y_linesize: width, - u_linesize: width / 2, - v_linesize: width / 2, - width, - height, - }) - } - - /// Decode directly to packed YUV420P buffer using libyuv - /// - /// This uses libyuv's MJPGToI420 which handles all JPEG subsampling formats - /// and converts to I420 directly. - pub fn decode_to_yuv420p_buffer(&mut self, jpeg_data: &[u8], dst: &mut [u8]) -> Result { - if jpeg_data.len() < 2 || jpeg_data[0] != 0xFF || jpeg_data[1] != 0xD8 { - return Err(AppError::VideoError("Invalid JPEG data".to_string())); - } - - self.frame_count += 1; - - // Get JPEG dimensions from libyuv - let (width, height) = libyuv::mjpeg_size(jpeg_data) - .map_err(|e| AppError::VideoError(format!("Failed to read MJPEG size: {}", e)))?; - - let yuv420_size = (width * height * 3 / 2) as usize; - - if dst.len() < yuv420_size { - return Err(AppError::VideoError(format!( - "Buffer too small: {} < {}", dst.len(), yuv420_size - ))); - } - - // Decode MJPEG directly to I420 using libyuv - // libyuv handles all JPEG subsampling formats (4:2:0, 4:2:2, 4:4:4) internally - libyuv::mjpeg_to_i420(jpeg_data, &mut dst[..yuv420_size], width, height) - .map_err(|e| AppError::VideoError(format!("libyuv MJPEG→I420 failed: {}", e)))?; - - Ok(yuv420_size) - } - - /// Get frame count - pub fn frame_count(&self) -> u64 { - self.frame_count - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_decoder_creation() { - let config = MjpegVaapiDecoderConfig::default(); - match MjpegVaapiDecoder::new(config) { - Ok(decoder) => { - println!("Decoder created, hwaccel: {}", decoder.is_hwaccel_active()); - } - Err(e) => { - println!("Failed to create decoder: {}", e); - } - } - } -} diff --git a/src/video/decoder/mod.rs b/src/video/decoder/mod.rs index bb374a47..928feec9 100644 --- a/src/video/decoder/mod.rs +++ b/src/video/decoder/mod.rs @@ -1,11 +1,3 @@ //! Video decoder implementations //! -//! This module provides video decoding capabilities including: -//! - MJPEG VAAPI hardware decoding (outputs NV12) -//! - MJPEG turbojpeg decoding (outputs YUV420P directly) - -pub mod mjpeg; - -pub use mjpeg::{ - DecodedYuv420pFrame, MjpegTurboDecoder, MjpegVaapiDecoder, MjpegVaapiDecoderConfig, -}; +//! This module provides video decoding capabilities. diff --git a/src/video/encoder/codec.rs b/src/video/encoder/codec.rs index 487d8bcf..1b646dae 100644 --- a/src/video/encoder/codec.rs +++ b/src/video/encoder/codec.rs @@ -355,7 +355,7 @@ mod tests { fn test_codec_config_default() { let config = VideoCodecConfig::default(); assert_eq!(config.codec, VideoCodecType::H264); - assert_eq!(config.bitrate_kbps, 2000); + assert_eq!(config.bitrate_kbps, 8000); assert_eq!(config.fps, 30); } diff --git a/src/video/h264_pipeline.rs b/src/video/h264_pipeline.rs index 9dbd1dc0..6c6c20b5 100644 --- a/src/video/h264_pipeline.rs +++ b/src/video/h264_pipeline.rs @@ -13,7 +13,6 @@ use tracing::{debug, error, info, warn}; use crate::error::{AppError, Result}; use crate::video::convert::Nv12Converter; -use crate::video::decoder::mjpeg::{MjpegVaapiDecoder, MjpegVaapiDecoderConfig}; use crate::video::encoder::h264::{H264Config, H264Encoder}; use crate::video::format::{PixelFormat, Resolution}; use crate::webrtc::rtp::{H264VideoTrack, H264VideoTrackConfig}; @@ -79,8 +78,6 @@ pub struct H264Pipeline { encoder: Arc>>, /// NV12 converter (for BGR24/RGB24/YUYV → NV12) nv12_converter: Arc>>, - /// MJPEG VAAPI decoder (for MJPEG input, outputs NV12) - mjpeg_decoder: Arc>>, /// WebRTC video track video_track: Arc, /// Pipeline statistics @@ -127,44 +124,38 @@ impl H264Pipeline { encoder_input_format ); - // Create NV12 converter or MJPEG decoder based on input format + // Create NV12 converter based on input format // All formats are converted to NV12 for VAAPI encoder - let (nv12_converter, mjpeg_decoder) = match config.input_format { + let nv12_converter = match config.input_format { // NV12 input - direct passthrough PixelFormat::Nv12 => { info!("NV12 input: direct passthrough to encoder"); - (None, None) + None } // YUYV (4:2:2 packed) → NV12 PixelFormat::Yuyv => { info!("YUYV input: converting to NV12"); - (Some(Nv12Converter::yuyv_to_nv12(config.resolution)), None) + Some(Nv12Converter::yuyv_to_nv12(config.resolution)) } // RGB24 → NV12 PixelFormat::Rgb24 => { info!("RGB24 input: converting to NV12"); - (Some(Nv12Converter::rgb24_to_nv12(config.resolution)), None) + Some(Nv12Converter::rgb24_to_nv12(config.resolution)) } // BGR24 → NV12 PixelFormat::Bgr24 => { info!("BGR24 input: converting to NV12"); - (Some(Nv12Converter::bgr24_to_nv12(config.resolution)), None) + Some(Nv12Converter::bgr24_to_nv12(config.resolution)) } - // MJPEG/JPEG → NV12 (via hwcodec decoder) + // MJPEG/JPEG input - not supported (requires libjpeg for decoding) PixelFormat::Mjpeg | PixelFormat::Jpeg => { - let decoder_config = MjpegVaapiDecoderConfig { - resolution: config.resolution, - use_hwaccel: true, - }; - let decoder = MjpegVaapiDecoder::new(decoder_config)?; - info!( - "MJPEG decoder created for H264 pipeline (outputs NV12)" - ); - (None, Some(decoder)) + return Err(AppError::VideoError( + "MJPEG input format not supported in this build".to_string() + )); } _ => { @@ -192,7 +183,6 @@ impl H264Pipeline { config, encoder: Arc::new(Mutex::new(Some(encoder))), nv12_converter: Arc::new(Mutex::new(nv12_converter)), - mjpeg_decoder: Arc::new(Mutex::new(mjpeg_decoder)), video_track, stats: Arc::new(Mutex::new(H264PipelineStats::default())), running: running_tx, @@ -230,7 +220,6 @@ impl H264Pipeline { let encoder = self.encoder.lock().await.take(); let nv12_converter = self.nv12_converter.lock().await.take(); - let mjpeg_decoder = self.mjpeg_decoder.lock().await.take(); let video_track = self.video_track.clone(); let stats = self.stats.clone(); let encode_times = self.encode_times.clone(); @@ -248,15 +237,10 @@ impl H264Pipeline { }; let mut nv12_converter = nv12_converter; - let mut mjpeg_decoder = mjpeg_decoder; let mut frame_count: u64 = 0; let mut last_fps_time = Instant::now(); let mut fps_frame_count: u64 = 0; - // Pre-allocated NV12 buffer for MJPEG decoder output (avoids per-frame allocation) - let nv12_size = (config.resolution.width * config.resolution.height * 3 / 2) as usize; - let mut nv12_buffer = vec![0u8; nv12_size]; - // Flag for one-time warnings let mut size_mismatch_warned = false; @@ -298,7 +282,6 @@ impl H264Pipeline { } // Convert to NV12 for VAAPI encoder - // MJPEG -> NV12 (via VAAPI decoder) // BGR24/RGB24/YUYV -> NV12 (via NV12 converter) // NV12 -> pass through // @@ -307,36 +290,7 @@ impl H264Pipeline { fps_frame_count += 1; let pts_ms = (frame_count * 1000 / config.fps as u64) as i64; - let encode_result = if let Some(ref mut decoder) = mjpeg_decoder { - // MJPEG input - decode to NV12 via VAAPI - match decoder.decode(&raw_frame) { - Ok(nv12_frame) => { - // Calculate required size for this frame - let required_size = (nv12_frame.width * nv12_frame.height * 3 / 2) as usize; - - // Resize buffer if needed (handles resolution changes) - if nv12_buffer.len() < required_size { - debug!( - "Resizing NV12 buffer: {} -> {} bytes (resolution: {}x{})", - nv12_buffer.len(), required_size, - nv12_frame.width, nv12_frame.height - ); - nv12_buffer.resize(required_size, 0); - } - - // Copy to pre-allocated buffer (guaranteed to fit after resize) - let written = nv12_frame.copy_to_packed_nv12(&mut nv12_buffer) - .expect("BUG: buffer too small after resize"); - encoder.encode_raw(&nv12_buffer[..written], pts_ms) - } - Err(e) => { - error!("MJPEG VAAPI decode failed: {}", e); - let mut s = stats.lock().await; - s.errors += 1; - continue; - } - } - } else if let Some(ref mut conv) = nv12_converter { + let encode_result = if let Some(ref mut conv) = nv12_converter { // BGR24/RGB24/YUYV input - convert to NV12 // Optimized: pass reference directly without copy match conv.convert(&raw_frame) { @@ -518,7 +472,7 @@ mod tests { fn test_pipeline_config_default() { let config = H264PipelineConfig::default(); assert_eq!(config.resolution, Resolution::HD720); - assert_eq!(config.bitrate_kbps, 2000); + assert_eq!(config.bitrate_kbps, 8000); assert_eq!(config.fps, 30); assert_eq!(config.gop_size, 30); } diff --git a/src/video/mod.rs b/src/video/mod.rs index f9cdae47..d024e1ce 100644 --- a/src/video/mod.rs +++ b/src/video/mod.rs @@ -16,8 +16,7 @@ pub mod streamer; pub mod video_session; pub use capture::VideoCapturer; -pub use convert::{MjpegDecoder, MjpegToYuv420Converter, PixelConverter, Yuv420pBuffer}; -pub use decoder::{MjpegVaapiDecoder, MjpegVaapiDecoderConfig}; +pub use convert::{PixelConverter, Yuv420pBuffer}; pub use device::{VideoDevice, VideoDeviceInfo}; pub use encoder::{JpegEncoder, H264Encoder, H264EncoderType}; pub use format::PixelFormat; diff --git a/src/video/shared_video_pipeline.rs b/src/video/shared_video_pipeline.rs index 1c80065d..0d5b5699 100644 --- a/src/video/shared_video_pipeline.rs +++ b/src/video/shared_video_pipeline.rs @@ -28,7 +28,6 @@ const AUTO_STOP_GRACE_PERIOD_SECS: u64 = 3; use crate::error::{AppError, Result}; use crate::video::convert::{Nv12Converter, PixelConverter}; -use crate::video::decoder::mjpeg::{MjpegTurboDecoder, MjpegVaapiDecoder, MjpegVaapiDecoderConfig}; use crate::video::encoder::h264::{H264Config, H264Encoder}; use crate::video::encoder::h265::{H265Config, H265Encoder}; use crate::video::encoder::registry::{EncoderBackend, EncoderRegistry, VideoEncoderType}; @@ -298,12 +297,6 @@ pub struct SharedVideoPipeline { encoder: Mutex>>, nv12_converter: Mutex>, yuv420p_converter: Mutex>, - mjpeg_decoder: Mutex>, - /// Turbojpeg decoder for direct MJPEG->YUV420P (optimized for software encoders) - mjpeg_turbo_decoder: Mutex>, - nv12_buffer: Mutex>, - /// YUV420P buffer for turbojpeg decoder output - yuv420p_buffer: Mutex>, /// Whether the encoder needs YUV420P (true) or NV12 (false) encoder_needs_yuv420p: AtomicBool, /// Whether YUYV direct input is enabled (RKMPP optimization) @@ -335,18 +328,12 @@ impl SharedVideoPipeline { let (frame_tx, _) = broadcast::channel(16); // Reduced from 64 for lower latency let (running_tx, running_rx) = watch::channel(false); - let nv12_size = (config.resolution.width * config.resolution.height * 3 / 2) as usize; - let yuv420p_size = nv12_size; // Same size as NV12 let pipeline = Arc::new(Self { config: RwLock::new(config), encoder: Mutex::new(None), nv12_converter: Mutex::new(None), yuv420p_converter: Mutex::new(None), - mjpeg_decoder: Mutex::new(None), - mjpeg_turbo_decoder: Mutex::new(None), - nv12_buffer: Mutex::new(vec![0u8; nv12_size]), - yuv420p_buffer: Mutex::new(vec![0u8; yuv420p_size]), encoder_needs_yuv420p: AtomicBool::new(false), yuyv_direct_input: AtomicBool::new(false), frame_tx, @@ -505,42 +492,36 @@ impl SharedVideoPipeline { config.input_format, if use_yuyv_direct { "YUYV422 (direct)" } else if needs_yuv420p { "YUV420P" } else { "NV12" }); - let (nv12_converter, yuv420p_converter, mjpeg_decoder, mjpeg_turbo_decoder) = if use_yuyv_direct { + let (nv12_converter, yuv420p_converter) = if use_yuyv_direct { // RKMPP with YUYV direct input - skip all conversion info!("YUYV direct input enabled for RKMPP, skipping format conversion"); - (None, None, None, None) + (None, None) } else if needs_yuv420p { // Software encoder needs YUV420P match config.input_format { PixelFormat::Yuv420 => { info!("Using direct YUV420P input (no conversion)"); - (None, None, None, None) + (None, None) } PixelFormat::Yuyv => { info!("Using YUYV->YUV420P converter"); - (None, Some(PixelConverter::yuyv_to_yuv420p(config.resolution)), None, None) + (None, Some(PixelConverter::yuyv_to_yuv420p(config.resolution))) } PixelFormat::Nv12 => { info!("Using NV12->YUV420P converter"); - (None, Some(PixelConverter::nv12_to_yuv420p(config.resolution)), None, None) + (None, Some(PixelConverter::nv12_to_yuv420p(config.resolution))) } PixelFormat::Rgb24 => { info!("Using RGB24->YUV420P converter"); - (None, Some(PixelConverter::rgb24_to_yuv420p(config.resolution)), None, None) + (None, Some(PixelConverter::rgb24_to_yuv420p(config.resolution))) } PixelFormat::Bgr24 => { info!("Using BGR24->YUV420P converter"); - (None, Some(PixelConverter::bgr24_to_yuv420p(config.resolution)), None, None) - } - PixelFormat::Mjpeg | PixelFormat::Jpeg => { - // Use turbojpeg for direct MJPEG->YUV420P (no intermediate NV12) - info!("Using turbojpeg MJPEG decoder (direct YUV420P output)"); - let turbo_decoder = MjpegTurboDecoder::new(config.resolution)?; - (None, None, None, Some(turbo_decoder)) + (None, Some(PixelConverter::bgr24_to_yuv420p(config.resolution))) } _ => { return Err(AppError::VideoError(format!( - "Unsupported input format: {}", + "Unsupported input format for software encoding: {}", config.input_format ))); } @@ -550,32 +531,23 @@ impl SharedVideoPipeline { match config.input_format { PixelFormat::Nv12 => { info!("Using direct NV12 input (no conversion)"); - (None, None, None, None) + (None, None) } PixelFormat::Yuyv => { info!("Using YUYV->NV12 converter"); - (Some(Nv12Converter::yuyv_to_nv12(config.resolution)), None, None, None) + (Some(Nv12Converter::yuyv_to_nv12(config.resolution)), None) } PixelFormat::Rgb24 => { info!("Using RGB24->NV12 converter"); - (Some(Nv12Converter::rgb24_to_nv12(config.resolution)), None, None, None) + (Some(Nv12Converter::rgb24_to_nv12(config.resolution)), None) } PixelFormat::Bgr24 => { info!("Using BGR24->NV12 converter"); - (Some(Nv12Converter::bgr24_to_nv12(config.resolution)), None, None, None) - } - PixelFormat::Mjpeg | PixelFormat::Jpeg => { - info!("Using MJPEG decoder (NV12 output)"); - let decoder_config = MjpegVaapiDecoderConfig { - resolution: config.resolution, - use_hwaccel: true, - }; - let decoder = MjpegVaapiDecoder::new(decoder_config)?; - (None, None, Some(decoder), None) + (Some(Nv12Converter::bgr24_to_nv12(config.resolution)), None) } _ => { return Err(AppError::VideoError(format!( - "Unsupported input format: {}", + "Unsupported input format for hardware encoding: {}", config.input_format ))); } @@ -585,8 +557,6 @@ impl SharedVideoPipeline { *self.encoder.lock().await = Some(encoder); *self.nv12_converter.lock().await = nv12_converter; *self.yuv420p_converter.lock().await = yuv420p_converter; - *self.mjpeg_decoder.lock().await = mjpeg_decoder; - *self.mjpeg_turbo_decoder.lock().await = mjpeg_turbo_decoder; self.encoder_needs_yuv420p.store(needs_yuv420p, Ordering::Release); self.yuyv_direct_input.store(use_yuyv_direct, Ordering::Release); @@ -669,8 +639,6 @@ impl SharedVideoPipeline { *self.encoder.lock().await = None; *self.nv12_converter.lock().await = None; *self.yuv420p_converter.lock().await = None; - *self.mjpeg_decoder.lock().await = None; - *self.mjpeg_turbo_decoder.lock().await = None; self.encoder_needs_yuv420p.store(false, Ordering::Release); info!("Switched to {} codec", codec); @@ -862,8 +830,6 @@ impl SharedVideoPipeline { ); } - let mut mjpeg_decoder = self.mjpeg_decoder.lock().await; - let mut mjpeg_turbo_decoder = self.mjpeg_turbo_decoder.lock().await; let mut nv12_converter = self.nv12_converter.lock().await; let mut yuv420p_converter = self.yuv420p_converter.lock().await; let needs_yuv420p = self.encoder_needs_yuv420p.load(Ordering::Acquire); @@ -879,38 +845,7 @@ impl SharedVideoPipeline { debug!("[Pipeline] Keyframe will be generated for this frame"); } - let encode_result = if mjpeg_turbo_decoder.is_some() { - // Optimized path: MJPEG -> YUV420P directly via turbojpeg (for software encoders) - let turbo = mjpeg_turbo_decoder.as_mut().unwrap(); - let mut yuv420p_buffer = self.yuv420p_buffer.lock().await; - let written = turbo.decode_to_yuv420p_buffer(raw_frame, &mut yuv420p_buffer) - .map_err(|e| AppError::VideoError(format!("turbojpeg decode failed: {}", e)))?; - encoder.encode_raw(&yuv420p_buffer[..written], pts_ms) - } else if mjpeg_decoder.is_some() { - // MJPEG input: decode to NV12 (for hardware encoders) - let decoder = mjpeg_decoder.as_mut().unwrap(); - let nv12_frame = decoder.decode(raw_frame) - .map_err(|e| AppError::VideoError(format!("MJPEG decode failed: {}", e)))?; - - let required_size = (nv12_frame.width * nv12_frame.height * 3 / 2) as usize; - let mut nv12_buffer = self.nv12_buffer.lock().await; - if nv12_buffer.len() < required_size { - nv12_buffer.resize(required_size, 0); - } - - let written = nv12_frame.copy_to_packed_nv12(&mut nv12_buffer) - .expect("Buffer too small"); - - // Debug log for H265 after MJPEG decode - if codec == VideoEncoderType::H265 && frame_count % 30 == 1 { - debug!( - "[Pipeline-H265] MJPEG decoded: nv12_size={}, frame_width={}, frame_height={}", - written, nv12_frame.width, nv12_frame.height - ); - } - - encoder.encode_raw(&nv12_buffer[..written], pts_ms) - } else if needs_yuv420p && yuv420p_converter.is_some() { + let encode_result = if needs_yuv420p && yuv420p_converter.is_some() { // Software encoder with direct input conversion to YUV420P let conv = yuv420p_converter.as_mut().unwrap(); let yuv420p_data = conv.convert(raw_frame) @@ -930,8 +865,6 @@ impl SharedVideoPipeline { drop(encoder_guard); drop(nv12_converter); drop(yuv420p_converter); - drop(mjpeg_decoder); - drop(mjpeg_turbo_decoder); match encode_result { Ok(frames) => { diff --git a/src/web/audio_ws.rs b/src/web/audio_ws.rs index 44e200f0..db9f60f0 100644 --- a/src/web/audio_ws.rs +++ b/src/web/audio_ws.rs @@ -57,7 +57,7 @@ async fn handle_audio_socket(socket: WebSocket, state: Arc) { // Send error message before closing let _ = sender .send(Message::Text( - r#"{"error": "Audio not streaming"}"#.to_string(), + r#"{"error": "Audio not streaming"}"#.to_string().into(), )) .await; return; @@ -83,7 +83,7 @@ async fn handle_audio_socket(socket: WebSocket, state: Arc) { match opus_result { Ok(frame) => { let binary = encode_audio_packet(&frame, stream_start); - if sender.send(Message::Binary(binary)).await.is_err() { + if sender.send(Message::Binary(binary.into())).await.is_err() { debug!("Failed to send audio frame, client disconnected"); break; } @@ -133,7 +133,7 @@ async fn handle_audio_socket(socket: WebSocket, state: Arc) { // Periodic ping to keep connection alive (using interval) _ = ping_interval.tick() => { - if sender.send(Message::Ping(vec![])).await.is_err() { + if sender.send(Message::Ping(vec![].into())).await.is_err() { warn!("Failed to send ping, disconnecting"); break; } diff --git a/src/web/handlers/terminal.rs b/src/web/handlers/terminal.rs index 5fd858cd..67feb0b5 100644 --- a/src/web/handlers/terminal.rs +++ b/src/web/handlers/terminal.rs @@ -82,7 +82,7 @@ async fn handle_terminal_websocket(client_ws: WebSocket, query_string: String) { let client_to_ttyd = tokio::spawn(async move { while let Some(msg) = client_rx.next().await { let ttyd_msg = match msg { - Ok(AxumMessage::Text(text)) => TungsteniteMessage::Text(text), + Ok(AxumMessage::Text(text)) => TungsteniteMessage::Text(text.to_string().into()), Ok(AxumMessage::Binary(data)) => TungsteniteMessage::Binary(data), Ok(AxumMessage::Ping(data)) => TungsteniteMessage::Ping(data), Ok(AxumMessage::Pong(data)) => TungsteniteMessage::Pong(data), @@ -103,7 +103,7 @@ async fn handle_terminal_websocket(client_ws: WebSocket, query_string: String) { let ttyd_to_client = tokio::spawn(async move { while let Some(msg) = ttyd_rx.next().await { let client_msg = match msg { - Ok(TungsteniteMessage::Text(text)) => AxumMessage::Text(text), + Ok(TungsteniteMessage::Text(text)) => AxumMessage::Text(text.to_string().into()), Ok(TungsteniteMessage::Binary(data)) => AxumMessage::Binary(data), Ok(TungsteniteMessage::Ping(data)) => AxumMessage::Ping(data), Ok(TungsteniteMessage::Pong(data)) => AxumMessage::Pong(data), diff --git a/src/web/routes.rs b/src/web/routes.rs index c8a901c6..74200187 100644 --- a/src/web/routes.rs +++ b/src/web/routes.rs @@ -70,7 +70,7 @@ pub fn create_router(state: Arc) -> Router { // Audio WebSocket endpoint .route("/ws/audio", any(audio_ws_handler)) // User can change their own password (handler will check ownership) - .route("/users/:id/password", post(handlers::change_user_password)); + .route("/users/{id}/password", post(handlers::change_user_password)); // Admin-only routes (require admin privileges) let admin_routes = Router::new() @@ -106,8 +106,8 @@ pub fn create_router(state: Arc) -> Router { .route("/msd/images", get(handlers::msd_images_list)) .route("/msd/images/download", post(handlers::msd_image_download)) .route("/msd/images/download/cancel", post(handlers::msd_image_download_cancel)) - .route("/msd/images/:id", get(handlers::msd_image_get)) - .route("/msd/images/:id", delete(handlers::msd_image_delete)) + .route("/msd/images/{id}", get(handlers::msd_image_get)) + .route("/msd/images/{id}", delete(handlers::msd_image_delete)) .route("/msd/connect", post(handlers::msd_connect)) .route("/msd/disconnect", post(handlers::msd_disconnect)) // MSD Virtual Drive endpoints @@ -115,9 +115,9 @@ pub fn create_router(state: Arc) -> Router { .route("/msd/drive", delete(handlers::msd_drive_delete)) .route("/msd/drive/init", post(handlers::msd_drive_init)) .route("/msd/drive/files", get(handlers::msd_drive_files)) - .route("/msd/drive/files/*path", get(handlers::msd_drive_download)) - .route("/msd/drive/files/*path", delete(handlers::msd_drive_file_delete)) - .route("/msd/drive/mkdir/*path", post(handlers::msd_drive_mkdir)) + .route("/msd/drive/files/{*path}", get(handlers::msd_drive_download)) + .route("/msd/drive/files/{*path}", delete(handlers::msd_drive_file_delete)) + .route("/msd/drive/mkdir/{*path}", post(handlers::msd_drive_mkdir)) // ATX (Power Control) endpoints .route("/atx/status", get(handlers::atx_status)) .route("/atx/power", post(handlers::atx_power)) @@ -127,14 +127,14 @@ pub fn create_router(state: Arc) -> Router { // User management endpoints .route("/users", get(handlers::list_users)) .route("/users", post(handlers::create_user)) - .route("/users/:id", put(handlers::update_user)) - .route("/users/:id", delete(handlers::delete_user)) + .route("/users/{id}", put(handlers::update_user)) + .route("/users/{id}", delete(handlers::delete_user)) // Extension management endpoints .route("/extensions", get(handlers::extensions::list_extensions)) - .route("/extensions/:id", get(handlers::extensions::get_extension)) - .route("/extensions/:id/start", post(handlers::extensions::start_extension)) - .route("/extensions/:id/stop", post(handlers::extensions::stop_extension)) - .route("/extensions/:id/logs", get(handlers::extensions::get_extension_logs)) + .route("/extensions/{id}", get(handlers::extensions::get_extension)) + .route("/extensions/{id}/start", post(handlers::extensions::start_extension)) + .route("/extensions/{id}/stop", post(handlers::extensions::stop_extension)) + .route("/extensions/{id}/logs", get(handlers::extensions::get_extension_logs)) .route("/extensions/ttyd/config", patch(handlers::extensions::update_ttyd_config)) .route("/extensions/ttyd/status", get(handlers::extensions::get_ttyd_status)) .route("/extensions/gostc/config", patch(handlers::extensions::update_gostc_config)) @@ -143,7 +143,7 @@ pub fn create_router(state: Arc) -> Router { .route("/terminal", get(handlers::terminal::terminal_index)) .route("/terminal/", get(handlers::terminal::terminal_index)) .route("/terminal/ws", get(handlers::terminal::terminal_ws)) - .route("/terminal/*path", get(handlers::terminal::terminal_proxy)) + .route("/terminal/{*path}", get(handlers::terminal::terminal_proxy)) // Apply admin middleware to all admin routes .layer(middleware::from_fn_with_state(state.clone(), require_admin)); diff --git a/src/web/static_files.rs b/src/web/static_files.rs index 26ba82c3..cbe78c06 100644 --- a/src/web/static_files.rs +++ b/src/web/static_files.rs @@ -45,7 +45,7 @@ where { Router::new() .route("/", get(index_handler)) - .route("/*path", get(static_handler)) + .route("/{*path}", get(static_handler)) } /// Serve index.html for root path diff --git a/src/web/ws.rs b/src/web/ws.rs index ffcb8cd5..38ede8c0 100644 --- a/src/web/ws.rs +++ b/src/web/ws.rs @@ -79,7 +79,7 @@ async fn handle_socket(socket: WebSocket, state: Arc) { if !device_info_sent && !subscribed_topics.is_empty() { let device_info = state.get_device_info().await; if let Ok(json) = serialize_event(&device_info) { - if sender.send(Message::Text(json)).await.is_err() { + if sender.send(Message::Text(json.into())).await.is_err() { warn!("Failed to send device info to client"); break; } @@ -113,7 +113,7 @@ async fn handle_socket(socket: WebSocket, state: Arc) { // Filter event based on subscribed topics if should_send_event(&event, &subscribed_topics) { if let Ok(json) = serialize_event(&event) { - if sender.send(Message::Text(json)).await.is_err() { + if sender.send(Message::Text(json.into())).await.is_err() { warn!("Failed to send event to client, disconnecting"); break; } @@ -127,7 +127,7 @@ async fn handle_socket(socket: WebSocket, state: Arc) { message: format!("Lagged by {} events", n), }; if let Ok(json) = serialize_event(&error_event) { - let _ = sender.send(Message::Text(json)).await; + let _ = sender.send(Message::Text(json.into())).await; } } Err(_) => { @@ -139,7 +139,7 @@ async fn handle_socket(socket: WebSocket, state: Arc) { // Heartbeat _ = heartbeat_interval.tick() => { - if sender.send(Message::Ping(vec![])).await.is_err() { + if sender.send(Message::Ping(vec![].into())).await.is_err() { warn!("Failed to send ping, disconnecting"); break; } diff --git a/src/webrtc/h265_payloader.rs b/src/webrtc/h265_payloader.rs index a60f46f3..628757de 100644 --- a/src/webrtc/h265_payloader.rs +++ b/src/webrtc/h265_payloader.rs @@ -397,7 +397,7 @@ mod tests { #[test] fn test_verify_with_rtp_depacketizer() { - use rtp::codecs::h265::{H265Packet, H265Payload, H265FragmentationUnitPacket}; + use rtp::codecs::h265::{H265Packet, H265Payload}; use rtp::packetizer::Depacketizer; let mut payloader = H265Payloader::new(); diff --git a/src/webrtc/rtp.rs b/src/webrtc/rtp.rs index 6113c5b9..325feb47 100644 --- a/src/webrtc/rtp.rs +++ b/src/webrtc/rtp.rs @@ -731,7 +731,7 @@ mod tests { fn test_h264_track_config_default() { let config = H264VideoTrackConfig::default(); assert_eq!(config.fps, 30); - assert_eq!(config.bitrate_kbps, 2000); + assert_eq!(config.bitrate_kbps, 8000); assert_eq!(config.resolution, Resolution::HD720); } } diff --git a/src/webrtc/webrtc_streamer.rs b/src/webrtc/webrtc_streamer.rs index 51409717..7dda63bf 100644 --- a/src/webrtc/webrtc_streamer.rs +++ b/src/webrtc/webrtc_streamer.rs @@ -1058,7 +1058,7 @@ mod tests { let config = WebRtcStreamerConfig::default(); assert_eq!(config.video_codec, VideoCodecType::H264); assert_eq!(config.resolution, Resolution::HD720); - assert_eq!(config.bitrate_preset, BitratePreset::Quality); + assert_eq!(config.bitrate_preset, BitratePreset::Balanced); assert_eq!(config.fps, 30); assert!(!config.audio_enabled); } diff --git a/web/src/views/SettingsView.vue b/web/src/views/SettingsView.vue index 087289e6..b0d13853 100644 --- a/web/src/views/SettingsView.vue +++ b/web/src/views/SettingsView.vue @@ -245,12 +245,6 @@ const config = ref({ // 跟踪服务器是否已配置 TURN 密码 const hasTurnPassword = ref(false) -// 跟踪公共 ICE 服务器状态 -const hasPublicIceServers = ref(false) -const usingPublicIceServers = computed(() => { - return !config.value.stun_server && !config.value.turn_server && hasPublicIceServers.value -}) - // OTG Descriptor settings const otgVendorIdHex = ref('1d6b') const otgProductIdHex = ref('0104') @@ -554,9 +548,6 @@ async function loadConfig() { // 设置是否已配置 TURN 密码 hasTurnPassword.value = stream.has_turn_password || false - // 设置公共 ICE 服务器状态 - hasPublicIceServers.value = stream.has_public_ice_servers || false - // 加载 OTG 描述符配置 if (hid.otg_descriptor) { otgVendorIdHex.value = hid.otg_descriptor.vendor_id?.toString(16).padStart(4, '0') || '1d6b' @@ -1261,9 +1252,6 @@ onMounted(async () => { :placeholder="t('settings.stunServerPlaceholder')" />

{{ t('settings.stunServerHint') }}

-

- {{ t('settings.usingPublicIceServers') }} -