mirror of
https://github.com/mofeng-git/One-KVM.git
synced 2026-01-28 16:41:52 +08:00
refactor(hwcodec): 精简FFmpeg编译配置并移除解码器
- 优化FFmpeg编译选项,禁用不需要的库(avformat/swscale/swresample/avfilter等) - 禁用所有解码器和大部分编码器,只保留实际使用的H264/H265/VP8/VP9编码器 - 移除hwcodec解码器模块,MJPEG解码改用libyuv实现 - 移除MJPEG编码器支持 - x86_64添加libmfx支持QSV编码器 - 修复H265 RKMPP编码器支持YUYV直接输入
This commit is contained in:
@@ -107,7 +107,7 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
|
|||||||
&& echo 'export PKG_CONFIG_SYSROOT_DIR=""' >> /tmp/aarch64-pkg-config \
|
&& echo 'export PKG_CONFIG_SYSROOT_DIR=""' >> /tmp/aarch64-pkg-config \
|
||||||
&& echo 'exec pkg-config "$@"' >> /tmp/aarch64-pkg-config \
|
&& echo 'exec pkg-config "$@"' >> /tmp/aarch64-pkg-config \
|
||||||
&& chmod +x /tmp/aarch64-pkg-config \
|
&& chmod +x /tmp/aarch64-pkg-config \
|
||||||
# Build FFmpeg with RKMPP
|
# Build FFmpeg with RKMPP (minimal build for encoding only)
|
||||||
&& cd ffmpeg-rockchip \
|
&& cd ffmpeg-rockchip \
|
||||||
&& ./configure \
|
&& ./configure \
|
||||||
--prefix=/usr/aarch64-linux-gnu \
|
--prefix=/usr/aarch64-linux-gnu \
|
||||||
@@ -120,26 +120,69 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
|
|||||||
--enable-version3 \
|
--enable-version3 \
|
||||||
--enable-shared \
|
--enable-shared \
|
||||||
--disable-static \
|
--disable-static \
|
||||||
|
# Hardware acceleration
|
||||||
--enable-libdrm \
|
--enable-libdrm \
|
||||||
--enable-rkmpp \
|
--enable-rkmpp \
|
||||||
--enable-rkrga \
|
--enable-rkrga \
|
||||||
--enable-libv4l2 \
|
--enable-vaapi \
|
||||||
|
--enable-v4l2-m2m \
|
||||||
|
# Software encoding libraries
|
||||||
--enable-libx264 \
|
--enable-libx264 \
|
||||||
--enable-libx265 \
|
--enable-libx265 \
|
||||||
--enable-libvpx \
|
--enable-libvpx \
|
||||||
--enable-vaapi \
|
# Disable programs and docs
|
||||||
--enable-v4l2-m2m \
|
|
||||||
--disable-programs \
|
--disable-programs \
|
||||||
--disable-doc \
|
--disable-doc \
|
||||||
--disable-htmlpages \
|
--disable-htmlpages \
|
||||||
--disable-manpages \
|
--disable-manpages \
|
||||||
--disable-podpages \
|
--disable-podpages \
|
||||||
--disable-txtpages \
|
--disable-txtpages \
|
||||||
|
# Disable network
|
||||||
--disable-network \
|
--disable-network \
|
||||||
--disable-protocols \
|
--disable-protocols \
|
||||||
|
# Disable unused libraries
|
||||||
|
--disable-avformat \
|
||||||
|
--disable-swscale \
|
||||||
|
--disable-swresample \
|
||||||
|
--disable-avfilter \
|
||||||
|
--disable-avdevice \
|
||||||
|
--disable-postproc \
|
||||||
|
# Disable all decoders
|
||||||
|
--disable-decoders \
|
||||||
|
# Disable all encoders, enable only needed ones
|
||||||
|
--disable-encoders \
|
||||||
|
--enable-encoder=h264_vaapi \
|
||||||
|
--enable-encoder=hevc_vaapi \
|
||||||
|
--enable-encoder=vp8_vaapi \
|
||||||
|
--enable-encoder=vp9_vaapi \
|
||||||
|
--enable-encoder=h264_rkmpp \
|
||||||
|
--enable-encoder=hevc_rkmpp \
|
||||||
|
--enable-encoder=h264_v4l2m2m \
|
||||||
|
--enable-encoder=hevc_v4l2m2m \
|
||||||
|
--enable-encoder=libx264 \
|
||||||
|
--enable-encoder=libx265 \
|
||||||
|
--enable-encoder=libvpx_vp8 \
|
||||||
|
--enable-encoder=libvpx_vp9 \
|
||||||
|
# Disable muxers/demuxers
|
||||||
|
--disable-muxers \
|
||||||
|
--disable-demuxers \
|
||||||
|
# Disable parsers except needed ones
|
||||||
|
--disable-parsers \
|
||||||
|
--enable-parser=h264 \
|
||||||
|
--enable-parser=hevc \
|
||||||
|
--enable-parser=vp8 \
|
||||||
|
--enable-parser=vp9 \
|
||||||
|
# Disable BSFs except needed ones
|
||||||
|
--disable-bsfs \
|
||||||
|
--enable-bsf=h264_mp4toannexb \
|
||||||
|
--enable-bsf=hevc_mp4toannexb \
|
||||||
|
# Disable hardware decoding
|
||||||
|
--disable-hwaccels \
|
||||||
|
# Disable other unused features
|
||||||
|
--disable-indevs \
|
||||||
|
--disable-outdevs \
|
||||||
|
--disable-filters \
|
||||||
--disable-debug \
|
--disable-debug \
|
||||||
--disable-decoder=mjpeg \
|
|
||||||
--disable-decoder=mjpegb \
|
|
||||||
&& make -j$(nproc) \
|
&& make -j$(nproc) \
|
||||||
&& make install \
|
&& make install \
|
||||||
&& cd / \
|
&& cd / \
|
||||||
|
|||||||
@@ -107,7 +107,7 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
|
|||||||
&& echo 'export PKG_CONFIG_SYSROOT_DIR=""' >> /tmp/armhf-pkg-config \
|
&& echo 'export PKG_CONFIG_SYSROOT_DIR=""' >> /tmp/armhf-pkg-config \
|
||||||
&& echo 'exec pkg-config "$@"' >> /tmp/armhf-pkg-config \
|
&& echo 'exec pkg-config "$@"' >> /tmp/armhf-pkg-config \
|
||||||
&& chmod +x /tmp/armhf-pkg-config \
|
&& chmod +x /tmp/armhf-pkg-config \
|
||||||
# Build FFmpeg with RKMPP
|
# Build FFmpeg with RKMPP (minimal build for encoding only)
|
||||||
&& cd ffmpeg-rockchip \
|
&& cd ffmpeg-rockchip \
|
||||||
&& ./configure \
|
&& ./configure \
|
||||||
--prefix=/usr/arm-linux-gnueabihf \
|
--prefix=/usr/arm-linux-gnueabihf \
|
||||||
@@ -120,26 +120,69 @@ RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
|
|||||||
--enable-version3 \
|
--enable-version3 \
|
||||||
--enable-shared \
|
--enable-shared \
|
||||||
--disable-static \
|
--disable-static \
|
||||||
|
# Hardware acceleration
|
||||||
--enable-libdrm \
|
--enable-libdrm \
|
||||||
--enable-rkmpp \
|
--enable-rkmpp \
|
||||||
--enable-rkrga \
|
--enable-rkrga \
|
||||||
--enable-libv4l2 \
|
--enable-vaapi \
|
||||||
|
--enable-v4l2-m2m \
|
||||||
|
# Software encoding libraries
|
||||||
--enable-libx264 \
|
--enable-libx264 \
|
||||||
--enable-libx265 \
|
--enable-libx265 \
|
||||||
--enable-libvpx \
|
--enable-libvpx \
|
||||||
--enable-vaapi \
|
# Disable programs and docs
|
||||||
--enable-v4l2-m2m \
|
|
||||||
--disable-programs \
|
--disable-programs \
|
||||||
--disable-doc \
|
--disable-doc \
|
||||||
--disable-htmlpages \
|
--disable-htmlpages \
|
||||||
--disable-manpages \
|
--disable-manpages \
|
||||||
--disable-podpages \
|
--disable-podpages \
|
||||||
--disable-txtpages \
|
--disable-txtpages \
|
||||||
|
# Disable network
|
||||||
--disable-network \
|
--disable-network \
|
||||||
--disable-protocols \
|
--disable-protocols \
|
||||||
|
# Disable unused libraries
|
||||||
|
--disable-avformat \
|
||||||
|
--disable-swscale \
|
||||||
|
--disable-swresample \
|
||||||
|
--disable-avfilter \
|
||||||
|
--disable-avdevice \
|
||||||
|
--disable-postproc \
|
||||||
|
# Disable all decoders
|
||||||
|
--disable-decoders \
|
||||||
|
# Disable all encoders, enable only needed ones
|
||||||
|
--disable-encoders \
|
||||||
|
--enable-encoder=h264_vaapi \
|
||||||
|
--enable-encoder=hevc_vaapi \
|
||||||
|
--enable-encoder=vp8_vaapi \
|
||||||
|
--enable-encoder=vp9_vaapi \
|
||||||
|
--enable-encoder=h264_rkmpp \
|
||||||
|
--enable-encoder=hevc_rkmpp \
|
||||||
|
--enable-encoder=h264_v4l2m2m \
|
||||||
|
--enable-encoder=hevc_v4l2m2m \
|
||||||
|
--enable-encoder=libx264 \
|
||||||
|
--enable-encoder=libx265 \
|
||||||
|
--enable-encoder=libvpx_vp8 \
|
||||||
|
--enable-encoder=libvpx_vp9 \
|
||||||
|
# Disable muxers/demuxers
|
||||||
|
--disable-muxers \
|
||||||
|
--disable-demuxers \
|
||||||
|
# Disable parsers except needed ones
|
||||||
|
--disable-parsers \
|
||||||
|
--enable-parser=h264 \
|
||||||
|
--enable-parser=hevc \
|
||||||
|
--enable-parser=vp8 \
|
||||||
|
--enable-parser=vp9 \
|
||||||
|
# Disable BSFs except needed ones
|
||||||
|
--disable-bsfs \
|
||||||
|
--enable-bsf=h264_mp4toannexb \
|
||||||
|
--enable-bsf=hevc_mp4toannexb \
|
||||||
|
# Disable hardware decoding
|
||||||
|
--disable-hwaccels \
|
||||||
|
# Disable other unused features
|
||||||
|
--disable-indevs \
|
||||||
|
--disable-outdevs \
|
||||||
|
--disable-filters \
|
||||||
--disable-debug \
|
--disable-debug \
|
||||||
--disable-decoder=mjpeg \
|
|
||||||
--disable-decoder=mjpegb \
|
|
||||||
&& make -j$(nproc) \
|
&& make -j$(nproc) \
|
||||||
&& make install \
|
&& make install \
|
||||||
&& cd / \
|
&& cd / \
|
||||||
|
|||||||
@@ -16,19 +16,21 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
|
|
||||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||||
|
|
||||||
# Install build dependencies (same as runtime Debian 12)
|
# Install build dependencies
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
# Build tools
|
# Build tools
|
||||||
build-essential \
|
build-essential \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
cmake \
|
cmake \
|
||||||
nasm \
|
nasm \
|
||||||
|
yasm \
|
||||||
git \
|
git \
|
||||||
libclang-dev \
|
libclang-dev \
|
||||||
llvm \
|
llvm \
|
||||||
protobuf-compiler \
|
protobuf-compiler \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
mold \
|
mold \
|
||||||
|
wget \
|
||||||
# Core system libraries
|
# Core system libraries
|
||||||
libasound2-dev \
|
libasound2-dev \
|
||||||
libv4l-dev \
|
libv4l-dev \
|
||||||
@@ -37,13 +39,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
# Video/image processing
|
# Video/image processing
|
||||||
libjpeg62-turbo-dev \
|
libjpeg62-turbo-dev \
|
||||||
libyuv-dev \
|
libyuv-dev \
|
||||||
# FFmpeg and codecs
|
# Video codec libraries (for FFmpeg build)
|
||||||
libavcodec-dev \
|
|
||||||
libavformat-dev \
|
|
||||||
libavutil-dev \
|
|
||||||
libswscale-dev \
|
|
||||||
libswresample-dev \
|
|
||||||
# Video codec libraries
|
|
||||||
libx264-dev \
|
libx264-dev \
|
||||||
libx265-dev \
|
libx265-dev \
|
||||||
libvpx-dev \
|
libvpx-dev \
|
||||||
@@ -60,8 +56,85 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
libxdmcp-dev \
|
libxdmcp-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Download and build FFmpeg with minimal configuration for encoding only
|
||||||
|
RUN mkdir -p /tmp/ffmpeg-build && cd /tmp/ffmpeg-build \
|
||||||
|
&& wget -q https://files.mofeng.run/src/image/other/ffmpeg.tar.gz \
|
||||||
|
&& tar -xzf ffmpeg.tar.gz \
|
||||||
|
&& cd ffmpeg/ffmpeg-rockchip \
|
||||||
|
&& ./configure \
|
||||||
|
--prefix=/usr/local \
|
||||||
|
--enable-gpl \
|
||||||
|
--enable-version3 \
|
||||||
|
--enable-shared \
|
||||||
|
--disable-static \
|
||||||
|
# Hardware acceleration
|
||||||
|
--enable-libdrm \
|
||||||
|
--enable-vaapi \
|
||||||
|
--enable-libmfx \
|
||||||
|
# Software encoding libraries
|
||||||
|
--enable-libx264 \
|
||||||
|
--enable-libx265 \
|
||||||
|
--enable-libvpx \
|
||||||
|
# Disable programs and docs
|
||||||
|
--disable-programs \
|
||||||
|
--disable-doc \
|
||||||
|
--disable-htmlpages \
|
||||||
|
--disable-manpages \
|
||||||
|
--disable-podpages \
|
||||||
|
--disable-txtpages \
|
||||||
|
# Disable network
|
||||||
|
--disable-network \
|
||||||
|
--disable-protocols \
|
||||||
|
# Disable unused libraries
|
||||||
|
--disable-avformat \
|
||||||
|
--disable-swscale \
|
||||||
|
--disable-swresample \
|
||||||
|
--disable-avfilter \
|
||||||
|
--disable-avdevice \
|
||||||
|
--disable-postproc \
|
||||||
|
# Disable all decoders
|
||||||
|
--disable-decoders \
|
||||||
|
# Disable all encoders, enable only needed ones
|
||||||
|
--disable-encoders \
|
||||||
|
--enable-encoder=h264_vaapi \
|
||||||
|
--enable-encoder=hevc_vaapi \
|
||||||
|
--enable-encoder=vp8_vaapi \
|
||||||
|
--enable-encoder=vp9_vaapi \
|
||||||
|
--enable-encoder=h264_qsv \
|
||||||
|
--enable-encoder=hevc_qsv \
|
||||||
|
--enable-encoder=libx264 \
|
||||||
|
--enable-encoder=libx265 \
|
||||||
|
--enable-encoder=libvpx_vp8 \
|
||||||
|
--enable-encoder=libvpx_vp9 \
|
||||||
|
# Disable muxers/demuxers
|
||||||
|
--disable-muxers \
|
||||||
|
--disable-demuxers \
|
||||||
|
# Disable parsers except needed ones
|
||||||
|
--disable-parsers \
|
||||||
|
--enable-parser=h264 \
|
||||||
|
--enable-parser=hevc \
|
||||||
|
--enable-parser=vp8 \
|
||||||
|
--enable-parser=vp9 \
|
||||||
|
# Disable BSFs except needed ones
|
||||||
|
--disable-bsfs \
|
||||||
|
--enable-bsf=h264_mp4toannexb \
|
||||||
|
--enable-bsf=hevc_mp4toannexb \
|
||||||
|
# Disable hardware decoding
|
||||||
|
--disable-hwaccels \
|
||||||
|
# Disable other unused features
|
||||||
|
--disable-indevs \
|
||||||
|
--disable-outdevs \
|
||||||
|
--disable-filters \
|
||||||
|
--disable-debug \
|
||||||
|
&& make -j$(nproc) \
|
||||||
|
&& make install \
|
||||||
|
&& ldconfig \
|
||||||
|
&& cd / \
|
||||||
|
&& rm -rf /tmp/ffmpeg-build
|
||||||
|
|
||||||
# Add Rust target
|
# Add Rust target
|
||||||
RUN rustup target add x86_64-unknown-linux-gnu
|
RUN rustup target add x86_64-unknown-linux-gnu
|
||||||
|
|
||||||
# Configure mold as the linker
|
# Configure mold as the linker and use custom FFmpeg
|
||||||
ENV RUSTFLAGS="-C link-arg=-fuse-ld=mold"
|
ENV RUSTFLAGS="-C link-arg=-fuse-ld=mold" \
|
||||||
|
PKG_CONFIG_PATH="/usr/local/lib/pkgconfig:${PKG_CONFIG_PATH}"
|
||||||
|
|||||||
@@ -101,7 +101,8 @@ mod ffmpeg {
|
|||||||
fn link_system_ffmpeg(builder: &mut Build) {
|
fn link_system_ffmpeg(builder: &mut Build) {
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
let libs = ["libavcodec", "libavutil", "libavformat", "libswscale"];
|
// Only need libavcodec and libavutil for encoding
|
||||||
|
let libs = ["libavcodec", "libavutil"];
|
||||||
|
|
||||||
for lib in &libs {
|
for lib in &libs {
|
||||||
// Get cflags
|
// Get cflags
|
||||||
@@ -134,7 +135,7 @@ mod ffmpeg {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
panic!("pkg-config failed for {}. Install FFmpeg development libraries: sudo apt install libavcodec-dev libavformat-dev libavutil-dev libswscale-dev", lib);
|
panic!("pkg-config failed for {}. Install FFmpeg development libraries: sudo apt install libavcodec-dev libavutil-dev", lib);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
panic!("pkg-config not found. Install pkg-config and FFmpeg development libraries.");
|
panic!("pkg-config not found. Install pkg-config and FFmpeg development libraries.");
|
||||||
@@ -178,7 +179,8 @@ mod ffmpeg {
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
{
|
{
|
||||||
let mut static_libs = vec!["avcodec", "avutil", "avformat"];
|
// Only need avcodec and avutil for encoding
|
||||||
|
let mut static_libs = vec!["avcodec", "avutil"];
|
||||||
if target_os == "windows" {
|
if target_os == "windows" {
|
||||||
static_libs.push("libmfx");
|
static_libs.push("libmfx");
|
||||||
}
|
}
|
||||||
@@ -251,7 +253,7 @@ mod ffmpeg {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
builder.files(
|
builder.files(
|
||||||
["ffmpeg_ram_encode.cpp", "ffmpeg_ram_decode.cpp"].map(|f| ffmpeg_ram_dir.join(f)),
|
["ffmpeg_ram_encode.cpp"].map(|f| ffmpeg_ram_dir.join(f)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ enum DataFormat {
|
|||||||
VP8,
|
VP8,
|
||||||
VP9,
|
VP9,
|
||||||
AV1,
|
AV1,
|
||||||
MJPEG,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// same as Driver
|
// same as Driver
|
||||||
|
|||||||
@@ -1,328 +0,0 @@
|
|||||||
// https://github.com/FFmpeg/FFmpeg/blob/master/doc/examples/hw_decode.c
|
|
||||||
// https://github.com/FFmpeg/FFmpeg/blob/master/doc/examples/decode_video.c
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavutil/log.h>
|
|
||||||
#include <libavutil/opt.h>
|
|
||||||
#include <libavutil/pixdesc.h>
|
|
||||||
}
|
|
||||||
|
|
||||||
#include <memory>
|
|
||||||
#include <stdbool.h>
|
|
||||||
|
|
||||||
#define LOG_MODULE "FFMPEG_RAM_DEC"
|
|
||||||
#include <log.h>
|
|
||||||
#include <util.h>
|
|
||||||
|
|
||||||
#ifdef _WIN32
|
|
||||||
#include <libavutil/hwcontext_d3d11va.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include "common.h"
|
|
||||||
#include "system.h"
|
|
||||||
|
|
||||||
// #define CFG_PKG_TRACE
|
|
||||||
|
|
||||||
namespace {
|
|
||||||
typedef void (*RamDecodeCallback)(const void *obj, int width, int height,
|
|
||||||
enum AVPixelFormat pixfmt,
|
|
||||||
int linesize[AV_NUM_DATA_POINTERS],
|
|
||||||
uint8_t *data[AV_NUM_DATA_POINTERS], int key);
|
|
||||||
|
|
||||||
class FFmpegRamDecoder {
|
|
||||||
public:
|
|
||||||
AVCodecContext *c_ = NULL;
|
|
||||||
AVBufferRef *hw_device_ctx_ = NULL;
|
|
||||||
AVFrame *sw_frame_ = NULL;
|
|
||||||
AVFrame *frame_ = NULL;
|
|
||||||
AVPacket *pkt_ = NULL;
|
|
||||||
bool hwaccel_ = true;
|
|
||||||
|
|
||||||
std::string name_;
|
|
||||||
AVHWDeviceType device_type_ = AV_HWDEVICE_TYPE_NONE;
|
|
||||||
int thread_count_ = 1;
|
|
||||||
RamDecodeCallback callback_ = NULL;
|
|
||||||
DataFormat data_format_;
|
|
||||||
|
|
||||||
#ifdef CFG_PKG_TRACE
|
|
||||||
int in_ = 0;
|
|
||||||
int out_ = 0;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
FFmpegRamDecoder(const char *name, int device_type, int thread_count,
|
|
||||||
RamDecodeCallback callback) {
|
|
||||||
this->name_ = name;
|
|
||||||
this->device_type_ = (AVHWDeviceType)device_type;
|
|
||||||
this->thread_count_ = thread_count;
|
|
||||||
this->callback_ = callback;
|
|
||||||
}
|
|
||||||
|
|
||||||
~FFmpegRamDecoder() {}
|
|
||||||
|
|
||||||
void free_decoder() {
|
|
||||||
if (frame_)
|
|
||||||
av_frame_free(&frame_);
|
|
||||||
if (pkt_)
|
|
||||||
av_packet_free(&pkt_);
|
|
||||||
if (sw_frame_)
|
|
||||||
av_frame_free(&sw_frame_);
|
|
||||||
if (c_)
|
|
||||||
avcodec_free_context(&c_);
|
|
||||||
if (hw_device_ctx_)
|
|
||||||
av_buffer_unref(&hw_device_ctx_);
|
|
||||||
|
|
||||||
frame_ = NULL;
|
|
||||||
pkt_ = NULL;
|
|
||||||
sw_frame_ = NULL;
|
|
||||||
c_ = NULL;
|
|
||||||
hw_device_ctx_ = NULL;
|
|
||||||
}
|
|
||||||
int reset() {
|
|
||||||
if (name_.find("h264") != std::string::npos) {
|
|
||||||
data_format_ = DataFormat::H264;
|
|
||||||
} else if (name_.find("hevc") != std::string::npos) {
|
|
||||||
data_format_ = DataFormat::H265;
|
|
||||||
} else if (name_.find("mjpeg") != std::string::npos) {
|
|
||||||
data_format_ = DataFormat::MJPEG;
|
|
||||||
} else {
|
|
||||||
LOG_ERROR(std::string("unsupported data format:") + name_);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
free_decoder();
|
|
||||||
const AVCodec *codec = NULL;
|
|
||||||
hwaccel_ = device_type_ != AV_HWDEVICE_TYPE_NONE;
|
|
||||||
int ret;
|
|
||||||
if (!(codec = avcodec_find_decoder_by_name(name_.c_str()))) {
|
|
||||||
LOG_ERROR(std::string("avcodec_find_decoder_by_name ") + name_ + " failed");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (!(c_ = avcodec_alloc_context3(codec))) {
|
|
||||||
LOG_ERROR(std::string("Could not allocate video codec context"));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
c_->flags |= AV_CODEC_FLAG_LOW_DELAY;
|
|
||||||
c_->thread_count =
|
|
||||||
device_type_ != AV_HWDEVICE_TYPE_NONE ? 1 : thread_count_;
|
|
||||||
c_->thread_type = FF_THREAD_SLICE;
|
|
||||||
|
|
||||||
if (name_.find("qsv") != std::string::npos) {
|
|
||||||
if ((ret = av_opt_set(c_->priv_data, "async_depth", "1", 0)) < 0) {
|
|
||||||
LOG_ERROR(std::string("qsv set opt async_depth 1 failed"));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
// https://github.com/FFmpeg/FFmpeg/blob/c6364b711bad1fe2fbd90e5b2798f87080ddf5ea/libavcodec/qsvdec.c#L932
|
|
||||||
// for disable warning
|
|
||||||
c_->pkt_timebase = av_make_q(1, 30);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hwaccel_) {
|
|
||||||
ret =
|
|
||||||
av_hwdevice_ctx_create(&hw_device_ctx_, device_type_, NULL, NULL, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
LOG_ERROR(std::string("av_hwdevice_ctx_create failed, ret = ") + av_err2str(ret));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
c_->hw_device_ctx = av_buffer_ref(hw_device_ctx_);
|
|
||||||
if (!check_support()) {
|
|
||||||
LOG_ERROR(std::string("check_support failed"));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (!(sw_frame_ = av_frame_alloc())) {
|
|
||||||
LOG_ERROR(std::string("av_frame_alloc failed"));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(pkt_ = av_packet_alloc())) {
|
|
||||||
LOG_ERROR(std::string("av_packet_alloc failed"));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(frame_ = av_frame_alloc())) {
|
|
||||||
LOG_ERROR(std::string("av_frame_alloc failed"));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((ret = avcodec_open2(c_, codec, NULL)) != 0) {
|
|
||||||
LOG_ERROR(std::string("avcodec_open2 failed, ret = ") + av_err2str(ret));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
#ifdef CFG_PKG_TRACE
|
|
||||||
in_ = 0;
|
|
||||||
out_ = 0;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int decode(const uint8_t *data, int length, const void *obj) {
|
|
||||||
int ret = -1;
|
|
||||||
#ifdef CFG_PKG_TRACE
|
|
||||||
in_++;
|
|
||||||
LOG_DEBUG(std::string("delay DI: in:") + in_ + " out:" + out_);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!data || !length) {
|
|
||||||
LOG_ERROR(std::string("illegal decode parameter"));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
pkt_->data = (uint8_t *)data;
|
|
||||||
pkt_->size = length;
|
|
||||||
ret = do_decode(obj);
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
int do_decode(const void *obj) {
|
|
||||||
int ret;
|
|
||||||
AVFrame *tmp_frame = NULL;
|
|
||||||
bool decoded = false;
|
|
||||||
|
|
||||||
ret = avcodec_send_packet(c_, pkt_);
|
|
||||||
if (ret < 0) {
|
|
||||||
LOG_ERROR(std::string("avcodec_send_packet failed, ret = ") + av_err2str(ret));
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
auto start = util::now();
|
|
||||||
while (ret >= 0 && util::elapsed_ms(start) < ENCODE_TIMEOUT_MS) {
|
|
||||||
if ((ret = avcodec_receive_frame(c_, frame_)) != 0) {
|
|
||||||
if (ret != AVERROR(EAGAIN)) {
|
|
||||||
LOG_ERROR(std::string("avcodec_receive_frame failed, ret = ") + av_err2str(ret));
|
|
||||||
}
|
|
||||||
goto _exit;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hwaccel_) {
|
|
||||||
if (!frame_->hw_frames_ctx) {
|
|
||||||
LOG_ERROR(std::string("hw_frames_ctx is NULL"));
|
|
||||||
goto _exit;
|
|
||||||
}
|
|
||||||
if ((ret = av_hwframe_transfer_data(sw_frame_, frame_, 0)) < 0) {
|
|
||||||
LOG_ERROR(std::string("av_hwframe_transfer_data failed, ret = ") +
|
|
||||||
av_err2str(ret));
|
|
||||||
goto _exit;
|
|
||||||
}
|
|
||||||
|
|
||||||
tmp_frame = sw_frame_;
|
|
||||||
} else {
|
|
||||||
tmp_frame = frame_;
|
|
||||||
}
|
|
||||||
decoded = true;
|
|
||||||
#ifdef CFG_PKG_TRACE
|
|
||||||
out_++;
|
|
||||||
LOG_DEBUG(std::string("delay DO: in:") + in_ + " out:" + out_);
|
|
||||||
#endif
|
|
||||||
#if FF_API_FRAME_KEY
|
|
||||||
int key_frame = frame_->flags & AV_FRAME_FLAG_KEY;
|
|
||||||
#else
|
|
||||||
int key_frame = frame_->key_frame;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
callback_(obj, tmp_frame->width, tmp_frame->height,
|
|
||||||
(AVPixelFormat)tmp_frame->format, tmp_frame->linesize,
|
|
||||||
tmp_frame->data, key_frame);
|
|
||||||
}
|
|
||||||
_exit:
|
|
||||||
av_packet_unref(pkt_);
|
|
||||||
return decoded ? 0 : -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool check_support() {
|
|
||||||
#ifdef _WIN32
|
|
||||||
if (device_type_ == AV_HWDEVICE_TYPE_D3D11VA) {
|
|
||||||
if (!c_->hw_device_ctx) {
|
|
||||||
LOG_ERROR(std::string("hw_device_ctx is NULL"));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
AVHWDeviceContext *deviceContext =
|
|
||||||
(AVHWDeviceContext *)hw_device_ctx_->data;
|
|
||||||
if (!deviceContext) {
|
|
||||||
LOG_ERROR(std::string("deviceContext is NULL"));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
AVD3D11VADeviceContext *d3d11vaDeviceContext =
|
|
||||||
(AVD3D11VADeviceContext *)deviceContext->hwctx;
|
|
||||||
if (!d3d11vaDeviceContext) {
|
|
||||||
LOG_ERROR(std::string("d3d11vaDeviceContext is NULL"));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
ID3D11Device *device = d3d11vaDeviceContext->device;
|
|
||||||
if (!device) {
|
|
||||||
LOG_ERROR(std::string("device is NULL"));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
std::unique_ptr<NativeDevice> native_ = std::make_unique<NativeDevice>();
|
|
||||||
if (!native_) {
|
|
||||||
LOG_ERROR(std::string("Failed to create native device"));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (!native_->Init(0, (ID3D11Device *)device, 0)) {
|
|
||||||
LOG_ERROR(std::string("Failed to init native device"));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (!native_->support_decode(data_format_)) {
|
|
||||||
LOG_ERROR(std::string("Failed to check support ") + name_);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
} else {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
return true;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
extern "C" void ffmpeg_ram_free_decoder(FFmpegRamDecoder *decoder) {
|
|
||||||
try {
|
|
||||||
if (!decoder)
|
|
||||||
return;
|
|
||||||
decoder->free_decoder();
|
|
||||||
delete decoder;
|
|
||||||
decoder = NULL;
|
|
||||||
} catch (const std::exception &e) {
|
|
||||||
LOG_ERROR(std::string("ffmpeg_ram_free_decoder exception:") + e.what());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
extern "C" FFmpegRamDecoder *
|
|
||||||
ffmpeg_ram_new_decoder(const char *name, int device_type, int thread_count,
|
|
||||||
RamDecodeCallback callback) {
|
|
||||||
FFmpegRamDecoder *decoder = NULL;
|
|
||||||
try {
|
|
||||||
decoder = new FFmpegRamDecoder(name, device_type, thread_count, callback);
|
|
||||||
if (decoder) {
|
|
||||||
if (decoder->reset() == 0) {
|
|
||||||
return decoder;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (std::exception &e) {
|
|
||||||
LOG_ERROR(std::string("new decoder exception:") + e.what());
|
|
||||||
}
|
|
||||||
if (decoder) {
|
|
||||||
decoder->free_decoder();
|
|
||||||
delete decoder;
|
|
||||||
decoder = NULL;
|
|
||||||
}
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
extern "C" int ffmpeg_ram_decode(FFmpegRamDecoder *decoder, const uint8_t *data,
|
|
||||||
int length, const void *obj) {
|
|
||||||
try {
|
|
||||||
int ret = decoder->decode(data, length, obj);
|
|
||||||
if (DataFormat::H265 == decoder->data_format_ && util_decode::has_flag_could_not_find_ref_with_poc()) {
|
|
||||||
return HWCODEC_ERR_HEVC_COULD_NOT_FIND_POC;
|
|
||||||
} else {
|
|
||||||
return ret == 0 ? HWCODEC_SUCCESS : HWCODEC_ERR_COMMON;
|
|
||||||
}
|
|
||||||
} catch (const std::exception &e) {
|
|
||||||
LOG_ERROR(std::string("ffmpeg_ram_decode exception:") + e.what());
|
|
||||||
}
|
|
||||||
return HWCODEC_ERR_COMMON;
|
|
||||||
}
|
|
||||||
@@ -5,10 +5,6 @@
|
|||||||
|
|
||||||
#define AV_NUM_DATA_POINTERS 8
|
#define AV_NUM_DATA_POINTERS 8
|
||||||
|
|
||||||
typedef void (*RamDecodeCallback)(const void *obj, int width, int height,
|
|
||||||
int pixfmt,
|
|
||||||
int linesize[AV_NUM_DATA_POINTERS],
|
|
||||||
uint8_t *data[AV_NUM_DATA_POINTERS], int key);
|
|
||||||
typedef void (*RamEncodeCallback)(const uint8_t *data, int len, int64_t pts,
|
typedef void (*RamEncodeCallback)(const uint8_t *data, int len, int64_t pts,
|
||||||
int key, const void *obj);
|
int key, const void *obj);
|
||||||
|
|
||||||
@@ -18,14 +14,9 @@ void *ffmpeg_ram_new_encoder(const char *name, const char *mc_name, int width,
|
|||||||
int thread_count, int gpu, int *linesize,
|
int thread_count, int gpu, int *linesize,
|
||||||
int *offset, int *length,
|
int *offset, int *length,
|
||||||
RamEncodeCallback callback);
|
RamEncodeCallback callback);
|
||||||
void *ffmpeg_ram_new_decoder(const char *name, int device_type,
|
|
||||||
int thread_count, RamDecodeCallback callback);
|
|
||||||
int ffmpeg_ram_encode(void *encoder, const uint8_t *data, int length,
|
int ffmpeg_ram_encode(void *encoder, const uint8_t *data, int length,
|
||||||
const void *obj, int64_t ms);
|
const void *obj, int64_t ms);
|
||||||
int ffmpeg_ram_decode(void *decoder, const uint8_t *data, int length,
|
|
||||||
const void *obj);
|
|
||||||
void ffmpeg_ram_free_encoder(void *encoder);
|
void ffmpeg_ram_free_encoder(void *encoder);
|
||||||
void ffmpeg_ram_free_decoder(void *decoder);
|
|
||||||
int ffmpeg_ram_get_linesize_offset_length(int pix_fmt, int width, int height,
|
int ffmpeg_ram_get_linesize_offset_length(int pix_fmt, int width, int height,
|
||||||
int align, int *linesize, int *offset,
|
int align, int *linesize, int *offset,
|
||||||
int *length);
|
int *length);
|
||||||
|
|||||||
@@ -1,202 +0,0 @@
|
|||||||
use crate::ffmpeg::{init_av_log, AVHWDeviceType::*};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
common::DataFormat::*,
|
|
||||||
ffmpeg::{AVHWDeviceType, AVPixelFormat},
|
|
||||||
ffmpeg_ram::{
|
|
||||||
ffmpeg_ram_decode, ffmpeg_ram_free_decoder, ffmpeg_ram_new_decoder, CodecInfo,
|
|
||||||
AV_NUM_DATA_POINTERS, Priority,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
use log::error;
|
|
||||||
use std::{
|
|
||||||
ffi::{c_void, CString},
|
|
||||||
os::raw::c_int,
|
|
||||||
slice::from_raw_parts,
|
|
||||||
vec,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct DecodeContext {
|
|
||||||
pub name: String,
|
|
||||||
pub device_type: AVHWDeviceType,
|
|
||||||
pub thread_count: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct DecodeFrame {
|
|
||||||
pub pixfmt: AVPixelFormat,
|
|
||||||
pub width: i32,
|
|
||||||
pub height: i32,
|
|
||||||
pub data: Vec<Vec<u8>>,
|
|
||||||
pub linesize: Vec<i32>,
|
|
||||||
pub key: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for DecodeFrame {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
let mut s = String::from("data:");
|
|
||||||
for data in self.data.iter() {
|
|
||||||
s.push_str(format!("{} ", data.len()).as_str());
|
|
||||||
}
|
|
||||||
s.push_str(", linesize:");
|
|
||||||
for linesize in self.linesize.iter() {
|
|
||||||
s.push_str(format!("{} ", linesize).as_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"fixfmt:{}, width:{}, height:{},key:{}, {}",
|
|
||||||
self.pixfmt as i32, self.width, self.height, self.key, s,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Decoder {
|
|
||||||
codec: *mut c_void,
|
|
||||||
frames: *mut Vec<DecodeFrame>,
|
|
||||||
pub ctx: DecodeContext,
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe impl Send for Decoder {}
|
|
||||||
unsafe impl Sync for Decoder {}
|
|
||||||
|
|
||||||
impl Decoder {
|
|
||||||
pub fn new(ctx: DecodeContext) -> Result<Self, ()> {
|
|
||||||
init_av_log();
|
|
||||||
unsafe {
|
|
||||||
let codec = ffmpeg_ram_new_decoder(
|
|
||||||
CString::new(ctx.name.as_str()).map_err(|_| ())?.as_ptr(),
|
|
||||||
ctx.device_type as _,
|
|
||||||
ctx.thread_count,
|
|
||||||
Some(Decoder::callback),
|
|
||||||
);
|
|
||||||
|
|
||||||
if codec.is_null() {
|
|
||||||
return Err(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Decoder {
|
|
||||||
codec,
|
|
||||||
frames: Box::into_raw(Box::new(Vec::<DecodeFrame>::new())),
|
|
||||||
ctx,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decode(&mut self, packet: &[u8]) -> Result<&mut Vec<DecodeFrame>, i32> {
|
|
||||||
unsafe {
|
|
||||||
(&mut *self.frames).clear();
|
|
||||||
let ret = ffmpeg_ram_decode(
|
|
||||||
self.codec,
|
|
||||||
packet.as_ptr(),
|
|
||||||
packet.len() as c_int,
|
|
||||||
self.frames as *const _ as *const c_void,
|
|
||||||
);
|
|
||||||
|
|
||||||
if ret < 0 {
|
|
||||||
Err(ret)
|
|
||||||
} else {
|
|
||||||
Ok(&mut *self.frames)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn callback(
|
|
||||||
obj: *const c_void,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
pixfmt: c_int,
|
|
||||||
linesizes: *mut c_int,
|
|
||||||
datas: *mut *mut u8,
|
|
||||||
key: c_int,
|
|
||||||
) {
|
|
||||||
let frames = &mut *(obj as *mut Vec<DecodeFrame>);
|
|
||||||
let datas = from_raw_parts(datas, AV_NUM_DATA_POINTERS as _);
|
|
||||||
let linesizes = from_raw_parts(linesizes, AV_NUM_DATA_POINTERS as _);
|
|
||||||
|
|
||||||
let mut frame = DecodeFrame {
|
|
||||||
pixfmt: std::mem::transmute(pixfmt),
|
|
||||||
width,
|
|
||||||
height,
|
|
||||||
data: vec![],
|
|
||||||
linesize: vec![],
|
|
||||||
key: key != 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Handle YUV420P and YUVJ420P (JPEG full-range) - same memory layout
|
|
||||||
if pixfmt == AVPixelFormat::AV_PIX_FMT_YUV420P as c_int
|
|
||||||
|| pixfmt == AVPixelFormat::AV_PIX_FMT_YUVJ420P as c_int
|
|
||||||
{
|
|
||||||
let y = from_raw_parts(datas[0], (linesizes[0] * height) as usize).to_vec();
|
|
||||||
let u = from_raw_parts(datas[1], (linesizes[1] * height / 2) as usize).to_vec();
|
|
||||||
let v = from_raw_parts(datas[2], (linesizes[2] * height / 2) as usize).to_vec();
|
|
||||||
|
|
||||||
frame.data.push(y);
|
|
||||||
frame.data.push(u);
|
|
||||||
frame.data.push(v);
|
|
||||||
|
|
||||||
frame.linesize.push(linesizes[0]);
|
|
||||||
frame.linesize.push(linesizes[1]);
|
|
||||||
frame.linesize.push(linesizes[2]);
|
|
||||||
|
|
||||||
frames.push(frame);
|
|
||||||
} else if pixfmt == AVPixelFormat::AV_PIX_FMT_YUV422P as c_int
|
|
||||||
|| pixfmt == AVPixelFormat::AV_PIX_FMT_YUVJ422P as c_int
|
|
||||||
{
|
|
||||||
// YUV422P: U and V planes have same height as Y (not half)
|
|
||||||
let y = from_raw_parts(datas[0], (linesizes[0] * height) as usize).to_vec();
|
|
||||||
let u = from_raw_parts(datas[1], (linesizes[1] * height) as usize).to_vec();
|
|
||||||
let v = from_raw_parts(datas[2], (linesizes[2] * height) as usize).to_vec();
|
|
||||||
|
|
||||||
frame.data.push(y);
|
|
||||||
frame.data.push(u);
|
|
||||||
frame.data.push(v);
|
|
||||||
|
|
||||||
frame.linesize.push(linesizes[0]);
|
|
||||||
frame.linesize.push(linesizes[1]);
|
|
||||||
frame.linesize.push(linesizes[2]);
|
|
||||||
|
|
||||||
frames.push(frame);
|
|
||||||
} else if pixfmt == AVPixelFormat::AV_PIX_FMT_NV12 as c_int
|
|
||||||
|| pixfmt == AVPixelFormat::AV_PIX_FMT_NV21 as c_int
|
|
||||||
{
|
|
||||||
let y = from_raw_parts(datas[0], (linesizes[0] * height) as usize).to_vec();
|
|
||||||
let uv = from_raw_parts(datas[1], (linesizes[1] * height / 2) as usize).to_vec();
|
|
||||||
|
|
||||||
frame.data.push(y);
|
|
||||||
frame.data.push(uv);
|
|
||||||
|
|
||||||
frame.linesize.push(linesizes[0]);
|
|
||||||
frame.linesize.push(linesizes[1]);
|
|
||||||
|
|
||||||
frames.push(frame);
|
|
||||||
} else {
|
|
||||||
error!("unsupported pixfmt {}", pixfmt as i32);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns available decoders for IP-KVM scenario.
|
|
||||||
/// Only MJPEG software decoder is supported as IP-KVM captures from video capture cards
|
|
||||||
/// that output MJPEG streams.
|
|
||||||
pub fn available_decoders() -> Vec<CodecInfo> {
|
|
||||||
// IP-KVM scenario only needs MJPEG decoding
|
|
||||||
// MJPEG comes from video capture cards, software decoding is sufficient
|
|
||||||
vec![CodecInfo {
|
|
||||||
name: "mjpeg".to_owned(),
|
|
||||||
format: MJPEG,
|
|
||||||
hwdevice: AV_HWDEVICE_TYPE_NONE,
|
|
||||||
priority: Priority::Best as _,
|
|
||||||
..Default::default()
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for Decoder {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
unsafe {
|
|
||||||
ffmpeg_ram_free_decoder(self.codec);
|
|
||||||
self.codec = std::ptr::null_mut();
|
|
||||||
let _ = Box::from_raw(self.frames);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -447,14 +447,6 @@ impl Encoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add MJPEG software encoder if not already present
|
|
||||||
if !res.iter().any(|c| c.format == MJPEG) {
|
|
||||||
if let Some(mjpeg_soft) = soft_codecs.mjpeg {
|
|
||||||
debug!("Adding software MJPEG encoder: {}", mjpeg_soft.name);
|
|
||||||
res.push(mjpeg_soft);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ use std::ffi::c_int;
|
|||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/ffmpeg_ram_ffi.rs"));
|
include!(concat!(env!("OUT_DIR"), "/ffmpeg_ram_ffi.rs"));
|
||||||
|
|
||||||
pub mod decode;
|
|
||||||
pub mod encode;
|
pub mod encode;
|
||||||
|
|
||||||
pub enum Priority {
|
pub enum Priority {
|
||||||
@@ -52,7 +51,6 @@ impl CodecInfo {
|
|||||||
let mut vp8: Option<CodecInfo> = None;
|
let mut vp8: Option<CodecInfo> = None;
|
||||||
let mut vp9: Option<CodecInfo> = None;
|
let mut vp9: Option<CodecInfo> = None;
|
||||||
let mut av1: Option<CodecInfo> = None;
|
let mut av1: Option<CodecInfo> = None;
|
||||||
let mut mjpeg: Option<CodecInfo> = None;
|
|
||||||
|
|
||||||
for coder in coders {
|
for coder in coders {
|
||||||
match coder.format {
|
match coder.format {
|
||||||
@@ -96,14 +94,6 @@ impl CodecInfo {
|
|||||||
}
|
}
|
||||||
None => av1 = Some(coder),
|
None => av1 = Some(coder),
|
||||||
},
|
},
|
||||||
DataFormat::MJPEG => match &mjpeg {
|
|
||||||
Some(old) => {
|
|
||||||
if old.priority > coder.priority {
|
|
||||||
mjpeg = Some(coder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => mjpeg = Some(coder),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CodecInfos {
|
CodecInfos {
|
||||||
@@ -112,7 +102,6 @@ impl CodecInfo {
|
|||||||
vp8,
|
vp8,
|
||||||
vp9,
|
vp9,
|
||||||
av1,
|
av1,
|
||||||
mjpeg,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -147,13 +136,6 @@ impl CodecInfo {
|
|||||||
priority: Priority::Soft as _,
|
priority: Priority::Soft as _,
|
||||||
}),
|
}),
|
||||||
av1: None,
|
av1: None,
|
||||||
mjpeg: Some(CodecInfo {
|
|
||||||
name: "mjpeg".to_owned(),
|
|
||||||
mc_name: Default::default(),
|
|
||||||
format: MJPEG,
|
|
||||||
hwdevice: AV_HWDEVICE_TYPE_NONE,
|
|
||||||
priority: Priority::Soft as _,
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -165,7 +147,6 @@ pub struct CodecInfos {
|
|||||||
pub vp8: Option<CodecInfo>,
|
pub vp8: Option<CodecInfo>,
|
||||||
pub vp9: Option<CodecInfo>,
|
pub vp9: Option<CodecInfo>,
|
||||||
pub av1: Option<CodecInfo>,
|
pub av1: Option<CodecInfo>,
|
||||||
pub mjpeg: Option<CodecInfo>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CodecInfos {
|
impl CodecInfos {
|
||||||
|
|||||||
@@ -1,36 +1,29 @@
|
|||||||
//! MJPEG VAAPI hardware decoder
|
//! MJPEG decoder implementations
|
||||||
//!
|
//!
|
||||||
//! Uses hwcodec's FFmpeg VAAPI backend to decode MJPEG to NV12.
|
//! Provides MJPEG decoding using libyuv for SIMD-accelerated decoding.
|
||||||
//! This provides hardware-accelerated JPEG decoding with direct NV12 output,
|
//! All decoders output to standard YUV formats suitable for encoding.
|
||||||
//! which is the optimal format for VAAPI H264 encoding.
|
|
||||||
|
|
||||||
use std::sync::Once;
|
use std::sync::Once;
|
||||||
use tracing::{debug, info, warn};
|
use tracing::{debug, info};
|
||||||
|
|
||||||
use hwcodec::ffmpeg::AVHWDeviceType;
|
|
||||||
use hwcodec::ffmpeg::AVPixelFormat;
|
|
||||||
use hwcodec::ffmpeg_ram::decode::{DecodeContext, DecodeFrame, Decoder};
|
|
||||||
|
|
||||||
use crate::error::{AppError, Result};
|
use crate::error::{AppError, Result};
|
||||||
use crate::video::format::Resolution;
|
use crate::video::format::Resolution;
|
||||||
|
|
||||||
// libyuv for SIMD-accelerated YUV conversion
|
|
||||||
|
|
||||||
static INIT_LOGGING: Once = Once::new();
|
static INIT_LOGGING: Once = Once::new();
|
||||||
|
|
||||||
/// Initialize hwcodec logging (only once)
|
/// Initialize decoder logging (only once)
|
||||||
fn init_hwcodec_logging() {
|
fn init_decoder_logging() {
|
||||||
INIT_LOGGING.call_once(|| {
|
INIT_LOGGING.call_once(|| {
|
||||||
debug!("hwcodec MJPEG decoder logging initialized");
|
debug!("MJPEG decoder logging initialized");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/// MJPEG VAAPI decoder configuration
|
/// MJPEG decoder configuration
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct MjpegVaapiDecoderConfig {
|
pub struct MjpegVaapiDecoderConfig {
|
||||||
/// Expected resolution (can be updated from decoded frame)
|
/// Expected resolution (can be updated from decoded frame)
|
||||||
pub resolution: Resolution,
|
pub resolution: Resolution,
|
||||||
/// Use hardware acceleration (VAAPI)
|
/// Use hardware acceleration (ignored, kept for API compatibility)
|
||||||
pub use_hwaccel: bool,
|
pub use_hwaccel: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -266,62 +259,34 @@ impl DecodedNv12Frame {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// MJPEG VAAPI hardware decoder
|
/// MJPEG decoder with NV12 output
|
||||||
///
|
///
|
||||||
/// Decodes MJPEG frames to NV12 format using VAAPI hardware acceleration.
|
/// Uses libyuv for SIMD-accelerated MJPEG decoding to YUV420P,
|
||||||
/// This is optimal for feeding into VAAPI H264 encoder.
|
/// then converts to NV12 for hardware encoder compatibility.
|
||||||
|
/// Named "VaapiDecoder" for API compatibility with existing code.
|
||||||
pub struct MjpegVaapiDecoder {
|
pub struct MjpegVaapiDecoder {
|
||||||
/// hwcodec decoder instance
|
|
||||||
decoder: Decoder,
|
|
||||||
/// Configuration
|
/// Configuration
|
||||||
config: MjpegVaapiDecoderConfig,
|
config: MjpegVaapiDecoderConfig,
|
||||||
/// Frame counter
|
/// Frame counter
|
||||||
frame_count: u64,
|
frame_count: u64,
|
||||||
/// Whether hardware acceleration is active
|
|
||||||
hwaccel_active: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MjpegVaapiDecoder {
|
impl MjpegVaapiDecoder {
|
||||||
/// Create a new MJPEG decoder
|
/// Create a new MJPEG decoder
|
||||||
/// Note: VAAPI does not support MJPEG decoding on most hardware,
|
|
||||||
/// so we use software decoding and convert to NV12 for VAAPI encoding.
|
|
||||||
pub fn new(config: MjpegVaapiDecoderConfig) -> Result<Self> {
|
pub fn new(config: MjpegVaapiDecoderConfig) -> Result<Self> {
|
||||||
init_hwcodec_logging();
|
init_decoder_logging();
|
||||||
|
|
||||||
// VAAPI doesn't support MJPEG decoding, always use software decoder
|
|
||||||
// The output will be converted to NV12 for VAAPI H264 encoding
|
|
||||||
let device_type = AVHWDeviceType::AV_HWDEVICE_TYPE_NONE;
|
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"Creating MJPEG decoder with software decoding (VAAPI doesn't support MJPEG decode)"
|
"Creating MJPEG decoder with libyuv (SIMD-accelerated, NV12 output)"
|
||||||
);
|
|
||||||
|
|
||||||
let ctx = DecodeContext {
|
|
||||||
name: "mjpeg".to_string(),
|
|
||||||
device_type,
|
|
||||||
thread_count: 4, // Use multiple threads for software decoding
|
|
||||||
};
|
|
||||||
|
|
||||||
let decoder = Decoder::new(ctx).map_err(|_| {
|
|
||||||
AppError::VideoError("Failed to create MJPEG software decoder".to_string())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// hwaccel is not actually active for MJPEG decoding
|
|
||||||
let hwaccel_active = false;
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"MJPEG decoder created successfully (software decode, will convert to NV12)"
|
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
decoder,
|
|
||||||
config,
|
config,
|
||||||
frame_count: 0,
|
frame_count: 0,
|
||||||
hwaccel_active,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create with default config (VAAPI enabled)
|
/// Create with default config
|
||||||
pub fn with_vaapi(resolution: Resolution) -> Result<Self> {
|
pub fn with_vaapi(resolution: Resolution) -> Result<Self> {
|
||||||
Self::new(MjpegVaapiDecoderConfig {
|
Self::new(MjpegVaapiDecoderConfig {
|
||||||
resolution,
|
resolution,
|
||||||
@@ -329,7 +294,7 @@ impl MjpegVaapiDecoder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create with software decoding (fallback)
|
/// Create with software decoding (same as with_vaapi, kept for API compatibility)
|
||||||
pub fn with_software(resolution: Resolution) -> Result<Self> {
|
pub fn with_software(resolution: Resolution) -> Result<Self> {
|
||||||
Self::new(MjpegVaapiDecoderConfig {
|
Self::new(MjpegVaapiDecoderConfig {
|
||||||
resolution,
|
resolution,
|
||||||
@@ -337,9 +302,9 @@ impl MjpegVaapiDecoder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if hardware acceleration is active
|
/// Check if hardware acceleration is active (always false, using libyuv)
|
||||||
pub fn is_hwaccel_active(&self) -> bool {
|
pub fn is_hwaccel_active(&self) -> bool {
|
||||||
self.hwaccel_active
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decode MJPEG frame to NV12
|
/// Decode MJPEG frame to NV12
|
||||||
@@ -357,130 +322,27 @@ impl MjpegVaapiDecoder {
|
|||||||
|
|
||||||
self.frame_count += 1;
|
self.frame_count += 1;
|
||||||
|
|
||||||
let frames = self.decoder.decode(jpeg_data).map_err(|e| {
|
// Get JPEG dimensions
|
||||||
AppError::VideoError(format!("MJPEG decode failed: error code {}", e))
|
let (width, height) = libyuv::mjpeg_size(jpeg_data)
|
||||||
})?;
|
.map_err(|e| AppError::VideoError(format!("Failed to read MJPEG size: {}", e)))?;
|
||||||
|
|
||||||
if frames.is_empty() {
|
// Decode MJPEG to YUV420P first
|
||||||
return Err(AppError::VideoError("Decoder returned no frames".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let frame = &frames[0];
|
|
||||||
|
|
||||||
// Handle different output formats
|
|
||||||
// VAAPI MJPEG decoder may output NV12, YUV420P, or YUVJ420P (JPEG full-range)
|
|
||||||
if frame.pixfmt == AVPixelFormat::AV_PIX_FMT_NV12
|
|
||||||
|| frame.pixfmt == AVPixelFormat::AV_PIX_FMT_NV21
|
|
||||||
{
|
|
||||||
// NV12/NV21 format: Y plane + UV interleaved plane
|
|
||||||
if frame.data.len() < 2 {
|
|
||||||
return Err(AppError::VideoError("Invalid NV12 frame data".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(DecodedNv12Frame {
|
|
||||||
y_plane: frame.data[0].clone(),
|
|
||||||
uv_plane: frame.data[1].clone(),
|
|
||||||
y_linesize: frame.linesize[0],
|
|
||||||
uv_linesize: frame.linesize[1],
|
|
||||||
width: frame.width,
|
|
||||||
height: frame.height,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// YUV420P or YUVJ420P (JPEG full-range) - need to convert to NV12
|
|
||||||
if frame.pixfmt == AVPixelFormat::AV_PIX_FMT_YUV420P
|
|
||||||
|| frame.pixfmt == AVPixelFormat::AV_PIX_FMT_YUVJ420P
|
|
||||||
{
|
|
||||||
return Self::convert_yuv420p_to_nv12_static(frame);
|
|
||||||
}
|
|
||||||
|
|
||||||
// YUV422P or YUVJ422P (JPEG full-range 4:2:2) - need to convert to NV12
|
|
||||||
if frame.pixfmt == AVPixelFormat::AV_PIX_FMT_YUV422P
|
|
||||||
|| frame.pixfmt == AVPixelFormat::AV_PIX_FMT_YUVJ422P
|
|
||||||
{
|
|
||||||
return Self::convert_yuv422p_to_nv12_static(frame);
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(AppError::VideoError(format!(
|
|
||||||
"Unexpected output format: {:?} (expected NV12, YUV420P, YUV422P, or YUVJ variants)",
|
|
||||||
frame.pixfmt
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert YUV420P frame to NV12 format using libyuv (SIMD accelerated)
|
|
||||||
fn convert_yuv420p_to_nv12_static(frame: &DecodeFrame) -> Result<DecodedNv12Frame> {
|
|
||||||
if frame.data.len() < 3 {
|
|
||||||
return Err(AppError::VideoError("Invalid YUV420P frame data".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let width = frame.width as i32;
|
|
||||||
let height = frame.height as i32;
|
|
||||||
let y_linesize = frame.linesize[0];
|
|
||||||
let u_linesize = frame.linesize[1];
|
|
||||||
let v_linesize = frame.linesize[2];
|
|
||||||
|
|
||||||
// Allocate packed NV12 output buffer
|
|
||||||
let nv12_size = (width * height * 3 / 2) as usize;
|
|
||||||
let mut nv12_data = vec![0u8; nv12_size];
|
|
||||||
|
|
||||||
// Use libyuv for SIMD-accelerated I420 → NV12 conversion
|
|
||||||
libyuv::i420_to_nv12_planar(
|
|
||||||
&frame.data[0], y_linesize,
|
|
||||||
&frame.data[1], u_linesize,
|
|
||||||
&frame.data[2], v_linesize,
|
|
||||||
&mut nv12_data,
|
|
||||||
width, height,
|
|
||||||
).map_err(|e| AppError::VideoError(format!("libyuv I420→NV12 failed: {}", e)))?;
|
|
||||||
|
|
||||||
// Split into Y and UV planes for DecodedNv12Frame
|
|
||||||
let y_size = (width * height) as usize;
|
let y_size = (width * height) as usize;
|
||||||
let y_plane = nv12_data[..y_size].to_vec();
|
let uv_size = y_size / 4;
|
||||||
let uv_plane = nv12_data[y_size..].to_vec();
|
let yuv420_size = y_size + uv_size * 2;
|
||||||
|
let mut yuv_data = vec![0u8; yuv420_size];
|
||||||
|
|
||||||
Ok(DecodedNv12Frame {
|
libyuv::mjpeg_to_i420(jpeg_data, &mut yuv_data, width, height)
|
||||||
y_plane,
|
.map_err(|e| AppError::VideoError(format!("libyuv MJPEG→I420 failed: {}", e)))?;
|
||||||
uv_plane,
|
|
||||||
y_linesize: width, // Output is packed, no padding
|
|
||||||
uv_linesize: width,
|
|
||||||
width: frame.width,
|
|
||||||
height: frame.height,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert YUV422P frame to NV12 format using libyuv (SIMD accelerated)
|
// Convert I420 to NV12
|
||||||
/// Pipeline: I422 (YUV422P) → I420 → NV12
|
|
||||||
fn convert_yuv422p_to_nv12_static(frame: &DecodeFrame) -> Result<DecodedNv12Frame> {
|
|
||||||
if frame.data.len() < 3 {
|
|
||||||
return Err(AppError::VideoError("Invalid YUV422P frame data".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let width = frame.width as i32;
|
|
||||||
let height = frame.height as i32;
|
|
||||||
let y_linesize = frame.linesize[0];
|
|
||||||
let u_linesize = frame.linesize[1];
|
|
||||||
let v_linesize = frame.linesize[2];
|
|
||||||
|
|
||||||
// Step 1: I422 → I420 (vertical chroma downsampling via SIMD)
|
|
||||||
let i420_size = (width * height * 3 / 2) as usize;
|
|
||||||
let mut i420_data = vec![0u8; i420_size];
|
|
||||||
|
|
||||||
libyuv::i422_to_i420_planar(
|
|
||||||
&frame.data[0], y_linesize,
|
|
||||||
&frame.data[1], u_linesize,
|
|
||||||
&frame.data[2], v_linesize,
|
|
||||||
&mut i420_data,
|
|
||||||
width, height,
|
|
||||||
).map_err(|e| AppError::VideoError(format!("libyuv I422→I420 failed: {}", e)))?;
|
|
||||||
|
|
||||||
// Step 2: I420 → NV12 (UV interleaving via SIMD)
|
|
||||||
let nv12_size = (width * height * 3 / 2) as usize;
|
let nv12_size = (width * height * 3 / 2) as usize;
|
||||||
let mut nv12_data = vec![0u8; nv12_size];
|
let mut nv12_data = vec![0u8; nv12_size];
|
||||||
|
|
||||||
libyuv::i420_to_nv12(&i420_data, &mut nv12_data, width, height)
|
libyuv::i420_to_nv12(&yuv_data, &mut nv12_data, width, height)
|
||||||
.map_err(|e| AppError::VideoError(format!("libyuv I420→NV12 failed: {}", e)))?;
|
.map_err(|e| AppError::VideoError(format!("libyuv I420→NV12 failed: {}", e)))?;
|
||||||
|
|
||||||
// Split into Y and UV planes for DecodedNv12Frame
|
// Split into Y and UV planes
|
||||||
let y_size = (width * height) as usize;
|
|
||||||
let y_plane = nv12_data[..y_size].to_vec();
|
let y_plane = nv12_data[..y_size].to_vec();
|
||||||
let uv_plane = nv12_data[y_size..].to_vec();
|
let uv_plane = nv12_data[y_size..].to_vec();
|
||||||
|
|
||||||
@@ -489,8 +351,8 @@ impl MjpegVaapiDecoder {
|
|||||||
uv_plane,
|
uv_plane,
|
||||||
y_linesize: width,
|
y_linesize: width,
|
||||||
uv_linesize: width,
|
uv_linesize: width,
|
||||||
width: frame.width,
|
width,
|
||||||
height: frame.height,
|
height,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -600,36 +462,10 @@ impl MjpegTurboDecoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if MJPEG VAAPI decoder is available
|
|
||||||
pub fn is_mjpeg_vaapi_available() -> bool {
|
|
||||||
let ctx = DecodeContext {
|
|
||||||
name: "mjpeg".to_string(),
|
|
||||||
device_type: AVHWDeviceType::AV_HWDEVICE_TYPE_VAAPI,
|
|
||||||
thread_count: 1,
|
|
||||||
};
|
|
||||||
|
|
||||||
match Decoder::new(ctx) {
|
|
||||||
Ok(_) => {
|
|
||||||
info!("MJPEG VAAPI decoder is available");
|
|
||||||
true
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
warn!("MJPEG VAAPI decoder is not available");
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_mjpeg_vaapi_availability() {
|
|
||||||
let available = is_mjpeg_vaapi_available();
|
|
||||||
println!("MJPEG VAAPI available: {}", available);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_decoder_creation() {
|
fn test_decoder_creation() {
|
||||||
let config = MjpegVaapiDecoderConfig::default();
|
let config = MjpegVaapiDecoderConfig::default();
|
||||||
|
|||||||
@@ -92,6 +92,8 @@ pub enum H265InputFormat {
|
|||||||
Yuv420p,
|
Yuv420p,
|
||||||
/// NV12 - Y plane + interleaved UV plane (optimal for hardware encoders)
|
/// NV12 - Y plane + interleaved UV plane (optimal for hardware encoders)
|
||||||
Nv12,
|
Nv12,
|
||||||
|
/// YUYV422 - packed YUV 4:2:2 format (optimal for RKMPP direct input)
|
||||||
|
Yuyv422,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for H265InputFormat {
|
impl Default for H265InputFormat {
|
||||||
@@ -145,6 +147,23 @@ impl H265Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create config for low latency streaming with YUYV422 input (optimal for RKMPP direct input)
|
||||||
|
pub fn low_latency_yuyv422(resolution: Resolution, bitrate_kbps: u32) -> Self {
|
||||||
|
Self {
|
||||||
|
base: EncoderConfig {
|
||||||
|
resolution,
|
||||||
|
input_format: PixelFormat::Yuyv,
|
||||||
|
quality: bitrate_kbps,
|
||||||
|
fps: 30,
|
||||||
|
gop_size: 30,
|
||||||
|
},
|
||||||
|
bitrate_kbps,
|
||||||
|
gop_size: 30,
|
||||||
|
fps: 30,
|
||||||
|
input_format: H265InputFormat::Yuyv422,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Create config for quality streaming
|
/// Create config for quality streaming
|
||||||
pub fn quality(resolution: Resolution, bitrate_kbps: u32) -> Self {
|
pub fn quality(resolution: Resolution, bitrate_kbps: u32) -> Self {
|
||||||
Self {
|
Self {
|
||||||
@@ -311,13 +330,14 @@ impl H265Encoder {
|
|||||||
let width = config.base.resolution.width;
|
let width = config.base.resolution.width;
|
||||||
let height = config.base.resolution.height;
|
let height = config.base.resolution.height;
|
||||||
|
|
||||||
// Software encoders (libx265) require YUV420P, hardware encoders use NV12
|
// Software encoders (libx265) require YUV420P, hardware encoders use NV12 or YUYV422
|
||||||
let (pixfmt, actual_input_format) = if is_software {
|
let (pixfmt, actual_input_format) = if is_software {
|
||||||
(AVPixelFormat::AV_PIX_FMT_YUV420P, H265InputFormat::Yuv420p)
|
(AVPixelFormat::AV_PIX_FMT_YUV420P, H265InputFormat::Yuv420p)
|
||||||
} else {
|
} else {
|
||||||
match config.input_format {
|
match config.input_format {
|
||||||
H265InputFormat::Nv12 => (AVPixelFormat::AV_PIX_FMT_NV12, H265InputFormat::Nv12),
|
H265InputFormat::Nv12 => (AVPixelFormat::AV_PIX_FMT_NV12, H265InputFormat::Nv12),
|
||||||
H265InputFormat::Yuv420p => (AVPixelFormat::AV_PIX_FMT_YUV420P, H265InputFormat::Yuv420p),
|
H265InputFormat::Yuv420p => (AVPixelFormat::AV_PIX_FMT_YUV420P, H265InputFormat::Yuv420p),
|
||||||
|
H265InputFormat::Yuyv422 => (AVPixelFormat::AV_PIX_FMT_YUYV422, H265InputFormat::Yuyv422),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -548,6 +568,7 @@ impl Encoder for H265Encoder {
|
|||||||
match self.config.input_format {
|
match self.config.input_format {
|
||||||
H265InputFormat::Nv12 => matches!(format, PixelFormat::Nv12),
|
H265InputFormat::Nv12 => matches!(format, PixelFormat::Nv12),
|
||||||
H265InputFormat::Yuv420p => matches!(format, PixelFormat::Yuv420),
|
H265InputFormat::Yuv420p => matches!(format, PixelFormat::Yuv420),
|
||||||
|
H265InputFormat::Yuyv422 => matches!(format, PixelFormat::Yuyv),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -411,9 +411,22 @@ impl SharedVideoPipeline {
|
|||||||
Box::new(H264EncoderWrapper(encoder))
|
Box::new(H264EncoderWrapper(encoder))
|
||||||
}
|
}
|
||||||
VideoEncoderType::H265 => {
|
VideoEncoderType::H265 => {
|
||||||
let encoder_config = H265Config::low_latency(config.resolution, config.bitrate_kbps);
|
// Determine H265 input format based on backend and input format
|
||||||
|
let encoder_config = if use_yuyv_direct {
|
||||||
|
H265Config::low_latency_yuyv422(config.resolution, config.bitrate_kbps)
|
||||||
|
} else {
|
||||||
|
H265Config::low_latency(config.resolution, config.bitrate_kbps)
|
||||||
|
};
|
||||||
|
|
||||||
let encoder = if let Some(ref backend) = config.encoder_backend {
|
let encoder = if use_yuyv_direct {
|
||||||
|
// Force RKMPP backend for YUYV direct input
|
||||||
|
let codec_name = get_codec_name(VideoEncoderType::H265, Some(EncoderBackend::Rkmpp))
|
||||||
|
.ok_or_else(|| AppError::VideoError(
|
||||||
|
"RKMPP backend not available for H.265".to_string()
|
||||||
|
))?;
|
||||||
|
info!("Creating H265 encoder with RKMPP backend for YUYV direct input (codec: {})", codec_name);
|
||||||
|
H265Encoder::with_codec(encoder_config, &codec_name)?
|
||||||
|
} else if let Some(ref backend) = config.encoder_backend {
|
||||||
let codec_name = get_codec_name(VideoEncoderType::H265, Some(*backend))
|
let codec_name = get_codec_name(VideoEncoderType::H265, Some(*backend))
|
||||||
.ok_or_else(|| AppError::VideoError(format!(
|
.ok_or_else(|| AppError::VideoError(format!(
|
||||||
"Backend {:?} does not support H.265", backend
|
"Backend {:?} does not support H.265", backend
|
||||||
|
|||||||
Reference in New Issue
Block a user