This commit is contained in:
mofeng-git
2025-12-28 18:19:16 +08:00
commit d143d158e4
771 changed files with 220548 additions and 0 deletions

View File

@@ -0,0 +1,214 @@
use env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
#[cfg(feature = "vram")]
use hwcodec::{
common::MAX_GOP,
vram::{DynamicContext, FeatureContext},
};
use hwcodec::{
common::{DataFormat, Quality::*, RateControl::*},
ffmpeg::AVPixelFormat::*,
ffmpeg_ram::{
decode::{DecodeContext, Decoder},
encode::{EncodeContext, Encoder},
ffmpeg_linesize_offset_length, CodecInfo,
},
};
#[cfg(feature = "vram")]
use tool::Tool;
fn main() {
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
let max_align = 16;
setup_ram(max_align);
#[cfg(feature = "vram")]
setup_vram(max_align);
}
fn setup_ram(max_align: i32) {
let encoders = Encoder::available_encoders(
EncodeContext {
name: String::from(""),
mc_name: None,
width: 1920,
height: 1080,
pixfmt: AV_PIX_FMT_NV12,
align: 0,
fps: 30,
gop: 60,
rc: RC_CBR,
quality: Quality_Default,
kbs: 0,
q: -1,
thread_count: 1,
},
None,
);
let decoders = Decoder::available_decoders();
let h264_encoders = encoders
.iter()
.filter(|info| info.name.contains("h264"))
.cloned()
.collect::<Vec<_>>();
let h265_encoders = encoders
.iter()
.filter(|info| info.name.contains("hevc"))
.cloned()
.collect::<Vec<_>>();
let h264_decoders = decoders
.iter()
.filter(|info| info.format == DataFormat::H264)
.cloned()
.collect::<Vec<_>>();
let h265_decoders = decoders
.iter()
.filter(|info| info.format == DataFormat::H265)
.cloned()
.collect::<Vec<_>>();
let start_width = 1920;
let start_height = 1080;
let step = 2;
for width in (start_width..=(start_width + max_align)).step_by(step) {
for height in (start_height..=(start_height + max_align)).step_by(step) {
for encode_info in &h264_encoders {
test_ram(width, height, encode_info.clone(), h264_decoders[0].clone());
}
for decode_info in &h264_decoders {
test_ram(width, height, h264_encoders[0].clone(), decode_info.clone());
}
for encode_info in &h265_encoders {
test_ram(width, height, encode_info.clone(), h265_decoders[0].clone());
}
for decode_info in &h265_decoders {
test_ram(width, height, h265_encoders[0].clone(), decode_info.clone());
}
}
}
}
fn test_ram(width: i32, height: i32, encode_info: CodecInfo, decode_info: CodecInfo) {
println!(
"Test {}x{}: {} -> {}",
width, height, encode_info.name, decode_info.name
);
let encode_ctx = EncodeContext {
name: encode_info.name.clone(),
mc_name: None,
width,
height,
pixfmt: AV_PIX_FMT_NV12,
align: 0,
kbs: 0,
fps: 30,
gop: 60,
quality: Quality_Default,
rc: RC_CBR,
thread_count: 1,
q: -1,
};
let decode_ctx = DecodeContext {
name: decode_info.name.clone(),
device_type: decode_info.hwdevice,
thread_count: 4,
};
let (_, _, len) = ffmpeg_linesize_offset_length(
encode_ctx.pixfmt,
encode_ctx.width as _,
encode_ctx.height as _,
encode_ctx.align as _,
)
.unwrap();
let mut video_encoder = Encoder::new(encode_ctx).unwrap();
let mut video_decoder = Decoder::new(decode_ctx).unwrap();
let buf: Vec<u8> = vec![0; len as usize];
let encode_frames = video_encoder.encode(&buf, 0).unwrap();
assert_eq!(encode_frames.len(), 1);
let docode_frames = video_decoder.decode(&encode_frames[0].data).unwrap();
assert_eq!(docode_frames.len(), 1);
assert_eq!(docode_frames[0].width, width);
assert_eq!(docode_frames[0].height, height);
println!(
"Pass {}x{}: {} -> {} {:?}",
width, height, encode_info.name, decode_info.name, decode_info.hwdevice
)
}
#[cfg(feature = "vram")]
fn setup_vram(max_align: i32) {
let encoders = hwcodec::vram::encode::available(DynamicContext {
device: None,
width: 1920,
height: 1080,
kbitrate: 1000,
framerate: 30,
gop: MAX_GOP as _,
});
let decoders = hwcodec::vram::decode::available();
let start_width = 1920;
let start_height = 1080;
let step = 2;
for width in (start_width..=(start_width + max_align)).step_by(step) {
for height in (start_height..=(start_height + max_align)).step_by(step) {
for encode_info in &encoders {
if let Some(decoder) = decoders.iter().find(|d| {
d.luid == encode_info.luid && d.data_format == encode_info.data_format
}) {
test_vram(width, height, encode_info.clone(), decoder.clone());
}
}
for decode_info in &decoders {
if let Some(encoder) = encoders.iter().find(|e| {
e.luid == decode_info.luid && e.data_format == decode_info.data_format
}) {
test_vram(width, height, encoder.clone(), decode_info.clone());
}
}
}
}
}
#[cfg(feature = "vram")]
fn test_vram(
width: i32,
height: i32,
encode_info: FeatureContext,
decode_info: hwcodec::vram::DecodeContext,
) {
println!(
"Test {}x{}: {:?} {:?} -> {:?}",
width, height, encode_info.data_format, encode_info.driver, decode_info.driver
);
let mut tool = Tool::new(encode_info.luid).unwrap();
let encode_ctx = hwcodec::vram::EncodeContext {
f: encode_info.clone(),
d: hwcodec::vram::DynamicContext {
device: Some(tool.device()),
width,
height,
kbitrate: 1000,
framerate: 30,
gop: MAX_GOP as _,
},
};
let mut encoder = hwcodec::vram::encode::Encoder::new(encode_ctx).unwrap();
let mut decoder = hwcodec::vram::decode::Decoder::new(hwcodec::vram::DecodeContext {
device: Some(tool.device()),
..decode_info.clone()
})
.unwrap();
let encode_frames = encoder.encode(tool.get_texture(width, height), 0).unwrap();
assert_eq!(encode_frames.len(), 1);
let decoder_frames = decoder.decode(&encode_frames[0].data).unwrap();
assert_eq!(decoder_frames.len(), 1);
let (decoded_width, decoded_height) = tool.get_texture_size(decoder_frames[0].texture);
assert_eq!(decoded_width, width);
assert_eq!(decoded_height, height);
println!(
"Pass {}x{}: {:?} {:?} -> {:?}",
width, height, encode_info.data_format, encode_info.driver, decode_info.driver
);
}

View File

@@ -0,0 +1,67 @@
use env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use hwcodec::{
common::{get_gpu_signature, Quality::*, RateControl::*},
ffmpeg::AVPixelFormat,
ffmpeg_ram::{
decode::Decoder,
encode::{EncodeContext, Encoder},
},
};
fn main() {
let start = std::time::Instant::now();
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
ram();
#[cfg(feature = "vram")]
vram();
log::info!(
"signature: {:?}, elapsed: {:?}",
get_gpu_signature(),
start.elapsed()
);
}
fn ram() {
println!("ram:");
println!("encoders:");
let ctx = EncodeContext {
name: String::from(""),
mc_name: None,
width: 1280,
height: 720,
pixfmt: AVPixelFormat::AV_PIX_FMT_NV12,
align: 0,
kbs: 1000,
fps: 30,
gop: i32::MAX,
quality: Quality_Default,
rc: RC_CBR,
q: -1,
thread_count: 1,
};
let encoders = Encoder::available_encoders(ctx.clone(), None);
encoders.iter().map(|e| println!("{:?}", e)).count();
println!("decoders:");
let decoders = Decoder::available_decoders();
decoders.iter().map(|e| println!("{:?}", e)).count();
}
#[cfg(feature = "vram")]
fn vram() {
use hwcodec::common::MAX_GOP;
use hwcodec::vram::{decode, encode, DynamicContext};
println!("vram:");
println!("encoders:");
let encoders = encode::available(DynamicContext {
width: 1920,
height: 1080,
kbitrate: 5000,
framerate: 30,
gop: MAX_GOP as _,
device: None,
});
encoders.iter().map(|e| println!("{:?}", e)).count();
println!("decoders:");
let decoders = decode::available();
decoders.iter().map(|e| println!("{:?}", e)).count();
}

View File

@@ -0,0 +1,147 @@
use env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use hwcodec::{
common::{Quality::*, RateControl::*},
ffmpeg::AVPixelFormat,
ffmpeg_ram::{
decode::{DecodeContext, Decoder},
encode::{EncodeContext, Encoder},
CodecInfo, CodecInfos,
},
};
use rand::random;
use std::io::Write;
use std::time::Instant;
fn main() {
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
let ctx = EncodeContext {
name: String::from(""),
mc_name: None,
width: 1920,
height: 1080,
pixfmt: AVPixelFormat::AV_PIX_FMT_NV12,
align: 0,
kbs: 5000,
fps: 30,
gop: 60,
quality: Quality_Default,
rc: RC_DEFAULT,
thread_count: 4,
q: -1,
};
let yuv_count = 10;
println!("benchmark");
let yuvs = prepare_yuv(ctx.width as _, ctx.height as _, yuv_count);
let encoders = Encoder::available_encoders(ctx.clone(), None);
log::info!("encoders: {:?}", encoders);
let best = CodecInfo::prioritized(encoders.clone());
for info in encoders {
test_encoder(info.clone(), ctx.clone(), &yuvs, is_best(&best, &info));
}
let (h264s, h265s) = prepare_h26x(best, ctx.clone(), &yuvs);
let decoders = Decoder::available_decoders();
log::info!("decoders: {:?}", decoders);
let best = CodecInfo::prioritized(decoders.clone());
for info in decoders {
let h26xs = if info.name.contains("h264") {
&h264s
} else {
&h265s
};
if h26xs.len() == yuv_count {
test_decoder(info.clone(), h26xs, is_best(&best, &info));
}
}
}
fn test_encoder(info: CodecInfo, ctx: EncodeContext, yuvs: &Vec<Vec<u8>>, best: bool) {
let mut ctx = ctx;
ctx.name = info.name;
let mut encoder = Encoder::new(ctx.clone()).unwrap();
let start = Instant::now();
for yuv in yuvs {
let _ = encoder
.encode(yuv, start.elapsed().as_millis() as _)
.unwrap();
}
println!(
"{}{}: {:?}",
if best { "*" } else { "" },
ctx.name,
start.elapsed() / yuvs.len() as _
);
}
fn test_decoder(info: CodecInfo, h26xs: &Vec<Vec<u8>>, best: bool) {
let ctx = DecodeContext {
name: info.name,
device_type: info.hwdevice,
thread_count: 4,
};
let mut decoder = Decoder::new(ctx.clone()).unwrap();
let start = Instant::now();
let mut cnt = 0;
for h26x in h26xs {
let _ = decoder.decode(h26x).unwrap();
cnt += 1;
}
let device = format!("{:?}", ctx.device_type).to_lowercase();
let device = device.split("_").last().unwrap();
println!(
"{}{} {}: {:?}",
if best { "*" } else { "" },
ctx.name,
device,
start.elapsed() / cnt
);
}
fn prepare_yuv(width: usize, height: usize, count: usize) -> Vec<Vec<u8>> {
let mut ret = vec![];
for index in 0..count {
let linesize = width * 3 / 2;
let mut yuv = vec![0u8; linesize * height];
for y in 0..height {
for x in 0..linesize {
yuv[linesize * y + x] = random();
}
}
ret.push(yuv);
print!("\rprepare {}/{}", index + 1, count);
std::io::stdout().flush().ok();
}
println!();
ret
}
fn prepare_h26x(
best: CodecInfos,
ctx: EncodeContext,
yuvs: &Vec<Vec<u8>>,
) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
let f = |info: Option<CodecInfo>| {
let mut h26xs = vec![];
if let Some(info) = info {
let mut ctx = ctx.clone();
ctx.name = info.name;
let mut encoder = Encoder::new(ctx).unwrap();
for yuv in yuvs {
let h26x = encoder.encode(yuv, 0).unwrap();
for frame in h26x {
h26xs.push(frame.data.to_vec());
}
}
}
h26xs
};
(f(best.h264), f(best.h265))
}
fn is_best(best: &CodecInfos, info: &CodecInfo) -> bool {
Some(info.clone()) == best.h264 || Some(info.clone()) == best.h265
}

View File

@@ -0,0 +1,117 @@
use env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use hwcodec::{
common::{Quality::*, RateControl::*},
ffmpeg::{AVHWDeviceType::*, AVPixelFormat::*},
ffmpeg_ram::{
decode::{DecodeContext, Decoder},
encode::{EncodeContext, Encoder},
ffmpeg_linesize_offset_length,
},
};
use std::{
fs::File,
io::{Read, Write},
};
fn main() {
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
let encode_ctx = EncodeContext {
name: String::from("h264_nvenc"),
mc_name: None,
width: 1920,
height: 1080,
pixfmt: AV_PIX_FMT_NV12,
align: 0,
kbs: 0,
fps: 30,
gop: 60,
quality: Quality_Default,
rc: RC_DEFAULT,
thread_count: 4,
q: -1,
};
let decode_ctx = DecodeContext {
name: String::from("hevc"),
device_type: AV_HWDEVICE_TYPE_D3D11VA,
thread_count: 4,
};
let _ = std::thread::spawn(move || test_encode_decode(encode_ctx, decode_ctx)).join();
}
fn test_encode_decode(encode_ctx: EncodeContext, decode_ctx: DecodeContext) {
let size: usize;
if let Ok((_, _, len)) = ffmpeg_linesize_offset_length(
encode_ctx.pixfmt,
encode_ctx.width as _,
encode_ctx.height as _,
encode_ctx.align as _,
) {
size = len as _;
} else {
return;
}
let mut video_encoder = Encoder::new(encode_ctx).unwrap();
let mut video_decoder = Decoder::new(decode_ctx).unwrap();
let mut yuv_file = File::open("input/1920_1080_decoded.yuv").unwrap();
let mut encode_file = File::create("output/1920_1080.265").unwrap();
let mut decode_file = File::create("output/1920_1080_decode.yuv").unwrap();
let mut buf = vec![0; size + 64];
let mut encode_sum = 0;
let mut decode_sum = 0;
let mut encode_size = 0;
let mut counter = 0;
let mut f = |data: &[u8]| {
let now = std::time::Instant::now();
if let Ok(encode_frames) = video_encoder.encode(data, 0) {
log::info!("encode:{:?}", now.elapsed());
encode_sum += now.elapsed().as_micros();
for encode_frame in encode_frames.iter() {
encode_size += encode_frame.data.len();
encode_file.write_all(&encode_frame.data).unwrap();
encode_file.flush().unwrap();
let now = std::time::Instant::now();
if let Ok(docode_frames) = video_decoder.decode(&encode_frame.data) {
log::info!("decode:{:?}", now.elapsed());
decode_sum += now.elapsed().as_micros();
counter += 1;
for decode_frame in docode_frames {
log::info!("decode_frame:{}", decode_frame);
for data in decode_frame.data.iter() {
decode_file.write_all(data).unwrap();
decode_file.flush().unwrap();
}
}
}
}
}
};
loop {
match yuv_file.read(&mut buf[..size]) {
Ok(n) => {
if n > 0 {
f(&buf[..n]);
} else {
break;
}
}
Err(e) => {
log::info!("{:?}", e);
break;
}
}
}
log::info!(
"counter:{}, encode_avg:{}us, decode_avg:{}us, size_avg:{}",
counter,
encode_sum / counter,
decode_sum / counter,
encode_size / counter as usize,
);
}

View File

@@ -0,0 +1,78 @@
use capture::dxgi;
use env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use hwcodec::common::{DataFormat, Driver, MAX_GOP};
use hwcodec::vram::{
decode::Decoder, encode::Encoder, DecodeContext, DynamicContext, EncodeContext, FeatureContext,
};
use render::Render;
use std::{
io::Write,
path::PathBuf,
time::{Duration, Instant},
};
fn main() {
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "trace"));
let luid = 69524; // 63444; // 59677
unsafe {
// one luid create render failed on my pc, wouldn't happen in rustdesk
let data_format = DataFormat::H265;
let mut capturer = dxgi::Capturer::new(luid).unwrap();
let mut render = Render::new(luid, false).unwrap();
let en_ctx = EncodeContext {
f: FeatureContext {
driver: Driver::FFMPEG,
vendor: Driver::NV,
data_format,
luid,
},
d: DynamicContext {
device: Some(capturer.device()),
width: capturer.width(),
height: capturer.height(),
kbitrate: 5000,
framerate: 30,
gop: MAX_GOP as _,
},
};
let de_ctx = DecodeContext {
device: Some(render.device()),
driver: Driver::FFMPEG,
vendor: Driver::NV,
data_format,
luid,
};
let mut dec = Decoder::new(de_ctx).unwrap();
let mut enc = Encoder::new(en_ctx).unwrap();
let filename = PathBuf::from(".\\1.264");
let mut file = std::fs::File::create(filename).unwrap();
let mut dup_sum = Duration::ZERO;
let mut enc_sum = Duration::ZERO;
let mut dec_sum = Duration::ZERO;
let mut pts_instant = Instant::now();
loop {
let start = Instant::now();
let texture = capturer.capture(100);
if texture.is_null() {
continue;
}
dup_sum += start.elapsed();
let start = Instant::now();
let frame = enc
.encode(texture, pts_instant.elapsed().as_millis() as _)
.unwrap();
enc_sum += start.elapsed();
for f in frame {
file.write_all(&mut f.data).unwrap();
let start = Instant::now();
let frames = dec.decode(&f.data).unwrap();
dec_sum += start.elapsed();
for f in frames {
render.render(f.texture).unwrap();
}
}
}
}
}

View File

@@ -0,0 +1,128 @@
use env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use hwcodec::{
common::{Quality::*, RateControl::*, MAX_GOP},
ffmpeg::{
AVHWDeviceType::{self, *},
AVPixelFormat::*,
},
ffmpeg_ram::{
decode::{DecodeContext, Decoder},
encode::{EncodeContext, Encoder},
},
};
use std::{
fs::File,
io::{Read, Write},
};
fn main() {
let gpu = true;
let h264 = true;
let hw_type = if gpu { "gpu" } else { "hw" };
let file_type = if h264 { "h264" } else { "h265" };
let codec = if h264 { "h264" } else { "hevc" };
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
let device_type = AV_HWDEVICE_TYPE_CUDA;
let decode_ctx = DecodeContext {
name: String::from(codec),
device_type,
thread_count: 4,
};
let mut video_decoder = Decoder::new(decode_ctx).unwrap();
decode_encode(
&mut video_decoder,
0,
hw_type,
file_type,
1600,
900,
h264,
device_type,
);
decode_encode(
&mut video_decoder,
1,
hw_type,
file_type,
1440,
900,
h264,
device_type,
);
}
fn decode_encode(
video_decoder: &mut Decoder,
index: usize,
hw_type: &str,
file_type: &str,
width: usize,
height: usize,
h264: bool,
device_type: AVHWDeviceType,
) {
let input_enc_filename = format!("input/data_and_line/{hw_type}_{width}_{height}.{file_type}");
let len_filename = format!("input/data_and_line/{hw_type}_{width}_{height}_{file_type}.txt");
let enc_ctx = EncodeContext {
name: if h264 {
"h264_nvenc".to_owned()
} else {
"hevc_nvenc".to_owned()
},
mc_name: None,
width: width as _,
height: height as _,
pixfmt: if device_type == AV_HWDEVICE_TYPE_NONE {
AV_PIX_FMT_YUV420P
} else {
AV_PIX_FMT_NV12
},
align: 0,
kbs: 1_000,
fps: 30,
gop: MAX_GOP as _,
quality: Quality_Default,
rc: RC_DEFAULT,
thread_count: 4,
q: -1,
};
let mut video_encoder = Encoder::new(enc_ctx).unwrap();
let mut encode_file =
File::create(format!("output/{hw_type}_{width}_{height}.{file_type}")).unwrap();
let mut yuv_file =
File::create(format!("output/{hw_type}_{width}_{height}_decode.yuv")).unwrap();
let mut file_lens = File::open(len_filename).unwrap();
let mut file = File::open(input_enc_filename).unwrap();
let mut file_lens_buf = Vec::new();
file_lens.read_to_end(&mut file_lens_buf).unwrap();
let file_lens_str = String::from_utf8_lossy(&file_lens_buf).to_string();
let lens: Vec<usize> = file_lens_str
.split(",")
.filter(|e| !e.is_empty())
.map(|e| e.parse().unwrap())
.collect();
for i in 0..lens.len() {
let mut buf = vec![0; lens[i]];
file.read(&mut buf).unwrap();
let frames = video_decoder.decode(&buf).unwrap();
println!(
"file{}, w:{}, h:{}, fmt:{:?}, linesize:{:?}",
index, frames[0].width, frames[0].height, frames[0].pixfmt, frames[0].linesize
);
assert!(frames.len() == 1);
let mut encode_buf = Vec::new();
for d in &mut frames[0].data {
encode_buf.append(d);
}
yuv_file.write_all(&encode_buf).unwrap();
let frames = video_encoder.encode(&encode_buf, 0).unwrap();
assert_eq!(frames.len(), 1);
for f in frames {
encode_file.write_all(&f.data).unwrap();
}
}
}