use crate::ffmpeg::{init_av_log, AVHWDeviceType::*}; use crate::{ common::DataFormat::*, ffmpeg::{AVHWDeviceType, AVPixelFormat}, ffmpeg_ram::{ ffmpeg_ram_decode, ffmpeg_ram_free_decoder, ffmpeg_ram_new_decoder, CodecInfo, AV_NUM_DATA_POINTERS, Priority, }, }; use log::error; use std::{ ffi::{c_void, CString}, os::raw::c_int, slice::from_raw_parts, vec, }; #[derive(Debug, Clone)] pub struct DecodeContext { pub name: String, pub device_type: AVHWDeviceType, pub thread_count: i32, } pub struct DecodeFrame { pub pixfmt: AVPixelFormat, pub width: i32, pub height: i32, pub data: Vec>, pub linesize: Vec, pub key: bool, } impl std::fmt::Display for DecodeFrame { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut s = String::from("data:"); for data in self.data.iter() { s.push_str(format!("{} ", data.len()).as_str()); } s.push_str(", linesize:"); for linesize in self.linesize.iter() { s.push_str(format!("{} ", linesize).as_str()); } write!( f, "fixfmt:{}, width:{}, height:{},key:{}, {}", self.pixfmt as i32, self.width, self.height, self.key, s, ) } } pub struct Decoder { codec: *mut c_void, frames: *mut Vec, pub ctx: DecodeContext, } unsafe impl Send for Decoder {} unsafe impl Sync for Decoder {} impl Decoder { pub fn new(ctx: DecodeContext) -> Result { init_av_log(); unsafe { let codec = ffmpeg_ram_new_decoder( CString::new(ctx.name.as_str()).map_err(|_| ())?.as_ptr(), ctx.device_type as _, ctx.thread_count, Some(Decoder::callback), ); if codec.is_null() { return Err(()); } Ok(Decoder { codec, frames: Box::into_raw(Box::new(Vec::::new())), ctx, }) } } pub fn decode(&mut self, packet: &[u8]) -> Result<&mut Vec, i32> { unsafe { (&mut *self.frames).clear(); let ret = ffmpeg_ram_decode( self.codec, packet.as_ptr(), packet.len() as c_int, self.frames as *const _ as *const c_void, ); if ret < 0 { Err(ret) } else { Ok(&mut *self.frames) } } } unsafe extern "C" fn callback( obj: *const c_void, width: c_int, height: c_int, pixfmt: c_int, linesizes: *mut c_int, datas: *mut *mut u8, key: c_int, ) { let frames = &mut *(obj as *mut Vec); let datas = from_raw_parts(datas, AV_NUM_DATA_POINTERS as _); let linesizes = from_raw_parts(linesizes, AV_NUM_DATA_POINTERS as _); let mut frame = DecodeFrame { pixfmt: std::mem::transmute(pixfmt), width, height, data: vec![], linesize: vec![], key: key != 0, }; // Handle YUV420P and YUVJ420P (JPEG full-range) - same memory layout if pixfmt == AVPixelFormat::AV_PIX_FMT_YUV420P as c_int || pixfmt == AVPixelFormat::AV_PIX_FMT_YUVJ420P as c_int { let y = from_raw_parts(datas[0], (linesizes[0] * height) as usize).to_vec(); let u = from_raw_parts(datas[1], (linesizes[1] * height / 2) as usize).to_vec(); let v = from_raw_parts(datas[2], (linesizes[2] * height / 2) as usize).to_vec(); frame.data.push(y); frame.data.push(u); frame.data.push(v); frame.linesize.push(linesizes[0]); frame.linesize.push(linesizes[1]); frame.linesize.push(linesizes[2]); frames.push(frame); } else if pixfmt == AVPixelFormat::AV_PIX_FMT_YUV422P as c_int || pixfmt == AVPixelFormat::AV_PIX_FMT_YUVJ422P as c_int { // YUV422P: U and V planes have same height as Y (not half) let y = from_raw_parts(datas[0], (linesizes[0] * height) as usize).to_vec(); let u = from_raw_parts(datas[1], (linesizes[1] * height) as usize).to_vec(); let v = from_raw_parts(datas[2], (linesizes[2] * height) as usize).to_vec(); frame.data.push(y); frame.data.push(u); frame.data.push(v); frame.linesize.push(linesizes[0]); frame.linesize.push(linesizes[1]); frame.linesize.push(linesizes[2]); frames.push(frame); } else if pixfmt == AVPixelFormat::AV_PIX_FMT_NV12 as c_int || pixfmt == AVPixelFormat::AV_PIX_FMT_NV21 as c_int { let y = from_raw_parts(datas[0], (linesizes[0] * height) as usize).to_vec(); let uv = from_raw_parts(datas[1], (linesizes[1] * height / 2) as usize).to_vec(); frame.data.push(y); frame.data.push(uv); frame.linesize.push(linesizes[0]); frame.linesize.push(linesizes[1]); frames.push(frame); } else { error!("unsupported pixfmt {}", pixfmt as i32); } } /// Returns available decoders for IP-KVM scenario. /// Only MJPEG software decoder is supported as IP-KVM captures from video capture cards /// that output MJPEG streams. pub fn available_decoders() -> Vec { // IP-KVM scenario only needs MJPEG decoding // MJPEG comes from video capture cards, software decoding is sufficient vec![CodecInfo { name: "mjpeg".to_owned(), format: MJPEG, hwdevice: AV_HWDEVICE_TYPE_NONE, priority: Priority::Best as _, ..Default::default() }] } } impl Drop for Decoder { fn drop(&mut self) { unsafe { ffmpeg_ram_free_decoder(self.codec); self.codec = std::ptr::null_mut(); let _ = Box::from_raw(self.frames); } } }