set width,height,stride together with the rgba data for rendering

Signed-off-by: fufesou <shuanglongchen@yeah.net>
This commit is contained in:
fufesou 2023-04-28 11:44:52 +08:00
parent 8a9af3a755
commit 6b1645f44d
11 changed files with 116 additions and 108 deletions

View File

@ -79,7 +79,7 @@ class PluginItem extends StatelessWidget {
final FFI? ffi;
final String location;
final PluginModel pluginModel;
final isMenu;
final bool isMenu;
PluginItem({
Key? key,

View File

@ -10,7 +10,7 @@ use crate::hwcodec::*;
use crate::mediacodec::{
MediaCodecDecoder, MediaCodecDecoders, H264_DECODER_SUPPORT, H265_DECODER_SUPPORT,
};
use crate::{vpxcodec::*, CodecName, ImageFormat};
use crate::{vpxcodec::*, CodecName, ImageRgb};
#[cfg(not(any(target_os = "android", target_os = "ios")))]
use hbb_common::sysinfo::{System, SystemExt};
@ -290,20 +290,19 @@ impl Decoder {
pub fn handle_video_frame(
&mut self,
frame: &video_frame::Union,
fmt: (ImageFormat, usize),
rgb: &mut Vec<u8>,
rgb: &mut ImageRgb,
) -> ResultType<bool> {
match frame {
video_frame::Union::Vp8s(vp8s) => {
Decoder::handle_vpxs_video_frame(&mut self.vp8, vp8s, fmt, rgb)
Decoder::handle_vpxs_video_frame(&mut self.vp8, vp8s, rgb)
}
video_frame::Union::Vp9s(vp9s) => {
Decoder::handle_vpxs_video_frame(&mut self.vp9, vp9s, fmt, rgb)
Decoder::handle_vpxs_video_frame(&mut self.vp9, vp9s, rgb)
}
#[cfg(feature = "hwcodec")]
video_frame::Union::H264s(h264s) => {
if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_hw_video_frame(decoder, h264s, fmt, rgb, &mut self.i420)
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h264!"))
}
@ -311,7 +310,7 @@ impl Decoder {
#[cfg(feature = "hwcodec")]
video_frame::Union::H265s(h265s) => {
if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_hw_video_frame(decoder, h265s, fmt, rgb, &mut self.i420)
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h265!"))
}
@ -319,7 +318,7 @@ impl Decoder {
#[cfg(feature = "mediacodec")]
video_frame::Union::H264s(h264s) => {
if let Some(decoder) = &mut self.media_codec.h264 {
Decoder::handle_mediacodec_video_frame(decoder, h264s, fmt, rgb)
Decoder::handle_mediacodec_video_frame(decoder, h264s, rgb)
} else {
Err(anyhow!("don't support h264!"))
}
@ -327,7 +326,7 @@ impl Decoder {
#[cfg(feature = "mediacodec")]
video_frame::Union::H265s(h265s) => {
if let Some(decoder) = &mut self.media_codec.h265 {
Decoder::handle_mediacodec_video_frame(decoder, h265s, fmt, rgb)
Decoder::handle_mediacodec_video_frame(decoder, h265s, rgb)
} else {
Err(anyhow!("don't support h265!"))
}
@ -339,8 +338,7 @@ impl Decoder {
fn handle_vpxs_video_frame(
decoder: &mut VpxDecoder,
vpxs: &EncodedVideoFrames,
fmt: (ImageFormat, usize),
rgb: &mut Vec<u8>,
rgb: &mut ImageRgb,
) -> ResultType<bool> {
let mut last_frame = Image::new();
for vpx in vpxs.frames.iter() {
@ -356,7 +354,7 @@ impl Decoder {
if last_frame.is_null() {
Ok(false)
} else {
last_frame.to(fmt.0, fmt.1, rgb);
last_frame.to(rgb);
Ok(true)
}
}
@ -365,15 +363,14 @@ impl Decoder {
fn handle_hw_video_frame(
decoder: &mut HwDecoder,
frames: &EncodedVideoFrames,
fmt: (ImageFormat, usize),
raw: &mut Vec<u8>,
rgb: &mut ImageRgb,
i420: &mut Vec<u8>,
) -> ResultType<bool> {
let mut ret = false;
for h264 in frames.frames.iter() {
for image in decoder.decode(&h264.data)? {
// TODO: just process the last frame
if image.to_fmt(fmt, raw, i420).is_ok() {
if image.to_fmt(rgb, i420).is_ok() {
ret = true;
}
}
@ -385,12 +382,11 @@ impl Decoder {
fn handle_mediacodec_video_frame(
decoder: &mut MediaCodecDecoder,
frames: &EncodedVideoFrames,
fmt: (ImageFormat, usize),
raw: &mut Vec<u8>,
rgb: &mut ImageRgb,
) -> ResultType<bool> {
let mut ret = false;
for h264 in frames.frames.iter() {
return decoder.decode(&h264.data, fmt, raw);
return decoder.decode(&h264.data, rgb);
}
return Ok(false);
}

View File

@ -1,6 +1,6 @@
use crate::{
codec::{EncoderApi, EncoderCfg},
hw, ImageFormat, HW_STRIDE_ALIGN,
hw, ImageFormat, ImageRgb, HW_STRIDE_ALIGN,
};
use hbb_common::{
allow_err,
@ -227,30 +227,28 @@ pub struct HwDecoderImage<'a> {
}
impl HwDecoderImage<'_> {
// take dst_stride into account when you convert
pub fn to_fmt(
&self,
(fmt, dst_stride): (ImageFormat, usize),
fmt_data: &mut Vec<u8>,
i420: &mut Vec<u8>,
) -> ResultType<()> {
pub fn to_fmt(&self, rgb: &mut ImageRgb, i420: &mut Vec<u8>) -> ResultType<()> {
let frame = self.frame;
rgb.w = frame.width as _;
rgb.h = frame.height as _;
// take dst_stride into account when you convert
let dst_stride = rgb.stride;
match frame.pixfmt {
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_nv12_to(
fmt,
rgb.fmt,
frame.width as _,
frame.height as _,
&frame.data[0],
&frame.data[1],
frame.linesize[0] as _,
frame.linesize[1] as _,
fmt_data,
&mut rgb.raw as _,
i420,
HW_STRIDE_ALIGN,
),
AVPixelFormat::AV_PIX_FMT_YUV420P => {
hw::hw_i420_to(
fmt,
rgb.fmt,
frame.width as _,
frame.height as _,
&frame.data[0],
@ -259,7 +257,7 @@ impl HwDecoderImage<'_> {
frame.linesize[0] as _,
frame.linesize[1] as _,
frame.linesize[2] as _,
fmt_data,
&mut rgb.raw as _,
);
return Ok(());
}
@ -267,11 +265,17 @@ impl HwDecoderImage<'_> {
}
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
self.to_fmt((ImageFormat::ARGB, 1), bgra, i420)
let mut rgb = ImageRgb::new(ImageFormat::ARGB, 1);
self.to_fmt(&mut rgb, i420)?;
*bgra = rgb.raw;
Ok(())
}
pub fn rgba(&self, rgba: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
self.to_fmt((ImageFormat::ABGR, 1), rgba, i420)
let mut rgb = ImageRgb::new(ImageFormat::ABGR, 1);
self.to_fmt(&mut rgb, i420)?;
*rgba = rgb.raw;
Ok(())
}
}

View File

@ -10,7 +10,7 @@ use std::{
use crate::ImageFormat;
use crate::{
codec::{EncoderApi, EncoderCfg},
I420ToABGR, I420ToARGB,
I420ToABGR, I420ToARGB, ImageRgb,
};
/// MediaCodec mime type name
@ -50,13 +50,9 @@ impl MediaCodecDecoder {
MediaCodecDecoders { h264, h265 }
}
// take dst_stride into account please
pub fn decode(
&mut self,
data: &[u8],
(fmt, dst_stride): (ImageFormat, usize),
raw: &mut Vec<u8>,
) -> ResultType<bool> {
pub fn decode(&mut self, data: &[u8], rgb: &mut ImageRgb) -> ResultType<bool> {
// take dst_stride into account please
let dst_stride = rgb.stride;
match self.dequeue_input_buffer(Duration::from_millis(10))? {
Some(mut input_buffer) => {
let mut buf = input_buffer.buffer_mut();
@ -89,12 +85,12 @@ impl MediaCodecDecoder {
let bps = 4;
let u = buf.len() * 2 / 3;
let v = buf.len() * 5 / 6;
raw.resize(h * w * bps, 0);
rgb.raw.resize(h * w * bps, 0);
let y_ptr = buf.as_ptr();
let u_ptr = buf[u..].as_ptr();
let v_ptr = buf[v..].as_ptr();
unsafe {
match fmt {
match rgb.fmt {
ImageFormat::ARGB => {
I420ToARGB(
y_ptr,
@ -103,7 +99,7 @@ impl MediaCodecDecoder {
stride / 2,
v_ptr,
stride / 2,
raw.as_mut_ptr(),
rgb.raw.as_mut_ptr(),
(w * bps) as _,
w as _,
h as _,
@ -117,7 +113,7 @@ impl MediaCodecDecoder {
stride / 2,
v_ptr,
stride / 2,
raw.as_mut_ptr(),
rgb.raw.as_mut_ptr(),
(w * bps) as _,
w as _,
h as _,

View File

@ -51,6 +51,26 @@ pub enum ImageFormat {
ARGB,
}
pub struct ImageRgb {
pub raw: Vec<u8>,
pub fmt: ImageFormat,
pub w: usize,
pub h: usize,
pub stride: usize,
}
impl ImageRgb {
pub fn new(fmt: ImageFormat, stride: usize) -> Self {
Self {
raw: Vec::new(),
fmt,
w: 0,
h: 0,
stride,
}
}
}
#[inline]
pub fn would_block_if_equal(old: &mut Vec<u8>, b: &[u8]) -> std::io::Result<()> {
// does this really help?

View File

@ -7,7 +7,7 @@ use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, Message,
use hbb_common::ResultType;
use crate::STRIDE_ALIGN;
use crate::{codec::EncoderApi, ImageFormat};
use crate::{codec::EncoderApi, ImageFormat, ImageRgb};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use hbb_common::bytes::Bytes;
@ -130,9 +130,11 @@ impl EncoderApi for VpxEncoder {
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED; // reduce bandwidth a lot
/*
VPX encoder支持two-pass encoderate control的
bitrate下得到最好的PSNR
The VPX encoder supports two-pass encoding for rate control purposes.
In two-pass encoding, the entire encoding process is performed twice.
The first pass generates new control parameters for the second pass.
This approach enables the best PSNR at the same bit rate.
*/
let mut ctx = Default::default();
@ -416,25 +418,6 @@ impl VpxDecoder {
Ok(Self { ctx })
}
pub fn decode2rgb(&mut self, data: &[u8], fmt: ImageFormat) -> Result<Vec<u8>> {
let mut img = Image::new();
for frame in self.decode(data)? {
drop(img);
img = frame;
}
for frame in self.flush()? {
drop(img);
img = frame;
}
if img.is_null() {
Ok(Vec::new())
} else {
let mut out = Default::default();
img.to(fmt, 1, &mut out);
Ok(out)
}
}
/// Feed some compressed data to the encoder
///
/// The `data` slice is sent to the decoder
@ -538,20 +521,26 @@ impl Image {
self.inner().stride[iplane]
}
pub fn to(&self, fmt: ImageFormat, stride: usize, dst: &mut Vec<u8>) {
let h = self.height();
let w = self.width();
#[inline]
pub fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize {
let bytes_per_pixel = match fmt {
ImageFormat::Raw => 3,
ImageFormat::ARGB | ImageFormat::ABGR => 4,
};
// https://github.com/lemenkov/libyuv/blob/6900494d90ae095d44405cd4cc3f346971fa69c9/source/convert_argb.cc#L128
// https://github.com/lemenkov/libyuv/blob/6900494d90ae095d44405cd4cc3f346971fa69c9/source/convert_argb.cc#L129
let bytes_per_row = (w * bytes_per_pixel + stride - 1) & !(stride - 1);
dst.resize(h * bytes_per_row, 0);
(w * bytes_per_pixel + stride - 1) & !(stride - 1)
}
// rgb [in/out] fmt and stride must be set in ImageRgb
pub fn to(&self, rgb: &mut ImageRgb) {
rgb.w = self.width();
rgb.h = self.height();
let bytes_per_row = Self::get_bytes_per_row(rgb.w, rgb.fmt, rgb.stride);
rgb.raw.resize(rgb.h * bytes_per_row, 0);
let img = self.inner();
unsafe {
match fmt {
match rgb.fmt {
ImageFormat::Raw => {
super::I420ToRAW(
img.planes[0],
@ -560,7 +549,7 @@ impl Image {
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,
@ -574,7 +563,7 @@ impl Image {
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,
@ -588,7 +577,7 @@ impl Image {
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,

View File

@ -50,7 +50,7 @@ pub use helper::*;
use scrap::{
codec::Decoder,
record::{Recorder, RecorderContext},
ImageFormat,
ImageFormat, ImageRgb,
};
use crate::common::{self, is_keyboard_mode_supported};
@ -980,7 +980,7 @@ impl AudioHandler {
/// Video handler for the [`Client`].
pub struct VideoHandler {
decoder: Decoder,
pub rgb: Vec<u8>,
pub rgb: ImageRgb,
recorder: Arc<Mutex<Option<Recorder>>>,
record: bool,
}
@ -990,7 +990,7 @@ impl VideoHandler {
pub fn new() -> Self {
VideoHandler {
decoder: Decoder::new(),
rgb: Default::default(),
rgb: ImageRgb::new(ImageFormat::ARGB, crate::DST_STRIDE_RGBA),
recorder: Default::default(),
record: false,
}
@ -1001,11 +1001,7 @@ impl VideoHandler {
pub fn handle_frame(&mut self, vf: VideoFrame) -> ResultType<bool> {
match &vf.union {
Some(frame) => {
let res = self.decoder.handle_video_frame(
frame,
(ImageFormat::ARGB, crate::DST_STRIDE_RGBA),
&mut self.rgb,
);
let res = self.decoder.handle_video_frame(frame, &mut self.rgb);
if self.record {
self.recorder
.lock()
@ -1757,7 +1753,7 @@ pub fn start_video_audio_threads<F>(
Arc<AtomicUsize>,
)
where
F: 'static + FnMut(&mut Vec<u8>) + Send,
F: 'static + FnMut(&scrap::ImageRgb) + Send,
{
let (video_sender, video_receiver) = mpsc::channel::<MediaData>();
let video_queue = Arc::new(ArrayQueue::<VideoFrame>::new(VIDEO_QUEUE_SIZE));

View File

@ -179,8 +179,9 @@ pub type FlutterRgbaRendererPluginOnRgba = unsafe extern "C" fn(
struct VideoRenderer {
// TextureRgba pointer in flutter native.
ptr: usize,
width: i32,
height: i32,
width: usize,
height: usize,
size: usize,
on_rgba_func: Option<Symbol<'static, FlutterRgbaRendererPluginOnRgba>>,
}
@ -209,6 +210,7 @@ impl Default for VideoRenderer {
ptr: 0,
width: 0,
height: 0,
size: 0,
on_rgba_func,
}
}
@ -217,24 +219,30 @@ impl Default for VideoRenderer {
#[cfg(feature = "flutter_texture_render")]
impl VideoRenderer {
#[inline]
pub fn set_size(&mut self, width: i32, height: i32) {
pub fn set_size(&mut self, width: usize, height: usize) {
self.width = width;
self.height = height;
}
pub fn on_rgba(&self, rgba: &Vec<u8>) {
if self.ptr == usize::default() || self.width == 0 || self.height == 0 {
pub fn on_rgba(&self, rgba: &scrap::ImageRgb) {
if self.ptr == usize::default() {
return;
}
// It is also Ok to skip this check.
if self.width != rgba.w || self.height != rgba.h {
return;
}
if let Some(func) = &self.on_rgba_func {
unsafe {
func(
self.ptr as _,
rgba.as_ptr() as _,
rgba.len() as _,
self.width as _,
self.height as _,
crate::DST_STRIDE_RGBA as _,
rgba.raw.as_ptr() as _,
rgba.raw.len() as _,
rgba.w as _,
rgba.h as _,
rgba.stride as _,
)
};
}
@ -315,7 +323,7 @@ impl FlutterHandler {
#[inline]
#[cfg(feature = "flutter_texture_render")]
pub fn set_size(&mut self, width: i32, height: i32) {
pub fn set_size(&mut self, width: usize, height: usize) {
*self.notify_rendered.write().unwrap() = false;
self.renderer.write().unwrap().set_size(width, height);
}
@ -492,7 +500,7 @@ impl InvokeUiSession for FlutterHandler {
#[inline]
#[cfg(not(feature = "flutter_texture_render"))]
fn on_rgba(&self, data: &mut Vec<u8>) {
fn on_rgba(&self, rgba: &scrap::ImageRgb) {
// If the current rgba is not fetched by flutter, i.e., is valid.
// We give up sending a new event to flutter.
if self.rgba_valid.load(Ordering::Relaxed) {
@ -500,7 +508,7 @@ impl InvokeUiSession for FlutterHandler {
}
self.rgba_valid.store(true, Ordering::Relaxed);
// Return the rgba buffer to the video handler for reusing allocated rgba buffer.
std::mem::swap::<Vec<u8>>(data, &mut *self.rgba.write().unwrap());
std::mem::swap::<Vec<u8>>(rgba.raw, &mut *self.rgba.write().unwrap());
if let Some(stream) = &*self.event_stream.read().unwrap() {
stream.add(EventToUI::Rgba);
}
@ -508,8 +516,8 @@ impl InvokeUiSession for FlutterHandler {
#[inline]
#[cfg(feature = "flutter_texture_render")]
fn on_rgba(&self, data: &mut Vec<u8>) {
self.renderer.read().unwrap().on_rgba(data);
fn on_rgba(&self, rgba: &scrap::ImageRgb) {
self.renderer.read().unwrap().on_rgba(rgba);
if *self.notify_rendered.read().unwrap() {
return;
}
@ -1047,5 +1055,4 @@ pub fn stop_global_event_stream(app_type: String) {
}
#[no_mangle]
unsafe extern "C" fn get_rgba() {
}
unsafe extern "C" fn get_rgba() {}

View File

@ -539,7 +539,7 @@ pub fn session_change_resolution(id: String, width: i32, height: i32) {
}
}
pub fn session_set_size(_id: String, _width: i32, _height: i32) {
pub fn session_set_size(_id: String, _width: usize, _height: usize) {
#[cfg(feature = "flutter_texture_render")]
if let Some(session) = SESSIONS.write().unwrap().get_mut(&_id) {
session.set_size(_width, _height);

View File

@ -223,12 +223,12 @@ impl InvokeUiSession for SciterHandler {
self.call("adaptSize", &make_args!());
}
fn on_rgba(&self, data: &mut Vec<u8>) {
fn on_rgba(&self, rgba: &scrap::ImageRgb) {
VIDEO
.lock()
.unwrap()
.as_mut()
.map(|v| v.render_frame(data).ok());
.map(|v| v.render_frame(rgba.raw).ok());
}
fn set_peer_info(&self, pi: &PeerInfo) {

View File

@ -926,7 +926,7 @@ pub trait InvokeUiSession: Send + Sync + Clone + 'static + Sized + Default {
fn update_block_input_state(&self, on: bool);
fn job_progress(&self, id: i32, file_num: i32, speed: f64, finished_size: f64);
fn adapt_size(&self);
fn on_rgba(&self, data: &mut Vec<u8>);
fn on_rgba(&self, rgba: &scrap::ImageRgb);
fn msgbox(&self, msgtype: &str, title: &str, text: &str, link: &str, retry: bool);
#[cfg(any(target_os = "android", target_os = "ios"))]
fn clipboard(&self, content: String);
@ -1207,7 +1207,7 @@ pub async fn io_loop<T: InvokeUiSession>(handler: Session<T>) {
let frame_count_cl = frame_count.clone();
let ui_handler = handler.ui_handler.clone();
let (video_sender, audio_sender, video_queue, decode_fps) =
start_video_audio_threads(move |data: &mut Vec<u8>| {
start_video_audio_threads(move |data: &scrap::ImageRgb| {
frame_count_cl.fetch_add(1, Ordering::Relaxed);
ui_handler.on_rgba(data);
});