Merge pull request #3479 from rustdesk/revert-3477-fix/macos_texture_stride_align

Revert "Fix/macos texture stride align"
This commit is contained in:
RustDesk 2023-03-03 13:18:45 +08:00 committed by GitHub
commit 4351272297
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 52 additions and 65 deletions

View File

@ -1228,10 +1228,10 @@ packages:
dependency: "direct main"
description:
name: texture_rgba_renderer
sha256: ec8d124e4c1d7dfff854ae34e95d7d9d877b8f9d291c383c67686e4b15cf538e
sha256: fbb09b2c6b4ce71261927f9e7e4ea339af3e2f3f2b175f6fb921de1c66ec848d
url: "https://pub.dev"
source: hosted
version: "0.0.12"
version: "0.0.8"
timing:
dependency: transitive
description:

View File

@ -92,7 +92,7 @@ dependencies:
password_strength: ^0.2.0
flutter_launcher_icons: ^0.11.0
flutter_keyboard_visibility: ^5.4.0
texture_rgba_renderer: ^0.0.12
texture_rgba_renderer: ^0.0.8
percent_indicator: ^4.2.2
dev_dependencies:

View File

@ -18,7 +18,7 @@ use webm::mux;
use webm::mux::Track;
use scrap::vpxcodec as vpx_encode;
use scrap::{TraitCapturer, Capturer, Display, STRIDE};
use scrap::{TraitCapturer, Capturer, Display, STRIDE_ALIGN};
const USAGE: &'static str = "
Simple WebM screen capture.
@ -137,7 +137,7 @@ fn main() -> io::Result<()> {
if let Ok(frame) = c.frame(Duration::from_millis(0)) {
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
for frame in vpx.encode(ms as i64, &frame, STRIDE).unwrap() {
for frame in vpx.encode(ms as i64, &frame, STRIDE_ALIGN).unwrap() {
vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key);
}
}

View File

@ -306,18 +306,17 @@ impl Decoder {
pub fn handle_video_frame(
&mut self,
frame: &video_frame::Union,
stride: usize,
fmt: ImageFormat,
rgb: &mut Vec<u8>,
) -> ResultType<bool> {
match frame {
video_frame::Union::Vp9s(vp9s) => {
Decoder::handle_vp9s_video_frame(&mut self.vpx, vp9s, stride, fmt, rgb)
Decoder::handle_vp9s_video_frame(&mut self.vpx, vp9s, fmt, rgb)
}
#[cfg(feature = "hwcodec")]
video_frame::Union::H264s(h264s) => {
if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_hw_video_frame(decoder, h264s, stride, fmt, rgb, &mut self.i420)
Decoder::handle_hw_video_frame(decoder, h264s, fmt, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h264!"))
}
@ -325,7 +324,7 @@ impl Decoder {
#[cfg(feature = "hwcodec")]
video_frame::Union::H265s(h265s) => {
if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_hw_video_frame(decoder, h265s, stride, fmt, rgb, &mut self.i420)
Decoder::handle_hw_video_frame(decoder, h265s, fmt, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h265!"))
}
@ -333,7 +332,7 @@ impl Decoder {
#[cfg(feature = "mediacodec")]
video_frame::Union::H264s(h264s) => {
if let Some(decoder) = &mut self.media_codec.h264 {
Decoder::handle_mediacodec_video_frame(decoder, h264s, stride, fmt, rgb)
Decoder::handle_mediacodec_video_frame(decoder, h264s, fmt, rgb)
} else {
Err(anyhow!("don't support h264!"))
}
@ -341,7 +340,7 @@ impl Decoder {
#[cfg(feature = "mediacodec")]
video_frame::Union::H265s(h265s) => {
if let Some(decoder) = &mut self.media_codec.h265 {
Decoder::handle_mediacodec_video_frame(decoder, h265s, stride, fmt, rgb)
Decoder::handle_mediacodec_video_frame(decoder, h265s, fmt, rgb)
} else {
Err(anyhow!("don't support h265!"))
}
@ -353,7 +352,6 @@ impl Decoder {
fn handle_vp9s_video_frame(
decoder: &mut VpxDecoder,
vp9s: &EncodedVideoFrames,
stride: usize,
fmt: ImageFormat,
rgb: &mut Vec<u8>,
) -> ResultType<bool> {
@ -371,7 +369,7 @@ impl Decoder {
if last_frame.is_null() {
Ok(false)
} else {
last_frame.to(fmt, stride, rgb);
last_frame.to(fmt, 1, rgb);
Ok(true)
}
}
@ -380,7 +378,6 @@ impl Decoder {
fn handle_hw_video_frame(
decoder: &mut HwDecoder,
frames: &EncodedVideoFrames,
stride: usize,
fmt: ImageFormat,
raw: &mut Vec<u8>,
i420: &mut Vec<u8>,
@ -389,7 +386,7 @@ impl Decoder {
for h264 in frames.frames.iter() {
for image in decoder.decode(&h264.data)? {
// TODO: just process the last frame
if image.to_fmt(stride, fmt, raw, i420).is_ok() {
if image.to_fmt(fmt, raw, i420).is_ok() {
ret = true;
}
}
@ -401,13 +398,12 @@ impl Decoder {
fn handle_mediacodec_video_frame(
decoder: &mut MediaCodecDecoder,
frames: &EncodedVideoFrames,
stride: usize,
fmt: ImageFormat,
raw: &mut Vec<u8>,
) -> ResultType<bool> {
let mut ret = false;
for h264 in frames.frames.iter() {
return decoder.decode(&h264.data, stride, fmt, raw);
return decoder.decode(&h264.data, fmt, raw);
}
return Ok(false);
}

View File

@ -144,7 +144,7 @@ extern "C" {
fn get_vpx_i420_stride(
width: usize,
height: usize,
stride: usize,
stride_align: usize,
) -> (usize, usize, usize, usize, usize, usize) {
let mut img = Default::default();
unsafe {
@ -153,7 +153,7 @@ fn get_vpx_i420_stride(
vpx_img_fmt::VPX_IMG_FMT_I420,
width as _,
height as _,
stride as _,
stride_align as _,
0x1 as _,
);
}
@ -169,7 +169,7 @@ fn get_vpx_i420_stride(
pub fn i420_to_rgb(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
let (_, _, src_stride_y, src_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE);
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
let src_y = src.as_ptr();
let src_u = src[u..].as_ptr();
let src_v = src[v..].as_ptr();
@ -192,7 +192,7 @@ pub fn i420_to_rgb(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
pub fn bgra_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE);
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr();
let dst_u = dst[u..].as_mut_ptr();
@ -215,7 +215,7 @@ pub fn bgra_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>)
pub fn rgba_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE);
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr();
let dst_u = dst[u..].as_mut_ptr();
@ -246,7 +246,7 @@ pub unsafe fn nv12_to_i420(
dst: &mut Vec<u8>,
) {
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE);
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr();
let dst_u = dst[u..].as_mut_ptr();

View File

@ -1,6 +1,6 @@
use crate::{
codec::{EncoderApi, EncoderCfg},
hw, ImageFormat, HW_STRIDE,
hw, ImageFormat, HW_STRIDE_ALIGN,
};
use hbb_common::{
anyhow::{anyhow, Context},
@ -52,7 +52,7 @@ impl EncoderApi for HwEncoder {
width: config.width as _,
height: config.height as _,
pixfmt: DEFAULT_PIXFMT,
align: HW_STRIDE as _,
align: HW_STRIDE_ALIGN as _,
bitrate: config.bitrate * 1000,
timebase: DEFAULT_TIME_BASE,
gop: DEFAULT_GOP,
@ -236,7 +236,7 @@ pub struct HwDecoderImage<'a> {
}
impl HwDecoderImage<'_> {
pub fn to_fmt(&self, stride: usize, fmt: ImageFormat, fmt_data: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
pub fn to_fmt(&self, fmt: ImageFormat, fmt_data: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
let frame = self.frame;
match frame.pixfmt {
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_nv12_to(
@ -249,7 +249,7 @@ impl HwDecoderImage<'_> {
frame.linesize[1] as _,
fmt_data,
i420,
HW_STRIDE,
HW_STRIDE_ALIGN,
),
AVPixelFormat::AV_PIX_FMT_YUV420P => {
hw::hw_i420_to(
@ -269,12 +269,12 @@ impl HwDecoderImage<'_> {
}
}
pub fn bgra(&self, stride: usize, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
self.to_fmt(stride, ImageFormat::ARGB, bgra, i420)
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
self.to_fmt(ImageFormat::ARGB, bgra, i420)
}
pub fn rgba(&self, stride: usize, rgba: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
self.to_fmt(stride, ImageFormat::ABGR, rgba, i420)
pub fn rgba(&self, rgba: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
self.to_fmt(ImageFormat::ABGR, rgba, i420)
}
}
@ -296,7 +296,7 @@ pub fn check_config() {
width: 1920,
height: 1080,
pixfmt: DEFAULT_PIXFMT,
align: HW_STRIDE as _,
align: HW_STRIDE_ALIGN as _,
bitrate: 0,
timebase: DEFAULT_TIME_BASE,
gop: DEFAULT_GOP,

View File

@ -50,8 +50,7 @@ impl MediaCodecDecoder {
MediaCodecDecoders { h264, h265 }
}
// to-do: apply stride to raw output data
pub fn decode(&mut self, data: &[u8], stride: usize, fmt: ImageFormat, raw: &mut Vec<u8>) -> ResultType<bool> {
pub fn decode(&mut self, data: &[u8], fmt: ImageFormat, raw: &mut Vec<u8>) -> ResultType<bool> {
match self.dequeue_input_buffer(Duration::from_millis(10))? {
Some(mut input_buffer) => {
let mut buf = input_buffer.buffer_mut();

View File

@ -37,8 +37,8 @@ pub mod hwcodec;
pub mod mediacodec;
pub mod vpxcodec;
pub use self::convert::*;
pub const STRIDE: usize = 64; // commonly used in libvpx vpx_img_alloc caller
pub const HW_STRIDE: usize = 0; // recommended by av_frame_get_buffer
pub const STRIDE_ALIGN: usize = 64; // commonly used in libvpx vpx_img_alloc caller
pub const HW_STRIDE_ALIGN: usize = 0; // recommended by av_frame_get_buffer
pub mod record;
mod vpx;

View File

@ -6,7 +6,7 @@ use hbb_common::anyhow::{anyhow, Context};
use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, Message, VideoFrame};
use hbb_common::{get_time, ResultType};
use crate::STRIDE;
use crate::STRIDE_ALIGN;
use crate::{codec::EncoderApi, ImageFormat};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
@ -202,7 +202,7 @@ impl EncoderApi for VpxEncoder {
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<Message> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE)
.encode(ms, frame, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(VpxEncoder::create_frame(frame));
@ -232,7 +232,7 @@ impl EncoderApi for VpxEncoder {
}
impl VpxEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride: usize) -> Result<EncodeFrames> {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
if 2 * data.len() < 3 * self.width * self.height {
return Err(Error::FailedCall("len not enough".to_string()));
}
@ -243,7 +243,7 @@ impl VpxEncoder {
vpx_img_fmt::VPX_IMG_FMT_I420,
self.width as _,
self.height as _,
stride as _,
stride_align as _,
data.as_ptr() as _,
));
@ -539,17 +539,15 @@ impl Image {
self.inner().stride[iplane]
}
pub fn to(&self, fmt: ImageFormat, stride: usize, dst: &mut Vec<u8>) {
pub fn to(&self, fmt: ImageFormat, stride_align: usize, dst: &mut Vec<u8>) {
let h = self.height();
let w = self.width();
let bytes_per_pixel = match fmt {
let mut w = self.width();
let bps = match fmt {
ImageFormat::Raw => 3,
ImageFormat::ARGB | ImageFormat::ABGR => 4,
};
// https://github.com/lemenkov/libyuv/blob/6900494d90ae095d44405cd4cc3f346971fa69c9/source/convert_argb.cc#L128
// https://github.com/lemenkov/libyuv/blob/6900494d90ae095d44405cd4cc3f346971fa69c9/source/convert_argb.cc#L129
let bytes_per_row = (w * bytes_per_pixel + stride - 1) & !(stride - 1);
dst.resize(h * bytes_per_row, 0);
w = (w + stride_align - 1) & !(stride_align - 1);
dst.resize(h * w * bps, 0);
let img = self.inner();
unsafe {
match fmt {
@ -562,7 +560,7 @@ impl Image {
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
bytes_per_row as _,
(w * bps) as _,
self.width() as _,
self.height() as _,
);
@ -576,7 +574,7 @@ impl Image {
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
bytes_per_row as _,
(w * bps) as _,
self.width() as _,
self.height() as _,
);
@ -590,7 +588,7 @@ impl Image {
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
bytes_per_row as _,
(w * bps) as _,
self.width() as _,
self.height() as _,
);

View File

@ -49,7 +49,7 @@ use scrap::{
};
use crate::{
common::{self, is_keyboard_mode_supported, DST_STRIDE_RGBA},
common::{self, is_keyboard_mode_supported},
server::video_service::{SCRAP_X11_REF_URL, SCRAP_X11_REQUIRED},
};
@ -944,7 +944,12 @@ impl VideoHandler {
}
match &vf.union {
Some(frame) => {
let res = self.decoder.handle_video_frame(frame, DST_STRIDE_RGBA, ImageFormat::ARGB, &mut self.rgb);
// windows && flutter_texture_render, fmt is ImageFormat::ABGR
#[cfg(all(target_os = "windows", feature = "flutter_texture_render"))]
let fmt = ImageFormat::ABGR;
#[cfg(not(all(target_os = "windows", feature = "flutter_texture_render")))]
let fmt = ImageFormat::ARGB;
let res = self.decoder.handle_video_frame(frame, fmt, &mut self.rgb);
if self.record {
self.recorder
.lock()

View File

@ -39,13 +39,6 @@ pub const CLIPBOARD_INTERVAL: u64 = 333;
pub const SYNC_PEER_INFO_DISPLAYS: i32 = 1;
#[cfg(all(target_os = "macos", feature = "flutter_texture_render"))]
// https://developer.apple.com/forums/thread/712709
// Memory alignment should be multiple of 64.
pub const DST_STRIDE_RGBA: usize = 64;
#[cfg(not(all(target_os = "macos", feature = "flutter_texture_render")))]
pub const DST_STRIDE_RGBA: usize = 1;
// the executable name of the portable version
pub const PORTABLE_APPNAME_RUNTIME_ENV_KEY: &str = "RUSTDESK_APPNAME";

View File

@ -154,7 +154,7 @@ pub struct FlutterHandler {
#[cfg(feature = "flutter_texture_render")]
pub type FlutterRgbaRendererPluginOnRgba =
unsafe extern "C" fn(texture_rgba: *mut c_void, buffer: *const u8, len: c_int, width: c_int, height: c_int, stride: c_int);
unsafe extern "C" fn(texture_rgba: *mut c_void, buffer: *const u8, width: c_int, height: c_int);
// Video Texture Renderer in Flutter
#[cfg(feature = "flutter_texture_render")]
@ -206,9 +206,7 @@ impl VideoRenderer {
self.width = width;
self.height = height;
self.data_len = if width > 0 && height > 0 {
let sa1 = crate::common::DST_STRIDE_RGBA - 1;
let row_bytes = (width as usize * 4 + sa1) & !sa1;
row_bytes * height as usize
(width * height * 4) as usize
} else {
0
};
@ -223,10 +221,8 @@ impl VideoRenderer {
func(
self.ptr as _,
rgba.as_ptr() as _,
rgba.len() as _,
self.width as _,
self.height as _,
crate::common::DST_STRIDE_RGBA as _,
)
};
}