Merge pull request #6275 from 21pages/opt_codec

opt codec
This commit is contained in:
RustDesk 2023-11-03 15:06:46 +08:00 committed by GitHub
commit a2bc02b4c5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 266 additions and 171 deletions

4
Cargo.lock generated
View File

@ -2973,8 +2973,8 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hwcodec"
version = "0.1.1"
source = "git+https://github.com/21pages/hwcodec?branch=stable#82cdc15457e42feaf14e1b38622506b2d54baf76"
version = "0.1.3"
source = "git+https://github.com/21pages/hwcodec?branch=stable#83300549075158e5a3fa6c59ea527af3330e48ff"
dependencies = [
"bindgen 0.59.2",
"cc",

View File

@ -1248,25 +1248,41 @@ customImageQualityDialog(SessionID sessionId, String id, FFI ffi) async {
double fpsInitValue = 30;
bool qualitySet = false;
bool fpsSet = false;
bool? direct;
try {
direct =
ConnectionTypeState.find(id).direct.value == ConnectionType.strDirect;
} catch (_) {}
bool hideFps = (await bind.mainIsUsingPublicServer() && direct != true) ||
versionCmp(ffi.ffiModel.pi.version, '1.2.0') < 0;
bool hideMoreQuality =
(await bind.mainIsUsingPublicServer() && direct != true) ||
versionCmp(ffi.ffiModel.pi.version, '1.2.2') < 0;
setCustomValues({double? quality, double? fps}) async {
if (quality != null) {
qualitySet = true;
await bind.sessionSetCustomImageQuality(
sessionId: sessionId, value: quality.toInt());
print("quality:$quality");
}
if (fps != null) {
fpsSet = true;
await bind.sessionSetCustomFps(sessionId: sessionId, fps: fps.toInt());
print("fps:$fps");
}
if (!qualitySet) {
qualitySet = true;
await bind.sessionSetCustomImageQuality(
sessionId: sessionId, value: qualityInitValue.toInt());
print("qualityInitValue:$qualityInitValue");
}
if (!fpsSet) {
if (!hideFps && !fpsSet) {
fpsSet = true;
await bind.sessionSetCustomFps(
sessionId: sessionId, fps: fpsInitValue.toInt());
print("fpsInitValue:$fpsInitValue");
}
}
@ -1279,7 +1295,9 @@ customImageQualityDialog(SessionID sessionId, String id, FFI ffi) async {
final quality = await bind.sessionGetCustomImageQuality(sessionId: sessionId);
qualityInitValue =
quality != null && quality.isNotEmpty ? quality[0].toDouble() : 50.0;
if (qualityInitValue < 10 || qualityInitValue > 2000) {
if ((hideMoreQuality && qualityInitValue > 100) ||
qualityInitValue < 10 ||
qualityInitValue > 2000) {
qualityInitValue = 50;
}
// fps
@ -1289,20 +1307,14 @@ customImageQualityDialog(SessionID sessionId, String id, FFI ffi) async {
if (fpsInitValue < 5 || fpsInitValue > 120) {
fpsInitValue = 30;
}
bool? direct;
try {
direct =
ConnectionTypeState.find(id).direct.value == ConnectionType.strDirect;
} catch (_) {}
bool notShowFps = (await bind.mainIsUsingPublicServer() && direct != true) ||
versionCmp(ffi.ffiModel.pi.version, '1.2.0') < 0;
final content = customImageQualityWidget(
initQuality: qualityInitValue,
initFps: fpsInitValue,
setQuality: (v) => setCustomValues(quality: v),
setFps: (v) => setCustomValues(fps: v),
showFps: !notShowFps);
showFps: !hideFps,
showMoreQuality: !hideMoreQuality);
msgBoxCommon(ffi.dialogManager, 'Custom Image Quality', content, [btnClose]);
}

View File

@ -10,7 +10,11 @@ customImageQualityWidget(
required double initFps,
required Function(double) setQuality,
required Function(double) setFps,
required bool showFps}) {
required bool showFps,
required bool showMoreQuality}) {
if (!showMoreQuality && initQuality > 100) {
initQuality = 50;
}
final qualityValue = initQuality.obs;
final fpsValue = initFps.obs;
@ -69,7 +73,7 @@ customImageQualityWidget(
style: const TextStyle(fontSize: 15),
)),
// mobile doesn't have enough space
if (!isMobile)
if (showMoreQuality && !isMobile)
Expanded(
flex: 1,
child: Row(
@ -85,7 +89,7 @@ customImageQualityWidget(
))
],
)),
if (isMobile)
if (showMoreQuality && isMobile)
Obx(() => Row(
children: [
Expanded(
@ -160,7 +164,8 @@ customImageQualitySetting() {
setFps: (v) {
bind.mainSetUserDefaultOption(key: fpsKey, value: v.toString());
},
showFps: true);
showFps: true,
showMoreQuality: true);
}
Future<bool> setServerConfig(

View File

@ -583,32 +583,19 @@ class WindowActionPanelState extends State<WindowActionPanel>
void onWindowClose() async {
mainWindowClose() async => await windowManager.hide();
notMainWindowClose(WindowController controller) async {
if (widget.tabController.length == 0) {
debugPrint("close emtpy multiwindow, hide");
await controller.hide();
await rustDeskWinManager
.call(WindowType.Main, kWindowEventHide, {"id": kWindowId!});
} else {
if (widget.tabController.length != 0) {
debugPrint("close not emtpy multiwindow from taskbar");
if (Platform.isWindows) {
await controller.show();
await controller.focus();
final res = await widget.onClose?.call() ?? true;
if (res) {
Future.delayed(Duration.zero, () async {
// onWindowClose will be called again to hide
await WindowController.fromWindowId(kWindowId!).close();
});
}
} else {
// ubuntu22.04 windowOnTop not work from taskbar
widget.tabController.clear();
Future.delayed(Duration.zero, () async {
// onWindowClose will be called again to hide
await WindowController.fromWindowId(kWindowId!).close();
});
if (!res) return;
}
widget.tabController.clear();
}
await controller.hide();
await rustDeskWinManager
.call(WindowType.Main, kWindowEventHide, {"id": kWindowId!});
}
macOSWindowClose(

View File

@ -43,7 +43,7 @@ impl crate::TraitCapturer for Capturer {
unsafe {
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
};
Ok(Frame::new(&self.rgba, self.height()))
Ok(Frame::new(&self.rgba, self.width(), self.height()))
} else {
return Err(io::ErrorKind::WouldBlock.into());
}
@ -51,16 +51,23 @@ impl crate::TraitCapturer for Capturer {
}
pub struct Frame<'a> {
pub data: &'a [u8],
pub stride: Vec<usize>,
data: &'a [u8],
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
pub fn new(data: &'a [u8], h: usize) -> Self {
let stride = data.len() / h;
let mut v = Vec::new();
v.push(stride);
Frame { data, stride: v }
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
Frame {
data,
width,
height,
stride,
}
}
}
@ -69,6 +76,14 @@ impl<'a> crate::TraitFrame for Frame<'a> {
self.data
}
fn width(&self) -> usize {
self.width
}
fn height(&self) -> usize {
self.height
}
fn stride(&self) -> Vec<usize> {
self.stride.clone()
}

View File

@ -7,9 +7,9 @@
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::codec::{base_bitrate, codec_thread_num, Quality};
use crate::Pixfmt;
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
use crate::{EncodeYuvFormat, Pixfmt};
use hbb_common::{
anyhow::{anyhow, Context},
bytes::Bytes,
@ -54,6 +54,7 @@ pub struct AomEncoder {
width: usize,
height: usize,
i444: bool,
yuvfmt: EncodeYuvFormat,
}
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
@ -241,6 +242,7 @@ impl EncoderApi for AomEncoder {
width: config.width as _,
height: config.height as _,
i444,
yuvfmt: Self::get_yuvfmt(config.width, config.height, i444),
})
}
_ => Err(anyhow!("encoder type mismatch")),
@ -263,35 +265,7 @@ impl EncoderApi for AomEncoder {
}
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
let mut img = Default::default();
let fmt = if self.i444 {
aom_img_fmt::AOM_IMG_FMT_I444
} else {
aom_img_fmt::AOM_IMG_FMT_I420
};
unsafe {
aom_img_wrap(
&mut img,
fmt,
self.width as _,
self.height as _,
crate::STRIDE_ALIGN as _,
0x1 as _,
);
}
let pixfmt = if self.i444 {
Pixfmt::I444
} else {
Pixfmt::I420
};
crate::EncodeYuvFormat {
pixfmt,
w: img.w as _,
h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(),
u: img.planes[1] as usize - img.planes[0] as usize,
v: img.planes[2] as usize - img.planes[0] as usize,
}
self.yuvfmt.clone()
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
@ -400,6 +374,34 @@ impl AomEncoder {
(q_min, q_max)
}
fn get_yuvfmt(width: u32, height: u32, i444: bool) -> EncodeYuvFormat {
let mut img = Default::default();
let fmt = if i444 {
aom_img_fmt::AOM_IMG_FMT_I444
} else {
aom_img_fmt::AOM_IMG_FMT_I420
};
unsafe {
aom_img_wrap(
&mut img,
fmt,
width as _,
height as _,
crate::STRIDE_ALIGN as _,
0x1 as _,
);
}
let pixfmt = if i444 { Pixfmt::I444 } else { Pixfmt::I420 };
EncodeYuvFormat {
pixfmt,
w: img.w as _,
h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(),
u: img.planes[1] as usize - img.planes[0] as usize,
v: img.planes[2] as usize - img.planes[0] as usize,
}
}
}
impl Drop for AomEncoder {

View File

@ -202,17 +202,31 @@ pub fn convert_to_yuv(
) -> ResultType<()> {
let src = captured.data();
let src_stride = captured.stride();
let captured_pixfmt = captured.pixfmt();
if captured_pixfmt == crate::Pixfmt::BGRA || captured_pixfmt == crate::Pixfmt::RGBA {
if src.len() < src_stride[0] * dst_fmt.h {
let src_pixfmt = captured.pixfmt();
let src_width = captured.width();
let src_height = captured.height();
if src_width > dst_fmt.w || src_height > dst_fmt.h {
bail!(
"src rect > dst rect: ({src_width}, {src_height}) > ({},{})",
dst_fmt.w,
dst_fmt.h
);
}
if src_pixfmt == crate::Pixfmt::BGRA || src_pixfmt == crate::Pixfmt::RGBA {
if src.len() < src_stride[0] * src_height {
bail!(
"length not enough: {} < {}",
"wrong src len, {} < {} * {}",
src.len(),
src_stride[0] * dst_fmt.h
src_stride[0],
src_height
);
}
}
match (captured_pixfmt, dst_fmt.pixfmt) {
let align = |x:usize| {
(x + 63) / 64 * 64
};
match (src_pixfmt, dst_fmt.pixfmt) {
(crate::Pixfmt::BGRA, crate::Pixfmt::I420) | (crate::Pixfmt::RGBA, crate::Pixfmt::I420) => {
let dst_stride_y = dst_fmt.stride[0];
let dst_stride_uv = dst_fmt.stride[1];
@ -220,7 +234,7 @@ pub fn convert_to_yuv(
let dst_y = dst.as_mut_ptr();
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
let f = if src_pixfmt == crate::Pixfmt::BGRA {
ARGBToI420
} else {
ABGRToI420
@ -234,17 +248,20 @@ pub fn convert_to_yuv(
dst_stride_uv as _,
dst_v,
dst_stride_uv as _,
dst_fmt.w as _,
dst_fmt.h as _,
src_width as _,
src_height as _,
));
}
(crate::Pixfmt::BGRA, crate::Pixfmt::NV12) | (crate::Pixfmt::RGBA, crate::Pixfmt::NV12) => {
let dst_stride_y = dst_fmt.stride[0];
let dst_stride_uv = dst_fmt.stride[1];
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_uv / 2), 0);
dst.resize(
align(dst_fmt.h) * (align(dst_stride_y) + align(dst_stride_uv / 2)),
0,
);
let dst_y = dst.as_mut_ptr();
let dst_uv = dst[dst_fmt.u..].as_mut_ptr();
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
let f = if src_pixfmt == crate::Pixfmt::BGRA {
ARGBToNV12
} else {
ABGRToNV12
@ -256,19 +273,22 @@ pub fn convert_to_yuv(
dst_stride_y as _,
dst_uv,
dst_stride_uv as _,
dst_fmt.w as _,
dst_fmt.h as _,
src_width as _,
src_height as _,
));
}
(crate::Pixfmt::BGRA, crate::Pixfmt::I444) | (crate::Pixfmt::RGBA, crate::Pixfmt::I444) => {
let dst_stride_y = dst_fmt.stride[0];
let dst_stride_u = dst_fmt.stride[1];
let dst_stride_v = dst_fmt.stride[2];
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_u + dst_stride_v), 0);
dst.resize(
align(dst_fmt.h) * (align(dst_stride_y) + align(dst_stride_u) + align(dst_stride_v)),
0,
);
let dst_y = dst.as_mut_ptr();
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
let src = if captured_pixfmt == crate::Pixfmt::BGRA {
let src = if src_pixfmt == crate::Pixfmt::BGRA {
src
} else {
mid_data.resize(src.len(), 0);
@ -277,8 +297,8 @@ pub fn convert_to_yuv(
src_stride[0] as _,
mid_data.as_mut_ptr(),
src_stride[0] as _,
dst_fmt.w as _,
dst_fmt.h as _,
src_width as _,
src_height as _,
));
mid_data
};
@ -291,13 +311,13 @@ pub fn convert_to_yuv(
dst_stride_u as _,
dst_v,
dst_stride_v as _,
dst_fmt.w as _,
dst_fmt.h as _,
src_width as _,
src_height as _,
));
}
_ => {
bail!(
"convert not support, {captured_pixfmt:?} -> {:?}",
"convert not support, {src_pixfmt:?} -> {:?}",
dst_fmt.pixfmt
);
}

View File

@ -41,7 +41,7 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.inner.frame(timeout.as_millis() as _) {
Ok(frame) => Ok(Frame::new(frame, self.height)),
Ok(frame) => Ok(Frame::new(frame, self.width, self.height)),
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
Err(error) => Err(error),
}
@ -58,15 +58,22 @@ impl TraitCapturer for Capturer {
pub struct Frame<'a> {
data: &'a [u8],
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
pub fn new(data: &'a [u8], h: usize) -> Self {
let stride = data.len() / h;
let mut v = Vec::new();
v.push(stride);
Frame { data, stride: v }
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
Frame {
data,
width,
height,
stride,
}
}
}
@ -75,6 +82,14 @@ impl<'a> crate::TraitFrame for Frame<'a> {
self.data
}
fn width(&self) -> usize {
self.width
}
fn height(&self) -> usize {
self.height
}
fn stride(&self) -> Vec<usize> {
self.stride.clone()
}
@ -167,7 +182,11 @@ impl CapturerMag {
impl TraitCapturer for CapturerMag {
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
self.inner.frame(&mut self.data)?;
Ok(Frame::new(&self.data, self.inner.get_rect().2))
Ok(Frame::new(
&self.data,
self.inner.get_rect().1,
self.inner.get_rect().2,
))
}
fn is_gdi(&self) -> bool {

View File

@ -112,6 +112,10 @@ pub trait TraitCapturer {
pub trait TraitFrame {
fn data(&self) -> &[u8];
fn width(&self) -> usize;
fn height(&self) -> usize;
fn stride(&self) -> Vec<usize>;
fn pixfmt(&self) -> Pixfmt;
@ -125,6 +129,7 @@ pub enum Pixfmt {
I444,
}
#[derive(Debug, Clone)]
pub struct EncodeYuvFormat {
pub pixfmt: Pixfmt,
pub w: usize,

View File

@ -55,7 +55,12 @@ impl crate::TraitCapturer for Capturer {
Some(mut frame) => {
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
frame.surface_to_bgra(self.height());
Ok(Frame(frame, PhantomData))
Ok(Frame {
frame,
data: PhantomData,
width: self.width(),
height: self.height(),
})
}
None => Err(io::ErrorKind::WouldBlock.into()),
@ -69,16 +74,29 @@ impl crate::TraitCapturer for Capturer {
}
}
pub struct Frame<'a>(pub quartz::Frame, PhantomData<&'a [u8]>);
pub struct Frame<'a> {
frame: quartz::Frame,
data: PhantomData<&'a [u8]>,
width: usize,
height: usize,
}
impl<'a> crate::TraitFrame for Frame<'a> {
fn data(&self) -> &[u8] {
&*self.0
&*self.frame
}
fn width(&self) -> usize {
self.width
}
fn height(&self) -> usize {
self.height
}
fn stride(&self) -> Vec<usize> {
let mut v = Vec::new();
v.push(self.0.stride());
v.push(self.frame.stride());
v
}

View File

@ -8,7 +8,7 @@ use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, V
use hbb_common::ResultType;
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
use crate::{GoogleImage, Pixfmt, STRIDE_ALIGN};
use crate::{EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
@ -40,6 +40,7 @@ pub struct VpxEncoder {
height: usize,
id: VpxVideoCodecId,
i444: bool,
yuvfmt: EncodeYuvFormat,
}
pub struct VpxDecoder {
@ -175,6 +176,7 @@ impl EncoderApi for VpxEncoder {
height: config.height as _,
id: config.codec,
i444,
yuvfmt: Self::get_yuvfmt(config.width, config.height, i444),
})
}
_ => Err(anyhow!("encoder type mismatch")),
@ -202,35 +204,7 @@ impl EncoderApi for VpxEncoder {
}
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
let mut img = Default::default();
let fmt = if self.i444 {
vpx_img_fmt::VPX_IMG_FMT_I444
} else {
vpx_img_fmt::VPX_IMG_FMT_I420
};
unsafe {
vpx_img_wrap(
&mut img,
fmt,
self.width as _,
self.height as _,
crate::STRIDE_ALIGN as _,
0x1 as _,
);
}
let pixfmt = if self.i444 {
Pixfmt::I444
} else {
Pixfmt::I420
};
crate::EncodeYuvFormat {
pixfmt,
w: img.w as _,
h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(),
u: img.planes[1] as usize - img.planes[0] as usize,
v: img.planes[2] as usize - img.planes[0] as usize,
}
self.yuvfmt.clone()
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
@ -362,6 +336,34 @@ impl VpxEncoder {
(q_min, q_max)
}
fn get_yuvfmt(width: u32, height: u32, i444: bool) -> EncodeYuvFormat {
let mut img = Default::default();
let fmt = if i444 {
vpx_img_fmt::VPX_IMG_FMT_I444
} else {
vpx_img_fmt::VPX_IMG_FMT_I420
};
unsafe {
vpx_img_wrap(
&mut img,
fmt,
width as _,
height as _,
crate::STRIDE_ALIGN as _,
0x1 as _,
);
}
let pixfmt = if i444 { Pixfmt::I444 } else { Pixfmt::I420 };
EncodeYuvFormat {
pixfmt,
w: img.w as _,
h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(),
u: img.planes[1] as usize - img.planes[0] as usize,
v: img.planes[2] as usize - img.planes[0] as usize,
}
}
}
impl Drop for VpxEncoder {

View File

@ -62,8 +62,8 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
PixelProvider::BGR0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, h)),
PixelProvider::RGB0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, h)),
PixelProvider::BGR0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, w, h)),
PixelProvider::RGB0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, w,h)),
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
_ => Err(map_err("Invalid data")),
}

View File

@ -1,4 +1,4 @@
use crate::{common::TraitCapturer, x11, TraitFrame, Pixfmt};
use crate::{common::TraitCapturer, x11, Pixfmt, TraitFrame};
use std::{io, time::Duration};
pub struct Capturer(x11::Capturer);
@ -25,26 +25,42 @@ impl TraitCapturer for Capturer {
}
}
pub struct Frame<'a>{
pub struct Frame<'a> {
pub data: &'a [u8],
pub pixfmt:Pixfmt,
pub stride:Vec<usize>,
pub pixfmt: Pixfmt,
pub width: usize,
pub height: usize,
pub stride: Vec<usize>,
}
impl<'a> Frame<'a> {
pub fn new(data:&'a [u8], pixfmt:Pixfmt, h:usize) -> Self {
let stride = data.len() / h;
let mut v = Vec::new();
v.push(stride);
Self { data, pixfmt, stride: v }
impl<'a> Frame<'a> {
pub fn new(data: &'a [u8], pixfmt: Pixfmt, width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
Self {
data,
pixfmt,
width,
height,
stride,
}
}
}
impl<'a> TraitFrame for Frame<'a> {
impl<'a> TraitFrame for Frame<'a> {
fn data(&self) -> &[u8] {
self.data
}
fn width(&self) -> usize {
self.width
}
fn height(&self) -> usize {
self.height
}
fn stride(&self) -> Vec<usize> {
self.stride.clone()
}

View File

@ -102,7 +102,7 @@ impl Capturer {
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
Ok(
Frame::new(result, crate::Pixfmt::BGRA, self.display.h())
Frame::new(result, crate::Pixfmt::BGRA, self.display.w(), self.display.h())
)
}
}

View File

@ -670,7 +670,6 @@ impl Connection {
conn.lr.my_id.clone(),
);
video_service::notify_video_frame_fetched(id, None);
scrap::codec::Encoder::update(id, scrap::codec::EncodingUpdate::Remove);
if conn.authorized {
password::update_temporary_password();
}
@ -1173,7 +1172,7 @@ impl Connection {
sub_service = true;
}
}
Self::on_remote_authorized();
self.on_remote_authorized();
}
let mut msg_out = Message::new();
msg_out.set_login_response(res);
@ -1212,9 +1211,10 @@ impl Connection {
}
}
fn on_remote_authorized() {
fn on_remote_authorized(&self) {
use std::sync::Once;
static _ONCE: Once = Once::new();
self.update_codec_on_login();
#[cfg(any(target_os = "windows", target_os = "linux"))]
if !Config::get_option("allow-remove-wallpaper").is_empty() {
// multi connections set once
@ -1412,8 +1412,8 @@ impl Connection {
return Config::get_option(enable_prefix_option).is_empty();
}
fn update_codec_on_login(&self, lr: &LoginRequest) {
if let Some(o) = lr.option.as_ref() {
fn update_codec_on_login(&self) {
if let Some(o) = self.lr.clone().option.as_ref() {
if let Some(q) = o.supported_decoding.clone().take() {
scrap::codec::Encoder::update(
self.inner.id(),
@ -1438,9 +1438,6 @@ impl Connection {
if let Some(o) = lr.option.as_ref() {
self.options_in_login = Some(o.clone());
}
if lr.union.is_none() {
self.update_codec_on_login(&lr);
}
self.video_ack_required = lr.video_ack_required;
}
@ -2969,18 +2966,6 @@ mod raii {
fn drop(&mut self) {
let mut active_conns_lock = ALIVE_CONNS.lock().unwrap();
active_conns_lock.retain(|&c| c != self.0);
#[cfg(not(any(target_os = "android", target_os = "ios")))]
if active_conns_lock.is_empty() {
display_service::reset_resolutions();
}
#[cfg(all(windows, feature = "virtual_display_driver"))]
if active_conns_lock.is_empty() {
let _ = virtual_display_manager::reset_all();
}
#[cfg(all(windows))]
if active_conns_lock.is_empty() {
crate::privacy_win_mag::stop();
}
video_service::VIDEO_QOS
.lock()
.unwrap()
@ -2988,17 +2973,20 @@ mod raii {
}
}
pub struct AuthedConnID(i32);
pub struct AuthedConnID(i32, AuthConnType);
impl AuthedConnID {
pub fn new(id: i32, conn_type: AuthConnType) -> Self {
AUTHED_CONNS.lock().unwrap().push((id, conn_type));
Self(id)
Self(id, conn_type)
}
}
impl Drop for AuthedConnID {
fn drop(&mut self) {
if self.1 == AuthConnType::Remote {
scrap::codec::Encoder::update(self.0, scrap::codec::EncodingUpdate::Remove);
}
let mut lock = AUTHED_CONNS.lock().unwrap();
lock.retain(|&c| c.0 != self.0);
if lock.iter().filter(|c| c.1 == AuthConnType::Remote).count() == 0 {
@ -3006,6 +2994,12 @@ mod raii {
{
*WALLPAPER_REMOVER.lock().unwrap() = None;
}
#[cfg(not(any(target_os = "android", target_os = "ios")))]
display_service::reset_resolutions();
#[cfg(all(windows, feature = "virtual_display_driver"))]
let _ = virtual_display_manager::reset_all();
#[cfg(all(windows))]
crate::privacy_win_mag::stop();
}
}
}

View File

@ -704,7 +704,7 @@ pub mod client {
}
let frame_ptr = base.add(ADDR_CAPTURE_FRAME);
let data = slice::from_raw_parts(frame_ptr, (*frame_info).length);
Ok(Frame::new(data, self.height))
Ok(Frame::new(data, self.width, self.height))
} else {
let ptr = base.add(ADDR_CAPTURE_WOULDBLOCK);
let wouldblock = utils::ptr_to_i32(ptr);