Merge pull request #6229 from 21pages/444

yuv 444
This commit is contained in:
RustDesk 2023-10-31 10:08:59 +08:00 committed by GitHub
commit 12b8cbf3e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
80 changed files with 1182 additions and 1186 deletions

View File

@ -27,45 +27,44 @@ class DraggableChatWindow extends StatelessWidget {
@override
Widget build(BuildContext context) {
return isIOS
? IOSDraggable (
position: position,
chatModel: chatModel,
width: width,
height: height,
builder: (context) {
return Column(
children: [
_buildMobileAppBar(context),
Expanded(
child: ChatPage(chatModel: chatModel),
),
],
);
},
)
: Draggable(
checkKeyboard: true,
position: position,
width: width,
height: height,
chatModel: chatModel,
builder: (context, onPanUpdate) {
final child =
Scaffold(
resizeToAvoidBottomInset: false,
appBar: CustomAppBar(
onPanUpdate: onPanUpdate,
appBar: isDesktop
? _buildDesktopAppBar(context)
: _buildMobileAppBar(context),
? IOSDraggable(
position: position,
chatModel: chatModel,
width: width,
height: height,
builder: (context) {
return Column(
children: [
_buildMobileAppBar(context),
Expanded(
child: ChatPage(chatModel: chatModel),
),
body: ChatPage(chatModel: chatModel),
);
return Container(
decoration:
BoxDecoration(border: Border.all(color: MyTheme.border)),
child: child);
});
],
);
},
)
: Draggable(
checkKeyboard: true,
position: position,
width: width,
height: height,
chatModel: chatModel,
builder: (context, onPanUpdate) {
final child = Scaffold(
resizeToAvoidBottomInset: false,
appBar: CustomAppBar(
onPanUpdate: onPanUpdate,
appBar: isDesktop
? _buildDesktopAppBar(context)
: _buildMobileAppBar(context),
),
body: ChatPage(chatModel: chatModel),
);
return Container(
decoration:
BoxDecoration(border: Border.all(color: MyTheme.border)),
child: child);
});
}
Widget _buildMobileAppBar(BuildContext context) {
@ -354,14 +353,14 @@ class _DraggableState extends State<Draggable> {
}
class IOSDraggable extends StatefulWidget {
const IOSDraggable({
Key? key,
this.position = Offset.zero,
this.chatModel,
required this.width,
required this.height,
required this.builder})
: super(key: key);
const IOSDraggable(
{Key? key,
this.position = Offset.zero,
this.chatModel,
required this.width,
required this.height,
required this.builder})
: super(key: key);
final Offset position;
final ChatModel? chatModel;
@ -423,7 +422,7 @@ class _IOSDraggableState extends State<IOSDraggable> {
_lastBottomHeight = bottomHeight;
}
@override
@override
Widget build(BuildContext context) {
checkKeyboard();
return Stack(
@ -439,12 +438,12 @@ class _IOSDraggableState extends State<IOSDraggable> {
_chatModel?.setChatWindowPosition(_position);
},
child: Material(
child:
Container(
width: _width,
height: _height,
decoration: BoxDecoration(border: Border.all(color: MyTheme.border)),
child: widget.builder(context),
child: Container(
width: _width,
height: _height,
decoration:
BoxDecoration(border: Border.all(color: MyTheme.border)),
child: widget.builder(context),
),
),
),
@ -499,6 +498,7 @@ class QualityMonitor extends StatelessWidget {
"${qualityMonitorModel.data.targetBitrate ?? '-'}kb"),
_row(
"Codec", qualityMonitorModel.data.codecFormat ?? '-'),
_row("Chroma", qualityMonitorModel.data.chroma ?? '-'),
],
),
)

View File

@ -547,5 +547,22 @@ Future<List<TToggleMenu>> toolbarDisplayToggle(
child: Text(translate('Use all my displays for the remote session'))));
}
// 444
final codec_format = ffi.qualityMonitorModel.data.codecFormat;
if (versionCmp(pi.version, "1.2.4") >= 0 &&
(codec_format == "AV1" || codec_format == "VP9")) {
final option = 'i444';
final value =
bind.sessionGetToggleOptionSync(sessionId: sessionId, arg: option);
v.add(TToggleMenu(
value: value,
onChanged: (value) async {
if (value == null) return;
await bind.sessionToggleOption(sessionId: sessionId, value: option);
bind.sessionChangePreferCodec(sessionId: sessionId);
},
child: Text(translate('True color(4:4:4)'))));
}
return v;
}

View File

@ -1320,6 +1320,7 @@ class _DisplayState extends State<_Display> {
otherRow('Lock after session end', 'lock_after_session_end'),
otherRow('Privacy mode', 'privacy_mode'),
otherRow('Reverse mouse wheel', 'reverse_mouse_wheel'),
otherRow('True color(4:4:4)', 'i444'),
];
if (useTextureRender) {
children.add(otherRow('Show displays as individual windows',

View File

@ -797,6 +797,7 @@ class __DisplayPageState extends State<_DisplayPage> {
otherRow('Lock after session end', 'lock_after_session_end'),
otherRow('Privacy mode', 'privacy_mode'),
otherRow('Touch mode', 'touch-mode'),
otherRow('True color(4:4:4)', 'i444'),
],
),
]),

View File

@ -1847,6 +1847,7 @@ class QualityMonitorData {
String? delay;
String? targetBitrate;
String? codecFormat;
String? chroma;
}
class QualityMonitorModel with ChangeNotifier {
@ -1900,6 +1901,9 @@ class QualityMonitorModel with ChangeNotifier {
if ((evt['codec_format'] as String).isNotEmpty) {
_data.codecFormat = evt['codec_format'];
}
if ((evt['chroma'] as String).isNotEmpty) {
_data.chroma = evt['chroma'];
}
notifyListeners();
} catch (e) {
//

View File

@ -17,6 +17,11 @@ message YUV {
int32 stride = 2;
}
enum Chroma {
I420 = 0;
I444 = 1;
}
message VideoFrame {
oneof union {
EncodedVideoFrames vp9s = 6;
@ -83,11 +88,20 @@ message Features {
bool privacy_mode = 1;
}
message CodecAbility {
bool vp8 = 1;
bool vp9 = 2;
bool av1 = 3;
bool h264 = 4;
bool h265 = 5;
}
message SupportedEncoding {
bool h264 = 1;
bool h265 = 2;
bool vp8 = 3;
bool av1 = 4;
CodecAbility i444 = 5;
}
message PeerInfo {
@ -541,6 +555,8 @@ message SupportedDecoding {
PreferCodec prefer = 4;
int32 ability_vp8 = 5;
int32 ability_av1 = 6;
CodecAbility i444 = 7;
Chroma prefer_chroma = 8;
}
message OptionMessage {

View File

@ -1229,6 +1229,10 @@ impl PeerConfig {
if !mp.contains_key(key) {
mp.insert(key.to_owned(), UserDefaultConfig::read().get(key));
}
key = "i444";
if !mp.contains_key(key) {
mp.insert(key.to_owned(), UserDefaultConfig::read().get(key));
}
}
}

View File

@ -197,6 +197,7 @@ fn main() {
find_package("libyuv");
gen_vcpkg_package("libvpx", "vpx_ffi.h", "vpx_ffi.rs", "^[vV].*");
gen_vcpkg_package("aom", "aom_ffi.h", "aom_ffi.rs", "^(aom|AOM|OBU|AV1).*");
gen_vcpkg_package("libyuv", "yuv_ffi.h", "yuv_ffi.rs", ".*");
// there is problem with cfg(target_os) in build.rs, so use our workaround
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap();

View File

@ -1,13 +1,20 @@
use docopt::Docopt;
use hbb_common::env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
use hbb_common::{
env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV},
log,
};
use scrap::{
aom::{AomDecoder, AomEncoder, AomEncoderConfig},
codec::{EncoderApi, EncoderCfg, Quality as Q},
Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig,
convert_to_yuv, Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder,
VpxEncoderConfig,
VpxVideoCodecId::{self, *},
STRIDE_ALIGN,
};
use std::{io::Write, time::Instant};
use std::{
io::Write,
time::{Duration, Instant},
};
// cargo run --package scrap --example benchmark --release --features hwcodec
@ -15,7 +22,7 @@ const USAGE: &'static str = "
Codec benchmark.
Usage:
benchmark [--count=COUNT] [--quality=QUALITY] [--hw-pixfmt=PIXFMT]
benchmark [--count=COUNT] [--quality=QUALITY] [--i444]
benchmark (-h | --help)
Options:
@ -23,24 +30,17 @@ Options:
--count=COUNT Capture frame count [default: 100].
--quality=QUALITY Video quality [default: Balanced].
Valid values: Best, Balanced, Low.
--hw-pixfmt=PIXFMT Hardware codec pixfmt. [default: i420]
Valid values: i420, nv12.
--i444 I444.
";
#[derive(Debug, serde::Deserialize)]
#[derive(Debug, serde::Deserialize, Clone, Copy)]
struct Args {
flag_count: usize,
flag_quality: Quality,
flag_hw_pixfmt: Pixfmt,
flag_i444: bool,
}
#[derive(Debug, serde::Deserialize)]
enum Pixfmt {
I420,
NV12,
}
#[derive(Debug, serde::Deserialize)]
#[derive(Debug, serde::Deserialize, Clone, Copy)]
enum Quality {
Best,
Balanced,
@ -54,31 +54,6 @@ fn main() {
.unwrap_or_else(|e| e.exit());
let quality = args.flag_quality;
let yuv_count = args.flag_count;
let (yuvs, width, height) = capture_yuv(yuv_count);
println!(
"benchmark {}x{} quality:{:?}k hw_pixfmt:{:?}",
width, height, quality, args.flag_hw_pixfmt
);
let quality = match quality {
Quality::Best => Q::Best,
Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low,
};
[VP8, VP9].map(|c| test_vpx(c, &yuvs, width, height, quality, yuv_count));
test_av1(&yuvs, width, height, quality, yuv_count);
#[cfg(feature = "hwcodec")]
{
use hwcodec::AVPixelFormat;
let hw_pixfmt = match args.flag_hw_pixfmt {
Pixfmt::I420 => AVPixelFormat::AV_PIX_FMT_YUV420P,
Pixfmt::NV12 => AVPixelFormat::AV_PIX_FMT_NV12,
};
let yuvs = hw::vpx_yuv_to_hw_yuv(yuvs, width, height, hw_pixfmt);
hw::test(&yuvs, width, height, quality, yuv_count, hw_pixfmt);
}
}
fn capture_yuv(yuv_count: usize) -> (Vec<Vec<u8>>, usize, usize) {
let mut index = 0;
let mut displays = Display::all().unwrap();
for i in 0..displays.len() {
@ -88,28 +63,45 @@ fn capture_yuv(yuv_count: usize) -> (Vec<Vec<u8>>, usize, usize) {
}
}
let d = displays.remove(index);
let mut c = Capturer::new(d, true).unwrap();
let mut v = vec![];
loop {
if let Ok(frame) = c.frame(std::time::Duration::from_millis(30)) {
v.push(frame.0.to_vec());
print!("\rcapture {}/{}", v.len(), yuv_count);
std::io::stdout().flush().ok();
if v.len() == yuv_count {
println!();
return (v, c.width(), c.height());
}
}
let mut c = Capturer::new(d).unwrap();
let width = c.width();
let height = c.height();
println!(
"benchmark {}x{} quality:{:?}, i444:{:?}",
width, height, quality, args.flag_i444
);
let quality = match quality {
Quality::Best => Q::Best,
Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low,
};
[VP8, VP9].map(|codec| {
test_vpx(
&mut c,
codec,
width,
height,
quality,
yuv_count,
if codec == VP8 { false } else { args.flag_i444 },
)
});
test_av1(&mut c, width, height, quality, yuv_count, args.flag_i444);
#[cfg(feature = "hwcodec")]
{
hw::test(&mut c, width, height, quality, yuv_count);
}
}
fn test_vpx(
c: &mut Capturer,
codec_id: VpxVideoCodecId,
yuvs: &Vec<Vec<u8>>,
width: usize,
height: usize,
quality: Q,
yuv_count: usize,
i444: bool,
) {
let config = EncoderCfg::VPX(VpxEncoderConfig {
width: width as _,
@ -118,28 +110,53 @@ fn test_vpx(
codec: codec_id,
keyframe_interval: None,
});
let mut encoder = VpxEncoder::new(config).unwrap();
let mut encoder = VpxEncoder::new(config, i444).unwrap();
let mut vpxs = vec![];
let start = Instant::now();
let mut size = 0;
for yuv in yuvs {
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, yuv, STRIDE_ALIGN)
.unwrap()
{
size += frame.data.len();
vpxs.push(frame.data.to_vec());
let mut yuv = Vec::new();
let mut mid_data = Vec::new();
let mut counter = 0;
let mut time_sum = Duration::ZERO;
loop {
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap()
{
size += frame.data.len();
vpxs.push(frame.data.to_vec());
counter += 1;
print!("\r{codec_id:?} {}/{}", counter, yuv_count);
std::io::stdout().flush().ok();
}
for ref frame in encoder.flush().unwrap() {
size += frame.data.len();
vpxs.push(frame.data.to_vec());
counter += 1;
print!("\r{codec_id:?} {}/{}", counter, yuv_count);
std::io::stdout().flush().ok();
}
time_sum += tmp_timer.elapsed();
}
Err(e) => {
log::error!("{e:?}");
}
}
for ref frame in encoder.flush().unwrap() {
size += frame.data.len();
vpxs.push(frame.data.to_vec());
if counter >= yuv_count {
println!();
break;
}
}
assert_eq!(vpxs.len(), yuv_count);
println!(
"{:?} encode: {:?}, {} byte",
codec_id,
start.elapsed() / yuv_count as _,
time_sum / yuv_count as _,
size / yuv_count
);
@ -156,30 +173,58 @@ fn test_vpx(
);
}
fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, quality: Q, yuv_count: usize) {
fn test_av1(
c: &mut Capturer,
width: usize,
height: usize,
quality: Q,
yuv_count: usize,
i444: bool,
) {
let config = EncoderCfg::AOM(AomEncoderConfig {
width: width as _,
height: height as _,
quality,
keyframe_interval: None,
});
let mut encoder = AomEncoder::new(config).unwrap();
let mut encoder = AomEncoder::new(config, i444).unwrap();
let start = Instant::now();
let mut size = 0;
let mut av1s = vec![];
for yuv in yuvs {
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, yuv, STRIDE_ALIGN)
.unwrap()
{
size += frame.data.len();
av1s.push(frame.data.to_vec());
let mut av1s: Vec<Vec<u8>> = vec![];
let mut yuv = Vec::new();
let mut mid_data = Vec::new();
let mut counter = 0;
let mut time_sum = Duration::ZERO;
loop {
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap()
{
size += frame.data.len();
av1s.push(frame.data.to_vec());
counter += 1;
print!("\rAV1 {}/{}", counter, yuv_count);
std::io::stdout().flush().ok();
}
time_sum += tmp_timer.elapsed();
}
Err(e) => {
log::error!("{e:?}");
}
}
if counter >= yuv_count {
println!();
break;
}
}
assert_eq!(av1s.len(), yuv_count);
println!(
"AV1 encode: {:?}, {} byte",
start.elapsed() / yuv_count as _,
time_sum / yuv_count as _,
size / yuv_count
);
let mut decoder = AomDecoder::new().unwrap();
@ -193,165 +238,101 @@ fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, quality: Q, yuv_co
#[cfg(feature = "hwcodec")]
mod hw {
use super::*;
use hwcodec::{
decode::{DecodeContext, Decoder},
encode::{EncodeContext, Encoder},
ffmpeg::{ffmpeg_linesize_offset_length, CodecInfo, CodecInfos},
AVPixelFormat,
Quality::*,
RateControl::*,
};
use hwcodec::ffmpeg::CodecInfo;
use scrap::{
codec::codec_thread_num,
convert::{
hw::{hw_bgra_to_i420, hw_bgra_to_nv12},
i420_to_bgra,
},
HW_STRIDE_ALIGN,
codec::HwEncoderConfig,
hwcodec::{HwDecoder, HwEncoder},
};
pub fn test(
yuvs: &Vec<Vec<u8>>,
use super::*;
pub fn test(c: &mut Capturer, width: usize, height: usize, quality: Q, yuv_count: usize) {
let best = HwEncoder::best();
let mut h264s = Vec::new();
let mut h265s = Vec::new();
if let Some(info) = best.h264 {
test_encoder(width, height, quality, info, c, yuv_count, &mut h264s);
}
if let Some(info) = best.h265 {
test_encoder(width, height, quality, info, c, yuv_count, &mut h265s);
}
let best = HwDecoder::best();
if let Some(info) = best.h264 {
test_decoder(info, &h264s);
}
if let Some(info) = best.h265 {
test_decoder(info, &h265s);
}
}
fn test_encoder(
width: usize,
height: usize,
quality: Q,
info: CodecInfo,
c: &mut Capturer,
yuv_count: usize,
pixfmt: AVPixelFormat,
h26xs: &mut Vec<Vec<u8>>,
) {
let bitrate = scrap::hwcodec::HwEncoder::convert_quality(quality);
let ctx = EncodeContext {
name: String::from(""),
width: width as _,
height: height as _,
pixfmt,
align: 0,
bitrate: bitrate as i32 * 1000,
timebase: [1, 30],
gop: 60,
quality: Quality_Default,
rc: RC_DEFAULT,
thread_count: codec_thread_num() as _,
};
let encoders = Encoder::available_encoders(ctx.clone());
println!("hw encoders: {}", encoders.len());
let best = CodecInfo::score(encoders.clone());
for info in encoders {
test_encoder(info.clone(), ctx.clone(), yuvs, is_best(&best, &info));
}
let (h264s, h265s) = prepare_h26x(best, ctx.clone(), yuvs);
assert!(h264s.is_empty() || h264s.len() == yuv_count);
assert!(h265s.is_empty() || h265s.len() == yuv_count);
let decoders = Decoder::available_decoders();
println!("hw decoders: {}", decoders.len());
let best = CodecInfo::score(decoders.clone());
for info in decoders {
let h26xs = if info.name.contains("h264") {
&h264s
} else {
&h265s
};
if h26xs.len() == yuvs.len() {
test_decoder(info.clone(), h26xs, is_best(&best, &info));
}
}
}
fn test_encoder(info: CodecInfo, ctx: EncodeContext, yuvs: &Vec<Vec<u8>>, best: bool) {
let mut ctx = ctx;
ctx.name = info.name;
let mut encoder = Encoder::new(ctx.clone()).unwrap();
let start = Instant::now();
let mut encoder = HwEncoder::new(
EncoderCfg::HW(HwEncoderConfig {
name: info.name.clone(),
width,
height,
quality,
keyframe_interval: None,
}),
false,
)
.unwrap();
let mut size = 0;
for yuv in yuvs {
let frames = encoder.encode(yuv).unwrap();
for frame in frames {
size += frame.data.len();
let mut yuv = Vec::new();
let mut mid_data = Vec::new();
let mut counter = 0;
let mut time_sum = Duration::ZERO;
loop {
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
for ref frame in encoder.encode(&yuv).unwrap() {
size += frame.data.len();
h26xs.push(frame.data.to_vec());
counter += 1;
print!("\r{:?} {}/{}", info.name, counter, yuv_count);
std::io::stdout().flush().ok();
}
time_sum += tmp_timer.elapsed();
}
Err(e) => {
log::error!("{e:?}");
}
}
if counter >= yuv_count {
println!();
break;
}
}
println!(
"{}{}: {:?}, {} byte",
if best { "*" } else { "" },
ctx.name,
start.elapsed() / yuvs.len() as _,
size / yuvs.len(),
"{}: {:?}, {} byte",
info.name,
time_sum / yuv_count as u32,
size / yuv_count,
);
}
fn test_decoder(info: CodecInfo, h26xs: &Vec<Vec<u8>>, best: bool) {
let ctx = DecodeContext {
name: info.name,
device_type: info.hwdevice,
thread_count: codec_thread_num() as _,
};
let mut decoder = Decoder::new(ctx.clone()).unwrap();
fn test_decoder(info: CodecInfo, h26xs: &Vec<Vec<u8>>) {
let mut decoder = HwDecoder::new(info.clone()).unwrap();
let start = Instant::now();
let mut cnt = 0;
for h26x in h26xs {
let _ = decoder.decode(h26x).unwrap();
cnt += 1;
}
let device = format!("{:?}", ctx.device_type).to_lowercase();
let device = format!("{:?}", info.hwdevice).to_lowercase();
let device = device.split("_").last().unwrap();
println!(
"{}{} {}: {:?}",
if best { "*" } else { "" },
ctx.name,
device,
start.elapsed() / cnt
);
}
fn prepare_h26x(
best: CodecInfos,
ctx: EncodeContext,
yuvs: &Vec<Vec<u8>>,
) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
let f = |info: Option<CodecInfo>| {
let mut h26xs = vec![];
if let Some(info) = info {
let mut ctx = ctx.clone();
ctx.name = info.name;
let mut encoder = Encoder::new(ctx).unwrap();
for yuv in yuvs {
let h26x = encoder.encode(yuv).unwrap();
for frame in h26x {
h26xs.push(frame.data.to_vec());
}
}
}
h26xs
};
(f(best.h264), f(best.h265))
}
fn is_best(best: &CodecInfos, info: &CodecInfo) -> bool {
Some(info.clone()) == best.h264 || Some(info.clone()) == best.h265
}
pub fn vpx_yuv_to_hw_yuv(
yuvs: Vec<Vec<u8>>,
width: usize,
height: usize,
pixfmt: AVPixelFormat,
) -> Vec<Vec<u8>> {
let yuvs = yuvs;
let mut bgra = vec![];
let mut v = vec![];
let (linesize, offset, length) =
ffmpeg_linesize_offset_length(pixfmt, width, height, HW_STRIDE_ALIGN).unwrap();
for mut yuv in yuvs {
i420_to_bgra(width, height, &yuv, &mut bgra);
if pixfmt == AVPixelFormat::AV_PIX_FMT_YUV420P {
hw_bgra_to_i420(width, height, &linesize, &offset, length, &bgra, &mut yuv);
} else {
hw_bgra_to_nv12(width, height, &linesize, &offset, length, &bgra, &mut yuv);
}
v.push(yuv);
}
v
println!("{} {}: {:?}", info.name, device, start.elapsed() / cnt);
}
}

View File

@ -3,7 +3,7 @@ extern crate scrap;
use scrap::Display;
#[cfg(windows)]
use scrap::{i420_to_rgb, CapturerMag, TraitCapturer};
use scrap::{CapturerMag, TraitCapturer};
#[cfg(windows)]
use std::fs::File;
@ -24,6 +24,8 @@ fn get_display(i: usize) -> Display {
fn record(i: usize) {
use std::time::Duration;
use scrap::TraitFrame;
for d in Display::all().unwrap() {
println!("{:?} {} {}", d.origin(), d.width(), d.height());
}
@ -32,9 +34,8 @@ fn record(i: usize) {
let (w, h) = (display.width(), display.height());
{
let mut capture_mag =
CapturerMag::new(display.origin(), display.width(), display.height(), false)
.expect("Couldn't begin capture.");
let mut capture_mag = CapturerMag::new(display.origin(), display.width(), display.height())
.expect("Couldn't begin capture.");
let wnd_cls = "";
let wnd_name = "RustDeskPrivacyWindow";
if false == capture_mag.exclude(wnd_cls, wnd_name).unwrap() {
@ -43,7 +44,8 @@ fn record(i: usize) {
println!("Filter window for cls {} name {}", wnd_cls, wnd_name);
}
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let captured_frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let frame = captured_frame.data();
println!("Capture data len: {}, Saving...", frame.len());
let mut bitflipped = Vec::with_capacity(w * h * 4);
@ -68,9 +70,8 @@ fn record(i: usize) {
}
{
let mut capture_mag =
CapturerMag::new(display.origin(), display.width(), display.height(), true)
.expect("Couldn't begin capture.");
let mut capture_mag = CapturerMag::new(display.origin(), display.width(), display.height())
.expect("Couldn't begin capture.");
let wnd_cls = "";
let wnd_title = "RustDeskPrivacyWindow";
if false == capture_mag.exclude(wnd_cls, wnd_title).unwrap() {
@ -79,19 +80,28 @@ fn record(i: usize) {
println!("Filter window for cls {} title {}", wnd_cls, wnd_title);
}
let buffer = capture_mag.frame(Duration::from_millis(0)).unwrap();
println!("Capture data len: {}, Saving...", buffer.len());
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
println!("Capture data len: {}, Saving...", frame.data().len());
let mut frame = Default::default();
i420_to_rgb(w, h, &buffer, &mut frame);
let mut raw = Vec::new();
unsafe {
scrap::ARGBToRAW(
frame.data().as_ptr(),
frame.stride()[0] as _,
(&mut raw).as_mut_ptr(),
(w * 3) as _,
w as _,
h as _,
)
};
let mut bitflipped = Vec::with_capacity(w * h * 4);
let stride = frame.len() / h;
let stride = raw.len() / h;
for y in 0..h {
for x in 0..w {
let i = stride * y + 3 * x;
bitflipped.extend_from_slice(&[frame[i], frame[i + 1], frame[i + 2], 255]);
bitflipped.extend_from_slice(&[raw[i], raw[i + 1], raw[i + 2], 255]);
}
}
let name = format!("capture_mag_{}_2.png", i);

View File

@ -1,5 +1,7 @@
use std::time::Duration;
use scrap::TraitFrame;
extern crate scrap;
fn main() {
@ -27,16 +29,16 @@ fn main() {
.spawn()
.expect("This example requires ffplay.");
let mut capturer = Capturer::new(d, false).unwrap();
let mut capturer = Capturer::new(d).unwrap();
let mut out = child.stdin.unwrap();
loop {
match capturer.frame(Duration::from_millis(0)) {
Ok(frame) => {
// Write the frame, removing end-of-row padding.
let stride = frame.len() / h;
let stride = frame.stride()[0];
let rowlen = 4 * w;
for row in frame.chunks(stride) {
for row in frame.data().chunks(stride) {
let row = &row[..rowlen];
out.write_all(row).unwrap();
}

View File

@ -17,7 +17,7 @@ use scrap::codec::{EncoderApi, EncoderCfg, Quality as Q};
use webm::mux;
use webm::mux::Track;
use scrap::vpxcodec as vpx_encode;
use scrap::{convert_to_yuv, vpxcodec as vpx_encode};
use scrap::{Capturer, Display, TraitCapturer, STRIDE_ALIGN};
const USAGE: &'static str = "
@ -110,13 +110,16 @@ fn main() -> io::Result<()> {
Quality::Balanced => Q::Balanced,
Quality::Low => Q::Low,
};
let mut vpx = vpx_encode::VpxEncoder::new(EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
width,
height,
quality,
codec: vpx_codec,
keyframe_interval: None,
}))
let mut vpx = vpx_encode::VpxEncoder::new(
EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
width,
height,
quality,
codec: vpx_codec,
keyframe_interval: None,
}),
false,
)
.unwrap();
// Start recording.
@ -136,7 +139,9 @@ fn main() -> io::Result<()> {
let spf = Duration::from_nanos(1_000_000_000 / args.flag_fps);
// Capturer object is expensive, avoiding to create it frequently.
let mut c = Capturer::new(d, true).unwrap();
let mut c = Capturer::new(d).unwrap();
let mut yuv = Vec::new();
let mut mid_data = Vec::new();
while !stop.load(Ordering::Acquire) {
let now = Instant::now();
let time = now - start;
@ -147,8 +152,8 @@ fn main() -> io::Result<()> {
if let Ok(frame) = c.frame(Duration::from_millis(0)) {
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
for frame in vpx.encode(ms as i64, &frame, STRIDE_ALIGN).unwrap() {
convert_to_yuv(&frame, vpx.yuvfmt(), &mut yuv, &mut mid_data);
for frame in vpx.encode(ms as i64, &yuv, STRIDE_ALIGN).unwrap() {
vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key);
}
}

View File

@ -6,7 +6,7 @@ use std::io::ErrorKind::WouldBlock;
use std::thread;
use std::time::Duration;
use scrap::{i420_to_rgb, Capturer, Display, TraitCapturer};
use scrap::{Capturer, Display, TraitCapturer, TraitFrame};
fn main() {
let n = Display::all().unwrap().len();
@ -28,14 +28,14 @@ fn record(i: usize) {
}
let display = get_display(i);
let mut capturer = Capturer::new(display, false).expect("Couldn't begin capture.");
let mut capturer = Capturer::new(display).expect("Couldn't begin capture.");
let (w, h) = (capturer.width(), capturer.height());
loop {
// Wait until there's a frame.
let buffer = match capturer.frame(Duration::from_millis(0)) {
Ok(buffer) => buffer,
let frame = match capturer.frame(Duration::from_millis(0)) {
Ok(frame) => frame,
Err(error) => {
if error.kind() == WouldBlock {
// Keep spinning.
@ -46,6 +46,7 @@ fn record(i: usize) {
}
}
};
let buffer = frame.data();
println!("Captured data len: {}, Saving...", buffer.len());
// Flip the BGRA image into a RGBA image.
@ -77,14 +78,14 @@ fn record(i: usize) {
drop(capturer);
let display = get_display(i);
let mut capturer = Capturer::new(display, true).expect("Couldn't begin capture.");
let mut capturer = Capturer::new(display).expect("Couldn't begin capture.");
let (w, h) = (capturer.width(), capturer.height());
loop {
// Wait until there's a frame.
let buffer = match capturer.frame(Duration::from_millis(0)) {
Ok(buffer) => buffer,
let frame = match capturer.frame(Duration::from_millis(0)) {
Ok(frame) => frame,
Err(error) => {
if error.kind() == WouldBlock {
// Keep spinning.
@ -95,18 +96,28 @@ fn record(i: usize) {
}
}
};
let buffer = frame.data();
println!("Captured data len: {}, Saving...", buffer.len());
let mut frame = Default::default();
i420_to_rgb(w, h, &buffer, &mut frame);
let mut raw = Vec::new();
unsafe {
scrap::ARGBToRAW(
buffer.as_ptr(),
frame.stride()[0] as _,
(&mut raw).as_mut_ptr(),
(w * 3) as _,
w as _,
h as _,
)
};
let mut bitflipped = Vec::with_capacity(w * h * 4);
let stride = frame.len() / h;
let stride = raw.len() / h;
for y in 0..h {
for x in 0..w {
let i = stride * y + 3 * x;
bitflipped.extend_from_slice(&[frame[i], frame[i + 1], frame[i + 2], 255]);
bitflipped.extend_from_slice(&[raw[i], raw[i + 1], raw[i + 2], 255]);
}
}
let name = format!("screenshot{}_2.png", i);

View File

@ -0,0 +1,6 @@
#include <libyuv/convert.h>
#include <libyuv/convert_argb.h>
#include <libyuv/convert_from.h>
#include <libyuv/convert_from_argb.h>
#include <libyuv/rotate.h>
#include <libyuv/rotate_argb.h>

View File

@ -1,5 +1,5 @@
use crate::android::ffi::*;
use crate::rgba_to_i420;
use crate::Pixfmt;
use lazy_static::lazy_static;
use serde_json::Value;
use std::collections::HashMap;
@ -12,15 +12,15 @@ lazy_static! {
pub struct Capturer {
display: Display,
bgra: Vec<u8>,
rgba: Vec<u8>,
saved_raw_data: Vec<u8>, // for faster compare and copy
}
impl Capturer {
pub fn new(display: Display, _yuv: bool) -> io::Result<Capturer> {
pub fn new(display: Display) -> io::Result<Capturer> {
Ok(Capturer {
display,
bgra: Vec::new(),
rgba: Vec::new(),
saved_raw_data: Vec::new(),
})
}
@ -35,22 +35,47 @@ impl Capturer {
}
impl crate::TraitCapturer for Capturer {
fn set_use_yuv(&mut self, _use_yuv: bool) {}
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
if let Some(buf) = get_video_raw() {
crate::would_block_if_equal(&mut self.saved_raw_data, buf)?;
rgba_to_i420(self.width(), self.height(), buf, &mut self.bgra);
Ok(Frame::RAW(&self.bgra))
// Is it safe to directly return buf without copy?
self.rgba.resize(buf.len(), 0);
unsafe {
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
};
Ok(Frame::new(&self.rgba, self.height()))
} else {
return Err(io::ErrorKind::WouldBlock.into());
}
}
}
pub enum Frame<'a> {
RAW(&'a [u8]),
Empty,
pub struct Frame<'a> {
pub data: &'a [u8],
pub stride: Vec<usize>,
}
impl<'a> Frame<'a> {
pub fn new(data: &'a [u8], h: usize) -> Self {
let stride = data.len() / h;
let mut v = Vec::new();
v.push(stride);
Frame { data, stride: v }
}
}
impl<'a> crate::TraitFrame for Frame<'a> {
fn data(&self) -> &[u8] {
self.data
}
fn stride(&self) -> Vec<usize> {
self.stride.clone()
}
fn pixfmt(&self) -> Pixfmt {
Pixfmt::RGBA
}
}
pub struct Display {

View File

@ -7,13 +7,14 @@
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::codec::{base_bitrate, codec_thread_num, Quality};
use crate::Pixfmt;
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
use hbb_common::{
anyhow::{anyhow, Context},
bytes::Bytes,
log,
message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
ResultType,
};
use std::{ptr, slice};
@ -52,6 +53,7 @@ pub struct AomEncoder {
ctx: aom_codec_ctx_t,
width: usize,
height: usize,
i444: bool,
}
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
@ -95,6 +97,7 @@ mod webrtc {
pub fn enc_cfg(
i: *const aom_codec_iface,
cfg: AomEncoderConfig,
i444: bool,
) -> ResultType<aom_codec_enc_cfg> {
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_aom!(aom_codec_enc_config_default(i, &mut c, kUsageProfile));
@ -139,6 +142,9 @@ mod webrtc {
c.g_pass = aom_enc_pass::AOM_RC_ONE_PASS; // One-pass rate control
c.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0.
// https://aomedia.googlesource.com/aom/+/refs/tags/v3.6.0/av1/common/enums.h#82
c.g_profile = if i444 { 1 } else { 0 };
Ok(c)
}
@ -210,14 +216,14 @@ mod webrtc {
}
impl EncoderApi for AomEncoder {
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
crate::codec::EncoderCfg::AOM(config) => {
let i = call_aom_ptr!(aom_codec_av1_cx());
let c = webrtc::enc_cfg(i, config)?;
let c = webrtc::enc_cfg(i, config, i444)?;
let mut ctx = Default::default();
// Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH
@ -234,6 +240,7 @@ impl EncoderApi for AomEncoder {
ctx,
width: config.width as _,
height: config.height as _,
i444,
})
}
_ => Err(anyhow!("encoder type mismatch")),
@ -255,8 +262,36 @@ impl EncoderApi for AomEncoder {
}
}
fn use_yuv(&self) -> bool {
true
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
let mut img = Default::default();
let fmt = if self.i444 {
aom_img_fmt::AOM_IMG_FMT_I444
} else {
aom_img_fmt::AOM_IMG_FMT_I420
};
unsafe {
aom_img_wrap(
&mut img,
fmt,
self.width as _,
self.height as _,
crate::STRIDE_ALIGN as _,
0x1 as _,
);
}
let pixfmt = if self.i444 {
Pixfmt::I444
} else {
Pixfmt::I420
};
crate::EncodeYuvFormat {
pixfmt,
w: img.w as _,
h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(),
u: img.planes[1] as usize - img.planes[0] as usize,
v: img.planes[2] as usize - img.planes[0] as usize,
}
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
@ -282,14 +317,20 @@ impl EncoderApi for AomEncoder {
impl AomEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
if 2 * data.len() < 3 * self.width * self.height {
let bpp = if self.i444 { 24 } else { 12 };
if data.len() < self.width * self.height * bpp / 8 {
return Err(Error::FailedCall("len not enough".to_string()));
}
let fmt = if self.i444 {
aom_img_fmt::AOM_IMG_FMT_I444
} else {
aom_img_fmt::AOM_IMG_FMT_I420
};
let mut image = Default::default();
call_aom_ptr!(aom_img_wrap(
&mut image,
aom_img_fmt::AOM_IMG_FMT_I420,
fmt,
self.width as _,
self.height as _,
stride_align as _,
@ -524,6 +565,13 @@ impl GoogleImage for Image {
fn planes(&self) -> Vec<*mut u8> {
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
}
fn chroma(&self) -> Chroma {
match self.inner().fmt {
aom_img_fmt::AOM_IMG_FMT_I444 => Chroma::I444,
_ => Chroma::I420,
}
}
}
impl Drop for Image {

View File

@ -14,7 +14,7 @@ use crate::{
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
common::GoogleImage,
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
CodecName, ImageRgb,
CodecName, EncodeYuvFormat, ImageRgb,
};
use hbb_common::{
@ -23,7 +23,7 @@ use hbb_common::{
config::PeerConfig,
log,
message_proto::{
supported_decoding::PreferCodec, video_frame, EncodedVideoFrames,
supported_decoding::PreferCodec, video_frame, Chroma, CodecAbility, EncodedVideoFrames,
SupportedDecoding, SupportedEncoding, VideoFrame,
},
sysinfo::{System, SystemExt},
@ -56,13 +56,13 @@ pub enum EncoderCfg {
}
pub trait EncoderApi {
fn new(cfg: EncoderCfg) -> ResultType<Self>
fn new(cfg: EncoderCfg, i444: bool) -> ResultType<Self>
where
Self: Sized;
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame>;
fn use_yuv(&self) -> bool;
fn yuvfmt(&self) -> EncodeYuvFormat;
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
@ -107,18 +107,18 @@ pub enum EncodingUpdate {
}
impl Encoder {
pub fn new(config: EncoderCfg) -> ResultType<Encoder> {
log::info!("new encoder:{:?}", config);
pub fn new(config: EncoderCfg, i444: bool) -> ResultType<Encoder> {
log::info!("new encoder:{config:?}, i444:{i444}");
match config {
EncoderCfg::VPX(_) => Ok(Encoder {
codec: Box::new(VpxEncoder::new(config)?),
codec: Box::new(VpxEncoder::new(config, i444)?),
}),
EncoderCfg::AOM(_) => Ok(Encoder {
codec: Box::new(AomEncoder::new(config)?),
codec: Box::new(AomEncoder::new(config, i444)?),
}),
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(_) => match HwEncoder::new(config) {
EncoderCfg::HW(_) => match HwEncoder::new(config, i444) {
Ok(hw) => Ok(Encoder {
codec: Box::new(hw),
}),
@ -230,6 +230,12 @@ impl Encoder {
let mut encoding = SupportedEncoding {
vp8: true,
av1: true,
i444: Some(CodecAbility {
vp9: true,
av1: true,
..Default::default()
})
.into(),
..Default::default()
};
#[cfg(feature = "hwcodec")]
@ -240,18 +246,41 @@ impl Encoder {
}
encoding
}
pub fn use_i444(config: &EncoderCfg) -> bool {
let decodings = PEER_DECODINGS.lock().unwrap().clone();
let prefer_i444 = decodings
.iter()
.all(|d| d.1.prefer_chroma == Chroma::I444.into());
let i444_useable = match config {
EncoderCfg::VPX(vpx) => match vpx.codec {
VpxVideoCodecId::VP8 => false,
VpxVideoCodecId::VP9 => decodings.iter().all(|d| d.1.i444.vp9),
},
EncoderCfg::AOM(_) => decodings.iter().all(|d| d.1.i444.av1),
EncoderCfg::HW(_) => false,
};
prefer_i444 && i444_useable && !decodings.is_empty()
}
}
impl Decoder {
pub fn supported_decodings(id_for_perfer: Option<&str>) -> SupportedDecoding {
let (prefer, prefer_chroma) = Self::preference(id_for_perfer);
#[allow(unused_mut)]
let mut decoding = SupportedDecoding {
ability_vp8: 1,
ability_vp9: 1,
ability_av1: 1,
prefer: id_for_perfer
.map_or(PreferCodec::Auto, |id| Self::codec_preference(id))
.into(),
i444: Some(CodecAbility {
vp9: true,
av1: true,
..Default::default()
})
.into(),
prefer: prefer.into(),
prefer_chroma: prefer_chroma.into(),
..Default::default()
};
#[cfg(feature = "hwcodec")]
@ -314,31 +343,33 @@ impl Decoder {
&mut self,
frame: &video_frame::Union,
rgb: &mut ImageRgb,
chroma: &mut Option<Chroma>,
) -> ResultType<bool> {
match frame {
video_frame::Union::Vp8s(vp8s) => {
if let Some(vp8) = &mut self.vp8 {
Decoder::handle_vpxs_video_frame(vp8, vp8s, rgb)
Decoder::handle_vpxs_video_frame(vp8, vp8s, rgb, chroma)
} else {
bail!("vp8 decoder not available");
}
}
video_frame::Union::Vp9s(vp9s) => {
if let Some(vp9) = &mut self.vp9 {
Decoder::handle_vpxs_video_frame(vp9, vp9s, rgb)
Decoder::handle_vpxs_video_frame(vp9, vp9s, rgb, chroma)
} else {
bail!("vp9 decoder not available");
}
}
video_frame::Union::Av1s(av1s) => {
if let Some(av1) = &mut self.av1 {
Decoder::handle_av1s_video_frame(av1, av1s, rgb)
Decoder::handle_av1s_video_frame(av1, av1s, rgb, chroma)
} else {
bail!("av1 decoder not available");
}
}
#[cfg(feature = "hwcodec")]
video_frame::Union::H264s(h264s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
} else {
@ -347,6 +378,7 @@ impl Decoder {
}
#[cfg(feature = "hwcodec")]
video_frame::Union::H265s(h265s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
} else {
@ -355,6 +387,7 @@ impl Decoder {
}
#[cfg(feature = "mediacodec")]
video_frame::Union::H264s(h264s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.media_codec.h264 {
Decoder::handle_mediacodec_video_frame(decoder, h264s, rgb)
} else {
@ -363,6 +396,7 @@ impl Decoder {
}
#[cfg(feature = "mediacodec")]
video_frame::Union::H265s(h265s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.media_codec.h265 {
Decoder::handle_mediacodec_video_frame(decoder, h265s, rgb)
} else {
@ -378,6 +412,7 @@ impl Decoder {
decoder: &mut VpxDecoder,
vpxs: &EncodedVideoFrames,
rgb: &mut ImageRgb,
chroma: &mut Option<Chroma>,
) -> ResultType<bool> {
let mut last_frame = vpxcodec::Image::new();
for vpx in vpxs.frames.iter() {
@ -393,6 +428,7 @@ impl Decoder {
if last_frame.is_null() {
Ok(false)
} else {
*chroma = Some(last_frame.chroma());
last_frame.to(rgb);
Ok(true)
}
@ -403,6 +439,7 @@ impl Decoder {
decoder: &mut AomDecoder,
av1s: &EncodedVideoFrames,
rgb: &mut ImageRgb,
chroma: &mut Option<Chroma>,
) -> ResultType<bool> {
let mut last_frame = aom::Image::new();
for av1 in av1s.frames.iter() {
@ -418,6 +455,7 @@ impl Decoder {
if last_frame.is_null() {
Ok(false)
} else {
*chroma = Some(last_frame.chroma());
last_frame.to(rgb);
Ok(true)
}
@ -457,12 +495,16 @@ impl Decoder {
return Ok(false);
}
fn codec_preference(id: &str) -> PreferCodec {
let codec = PeerConfig::load(id)
.options
fn preference(id: Option<&str>) -> (PreferCodec, Chroma) {
let id = id.unwrap_or_default();
if id.is_empty() {
return (PreferCodec::Auto, Chroma::I420);
}
let options = PeerConfig::load(id).options;
let codec = options
.get("codec-preference")
.map_or("".to_owned(), |c| c.to_owned());
if codec == "vp8" {
let codec = if codec == "vp8" {
PreferCodec::VP8
} else if codec == "vp9" {
PreferCodec::VP9
@ -474,7 +516,13 @@ impl Decoder {
PreferCodec::H265
} else {
PreferCodec::Auto
}
};
let chroma = if options.get("i444") == Some(&"Y".to_string()) {
Chroma::I444
} else {
Chroma::I420
};
(codec, chroma)
}
}

View File

@ -1,367 +1,25 @@
use super::vpx::*;
use std::os::raw::c_int;
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(improper_ctypes)]
#![allow(dead_code)]
extern "C" {
// seems libyuv uses reverse byte order compared with our view
include!(concat!(env!("OUT_DIR"), "/yuv_ffi.rs"));
pub fn ARGBRotate(
src_argb: *const u8,
src_stride_argb: c_int,
dst_argb: *mut u8,
dst_stride_argb: c_int,
src_width: c_int,
src_height: c_int,
mode: c_int,
) -> c_int;
#[cfg(not(target_os = "ios"))]
use crate::Frame;
use crate::{generate_call_macro, EncodeYuvFormat, TraitFrame};
use hbb_common::{bail, log, ResultType};
pub fn ARGBMirror(
src_argb: *const u8,
src_stride_argb: c_int,
dst_argb: *mut u8,
dst_stride_argb: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn ARGBToI420(
src_bgra: *const u8,
src_stride_bgra: c_int,
dst_y: *mut u8,
dst_stride_y: c_int,
dst_u: *mut u8,
dst_stride_u: c_int,
dst_v: *mut u8,
dst_stride_v: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn ABGRToI420(
src_rgba: *const u8,
src_stride_rgba: c_int,
dst_y: *mut u8,
dst_stride_y: c_int,
dst_u: *mut u8,
dst_stride_u: c_int,
dst_v: *mut u8,
dst_stride_v: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn ARGBToNV12(
src_bgra: *const u8,
src_stride_bgra: c_int,
dst_y: *mut u8,
dst_stride_y: c_int,
dst_uv: *mut u8,
dst_stride_uv: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn NV12ToI420(
src_y: *const u8,
src_stride_y: c_int,
src_uv: *const u8,
src_stride_uv: c_int,
dst_y: *mut u8,
dst_stride_y: c_int,
dst_u: *mut u8,
dst_stride_u: c_int,
dst_v: *mut u8,
dst_stride_v: c_int,
width: c_int,
height: c_int,
) -> c_int;
// I420ToRGB24: RGB little endian (bgr in memory)
// I420ToRaw: RGB big endian (rgb in memory) to RGBA.
pub fn I420ToRAW(
src_y: *const u8,
src_stride_y: c_int,
src_u: *const u8,
src_stride_u: c_int,
src_v: *const u8,
src_stride_v: c_int,
dst_rgba: *mut u8,
dst_stride_raw: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn I420ToARGB(
src_y: *const u8,
src_stride_y: c_int,
src_u: *const u8,
src_stride_u: c_int,
src_v: *const u8,
src_stride_v: c_int,
dst_rgba: *mut u8,
dst_stride_rgba: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn I420ToABGR(
src_y: *const u8,
src_stride_y: c_int,
src_u: *const u8,
src_stride_u: c_int,
src_v: *const u8,
src_stride_v: c_int,
dst_rgba: *mut u8,
dst_stride_rgba: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn NV12ToARGB(
src_y: *const u8,
src_stride_y: c_int,
src_uv: *const u8,
src_stride_uv: c_int,
dst_rgba: *mut u8,
dst_stride_rgba: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn NV12ToABGR(
src_y: *const u8,
src_stride_y: c_int,
src_uv: *const u8,
src_stride_uv: c_int,
dst_rgba: *mut u8,
dst_stride_rgba: c_int,
width: c_int,
height: c_int,
) -> c_int;
}
// https://github.com/webmproject/libvpx/blob/master/vpx/src/vpx_image.c
#[inline]
fn get_vpx_i420_stride(
width: usize,
height: usize,
stride_align: usize,
) -> (usize, usize, usize, usize, usize, usize) {
let mut img = Default::default();
unsafe {
vpx_img_wrap(
&mut img,
vpx_img_fmt::VPX_IMG_FMT_I420,
width as _,
height as _,
stride_align as _,
0x1 as _,
);
}
(
img.w as _,
img.h as _,
img.stride[0] as _,
img.stride[1] as _,
img.planes[1] as usize - img.planes[0] as usize,
img.planes[2] as usize - img.planes[0] as usize,
)
}
pub fn i420_to_rgb(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
let (_, _, src_stride_y, src_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
let src_y = src.as_ptr();
let src_u = src[u..].as_ptr();
let src_v = src[v..].as_ptr();
dst.resize(width * height * 3, 0);
unsafe {
super::I420ToRAW(
src_y,
src_stride_y as _,
src_u,
src_stride_uv as _,
src_v,
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 3) as _,
width as _,
height as _,
);
};
}
pub fn i420_to_bgra(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
let (_, _, src_stride_y, src_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
let src_y = src.as_ptr();
let src_u = src[u..].as_ptr();
let src_v = src[v..].as_ptr();
dst.resize(width * height * 4, 0);
unsafe {
super::I420ToARGB(
src_y,
src_stride_y as _,
src_u,
src_stride_uv as _,
src_v,
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 3) as _,
width as _,
height as _,
);
};
}
pub fn bgra_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr();
let dst_u = dst[u..].as_mut_ptr();
let dst_v = dst[v..].as_mut_ptr();
unsafe {
ARGBToI420(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
dst_stride_y as _,
dst_u,
dst_stride_uv as _,
dst_v,
dst_stride_uv as _,
width as _,
height as _,
);
}
}
pub fn rgba_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr();
let dst_u = dst[u..].as_mut_ptr();
let dst_v = dst[v..].as_mut_ptr();
unsafe {
ABGRToI420(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
dst_stride_y as _,
dst_u,
dst_stride_uv as _,
dst_v,
dst_stride_uv as _,
width as _,
height as _,
);
}
}
pub unsafe fn nv12_to_i420(
src_y: *const u8,
src_stride_y: c_int,
src_uv: *const u8,
src_stride_uv: c_int,
width: usize,
height: usize,
dst: &mut Vec<u8>,
) {
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr();
let dst_u = dst[u..].as_mut_ptr();
let dst_v = dst[v..].as_mut_ptr();
NV12ToI420(
src_y,
src_stride_y,
src_uv,
src_stride_uv,
dst_y,
dst_stride_y as _,
dst_u,
dst_stride_uv as _,
dst_v,
dst_stride_uv as _,
width as _,
height as _,
);
}
generate_call_macro!(call_yuv, false);
#[cfg(feature = "hwcodec")]
pub mod hw {
use super::*;
use crate::ImageFormat;
use hbb_common::{anyhow::anyhow, ResultType};
#[cfg(target_os = "windows")]
use hwcodec::{ffmpeg::ffmpeg_linesize_offset_length, AVPixelFormat};
pub fn hw_bgra_to_i420(
width: usize,
height: usize,
stride: &[i32],
offset: &[i32],
length: i32,
src: &[u8],
dst: &mut Vec<u8>,
) {
let stride_y = stride[0] as usize;
let stride_u = stride[1] as usize;
let stride_v = stride[2] as usize;
let offset_u = offset[0] as usize;
let offset_v = offset[1] as usize;
dst.resize(length as _, 0);
let dst_y = dst.as_mut_ptr();
let dst_u = dst[offset_u..].as_mut_ptr();
let dst_v = dst[offset_v..].as_mut_ptr();
unsafe {
super::ARGBToI420(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
stride_y as _,
dst_u,
stride_u as _,
dst_v,
stride_v as _,
width as _,
height as _,
);
}
}
pub fn hw_bgra_to_nv12(
width: usize,
height: usize,
stride: &[i32],
offset: &[i32],
length: i32,
src: &[u8],
dst: &mut Vec<u8>,
) {
let stride_y = stride[0] as usize;
let stride_uv = stride[1] as usize;
let offset_uv = offset[0] as usize;
dst.resize(length as _, 0);
let dst_y = dst.as_mut_ptr();
let dst_uv = dst[offset_uv..].as_mut_ptr();
unsafe {
super::ARGBToNV12(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
stride_y as _,
dst_uv,
stride_uv as _,
width as _,
height as _,
);
}
}
#[cfg(target_os = "windows")]
pub fn hw_nv12_to(
fmt: ImageFormat,
@ -386,61 +44,59 @@ pub mod hw {
let i420_stride_v = linesize_i420[2];
i420.resize(i420_len as _, 0);
unsafe {
let i420_offset_y = i420.as_ptr().add(0) as _;
let i420_offset_u = i420.as_ptr().add(offset_i420[0] as _) as _;
let i420_offset_v = i420.as_ptr().add(offset_i420[1] as _) as _;
super::NV12ToI420(
src_y.as_ptr(),
nv12_stride_y as _,
src_uv.as_ptr(),
nv12_stride_uv as _,
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
width as _,
height as _,
);
match fmt {
ImageFormat::ARGB => {
super::I420ToARGB(
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
}
ImageFormat::ABGR => {
super::I420ToABGR(
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
}
_ => {
return Err(anyhow!("unsupported image format"));
}
let i420_offset_y = unsafe { i420.as_ptr().add(0) as _ };
let i420_offset_u = unsafe { i420.as_ptr().add(offset_i420[0] as _) as _ };
let i420_offset_v = unsafe { i420.as_ptr().add(offset_i420[1] as _) as _ };
call_yuv!(NV12ToI420(
src_y.as_ptr(),
nv12_stride_y as _,
src_uv.as_ptr(),
nv12_stride_uv as _,
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
width as _,
height as _,
));
match fmt {
ImageFormat::ARGB => {
call_yuv!(I420ToARGB(
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
));
}
return Ok(());
};
ImageFormat::ABGR => {
call_yuv!(I420ToABGR(
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
));
}
_ => {
bail!("unsupported image format");
}
}
return Ok(());
}
return Err(anyhow!("get linesize offset failed"));
bail!("get linesize offset failed");
}
#[cfg(not(target_os = "windows"))]
@ -457,41 +113,34 @@ pub mod hw {
_align: usize,
) -> ResultType<()> {
dst.resize(width * height * 4, 0);
unsafe {
match fmt {
ImageFormat::ARGB => {
match super::NV12ToARGB(
src_y.as_ptr(),
src_stride_y as _,
src_uv.as_ptr(),
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
) {
0 => Ok(()),
_ => Err(anyhow!("NV12ToARGB failed")),
}
}
ImageFormat::ABGR => {
match super::NV12ToABGR(
src_y.as_ptr(),
src_stride_y as _,
src_uv.as_ptr(),
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
) {
0 => Ok(()),
_ => Err(anyhow!("NV12ToABGR failed")),
}
}
_ => Err(anyhow!("unsupported image format")),
match fmt {
ImageFormat::ARGB => {
call_yuv!(NV12ToARGB(
src_y.as_ptr(),
src_stride_y as _,
src_uv.as_ptr(),
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
));
}
ImageFormat::ABGR => {
call_yuv!(NV12ToABGR(
src_y.as_ptr(),
src_stride_y as _,
src_uv.as_ptr(),
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
));
}
_ => bail!("unsupported image format"),
}
Ok(())
}
pub fn hw_i420_to(
@ -505,43 +154,153 @@ pub mod hw {
src_stride_u: usize,
src_stride_v: usize,
dst: &mut Vec<u8>,
) {
) -> ResultType<()> {
let src_y = src_y.as_ptr();
let src_u = src_u.as_ptr();
let src_v = src_v.as_ptr();
dst.resize(width * height * 4, 0);
unsafe {
match fmt {
ImageFormat::ARGB => {
super::I420ToARGB(
src_y,
src_stride_y as _,
src_u,
src_stride_u as _,
src_v,
src_stride_v as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
}
ImageFormat::ABGR => {
super::I420ToABGR(
src_y,
src_stride_y as _,
src_u,
src_stride_u as _,
src_v,
src_stride_v as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
}
_ => {}
match fmt {
ImageFormat::ARGB => {
call_yuv!(I420ToARGB(
src_y,
src_stride_y as _,
src_u,
src_stride_u as _,
src_v,
src_stride_v as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
));
}
ImageFormat::ABGR => {
call_yuv!(I420ToABGR(
src_y,
src_stride_y as _,
src_u,
src_stride_u as _,
src_v,
src_stride_v as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
));
}
_ => bail!("unsupported image format"),
};
Ok(())
}
}
#[cfg(not(target_os = "ios"))]
pub fn convert_to_yuv(
captured: &Frame,
dst_fmt: EncodeYuvFormat,
dst: &mut Vec<u8>,
mid_data: &mut Vec<u8>,
) -> ResultType<()> {
let src = captured.data();
let src_stride = captured.stride();
let captured_pixfmt = captured.pixfmt();
if captured_pixfmt == crate::Pixfmt::BGRA || captured_pixfmt == crate::Pixfmt::RGBA {
if src.len() < src_stride[0] * dst_fmt.h {
bail!(
"length not enough: {} < {}",
src.len(),
src_stride[0] * dst_fmt.h
);
}
}
match (captured_pixfmt, dst_fmt.pixfmt) {
(crate::Pixfmt::BGRA, crate::Pixfmt::I420) | (crate::Pixfmt::RGBA, crate::Pixfmt::I420) => {
let dst_stride_y = dst_fmt.stride[0];
let dst_stride_uv = dst_fmt.stride[1];
dst.resize(dst_fmt.h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
let dst_y = dst.as_mut_ptr();
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
ARGBToI420
} else {
ABGRToI420
};
call_yuv!(f(
src.as_ptr(),
src_stride[0] as _,
dst_y,
dst_stride_y as _,
dst_u,
dst_stride_uv as _,
dst_v,
dst_stride_uv as _,
dst_fmt.w as _,
dst_fmt.h as _,
));
}
(crate::Pixfmt::BGRA, crate::Pixfmt::NV12) | (crate::Pixfmt::RGBA, crate::Pixfmt::NV12) => {
let dst_stride_y = dst_fmt.stride[0];
let dst_stride_uv = dst_fmt.stride[1];
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_uv / 2), 0);
let dst_y = dst.as_mut_ptr();
let dst_uv = dst[dst_fmt.u..].as_mut_ptr();
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
ARGBToNV12
} else {
ABGRToNV12
};
call_yuv!(f(
src.as_ptr(),
src_stride[0] as _,
dst_y,
dst_stride_y as _,
dst_uv,
dst_stride_uv as _,
dst_fmt.w as _,
dst_fmt.h as _,
));
}
(crate::Pixfmt::BGRA, crate::Pixfmt::I444) | (crate::Pixfmt::RGBA, crate::Pixfmt::I444) => {
let dst_stride_y = dst_fmt.stride[0];
let dst_stride_u = dst_fmt.stride[1];
let dst_stride_v = dst_fmt.stride[2];
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_u + dst_stride_v), 0);
let dst_y = dst.as_mut_ptr();
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
let src = if captured_pixfmt == crate::Pixfmt::BGRA {
src
} else {
mid_data.resize(src.len(), 0);
call_yuv!(ABGRToARGB(
src.as_ptr(),
src_stride[0] as _,
mid_data.as_mut_ptr(),
src_stride[0] as _,
dst_fmt.w as _,
dst_fmt.h as _,
));
mid_data
};
call_yuv!(ARGBToI444(
src.as_ptr(),
src_stride[0] as _,
dst_y,
dst_stride_y as _,
dst_u,
dst_stride_u as _,
dst_v,
dst_stride_v as _,
dst_fmt.w as _,
dst_fmt.h as _,
));
}
_ => {
bail!(
"convert not support, {captured_pixfmt:?} -> {:?}",
dst_fmt.pixfmt
);
}
}
Ok(())
}

View File

@ -1,10 +1,9 @@
use crate::{common::TraitCapturer, dxgi};
use crate::{common::TraitCapturer, dxgi, Pixfmt};
use std::{
io::{
self,
ErrorKind::{NotFound, TimedOut, WouldBlock},
},
ops,
time::Duration,
};
@ -15,10 +14,10 @@ pub struct Capturer {
}
impl Capturer {
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
pub fn new(display: Display) -> io::Result<Capturer> {
let width = display.width();
let height = display.height();
let inner = dxgi::Capturer::new(display.0, yuv)?;
let inner = dxgi::Capturer::new(display.0)?;
Ok(Capturer {
inner,
width,
@ -40,13 +39,9 @@ impl Capturer {
}
impl TraitCapturer for Capturer {
fn set_use_yuv(&mut self, use_yuv: bool) {
self.inner.set_use_yuv(use_yuv);
}
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.inner.frame(timeout.as_millis() as _) {
Ok(frame) => Ok(Frame(frame)),
Ok(frame) => Ok(Frame::new(frame, self.height)),
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
Err(error) => Err(error),
}
@ -61,12 +56,31 @@ impl TraitCapturer for Capturer {
}
}
pub struct Frame<'a>(pub &'a [u8]);
pub struct Frame<'a> {
data: &'a [u8],
stride: Vec<usize>,
}
impl<'a> ops::Deref for Frame<'a> {
type Target = [u8];
fn deref(&self) -> &[u8] {
self.0
impl<'a> Frame<'a> {
pub fn new(data: &'a [u8], h: usize) -> Self {
let stride = data.len() / h;
let mut v = Vec::new();
v.push(stride);
Frame { data, stride: v }
}
}
impl<'a> crate::TraitFrame for Frame<'a> {
fn data(&self) -> &[u8] {
self.data
}
fn stride(&self) -> Vec<usize> {
self.stride.clone()
}
fn pixfmt(&self) -> Pixfmt {
Pixfmt::BGRA
}
}
@ -134,9 +148,9 @@ impl CapturerMag {
dxgi::mag::CapturerMag::is_supported()
}
pub fn new(origin: (i32, i32), width: usize, height: usize, use_yuv: bool) -> io::Result<Self> {
pub fn new(origin: (i32, i32), width: usize, height: usize) -> io::Result<Self> {
Ok(CapturerMag {
inner: dxgi::mag::CapturerMag::new(origin, width, height, use_yuv)?,
inner: dxgi::mag::CapturerMag::new(origin, width, height)?,
data: Vec::new(),
})
}
@ -151,13 +165,9 @@ impl CapturerMag {
}
impl TraitCapturer for CapturerMag {
fn set_use_yuv(&mut self, use_yuv: bool) {
self.inner.set_use_yuv(use_yuv)
}
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
self.inner.frame(&mut self.data)?;
Ok(Frame(&self.data))
Ok(Frame::new(&self.data, self.inner.get_rect().2))
}
fn is_gdi(&self) -> bool {

View File

@ -1,6 +1,6 @@
use crate::{
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
hw, ImageFormat, ImageRgb, HW_STRIDE_ALIGN,
hw, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
};
use hbb_common::{
allow_err,
@ -31,7 +31,6 @@ const DEFAULT_RC: RateControl = RC_DEFAULT;
pub struct HwEncoder {
encoder: Encoder,
yuv: Vec<u8>,
pub format: DataFormat,
pub pixfmt: AVPixelFormat,
width: u32,
@ -40,7 +39,7 @@ pub struct HwEncoder {
}
impl EncoderApi for HwEncoder {
fn new(cfg: EncoderCfg) -> ResultType<Self>
fn new(cfg: EncoderCfg, _i444: bool) -> ResultType<Self>
where
Self: Sized,
{
@ -78,7 +77,6 @@ impl EncoderApi for HwEncoder {
match Encoder::new(ctx.clone()) {
Ok(encoder) => Ok(HwEncoder {
encoder,
yuv: vec![],
format,
pixfmt: ctx.pixfmt,
width: ctx.width as _,
@ -118,8 +116,31 @@ impl EncoderApi for HwEncoder {
}
}
fn use_yuv(&self) -> bool {
false
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
let pixfmt = if self.pixfmt == AVPixelFormat::AV_PIX_FMT_NV12 {
Pixfmt::NV12
} else {
Pixfmt::I420
};
let stride = self
.encoder
.linesize
.clone()
.drain(..)
.map(|i| i as usize)
.collect();
crate::EncodeYuvFormat {
pixfmt,
w: self.encoder.ctx.width as _,
h: self.encoder.ctx.height as _,
stride,
u: self.encoder.offset[0] as _,
v: if pixfmt == Pixfmt::NV12 {
0
} else {
self.encoder.offset[1] as _
},
}
}
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
@ -145,29 +166,8 @@ impl HwEncoder {
})
}
pub fn encode(&mut self, bgra: &[u8]) -> ResultType<Vec<EncodeFrame>> {
match self.pixfmt {
AVPixelFormat::AV_PIX_FMT_YUV420P => hw::hw_bgra_to_i420(
self.encoder.ctx.width as _,
self.encoder.ctx.height as _,
&self.encoder.linesize,
&self.encoder.offset,
self.encoder.length,
bgra,
&mut self.yuv,
),
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_bgra_to_nv12(
self.encoder.ctx.width as _,
self.encoder.ctx.height as _,
&self.encoder.linesize,
&self.encoder.offset,
self.encoder.length,
bgra,
&mut self.yuv,
),
}
match self.encoder.encode(&self.yuv) {
pub fn encode(&mut self, yuv: &[u8]) -> ResultType<Vec<EncodeFrame>> {
match self.encoder.encode(yuv) {
Ok(v) => {
let mut data = Vec::<EncodeFrame>::new();
data.append(v);
@ -245,7 +245,7 @@ impl HwDecoder {
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<HwDecoderImage>> {
match self.decoder.decode(data) {
Ok(v) => Ok(v.iter().map(|f| HwDecoderImage { frame: f }).collect()),
Err(_) => Ok(vec![]),
Err(e) => Err(anyhow!(e)),
}
}
}
@ -274,7 +274,7 @@ impl HwDecoderImage<'_> {
&mut rgb.raw as _,
i420,
HW_STRIDE_ALIGN,
),
)?,
AVPixelFormat::AV_PIX_FMT_YUV420P => {
hw::hw_i420_to(
rgb.fmt(),
@ -287,10 +287,10 @@ impl HwDecoderImage<'_> {
frame.linesize[1] as _,
frame.linesize[2] as _,
&mut rgb.raw as _,
);
return Ok(());
)?;
}
}
Ok(())
}
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {

View File

@ -11,10 +11,10 @@ pub enum Capturer {
}
impl Capturer {
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
pub fn new(display: Display) -> io::Result<Capturer> {
Ok(match display {
Display::X11(d) => Capturer::X11(x11::Capturer::new(d, yuv)?),
Display::WAYLAND(d) => Capturer::WAYLAND(wayland::Capturer::new(d, yuv)?),
Display::X11(d) => Capturer::X11(x11::Capturer::new(d)?),
Display::WAYLAND(d) => Capturer::WAYLAND(wayland::Capturer::new(d)?),
})
}
@ -34,13 +34,6 @@ impl Capturer {
}
impl TraitCapturer for Capturer {
fn set_use_yuv(&mut self, use_yuv: bool) {
match self {
Capturer::X11(d) => d.set_use_yuv(use_yuv),
Capturer::WAYLAND(d) => d.set_use_yuv(use_yuv),
}
}
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self {
Capturer::X11(d) => d.frame(timeout),

View File

@ -1,5 +1,8 @@
pub use self::vpxcodec::*;
use hbb_common::message_proto::{video_frame, VideoFrame};
use hbb_common::{
log,
message_proto::{video_frame, Chroma, VideoFrame},
};
use std::slice;
cfg_if! {
@ -96,8 +99,6 @@ pub fn would_block_if_equal(old: &mut Vec<u8>, b: &[u8]) -> std::io::Result<()>
}
pub trait TraitCapturer {
fn set_use_yuv(&mut self, use_yuv: bool);
// We doesn't support
#[cfg(not(any(target_os = "ios")))]
fn frame<'a>(&'a mut self, timeout: std::time::Duration) -> std::io::Result<Frame<'a>>;
@ -108,6 +109,31 @@ pub trait TraitCapturer {
fn set_gdi(&mut self) -> bool;
}
pub trait TraitFrame {
fn data(&self) -> &[u8];
fn stride(&self) -> Vec<usize>;
fn pixfmt(&self) -> Pixfmt;
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Pixfmt {
BGRA,
RGBA,
I420,
NV12,
I444,
}
pub struct EncodeYuvFormat {
pub pixfmt: Pixfmt,
pub w: usize,
pub h: usize,
pub stride: Vec<usize>,
pub u: usize,
pub v: usize,
}
#[cfg(x11)]
#[inline]
pub fn is_x11() -> bool {
@ -260,6 +286,7 @@ pub trait GoogleImage {
fn height(&self) -> usize;
fn stride(&self) -> Vec<i32>;
fn planes(&self) -> Vec<*mut u8>;
fn chroma(&self) -> Chroma;
fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize {
let bytes_per_pixel = match fmt {
ImageFormat::Raw => 3,
@ -278,8 +305,8 @@ pub trait GoogleImage {
let stride = self.stride();
let planes = self.planes();
unsafe {
match rgb.fmt() {
ImageFormat::Raw => {
match (self.chroma(), rgb.fmt()) {
(Chroma::I420, ImageFormat::Raw) => {
super::I420ToRAW(
planes[0],
stride[0],
@ -293,7 +320,7 @@ pub trait GoogleImage {
self.height() as _,
);
}
ImageFormat::ARGB => {
(Chroma::I420, ImageFormat::ARGB) => {
super::I420ToARGB(
planes[0],
stride[0],
@ -307,7 +334,7 @@ pub trait GoogleImage {
self.height() as _,
);
}
ImageFormat::ABGR => {
(Chroma::I420, ImageFormat::ABGR) => {
super::I420ToABGR(
planes[0],
stride[0],
@ -321,6 +348,36 @@ pub trait GoogleImage {
self.height() as _,
);
}
(Chroma::I444, ImageFormat::ARGB) => {
super::I444ToARGB(
planes[0],
stride[0],
planes[1],
stride[1],
planes[2],
stride[2],
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,
);
}
(Chroma::I444, ImageFormat::ABGR) => {
super::I444ToABGR(
planes[0],
stride[0],
planes[1],
stride[1],
planes[2],
stride[2],
rgb.raw.as_mut_ptr(),
bytes_per_row as _,
self.width() as _,
self.height() as _,
);
}
// (Chroma::I444, ImageFormat::Raw), new version libyuv have I444ToRAW
_ => log::error!("unsupported pixfmt:{:?}", self.chroma()),
}
}
}

View File

@ -1,18 +1,16 @@
use crate::quartz;
use crate::{quartz, Pixfmt};
use std::marker::PhantomData;
use std::sync::{Arc, Mutex, TryLockError};
use std::{io, mem, ops};
use std::{io, mem};
pub struct Capturer {
inner: quartz::Capturer,
frame: Arc<Mutex<Option<quartz::Frame>>>,
use_yuv: bool,
i420: Vec<u8>,
saved_raw_data: Vec<u8>, // for faster compare and copy
}
impl Capturer {
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
pub fn new(display: Display) -> io::Result<Capturer> {
let frame = Arc::new(Mutex::new(None));
let f = frame.clone();
@ -20,11 +18,7 @@ impl Capturer {
display.0,
display.width(),
display.height(),
if use_yuv {
quartz::PixelFormat::YCbCr420Video
} else {
quartz::PixelFormat::Argb8888
},
quartz::PixelFormat::Argb8888,
Default::default(),
move |inner| {
if let Ok(mut f) = f.lock() {
@ -37,8 +31,6 @@ impl Capturer {
Ok(Capturer {
inner,
frame,
use_yuv,
i420: Vec::new(),
saved_raw_data: Vec::new(),
})
}
@ -53,10 +45,6 @@ impl Capturer {
}
impl crate::TraitCapturer for Capturer {
fn set_use_yuv(&mut self, use_yuv: bool) {
self.use_yuv = use_yuv;
}
fn frame<'a>(&'a mut self, _timeout_ms: std::time::Duration) -> io::Result<Frame<'a>> {
match self.frame.try_lock() {
Ok(mut handle) => {
@ -66,9 +54,7 @@ impl crate::TraitCapturer for Capturer {
match frame {
Some(mut frame) => {
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
if self.use_yuv {
frame.nv12_to_i420(self.width(), self.height(), &mut self.i420);
}
frame.surface_to_bgra(self.height());
Ok(Frame(frame, PhantomData))
}
@ -85,11 +71,20 @@ impl crate::TraitCapturer for Capturer {
pub struct Frame<'a>(pub quartz::Frame, PhantomData<&'a [u8]>);
impl<'a> ops::Deref for Frame<'a> {
type Target = [u8];
fn deref(&self) -> &[u8] {
impl<'a> crate::TraitFrame for Frame<'a> {
fn data(&self) -> &[u8] {
&*self.0
}
fn stride(&self) -> Vec<usize> {
let mut v = Vec::new();
v.push(self.0.stride());
v
}
fn pixfmt(&self) -> Pixfmt {
Pixfmt::BGRA
}
}
pub struct Display(quartz::Display);

View File

@ -4,11 +4,11 @@
use hbb_common::anyhow::{anyhow, Context};
use hbb_common::log;
use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
use hbb_common::ResultType;
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
use crate::{GoogleImage, STRIDE_ALIGN};
use crate::{GoogleImage, Pixfmt, STRIDE_ALIGN};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
@ -39,6 +39,7 @@ pub struct VpxEncoder {
width: usize,
height: usize,
id: VpxVideoCodecId,
i444: bool,
}
pub struct VpxDecoder {
@ -46,7 +47,7 @@ pub struct VpxDecoder {
}
impl EncoderApi for VpxEncoder {
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
where
Self: Sized,
{
@ -98,6 +99,13 @@ impl EncoderApi for VpxEncoder {
} else {
c.rc_target_bitrate = base_bitrate;
}
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp9/common/vp9_enums.h#29
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp8/vp8_cx_iface.c#282
c.g_profile = if i444 && config.codec == VpxVideoCodecId::VP9 {
1
} else {
0
};
/*
The VPX encoder supports two-pass encoding for rate control purposes.
@ -166,6 +174,7 @@ impl EncoderApi for VpxEncoder {
width: config.width as _,
height: config.height as _,
id: config.codec,
i444,
})
}
_ => Err(anyhow!("encoder type mismatch")),
@ -192,8 +201,36 @@ impl EncoderApi for VpxEncoder {
}
}
fn use_yuv(&self) -> bool {
true
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
let mut img = Default::default();
let fmt = if self.i444 {
vpx_img_fmt::VPX_IMG_FMT_I444
} else {
vpx_img_fmt::VPX_IMG_FMT_I420
};
unsafe {
vpx_img_wrap(
&mut img,
fmt,
self.width as _,
self.height as _,
crate::STRIDE_ALIGN as _,
0x1 as _,
);
}
let pixfmt = if self.i444 {
Pixfmt::I444
} else {
Pixfmt::I420
};
crate::EncodeYuvFormat {
pixfmt,
w: img.w as _,
h: img.h as _,
stride: img.stride.map(|s| s as usize).to_vec(),
u: img.planes[1] as usize - img.planes[0] as usize,
v: img.planes[2] as usize - img.planes[0] as usize,
}
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
@ -219,14 +256,20 @@ impl EncoderApi for VpxEncoder {
impl VpxEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
if 2 * data.len() < 3 * self.width * self.height {
let bpp = if self.i444 { 24 } else { 12 };
if data.len() < self.width * self.height * bpp / 8 {
return Err(Error::FailedCall("len not enough".to_string()));
}
let fmt = if self.i444 {
vpx_img_fmt::VPX_IMG_FMT_I444
} else {
vpx_img_fmt::VPX_IMG_FMT_I420
};
let mut image = Default::default();
call_vpx_ptr!(vpx_img_wrap(
&mut image,
vpx_img_fmt::VPX_IMG_FMT_I420,
fmt,
self.width as _,
self.height as _,
stride_align as _,
@ -533,6 +576,13 @@ impl GoogleImage for Image {
fn planes(&self) -> Vec<*mut u8> {
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
}
fn chroma(&self) -> Chroma {
match self.inner().fmt {
vpx_img_fmt::VPX_IMG_FMT_I444 => Chroma::I444,
_ => Chroma::I420,
}
}
}
impl Drop for Image {

View File

@ -2,7 +2,7 @@ use crate::common::{x11::Frame, TraitCapturer};
use crate::wayland::{capturable::*, *};
use std::{io, sync::RwLock, time::Duration};
pub struct Capturer(Display, Box<dyn Recorder>, bool, Vec<u8>);
pub struct Capturer(Display, Box<dyn Recorder>, Vec<u8>);
static mut IS_CURSOR_EMBEDDED: Option<bool> = None;
@ -45,9 +45,9 @@ fn map_err<E: ToString>(err: E) -> io::Error {
}
impl Capturer {
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
pub fn new(display: Display) -> io::Result<Capturer> {
let r = display.0.recorder(false).map_err(map_err)?;
Ok(Capturer(display, r, yuv, Default::default()))
Ok(Capturer(display, r, Default::default()))
}
pub fn width(&self) -> usize {
@ -60,24 +60,10 @@ impl Capturer {
}
impl TraitCapturer for Capturer {
fn set_use_yuv(&mut self, use_yuv: bool) {
self.2 = use_yuv;
}
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
PixelProvider::BGR0(w, h, x) => Ok(Frame(if self.2 {
crate::common::bgra_to_i420(w as _, h as _, &x, &mut self.3);
&self.3[..]
} else {
x
})),
PixelProvider::RGB0(w, h, x) => Ok(Frame(if self.2 {
crate::common::rgba_to_i420(w as _, h as _, &x, &mut self.3);
&self.3[..]
} else {
x
})),
PixelProvider::BGR0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, h)),
PixelProvider::RGB0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, h)),
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
_ => Err(map_err("Invalid data")),
}

View File

@ -1,13 +1,13 @@
use crate::{common::TraitCapturer, x11};
use std::{io, ops, time::Duration};
use crate::{common::TraitCapturer, x11, TraitFrame, Pixfmt};
use std::{io, time::Duration};
pub struct Capturer(x11::Capturer);
pub const IS_CURSOR_EMBEDDED: bool = false;
impl Capturer {
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
x11::Capturer::new(display.0, yuv).map(Capturer)
pub fn new(display: Display) -> io::Result<Capturer> {
x11::Capturer::new(display.0).map(Capturer)
}
pub fn width(&self) -> usize {
@ -20,21 +20,37 @@ impl Capturer {
}
impl TraitCapturer for Capturer {
fn set_use_yuv(&mut self, use_yuv: bool) {
self.0.set_use_yuv(use_yuv);
}
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
Ok(Frame(self.0.frame()?))
Ok(self.0.frame()?)
}
}
pub struct Frame<'a>(pub &'a [u8]);
pub struct Frame<'a>{
pub data: &'a [u8],
pub pixfmt:Pixfmt,
pub stride:Vec<usize>,
}
impl<'a> ops::Deref for Frame<'a> {
type Target = [u8];
fn deref(&self) -> &[u8] {
self.0
impl<'a> Frame<'a> {
pub fn new(data:&'a [u8], pixfmt:Pixfmt, h:usize) -> Self {
let stride = data.len() / h;
let mut v = Vec::new();
v.push(stride);
Self { data, pixfmt, stride: v }
}
}
impl<'a> TraitFrame for Frame<'a> {
fn data(&self) -> &[u8] {
self.data
}
fn stride(&self) -> Vec<usize> {
self.stride.clone()
}
fn pixfmt(&self) -> crate::Pixfmt {
self.pixfmt
}
}

View File

@ -160,7 +160,7 @@ impl CapturerGDI {
stride,
self.width,
self.height,
180,
crate::RotationMode::kRotate180,
);
Ok(())
}

View File

@ -245,9 +245,6 @@ pub struct CapturerMag {
rect: RECT,
width: usize,
height: usize,
use_yuv: bool,
data: Vec<u8>,
}
impl Drop for CapturerMag {
@ -262,12 +259,7 @@ impl CapturerMag {
MagInterface::new().is_ok()
}
pub(crate) fn new(
origin: (i32, i32),
width: usize,
height: usize,
use_yuv: bool,
) -> Result<Self> {
pub(crate) fn new(origin: (i32, i32), width: usize, height: usize) -> Result<Self> {
unsafe {
let x = GetSystemMetrics(SM_XVIRTUALSCREEN);
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
@ -311,8 +303,6 @@ impl CapturerMag {
},
width,
height,
use_yuv,
data: Vec::new(),
};
unsafe {
@ -437,10 +427,6 @@ impl CapturerMag {
Ok(s)
}
pub(crate) fn set_use_yuv(&mut self, use_yuv: bool) {
self.use_yuv = use_yuv;
}
pub(crate) fn exclude(&mut self, cls: &str, name: &str) -> Result<bool> {
let name_c = CString::new(name)?;
unsafe {
@ -579,22 +565,9 @@ impl CapturerMag {
));
}
if self.use_yuv {
self.data.resize(lock.1.len(), 0);
unsafe {
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut self.data[0], self.data.len());
}
crate::common::bgra_to_i420(
self.width as usize,
self.height as usize,
&self.data,
data,
);
} else {
data.resize(lock.1.len(), 0);
unsafe {
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut data[0], data.len());
}
data.resize(lock.1.len(), 0);
unsafe {
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut data[0], data.len());
}
Ok(())
@ -651,7 +624,7 @@ mod tests {
use super::*;
#[test]
fn test() {
let mut capture_mag = CapturerMag::new((0, 0), 1920, 1080, false).unwrap();
let mut capture_mag = CapturerMag::new((0, 0), 1920, 1080).unwrap();
capture_mag.exclude("", "RustDeskPrivacyWindow").unwrap();
std::thread::sleep(std::time::Duration::from_millis(1000 * 10));
let mut data = Vec::new();

View File

@ -20,6 +20,8 @@ use winapi::{
},
};
use crate::RotationMode::*;
pub struct ComPtr<T>(*mut T);
impl<T> ComPtr<T> {
fn is_null(&self) -> bool {
@ -45,8 +47,6 @@ pub struct Capturer {
surface: ComPtr<IDXGISurface>,
width: usize,
height: usize,
use_yuv: bool,
yuv: Vec<u8>,
rotated: Vec<u8>,
gdi_capturer: Option<CapturerGDI>,
gdi_buffer: Vec<u8>,
@ -54,7 +54,7 @@ pub struct Capturer {
}
impl Capturer {
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
pub fn new(display: Display) -> io::Result<Capturer> {
let mut device = ptr::null_mut();
let mut context = ptr::null_mut();
let mut duplication = ptr::null_mut();
@ -148,8 +148,6 @@ impl Capturer {
width: display.width() as usize,
height: display.height() as usize,
display,
use_yuv,
yuv: Vec::new(),
rotated: Vec::new(),
gdi_capturer,
gdi_buffer: Vec::new(),
@ -157,10 +155,6 @@ impl Capturer {
})
}
pub fn set_use_yuv(&mut self, use_yuv: bool) {
self.use_yuv = use_yuv;
}
pub fn is_gdi(&self) -> bool {
self.gdi_capturer.is_some()
}
@ -259,10 +253,10 @@ impl Capturer {
self.unmap();
let r = self.load_frame(timeout)?;
let rotate = match self.display.rotation() {
DXGI_MODE_ROTATION_IDENTITY | DXGI_MODE_ROTATION_UNSPECIFIED => 0,
DXGI_MODE_ROTATION_ROTATE90 => 90,
DXGI_MODE_ROTATION_ROTATE180 => 180,
DXGI_MODE_ROTATION_ROTATE270 => 270,
DXGI_MODE_ROTATION_IDENTITY | DXGI_MODE_ROTATION_UNSPECIFIED => kRotate0,
DXGI_MODE_ROTATION_ROTATE90 => kRotate90,
DXGI_MODE_ROTATION_ROTATE180 => kRotate180,
DXGI_MODE_ROTATION_ROTATE270 => kRotate270,
_ => {
return Err(io::Error::new(
io::ErrorKind::Other,
@ -270,7 +264,7 @@ impl Capturer {
));
}
};
if rotate == 0 {
if rotate == kRotate0 {
slice::from_raw_parts(r.0, r.1 as usize * self.height)
} else {
self.rotated.resize(self.width * self.height * 4, 0);
@ -279,12 +273,12 @@ impl Capturer {
r.1,
self.rotated.as_mut_ptr(),
4 * self.width as i32,
if rotate == 180 {
if rotate == kRotate180 {
self.width
} else {
self.height
} as _,
if rotate != 180 {
if rotate != kRotate180 {
self.width
} else {
self.height
@ -295,19 +289,7 @@ impl Capturer {
}
}
};
Ok({
if self.use_yuv {
crate::common::bgra_to_i420(
self.width as usize,
self.height as usize,
&result,
&mut self.yuv,
);
&self.yuv[..]
} else {
result
}
})
Ok(result)
}
}

View File

@ -5,8 +5,8 @@ use super::ffi::*;
pub struct Frame {
surface: IOSurfaceRef,
inner: &'static [u8],
i420: *mut u8,
i420_len: usize,
bgra: Vec<u8>,
bgra_stride: usize,
}
impl Frame {
@ -24,8 +24,8 @@ impl Frame {
Frame {
surface,
inner,
i420: ptr::null_mut(),
i420_len: 0,
bgra: Vec::new(),
bgra_stride: 0,
}
}
@ -34,23 +34,20 @@ impl Frame {
self.inner
}
pub fn nv12_to_i420<'a>(&'a mut self, w: usize, h: usize, i420: &'a mut Vec<u8>) {
pub fn stride(&self) -> usize {
self.bgra_stride
}
pub fn surface_to_bgra<'a>(&'a mut self, h: usize) {
unsafe {
let plane0 = IOSurfaceGetBaseAddressOfPlane(self.surface, 0);
let stride0 = IOSurfaceGetBytesPerRowOfPlane(self.surface, 0);
let plane1 = IOSurfaceGetBaseAddressOfPlane(self.surface, 1);
let stride1 = IOSurfaceGetBytesPerRowOfPlane(self.surface, 1);
crate::common::nv12_to_i420(
self.bgra_stride = IOSurfaceGetBytesPerRowOfPlane(self.surface, 0);
self.bgra.resize(self.bgra_stride * h, 0);
std::ptr::copy_nonoverlapping(
plane0 as _,
stride0 as _,
plane1 as _,
stride1 as _,
w,
h,
i420,
self.bgra.as_mut_ptr(),
self.bgra_stride * h,
);
self.i420 = i420.as_mut_ptr() as _;
self.i420_len = i420.len();
}
}
}
@ -58,14 +55,7 @@ impl Frame {
impl ops::Deref for Frame {
type Target = [u8];
fn deref<'a>(&'a self) -> &'a [u8] {
if self.i420.is_null() {
self.inner
} else {
unsafe {
let inner = slice::from_raw_parts(self.i420 as *const u8, self.i420_len);
inner
}
}
&self.bgra
}
}

View File

@ -2,6 +2,8 @@ use std::{io, ptr, slice};
use hbb_common::libc;
use crate::Frame;
use super::ffi::*;
use super::Display;
@ -12,13 +14,11 @@ pub struct Capturer {
buffer: *const u8,
size: usize,
use_yuv: bool,
yuv: Vec<u8>,
saved_raw_data: Vec<u8>, // for faster compare and copy
}
impl Capturer {
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
pub fn new(display: Display) -> io::Result<Capturer> {
// Calculate dimensions.
let pixel_width = 4;
@ -67,17 +67,11 @@ impl Capturer {
xcbid,
buffer,
size,
use_yuv,
yuv: Vec::new(),
saved_raw_data: Vec::new(),
};
Ok(c)
}
pub fn set_use_yuv(&mut self, use_yuv: bool) {
self.use_yuv = use_yuv;
}
pub fn display(&self) -> &Display {
&self.display
}
@ -103,16 +97,13 @@ impl Capturer {
}
}
pub fn frame<'b>(&'b mut self) -> std::io::Result<&'b [u8]> {
pub fn frame<'b>(&'b mut self) -> std::io::Result<Frame> {
self.get_image();
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
Ok(if self.use_yuv {
crate::common::bgra_to_i420(self.display.w(), self.display.h(), &result, &mut self.yuv);
&self.yuv[..]
} else {
result
})
Ok(
Frame::new(result, crate::Pixfmt::BGRA, self.display.h())
)
}
}

View File

@ -1017,10 +1017,16 @@ impl VideoHandler {
/// Handle a new video frame.
#[inline]
pub fn handle_frame(&mut self, vf: VideoFrame) -> ResultType<bool> {
pub fn handle_frame(
&mut self,
vf: VideoFrame,
chroma: &mut Option<Chroma>,
) -> ResultType<bool> {
match &vf.union {
Some(frame) => {
let res = self.decoder.handle_video_frame(frame, &mut self.rgb);
let res = self
.decoder
.handle_video_frame(frame, &mut self.rgb, chroma);
if self.record {
self.recorder
.lock()
@ -1855,6 +1861,7 @@ pub fn start_video_audio_threads<F, T>(
MediaSender,
Arc<RwLock<HashMap<usize, ArrayQueue<VideoFrame>>>>,
Arc<RwLock<HashMap<usize, usize>>>,
Arc<RwLock<Option<Chroma>>>,
)
where
F: 'static + FnMut(usize, &mut scrap::ImageRgb) + Send,
@ -1866,6 +1873,9 @@ where
let mut video_callback = video_callback;
let fps_map = Arc::new(RwLock::new(HashMap::new()));
let decode_fps_map = fps_map.clone();
let chroma = Arc::new(RwLock::new(None));
let chroma_cloned = chroma.clone();
let mut last_chroma = None;
std::thread::spawn(move || {
#[cfg(windows)]
@ -1911,10 +1921,17 @@ where
}
}
if let Some(handler_controller) = handler_controller_map.get_mut(display) {
match handler_controller.handler.handle_frame(vf) {
let mut tmp_chroma = None;
match handler_controller.handler.handle_frame(vf, &mut tmp_chroma) {
Ok(true) => {
video_callback(display, &mut handler_controller.handler.rgb);
// chroma
if tmp_chroma.is_some() && last_chroma != tmp_chroma {
last_chroma = tmp_chroma;
*chroma.write().unwrap() = tmp_chroma;
}
// fps calculation
// The first frame will be very slow
if handler_controller.skip_beginning < 5 {
@ -1992,6 +2009,7 @@ where
audio_sender,
video_queue_map_cloned,
decode_fps_map,
chroma_cloned,
);
}

View File

@ -1,9 +1,9 @@
use std::collections::HashMap;
use hbb_common::{
get_time,
message_proto::{Message, VoiceCallRequest, VoiceCallResponse},
};
use scrap::CodecFormat;
use std::collections::HashMap;
#[derive(Debug, Default)]
pub struct QualityStatus {
@ -12,6 +12,7 @@ pub struct QualityStatus {
pub delay: Option<i32>,
pub target_bitrate: Option<i32>,
pub codec_format: Option<CodecFormat>,
pub chroma: Option<String>,
}
#[inline]

View File

@ -74,6 +74,7 @@ pub struct Remote<T: InvokeUiSession> {
elevation_requested: bool,
fps_control_map: HashMap<usize, FpsControl>,
decode_fps_map: Arc<RwLock<HashMap<usize, usize>>>,
chroma: Arc<RwLock<Option<Chroma>>>,
}
impl<T: InvokeUiSession> Remote<T> {
@ -86,6 +87,7 @@ impl<T: InvokeUiSession> Remote<T> {
sender: mpsc::UnboundedSender<Data>,
frame_count_map: Arc<RwLock<HashMap<usize, usize>>>,
decode_fps: Arc<RwLock<HashMap<usize, usize>>>,
chroma: Arc<RwLock<Option<Chroma>>>,
) -> Self {
Self {
handler,
@ -111,6 +113,7 @@ impl<T: InvokeUiSession> Remote<T> {
elevation_requested: false,
fps_control_map: Default::default(),
decode_fps_map: decode_fps,
chroma,
}
}
@ -247,9 +250,17 @@ impl<T: InvokeUiSession> Remote<T> {
// Correcting the inaccuracy of status_timer
(k.clone(), (*v as i32) * 1000 / elapsed as i32)
}).collect::<HashMap<usize, i32>>();
let chroma = self.chroma.read().unwrap().clone();
let chroma = match chroma {
Some(Chroma::I444) => "4:4:4",
Some(Chroma::I420) => "4:2:0",
None => "-",
};
let chroma = Some(chroma.to_string());
self.handler.update_quality_status(QualityStatus {
speed: Some(speed),
fps,
chroma,
..Default::default()
});
}

View File

@ -471,6 +471,7 @@ impl InvokeUiSession for FlutterHandler {
"codec_format",
&status.codec_format.map_or(NULL, |it| it.to_string()),
),
("chroma", &status.chroma.map_or(NULL, |it| it.to_string())),
],
);
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", "列表"),
("Virtual display", "虚拟显示器"),
("Plug out all", "拔出所有"),
("True color(4:4:4)", "真彩模式(4:4:4)"),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", "Seznam"),
("Virtual display", "Virtuální obrazovka"),
("Plug out all", "Odpojit všechny"),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", "Liste"),
("Virtual display", "Virtueller Bildschirm"),
("Plug out all", "Alle ausschalten"),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -565,13 +565,13 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("Open in new window", "Abrir en una nueva ventana"),
("Show displays as individual windows", "Mostrar pantallas como ventanas individuales"),
("Use all my displays for the remote session", "Usar todas mis pantallas para la sesión remota"),
("selinux_tip", "SELinux está activado en tu dispositivo, lo que puede hacer que RustDesk no se ejecute correctamente como lado controlado."),
("selinux_tip", ""),
("Change view", ""),
("Big tiles", ""),
("Small tiles", ""),
("List", ""),
("selinux_tip", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", "Tampilan virtual"),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", "Elenco"),
("Virtual display", "Scehrmo virtuale"),
("Plug out all", "Scollega tutto"),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", "Saraksts"),
("Virtual display", "Virtuālais displejs"),
("Plug out all", "Atvienot visu"),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", "Lista"),
("Virtual display", "Witualne ekrany"),
("Plug out all", "Odłącz wszystko"),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", "Список"),
("Virtual display", "Виртуальный дисплей"),
("Plug out all", "Отключить все"),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color(4:4:4)", ""),
].iter().cloned().collect();
}

View File

@ -126,7 +126,7 @@ pub fn is_can_screen_recording(prompt: bool) -> bool {
if !can_record_screen && prompt {
use scrap::{Capturer, Display};
if let Ok(d) = Display::primary() {
Capturer::new(d, true).ok();
Capturer::new(d).ok();
}
}
can_record_screen

View File

@ -1214,7 +1214,7 @@ impl Connection {
fn on_remote_authorized() {
use std::sync::Once;
static ONCE: Once = Once::new();
static _ONCE: Once = Once::new();
#[cfg(any(target_os = "windows", target_os = "linux"))]
if !Config::get_option("allow-remove-wallpaper").is_empty() {
// multi connections set once
@ -1223,7 +1223,7 @@ impl Connection {
match crate::platform::WallPaperRemover::new() {
Ok(remover) => {
*wallpaper = Some(remover);
ONCE.call_once(|| {
_ONCE.call_once(|| {
shutdown_hooks::add_shutdown_hook(shutdown_hook);
});
}

View File

@ -8,7 +8,7 @@ use hbb_common::{
tokio::{self, sync::mpsc},
ResultType,
};
use scrap::{Capturer, Frame, TraitCapturer};
use scrap::{Capturer, Frame, TraitCapturer, TraitFrame};
use shared_memory::*;
use std::{
mem::size_of,
@ -300,7 +300,6 @@ pub mod server {
fn run_capture(shmem: Arc<SharedMemory>) {
let mut c = None;
let mut last_current_display = usize::MAX;
let mut last_use_yuv = false;
let mut last_timeout_ms: i32 = 33;
let mut spf = Duration::from_millis(last_timeout_ms as _);
let mut first_frame_captured = false;
@ -316,14 +315,7 @@ pub mod server {
let para = para_ptr as *const CapturerPara;
let recreate = (*para).recreate;
let current_display = (*para).current_display;
let use_yuv = (*para).use_yuv;
let use_yuv_set = (*para).use_yuv_set;
let timeout_ms = (*para).timeout_ms;
if !use_yuv_set {
c = None;
std::thread::sleep(spf);
continue;
}
if c.is_none() {
let Ok(mut displays) = display_service::try_get_displays() else {
log::error!("Failed to get displays");
@ -338,11 +330,10 @@ pub mod server {
let display = displays.remove(current_display);
display_width = display.width();
display_height = display.height();
match Capturer::new(display, use_yuv) {
match Capturer::new(display) {
Ok(mut v) => {
c = {
last_current_display = current_display;
last_use_yuv = use_yuv;
first_frame_captured = false;
if dxgi_failed_times > MAX_DXGI_FAIL_TIME {
dxgi_failed_times = 0;
@ -353,8 +344,6 @@ pub mod server {
CapturerPara {
recreate: false,
current_display: (*para).current_display,
use_yuv: (*para).use_yuv,
use_yuv_set: (*para).use_yuv_set,
timeout_ms: (*para).timeout_ms,
},
);
@ -368,16 +357,11 @@ pub mod server {
}
}
} else {
if recreate
|| current_display != last_current_display
|| use_yuv != last_use_yuv
{
if recreate || current_display != last_current_display {
log::info!(
"create capturer, display:{}->{}, use_yuv:{}->{}",
"create capturer, display:{}->{}",
last_current_display,
current_display,
last_use_yuv,
use_yuv
);
c = None;
continue;
@ -401,12 +385,12 @@ pub mod server {
utils::set_frame_info(
&shmem,
FrameInfo {
length: f.0.len(),
length: f.data().len(),
width: display_width,
height: display_height,
},
);
shmem.write(ADDR_CAPTURE_FRAME, f.0);
shmem.write(ADDR_CAPTURE_FRAME, f.data());
shmem.write(ADDR_CAPTURE_WOULDBLOCK, &utils::i32_to_vec(TRUE));
utils::increase_counter(shmem.as_ptr().add(ADDR_CAPTURE_FRAME_COUNTER));
first_frame_captured = true;
@ -651,7 +635,7 @@ pub mod client {
}
impl CapturerPortable {
pub fn new(current_display: usize, use_yuv: bool) -> Self
pub fn new(current_display: usize) -> Self
where
Self: Sized,
{
@ -665,8 +649,6 @@ pub mod client {
CapturerPara {
recreate: true,
current_display,
use_yuv,
use_yuv_set: false,
timeout_ms: 33,
},
);
@ -684,26 +666,6 @@ pub mod client {
}
impl TraitCapturer for CapturerPortable {
fn set_use_yuv(&mut self, use_yuv: bool) {
let mut option = SHMEM.lock().unwrap();
if let Some(shmem) = option.as_mut() {
unsafe {
let para_ptr = shmem.as_ptr().add(ADDR_CAPTURER_PARA);
let para = para_ptr as *const CapturerPara;
utils::set_para(
shmem,
CapturerPara {
recreate: (*para).recreate,
current_display: (*para).current_display,
use_yuv,
use_yuv_set: true,
timeout_ms: (*para).timeout_ms,
},
);
}
}
}
fn frame<'a>(&'a mut self, timeout: Duration) -> std::io::Result<Frame<'a>> {
let mut lock = SHMEM.lock().unwrap();
let shmem = lock.as_mut().ok_or(std::io::Error::new(
@ -720,8 +682,6 @@ pub mod client {
CapturerPara {
recreate: (*para).recreate,
current_display: (*para).current_display,
use_yuv: (*para).use_yuv,
use_yuv_set: (*para).use_yuv_set,
timeout_ms: timeout.as_millis() as _,
},
);
@ -744,7 +704,7 @@ pub mod client {
}
let frame_ptr = base.add(ADDR_CAPTURE_FRAME);
let data = slice::from_raw_parts(frame_ptr, (*frame_info).length);
Ok(Frame(data))
Ok(Frame::new(data, self.height))
} else {
let ptr = base.add(ADDR_CAPTURE_WOULDBLOCK);
let wouldblock = utils::ptr_to_i32(ptr);
@ -910,7 +870,6 @@ pub mod client {
pub fn create_capturer(
current_display: usize,
display: scrap::Display,
use_yuv: bool,
portable_service_running: bool,
) -> ResultType<Box<dyn TraitCapturer>> {
if portable_service_running != RUNNING.lock().unwrap().clone() {
@ -919,7 +878,7 @@ pub mod client {
if portable_service_running {
log::info!("Create shared memory capturer");
if current_display == *display_service::PRIMARY_DISPLAY_IDX {
return Ok(Box::new(CapturerPortable::new(current_display, use_yuv)));
return Ok(Box::new(CapturerPortable::new(current_display)));
} else {
bail!(
"Ignore capture display index: {}, the primary display index is: {}",
@ -930,7 +889,7 @@ pub mod client {
} else {
log::debug!("Create capturer dxgi|gdi");
return Ok(Box::new(
Capturer::new(display, use_yuv).with_context(|| "Failed to create capturer")?,
Capturer::new(display).with_context(|| "Failed to create capturer")?,
));
}
}
@ -981,8 +940,6 @@ pub mod client {
pub struct CapturerPara {
recreate: bool,
current_display: usize,
use_yuv: bool,
use_yuv_set: bool,
timeout_ms: i32,
}

View File

@ -42,9 +42,10 @@ use scrap::Capturer;
use scrap::{
aom::AomEncoderConfig,
codec::{Encoder, EncoderCfg, HwEncoderConfig, Quality},
convert_to_yuv,
record::{Recorder, RecorderContext},
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
CodecName, Display, TraitCapturer,
CodecName, Display, Frame, TraitCapturer, TraitFrame,
};
#[cfg(windows)]
use std::sync::Once;
@ -171,7 +172,6 @@ pub fn new(idx: usize) -> GenericService {
fn create_capturer(
privacy_mode_id: i32,
display: Display,
use_yuv: bool,
_current: usize,
_portable_service_running: bool,
) -> ResultType<Box<dyn TraitCapturer>> {
@ -182,12 +182,7 @@ fn create_capturer(
if privacy_mode_id > 0 {
#[cfg(windows)]
{
match scrap::CapturerMag::new(
display.origin(),
display.width(),
display.height(),
use_yuv,
) {
match scrap::CapturerMag::new(display.origin(), display.width(), display.height()) {
Ok(mut c1) => {
let mut ok = false;
let check_begin = Instant::now();
@ -236,12 +231,11 @@ fn create_capturer(
return crate::portable_service::client::create_capturer(
_current,
display,
use_yuv,
_portable_service_running,
);
#[cfg(not(windows))]
return Ok(Box::new(
Capturer::new(display, use_yuv).with_context(|| "Failed to create capturer")?,
Capturer::new(display).with_context(|| "Failed to create capturer")?,
));
}
};
@ -265,7 +259,7 @@ pub fn test_create_capturer(
)
} else {
let display = displays.remove(display_idx);
match create_capturer(privacy_mode_id, display, true, display_idx, false) {
match create_capturer(privacy_mode_id, display, display_idx, false) {
Ok(_) => return "".to_owned(),
Err(e) => e,
}
@ -320,11 +314,7 @@ impl DerefMut for CapturerInfo {
}
}
fn get_capturer(
current: usize,
use_yuv: bool,
portable_service_running: bool,
) -> ResultType<CapturerInfo> {
fn get_capturer(current: usize, portable_service_running: bool) -> ResultType<CapturerInfo> {
#[cfg(target_os = "linux")]
{
if !is_x11() {
@ -382,7 +372,6 @@ fn get_capturer(
let capturer = create_capturer(
capturer_privacy_mode_id,
display,
use_yuv,
current,
portable_service_running,
)?;
@ -424,7 +413,7 @@ fn run(vs: VideoService) -> ResultType<()> {
let display_idx = vs.idx;
let sp = vs.sp;
let mut c = get_capturer(display_idx, true, last_portable_service_running)?;
let mut c = get_capturer(display_idx, last_portable_service_running)?;
let mut video_qos = VIDEO_QOS.lock().unwrap();
video_qos.refresh(None);
@ -439,11 +428,11 @@ fn run(vs: VideoService) -> ResultType<()> {
let encoder_cfg = get_encoder_config(&c, quality, last_recording);
let mut encoder;
match Encoder::new(encoder_cfg) {
let use_i444 = Encoder::use_i444(&encoder_cfg);
match Encoder::new(encoder_cfg.clone(), use_i444) {
Ok(x) => encoder = x,
Err(err) => bail!("Failed to create encoder: {}", err),
}
c.set_use_yuv(encoder.use_yuv());
VIDEO_QOS.lock().unwrap().store_bitrate(encoder.bitrate());
if sp.is_option_true(OPTION_REFRESH) {
@ -463,6 +452,8 @@ fn run(vs: VideoService) -> ResultType<()> {
#[cfg(target_os = "linux")]
let mut would_block_count = 0u32;
let mut yuv = Vec::new();
let mut mid_data = Vec::new();
while sp.ok() {
#[cfg(windows)]
@ -493,6 +484,9 @@ fn run(vs: VideoService) -> ResultType<()> {
if last_portable_service_running != crate::portable_service::client::running() {
bail!("SWITCH");
}
if Encoder::use_i444(&encoder_cfg) != use_i444 {
bail!("SWITCH");
}
check_privacy_mode_changed(&sp, c.privacy_mode_id)?;
#[cfg(windows)]
{
@ -512,40 +506,23 @@ fn run(vs: VideoService) -> ResultType<()> {
frame_controller.reset();
#[cfg(any(target_os = "android", target_os = "ios"))]
let res = match c.frame(spf) {
Ok(frame) => {
let time = now - start;
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
match frame {
scrap::Frame::RAW(data) => {
if data.len() != 0 {
let send_conn_ids = handle_one_frame(
display_idx,
&sp,
data,
ms,
&mut encoder,
recorder.clone(),
)?;
frame_controller.set_send(now, send_conn_ids);
}
}
_ => {}
};
Ok(())
}
Err(err) => Err(err),
};
#[cfg(not(any(target_os = "android", target_os = "ios")))]
let res = match c.frame(spf) {
Ok(frame) => {
let time = now - start;
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
let send_conn_ids =
handle_one_frame(display_idx, &sp, &frame, ms, &mut encoder, recorder.clone())?;
frame_controller.set_send(now, send_conn_ids);
if frame.data().len() != 0 {
let send_conn_ids = handle_one_frame(
display_idx,
&sp,
frame,
&mut yuv,
&mut mid_data,
ms,
&mut encoder,
recorder.clone(),
)?;
frame_controller.set_send(now, send_conn_ids);
}
#[cfg(windows)]
{
try_gdi = 0;
@ -718,7 +695,9 @@ fn check_privacy_mode_changed(sp: &GenericService, privacy_mode_id: i32) -> Resu
fn handle_one_frame(
display: usize,
sp: &GenericService,
frame: &[u8],
frame: Frame,
yuv: &mut Vec<u8>,
mid_data: &mut Vec<u8>,
ms: i64,
encoder: &mut Encoder,
recorder: Arc<Mutex<Option<Recorder>>>,
@ -732,7 +711,8 @@ fn handle_one_frame(
})?;
let mut send_conn_ids: HashSet<i32> = Default::default();
if let Ok(mut vf) = encoder.encode_to_message(frame, ms) {
convert_to_yuv(&frame, encoder.yuvfmt(), yuv, mid_data)?;
if let Ok(mut vf) = encoder.encode_to_message(yuv, ms) {
vf.display = display as _;
let mut msg = Message::new();
msg.set_video_frame(vf);

View File

@ -76,12 +76,6 @@ impl TraitCapturer for CapturerPtr {
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
unsafe { (*self.0).frame(timeout) }
}
fn set_use_yuv(&mut self, use_yuv: bool) {
unsafe {
(*self.0).set_use_yuv(use_yuv);
}
}
}
struct CapDisplayInfo {
@ -192,7 +186,8 @@ pub(super) async fn check_init() -> ResultType<()> {
maxy = max_height;
let capturer = Box::into_raw(Box::new(
Capturer::new(display, true).with_context(|| "Failed to create capturer")?,
Capturer::new(display)
.with_context(|| "Failed to create capturer")?,
));
let capturer = CapturerPtr(capturer);
let cap_display_info = Box::into_raw(Box::new(CapDisplayInfo {

View File

@ -201,6 +201,7 @@ class Header: Reactor.Component {
{keyboard_enabled ? <li #lock-after-session-end .toggle-option><span>{svg_checkmark}</span>{translate('Lock after session end')}</li> : ""}
{keyboard_enabled && pi.platform == "Windows" ? <li #privacy-mode><span>{svg_checkmark}</span>{translate('Privacy mode')}</li> : ""}
{keyboard_enabled && ((is_osx && pi.platform != "Mac OS") || (!is_osx && pi.platform == "Mac OS")) ? <li #allow_swap_key .toggle-option><span>{svg_checkmark}</span>{translate('Swap control-command key')}</li> : ""}
{handler.version_cmp(pi.version, '1.2.4') >= 0 ? <li #i444><span>{svg_checkmark}</span>{translate('True color(4:4:4)')}</li> : ""}
</menu>
</popup>;
}
@ -402,6 +403,8 @@ class Header: Reactor.Component {
togglePrivacyMode(me.id);
} else if (me.id == "show-quality-monitor") {
toggleQualityMonitor(me.id);
} else if (me.id == "i444") {
toggleI444(me.id);
} else if (me.attributes.hasClass("toggle-option")) {
handler.toggle_option(me.id);
toggleMenuState();
@ -476,7 +479,7 @@ function toggleMenuState() {
for (var el in $$(menu#keyboard-options>li)) {
el.attributes.toggleClass("selected", values.indexOf(el.id) >= 0);
}
for (var id in ["show-remote-cursor", "show-quality-monitor", "disable-audio", "enable-file-transfer", "disable-clipboard", "lock-after-session-end", "allow_swap_key"]) {
for (var id in ["show-remote-cursor", "show-quality-monitor", "disable-audio", "enable-file-transfer", "disable-clipboard", "lock-after-session-end", "allow_swap_key", "i444"]) {
var el = self.select('#' + id);
if (el) {
var value = handler.get_toggle_option(id);
@ -563,6 +566,12 @@ function toggleQualityMonitor(name) {
toggleMenuState();
}
function toggleI444(name) {
handler.toggle_option(name);
handler.change_prefer_codec();
toggleMenuState();
}
handler.updateBlockInputState = function(input_blocked) {
if (!input_blocked) {
handler.toggle_option("block-input");

View File

@ -131,7 +131,8 @@ impl InvokeUiSession for SciterHandler {
status.target_bitrate.map_or(Value::null(), |it| it.into()),
status
.codec_format
.map_or(Value::null(), |it| it.to_string().into())
.map_or(Value::null(), |it| it.to_string().into()),
status.chroma.map_or(Value::null(), |it| it.into())
),
);
}

View File

@ -510,17 +510,21 @@ class QualityMonitor: Reactor.Component
<div>
Codec: {qualityMonitorData[4]}
</div>
<div>
Chroma: {qualityMonitorData[5]}
</div>
</div>;
}
}
$(#quality-monitor).content(<QualityMonitor />);
handler.updateQualityStatus = function(speed, fps, delay, bitrate, codec_format) {
handler.updateQualityStatus = function(speed, fps, delay, bitrate, codec_format, chroma) {
speed ? qualityMonitorData[0] = speed:null;
fps ? qualityMonitorData[1] = fps:null;
delay ? qualityMonitorData[2] = delay:null;
bitrate ? qualityMonitorData[3] = bitrate:null;
codec_format ? qualityMonitorData[4] = codec_format:null;
chroma ? qualityMonitorData[5] = chroma:null;
qualityMonitor.update();
}

View File

@ -1527,16 +1527,17 @@ pub async fn io_loop<T: InvokeUiSession>(handler: Session<T>, round: u32) {
let frame_count_map: Arc<RwLock<HashMap<usize, usize>>> = Default::default();
let frame_count_map_cl = frame_count_map.clone();
let ui_handler = handler.ui_handler.clone();
let (video_sender, audio_sender, video_queue_map, decode_fps_map) = start_video_audio_threads(
handler.clone(),
move |display: usize, data: &mut scrap::ImageRgb| {
let mut write_lock = frame_count_map_cl.write().unwrap();
let count = write_lock.get(&display).unwrap_or(&0) + 1;
write_lock.insert(display, count);
drop(write_lock);
ui_handler.on_rgba(display, data);
},
);
let (video_sender, audio_sender, video_queue_map, decode_fps_map, chroma) =
start_video_audio_threads(
handler.clone(),
move |display: usize, data: &mut scrap::ImageRgb| {
let mut write_lock = frame_count_map_cl.write().unwrap();
let count = write_lock.get(&display).unwrap_or(&0) + 1;
write_lock.insert(display, count);
drop(write_lock);
ui_handler.on_rgba(display, data);
},
);
let mut remote = Remote::new(
handler,
@ -1547,6 +1548,7 @@ pub async fn io_loop<T: InvokeUiSession>(handler: Session<T>, round: u32) {
sender,
frame_count_map,
decode_fps_map,
chroma,
);
remote.io_loop(&key, &token, round).await;
remote.sync_jobs_status_to_local().await;