mirror of
https://github.com/rustdesk/rustdesk.git
synced 2024-12-04 20:21:35 +08:00
commit
12b8cbf3e0
@ -27,45 +27,44 @@ class DraggableChatWindow extends StatelessWidget {
|
|||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
return isIOS
|
return isIOS
|
||||||
? IOSDraggable (
|
? IOSDraggable(
|
||||||
position: position,
|
position: position,
|
||||||
chatModel: chatModel,
|
chatModel: chatModel,
|
||||||
width: width,
|
width: width,
|
||||||
height: height,
|
height: height,
|
||||||
builder: (context) {
|
builder: (context) {
|
||||||
return Column(
|
return Column(
|
||||||
children: [
|
children: [
|
||||||
_buildMobileAppBar(context),
|
_buildMobileAppBar(context),
|
||||||
Expanded(
|
Expanded(
|
||||||
child: ChatPage(chatModel: chatModel),
|
child: ChatPage(chatModel: chatModel),
|
||||||
),
|
|
||||||
],
|
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
: Draggable(
|
|
||||||
checkKeyboard: true,
|
|
||||||
position: position,
|
|
||||||
width: width,
|
|
||||||
height: height,
|
|
||||||
chatModel: chatModel,
|
|
||||||
builder: (context, onPanUpdate) {
|
|
||||||
final child =
|
|
||||||
Scaffold(
|
|
||||||
resizeToAvoidBottomInset: false,
|
|
||||||
appBar: CustomAppBar(
|
|
||||||
onPanUpdate: onPanUpdate,
|
|
||||||
appBar: isDesktop
|
|
||||||
? _buildDesktopAppBar(context)
|
|
||||||
: _buildMobileAppBar(context),
|
|
||||||
),
|
),
|
||||||
body: ChatPage(chatModel: chatModel),
|
],
|
||||||
);
|
);
|
||||||
return Container(
|
},
|
||||||
decoration:
|
)
|
||||||
BoxDecoration(border: Border.all(color: MyTheme.border)),
|
: Draggable(
|
||||||
child: child);
|
checkKeyboard: true,
|
||||||
});
|
position: position,
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
chatModel: chatModel,
|
||||||
|
builder: (context, onPanUpdate) {
|
||||||
|
final child = Scaffold(
|
||||||
|
resizeToAvoidBottomInset: false,
|
||||||
|
appBar: CustomAppBar(
|
||||||
|
onPanUpdate: onPanUpdate,
|
||||||
|
appBar: isDesktop
|
||||||
|
? _buildDesktopAppBar(context)
|
||||||
|
: _buildMobileAppBar(context),
|
||||||
|
),
|
||||||
|
body: ChatPage(chatModel: chatModel),
|
||||||
|
);
|
||||||
|
return Container(
|
||||||
|
decoration:
|
||||||
|
BoxDecoration(border: Border.all(color: MyTheme.border)),
|
||||||
|
child: child);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Widget _buildMobileAppBar(BuildContext context) {
|
Widget _buildMobileAppBar(BuildContext context) {
|
||||||
@ -354,14 +353,14 @@ class _DraggableState extends State<Draggable> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
class IOSDraggable extends StatefulWidget {
|
class IOSDraggable extends StatefulWidget {
|
||||||
const IOSDraggable({
|
const IOSDraggable(
|
||||||
Key? key,
|
{Key? key,
|
||||||
this.position = Offset.zero,
|
this.position = Offset.zero,
|
||||||
this.chatModel,
|
this.chatModel,
|
||||||
required this.width,
|
required this.width,
|
||||||
required this.height,
|
required this.height,
|
||||||
required this.builder})
|
required this.builder})
|
||||||
: super(key: key);
|
: super(key: key);
|
||||||
|
|
||||||
final Offset position;
|
final Offset position;
|
||||||
final ChatModel? chatModel;
|
final ChatModel? chatModel;
|
||||||
@ -423,7 +422,7 @@ class _IOSDraggableState extends State<IOSDraggable> {
|
|||||||
_lastBottomHeight = bottomHeight;
|
_lastBottomHeight = bottomHeight;
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
checkKeyboard();
|
checkKeyboard();
|
||||||
return Stack(
|
return Stack(
|
||||||
@ -439,12 +438,12 @@ class _IOSDraggableState extends State<IOSDraggable> {
|
|||||||
_chatModel?.setChatWindowPosition(_position);
|
_chatModel?.setChatWindowPosition(_position);
|
||||||
},
|
},
|
||||||
child: Material(
|
child: Material(
|
||||||
child:
|
child: Container(
|
||||||
Container(
|
width: _width,
|
||||||
width: _width,
|
height: _height,
|
||||||
height: _height,
|
decoration:
|
||||||
decoration: BoxDecoration(border: Border.all(color: MyTheme.border)),
|
BoxDecoration(border: Border.all(color: MyTheme.border)),
|
||||||
child: widget.builder(context),
|
child: widget.builder(context),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -499,6 +498,7 @@ class QualityMonitor extends StatelessWidget {
|
|||||||
"${qualityMonitorModel.data.targetBitrate ?? '-'}kb"),
|
"${qualityMonitorModel.data.targetBitrate ?? '-'}kb"),
|
||||||
_row(
|
_row(
|
||||||
"Codec", qualityMonitorModel.data.codecFormat ?? '-'),
|
"Codec", qualityMonitorModel.data.codecFormat ?? '-'),
|
||||||
|
_row("Chroma", qualityMonitorModel.data.chroma ?? '-'),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -547,5 +547,22 @@ Future<List<TToggleMenu>> toolbarDisplayToggle(
|
|||||||
child: Text(translate('Use all my displays for the remote session'))));
|
child: Text(translate('Use all my displays for the remote session'))));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 444
|
||||||
|
final codec_format = ffi.qualityMonitorModel.data.codecFormat;
|
||||||
|
if (versionCmp(pi.version, "1.2.4") >= 0 &&
|
||||||
|
(codec_format == "AV1" || codec_format == "VP9")) {
|
||||||
|
final option = 'i444';
|
||||||
|
final value =
|
||||||
|
bind.sessionGetToggleOptionSync(sessionId: sessionId, arg: option);
|
||||||
|
v.add(TToggleMenu(
|
||||||
|
value: value,
|
||||||
|
onChanged: (value) async {
|
||||||
|
if (value == null) return;
|
||||||
|
await bind.sessionToggleOption(sessionId: sessionId, value: option);
|
||||||
|
bind.sessionChangePreferCodec(sessionId: sessionId);
|
||||||
|
},
|
||||||
|
child: Text(translate('True color(4:4:4)'))));
|
||||||
|
}
|
||||||
|
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
|
@ -1320,6 +1320,7 @@ class _DisplayState extends State<_Display> {
|
|||||||
otherRow('Lock after session end', 'lock_after_session_end'),
|
otherRow('Lock after session end', 'lock_after_session_end'),
|
||||||
otherRow('Privacy mode', 'privacy_mode'),
|
otherRow('Privacy mode', 'privacy_mode'),
|
||||||
otherRow('Reverse mouse wheel', 'reverse_mouse_wheel'),
|
otherRow('Reverse mouse wheel', 'reverse_mouse_wheel'),
|
||||||
|
otherRow('True color(4:4:4)', 'i444'),
|
||||||
];
|
];
|
||||||
if (useTextureRender) {
|
if (useTextureRender) {
|
||||||
children.add(otherRow('Show displays as individual windows',
|
children.add(otherRow('Show displays as individual windows',
|
||||||
|
@ -797,6 +797,7 @@ class __DisplayPageState extends State<_DisplayPage> {
|
|||||||
otherRow('Lock after session end', 'lock_after_session_end'),
|
otherRow('Lock after session end', 'lock_after_session_end'),
|
||||||
otherRow('Privacy mode', 'privacy_mode'),
|
otherRow('Privacy mode', 'privacy_mode'),
|
||||||
otherRow('Touch mode', 'touch-mode'),
|
otherRow('Touch mode', 'touch-mode'),
|
||||||
|
otherRow('True color(4:4:4)', 'i444'),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
]),
|
]),
|
||||||
|
@ -1847,6 +1847,7 @@ class QualityMonitorData {
|
|||||||
String? delay;
|
String? delay;
|
||||||
String? targetBitrate;
|
String? targetBitrate;
|
||||||
String? codecFormat;
|
String? codecFormat;
|
||||||
|
String? chroma;
|
||||||
}
|
}
|
||||||
|
|
||||||
class QualityMonitorModel with ChangeNotifier {
|
class QualityMonitorModel with ChangeNotifier {
|
||||||
@ -1900,6 +1901,9 @@ class QualityMonitorModel with ChangeNotifier {
|
|||||||
if ((evt['codec_format'] as String).isNotEmpty) {
|
if ((evt['codec_format'] as String).isNotEmpty) {
|
||||||
_data.codecFormat = evt['codec_format'];
|
_data.codecFormat = evt['codec_format'];
|
||||||
}
|
}
|
||||||
|
if ((evt['chroma'] as String).isNotEmpty) {
|
||||||
|
_data.chroma = evt['chroma'];
|
||||||
|
}
|
||||||
notifyListeners();
|
notifyListeners();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
//
|
//
|
||||||
|
@ -17,6 +17,11 @@ message YUV {
|
|||||||
int32 stride = 2;
|
int32 stride = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum Chroma {
|
||||||
|
I420 = 0;
|
||||||
|
I444 = 1;
|
||||||
|
}
|
||||||
|
|
||||||
message VideoFrame {
|
message VideoFrame {
|
||||||
oneof union {
|
oneof union {
|
||||||
EncodedVideoFrames vp9s = 6;
|
EncodedVideoFrames vp9s = 6;
|
||||||
@ -83,11 +88,20 @@ message Features {
|
|||||||
bool privacy_mode = 1;
|
bool privacy_mode = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message CodecAbility {
|
||||||
|
bool vp8 = 1;
|
||||||
|
bool vp9 = 2;
|
||||||
|
bool av1 = 3;
|
||||||
|
bool h264 = 4;
|
||||||
|
bool h265 = 5;
|
||||||
|
}
|
||||||
|
|
||||||
message SupportedEncoding {
|
message SupportedEncoding {
|
||||||
bool h264 = 1;
|
bool h264 = 1;
|
||||||
bool h265 = 2;
|
bool h265 = 2;
|
||||||
bool vp8 = 3;
|
bool vp8 = 3;
|
||||||
bool av1 = 4;
|
bool av1 = 4;
|
||||||
|
CodecAbility i444 = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PeerInfo {
|
message PeerInfo {
|
||||||
@ -541,6 +555,8 @@ message SupportedDecoding {
|
|||||||
PreferCodec prefer = 4;
|
PreferCodec prefer = 4;
|
||||||
int32 ability_vp8 = 5;
|
int32 ability_vp8 = 5;
|
||||||
int32 ability_av1 = 6;
|
int32 ability_av1 = 6;
|
||||||
|
CodecAbility i444 = 7;
|
||||||
|
Chroma prefer_chroma = 8;
|
||||||
}
|
}
|
||||||
|
|
||||||
message OptionMessage {
|
message OptionMessage {
|
||||||
|
@ -1229,6 +1229,10 @@ impl PeerConfig {
|
|||||||
if !mp.contains_key(key) {
|
if !mp.contains_key(key) {
|
||||||
mp.insert(key.to_owned(), UserDefaultConfig::read().get(key));
|
mp.insert(key.to_owned(), UserDefaultConfig::read().get(key));
|
||||||
}
|
}
|
||||||
|
key = "i444";
|
||||||
|
if !mp.contains_key(key) {
|
||||||
|
mp.insert(key.to_owned(), UserDefaultConfig::read().get(key));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,6 +197,7 @@ fn main() {
|
|||||||
find_package("libyuv");
|
find_package("libyuv");
|
||||||
gen_vcpkg_package("libvpx", "vpx_ffi.h", "vpx_ffi.rs", "^[vV].*");
|
gen_vcpkg_package("libvpx", "vpx_ffi.h", "vpx_ffi.rs", "^[vV].*");
|
||||||
gen_vcpkg_package("aom", "aom_ffi.h", "aom_ffi.rs", "^(aom|AOM|OBU|AV1).*");
|
gen_vcpkg_package("aom", "aom_ffi.h", "aom_ffi.rs", "^(aom|AOM|OBU|AV1).*");
|
||||||
|
gen_vcpkg_package("libyuv", "yuv_ffi.h", "yuv_ffi.rs", ".*");
|
||||||
|
|
||||||
// there is problem with cfg(target_os) in build.rs, so use our workaround
|
// there is problem with cfg(target_os) in build.rs, so use our workaround
|
||||||
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap();
|
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap();
|
||||||
|
@ -1,13 +1,20 @@
|
|||||||
use docopt::Docopt;
|
use docopt::Docopt;
|
||||||
use hbb_common::env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV};
|
use hbb_common::{
|
||||||
|
env_logger::{init_from_env, Env, DEFAULT_FILTER_ENV},
|
||||||
|
log,
|
||||||
|
};
|
||||||
use scrap::{
|
use scrap::{
|
||||||
aom::{AomDecoder, AomEncoder, AomEncoderConfig},
|
aom::{AomDecoder, AomEncoder, AomEncoderConfig},
|
||||||
codec::{EncoderApi, EncoderCfg, Quality as Q},
|
codec::{EncoderApi, EncoderCfg, Quality as Q},
|
||||||
Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig,
|
convert_to_yuv, Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder,
|
||||||
|
VpxEncoderConfig,
|
||||||
VpxVideoCodecId::{self, *},
|
VpxVideoCodecId::{self, *},
|
||||||
STRIDE_ALIGN,
|
STRIDE_ALIGN,
|
||||||
};
|
};
|
||||||
use std::{io::Write, time::Instant};
|
use std::{
|
||||||
|
io::Write,
|
||||||
|
time::{Duration, Instant},
|
||||||
|
};
|
||||||
|
|
||||||
// cargo run --package scrap --example benchmark --release --features hwcodec
|
// cargo run --package scrap --example benchmark --release --features hwcodec
|
||||||
|
|
||||||
@ -15,7 +22,7 @@ const USAGE: &'static str = "
|
|||||||
Codec benchmark.
|
Codec benchmark.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
benchmark [--count=COUNT] [--quality=QUALITY] [--hw-pixfmt=PIXFMT]
|
benchmark [--count=COUNT] [--quality=QUALITY] [--i444]
|
||||||
benchmark (-h | --help)
|
benchmark (-h | --help)
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
@ -23,24 +30,17 @@ Options:
|
|||||||
--count=COUNT Capture frame count [default: 100].
|
--count=COUNT Capture frame count [default: 100].
|
||||||
--quality=QUALITY Video quality [default: Balanced].
|
--quality=QUALITY Video quality [default: Balanced].
|
||||||
Valid values: Best, Balanced, Low.
|
Valid values: Best, Balanced, Low.
|
||||||
--hw-pixfmt=PIXFMT Hardware codec pixfmt. [default: i420]
|
--i444 I444.
|
||||||
Valid values: i420, nv12.
|
|
||||||
";
|
";
|
||||||
|
|
||||||
#[derive(Debug, serde::Deserialize)]
|
#[derive(Debug, serde::Deserialize, Clone, Copy)]
|
||||||
struct Args {
|
struct Args {
|
||||||
flag_count: usize,
|
flag_count: usize,
|
||||||
flag_quality: Quality,
|
flag_quality: Quality,
|
||||||
flag_hw_pixfmt: Pixfmt,
|
flag_i444: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, serde::Deserialize)]
|
#[derive(Debug, serde::Deserialize, Clone, Copy)]
|
||||||
enum Pixfmt {
|
|
||||||
I420,
|
|
||||||
NV12,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, serde::Deserialize)]
|
|
||||||
enum Quality {
|
enum Quality {
|
||||||
Best,
|
Best,
|
||||||
Balanced,
|
Balanced,
|
||||||
@ -54,31 +54,6 @@ fn main() {
|
|||||||
.unwrap_or_else(|e| e.exit());
|
.unwrap_or_else(|e| e.exit());
|
||||||
let quality = args.flag_quality;
|
let quality = args.flag_quality;
|
||||||
let yuv_count = args.flag_count;
|
let yuv_count = args.flag_count;
|
||||||
let (yuvs, width, height) = capture_yuv(yuv_count);
|
|
||||||
println!(
|
|
||||||
"benchmark {}x{} quality:{:?}k hw_pixfmt:{:?}",
|
|
||||||
width, height, quality, args.flag_hw_pixfmt
|
|
||||||
);
|
|
||||||
let quality = match quality {
|
|
||||||
Quality::Best => Q::Best,
|
|
||||||
Quality::Balanced => Q::Balanced,
|
|
||||||
Quality::Low => Q::Low,
|
|
||||||
};
|
|
||||||
[VP8, VP9].map(|c| test_vpx(c, &yuvs, width, height, quality, yuv_count));
|
|
||||||
test_av1(&yuvs, width, height, quality, yuv_count);
|
|
||||||
#[cfg(feature = "hwcodec")]
|
|
||||||
{
|
|
||||||
use hwcodec::AVPixelFormat;
|
|
||||||
let hw_pixfmt = match args.flag_hw_pixfmt {
|
|
||||||
Pixfmt::I420 => AVPixelFormat::AV_PIX_FMT_YUV420P,
|
|
||||||
Pixfmt::NV12 => AVPixelFormat::AV_PIX_FMT_NV12,
|
|
||||||
};
|
|
||||||
let yuvs = hw::vpx_yuv_to_hw_yuv(yuvs, width, height, hw_pixfmt);
|
|
||||||
hw::test(&yuvs, width, height, quality, yuv_count, hw_pixfmt);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn capture_yuv(yuv_count: usize) -> (Vec<Vec<u8>>, usize, usize) {
|
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
let mut displays = Display::all().unwrap();
|
let mut displays = Display::all().unwrap();
|
||||||
for i in 0..displays.len() {
|
for i in 0..displays.len() {
|
||||||
@ -88,28 +63,45 @@ fn capture_yuv(yuv_count: usize) -> (Vec<Vec<u8>>, usize, usize) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let d = displays.remove(index);
|
let d = displays.remove(index);
|
||||||
let mut c = Capturer::new(d, true).unwrap();
|
let mut c = Capturer::new(d).unwrap();
|
||||||
let mut v = vec![];
|
let width = c.width();
|
||||||
loop {
|
let height = c.height();
|
||||||
if let Ok(frame) = c.frame(std::time::Duration::from_millis(30)) {
|
|
||||||
v.push(frame.0.to_vec());
|
println!(
|
||||||
print!("\rcapture {}/{}", v.len(), yuv_count);
|
"benchmark {}x{} quality:{:?}, i444:{:?}",
|
||||||
std::io::stdout().flush().ok();
|
width, height, quality, args.flag_i444
|
||||||
if v.len() == yuv_count {
|
);
|
||||||
println!();
|
let quality = match quality {
|
||||||
return (v, c.width(), c.height());
|
Quality::Best => Q::Best,
|
||||||
}
|
Quality::Balanced => Q::Balanced,
|
||||||
}
|
Quality::Low => Q::Low,
|
||||||
|
};
|
||||||
|
[VP8, VP9].map(|codec| {
|
||||||
|
test_vpx(
|
||||||
|
&mut c,
|
||||||
|
codec,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
quality,
|
||||||
|
yuv_count,
|
||||||
|
if codec == VP8 { false } else { args.flag_i444 },
|
||||||
|
)
|
||||||
|
});
|
||||||
|
test_av1(&mut c, width, height, quality, yuv_count, args.flag_i444);
|
||||||
|
#[cfg(feature = "hwcodec")]
|
||||||
|
{
|
||||||
|
hw::test(&mut c, width, height, quality, yuv_count);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_vpx(
|
fn test_vpx(
|
||||||
|
c: &mut Capturer,
|
||||||
codec_id: VpxVideoCodecId,
|
codec_id: VpxVideoCodecId,
|
||||||
yuvs: &Vec<Vec<u8>>,
|
|
||||||
width: usize,
|
width: usize,
|
||||||
height: usize,
|
height: usize,
|
||||||
quality: Q,
|
quality: Q,
|
||||||
yuv_count: usize,
|
yuv_count: usize,
|
||||||
|
i444: bool,
|
||||||
) {
|
) {
|
||||||
let config = EncoderCfg::VPX(VpxEncoderConfig {
|
let config = EncoderCfg::VPX(VpxEncoderConfig {
|
||||||
width: width as _,
|
width: width as _,
|
||||||
@ -118,28 +110,53 @@ fn test_vpx(
|
|||||||
codec: codec_id,
|
codec: codec_id,
|
||||||
keyframe_interval: None,
|
keyframe_interval: None,
|
||||||
});
|
});
|
||||||
let mut encoder = VpxEncoder::new(config).unwrap();
|
let mut encoder = VpxEncoder::new(config, i444).unwrap();
|
||||||
let mut vpxs = vec![];
|
let mut vpxs = vec![];
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let mut size = 0;
|
let mut size = 0;
|
||||||
for yuv in yuvs {
|
let mut yuv = Vec::new();
|
||||||
for ref frame in encoder
|
let mut mid_data = Vec::new();
|
||||||
.encode(start.elapsed().as_millis() as _, yuv, STRIDE_ALIGN)
|
let mut counter = 0;
|
||||||
.unwrap()
|
let mut time_sum = Duration::ZERO;
|
||||||
{
|
loop {
|
||||||
size += frame.data.len();
|
match c.frame(std::time::Duration::from_millis(30)) {
|
||||||
vpxs.push(frame.data.to_vec());
|
Ok(frame) => {
|
||||||
|
let tmp_timer = Instant::now();
|
||||||
|
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
|
||||||
|
for ref frame in encoder
|
||||||
|
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
|
||||||
|
.unwrap()
|
||||||
|
{
|
||||||
|
size += frame.data.len();
|
||||||
|
vpxs.push(frame.data.to_vec());
|
||||||
|
counter += 1;
|
||||||
|
print!("\r{codec_id:?} {}/{}", counter, yuv_count);
|
||||||
|
std::io::stdout().flush().ok();
|
||||||
|
}
|
||||||
|
for ref frame in encoder.flush().unwrap() {
|
||||||
|
size += frame.data.len();
|
||||||
|
vpxs.push(frame.data.to_vec());
|
||||||
|
counter += 1;
|
||||||
|
print!("\r{codec_id:?} {}/{}", counter, yuv_count);
|
||||||
|
std::io::stdout().flush().ok();
|
||||||
|
}
|
||||||
|
time_sum += tmp_timer.elapsed();
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("{e:?}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
for ref frame in encoder.flush().unwrap() {
|
if counter >= yuv_count {
|
||||||
size += frame.data.len();
|
println!();
|
||||||
vpxs.push(frame.data.to_vec());
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(vpxs.len(), yuv_count);
|
assert_eq!(vpxs.len(), yuv_count);
|
||||||
println!(
|
println!(
|
||||||
"{:?} encode: {:?}, {} byte",
|
"{:?} encode: {:?}, {} byte",
|
||||||
codec_id,
|
codec_id,
|
||||||
start.elapsed() / yuv_count as _,
|
time_sum / yuv_count as _,
|
||||||
size / yuv_count
|
size / yuv_count
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -156,30 +173,58 @@ fn test_vpx(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, quality: Q, yuv_count: usize) {
|
fn test_av1(
|
||||||
|
c: &mut Capturer,
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
quality: Q,
|
||||||
|
yuv_count: usize,
|
||||||
|
i444: bool,
|
||||||
|
) {
|
||||||
let config = EncoderCfg::AOM(AomEncoderConfig {
|
let config = EncoderCfg::AOM(AomEncoderConfig {
|
||||||
width: width as _,
|
width: width as _,
|
||||||
height: height as _,
|
height: height as _,
|
||||||
quality,
|
quality,
|
||||||
keyframe_interval: None,
|
keyframe_interval: None,
|
||||||
});
|
});
|
||||||
let mut encoder = AomEncoder::new(config).unwrap();
|
let mut encoder = AomEncoder::new(config, i444).unwrap();
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let mut size = 0;
|
let mut size = 0;
|
||||||
let mut av1s = vec![];
|
let mut av1s: Vec<Vec<u8>> = vec![];
|
||||||
for yuv in yuvs {
|
let mut yuv = Vec::new();
|
||||||
for ref frame in encoder
|
let mut mid_data = Vec::new();
|
||||||
.encode(start.elapsed().as_millis() as _, yuv, STRIDE_ALIGN)
|
let mut counter = 0;
|
||||||
.unwrap()
|
let mut time_sum = Duration::ZERO;
|
||||||
{
|
loop {
|
||||||
size += frame.data.len();
|
match c.frame(std::time::Duration::from_millis(30)) {
|
||||||
av1s.push(frame.data.to_vec());
|
Ok(frame) => {
|
||||||
|
let tmp_timer = Instant::now();
|
||||||
|
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
|
||||||
|
for ref frame in encoder
|
||||||
|
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
|
||||||
|
.unwrap()
|
||||||
|
{
|
||||||
|
size += frame.data.len();
|
||||||
|
av1s.push(frame.data.to_vec());
|
||||||
|
counter += 1;
|
||||||
|
print!("\rAV1 {}/{}", counter, yuv_count);
|
||||||
|
std::io::stdout().flush().ok();
|
||||||
|
}
|
||||||
|
time_sum += tmp_timer.elapsed();
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("{e:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if counter >= yuv_count {
|
||||||
|
println!();
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert_eq!(av1s.len(), yuv_count);
|
assert_eq!(av1s.len(), yuv_count);
|
||||||
println!(
|
println!(
|
||||||
"AV1 encode: {:?}, {} byte",
|
"AV1 encode: {:?}, {} byte",
|
||||||
start.elapsed() / yuv_count as _,
|
time_sum / yuv_count as _,
|
||||||
size / yuv_count
|
size / yuv_count
|
||||||
);
|
);
|
||||||
let mut decoder = AomDecoder::new().unwrap();
|
let mut decoder = AomDecoder::new().unwrap();
|
||||||
@ -193,165 +238,101 @@ fn test_av1(yuvs: &Vec<Vec<u8>>, width: usize, height: usize, quality: Q, yuv_co
|
|||||||
|
|
||||||
#[cfg(feature = "hwcodec")]
|
#[cfg(feature = "hwcodec")]
|
||||||
mod hw {
|
mod hw {
|
||||||
use super::*;
|
use hwcodec::ffmpeg::CodecInfo;
|
||||||
use hwcodec::{
|
|
||||||
decode::{DecodeContext, Decoder},
|
|
||||||
encode::{EncodeContext, Encoder},
|
|
||||||
ffmpeg::{ffmpeg_linesize_offset_length, CodecInfo, CodecInfos},
|
|
||||||
AVPixelFormat,
|
|
||||||
Quality::*,
|
|
||||||
RateControl::*,
|
|
||||||
};
|
|
||||||
use scrap::{
|
use scrap::{
|
||||||
codec::codec_thread_num,
|
codec::HwEncoderConfig,
|
||||||
convert::{
|
hwcodec::{HwDecoder, HwEncoder},
|
||||||
hw::{hw_bgra_to_i420, hw_bgra_to_nv12},
|
|
||||||
i420_to_bgra,
|
|
||||||
},
|
|
||||||
HW_STRIDE_ALIGN,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn test(
|
use super::*;
|
||||||
yuvs: &Vec<Vec<u8>>,
|
|
||||||
|
pub fn test(c: &mut Capturer, width: usize, height: usize, quality: Q, yuv_count: usize) {
|
||||||
|
let best = HwEncoder::best();
|
||||||
|
let mut h264s = Vec::new();
|
||||||
|
let mut h265s = Vec::new();
|
||||||
|
if let Some(info) = best.h264 {
|
||||||
|
test_encoder(width, height, quality, info, c, yuv_count, &mut h264s);
|
||||||
|
}
|
||||||
|
if let Some(info) = best.h265 {
|
||||||
|
test_encoder(width, height, quality, info, c, yuv_count, &mut h265s);
|
||||||
|
}
|
||||||
|
let best = HwDecoder::best();
|
||||||
|
if let Some(info) = best.h264 {
|
||||||
|
test_decoder(info, &h264s);
|
||||||
|
}
|
||||||
|
if let Some(info) = best.h265 {
|
||||||
|
test_decoder(info, &h265s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_encoder(
|
||||||
width: usize,
|
width: usize,
|
||||||
height: usize,
|
height: usize,
|
||||||
quality: Q,
|
quality: Q,
|
||||||
|
info: CodecInfo,
|
||||||
|
c: &mut Capturer,
|
||||||
yuv_count: usize,
|
yuv_count: usize,
|
||||||
pixfmt: AVPixelFormat,
|
h26xs: &mut Vec<Vec<u8>>,
|
||||||
) {
|
) {
|
||||||
let bitrate = scrap::hwcodec::HwEncoder::convert_quality(quality);
|
let mut encoder = HwEncoder::new(
|
||||||
let ctx = EncodeContext {
|
EncoderCfg::HW(HwEncoderConfig {
|
||||||
name: String::from(""),
|
name: info.name.clone(),
|
||||||
width: width as _,
|
width,
|
||||||
height: height as _,
|
height,
|
||||||
pixfmt,
|
quality,
|
||||||
align: 0,
|
keyframe_interval: None,
|
||||||
bitrate: bitrate as i32 * 1000,
|
}),
|
||||||
timebase: [1, 30],
|
false,
|
||||||
gop: 60,
|
)
|
||||||
quality: Quality_Default,
|
.unwrap();
|
||||||
rc: RC_DEFAULT,
|
|
||||||
thread_count: codec_thread_num() as _,
|
|
||||||
};
|
|
||||||
|
|
||||||
let encoders = Encoder::available_encoders(ctx.clone());
|
|
||||||
println!("hw encoders: {}", encoders.len());
|
|
||||||
let best = CodecInfo::score(encoders.clone());
|
|
||||||
for info in encoders {
|
|
||||||
test_encoder(info.clone(), ctx.clone(), yuvs, is_best(&best, &info));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (h264s, h265s) = prepare_h26x(best, ctx.clone(), yuvs);
|
|
||||||
assert!(h264s.is_empty() || h264s.len() == yuv_count);
|
|
||||||
assert!(h265s.is_empty() || h265s.len() == yuv_count);
|
|
||||||
let decoders = Decoder::available_decoders();
|
|
||||||
println!("hw decoders: {}", decoders.len());
|
|
||||||
let best = CodecInfo::score(decoders.clone());
|
|
||||||
for info in decoders {
|
|
||||||
let h26xs = if info.name.contains("h264") {
|
|
||||||
&h264s
|
|
||||||
} else {
|
|
||||||
&h265s
|
|
||||||
};
|
|
||||||
if h26xs.len() == yuvs.len() {
|
|
||||||
test_decoder(info.clone(), h26xs, is_best(&best, &info));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_encoder(info: CodecInfo, ctx: EncodeContext, yuvs: &Vec<Vec<u8>>, best: bool) {
|
|
||||||
let mut ctx = ctx;
|
|
||||||
ctx.name = info.name;
|
|
||||||
let mut encoder = Encoder::new(ctx.clone()).unwrap();
|
|
||||||
let start = Instant::now();
|
|
||||||
let mut size = 0;
|
let mut size = 0;
|
||||||
for yuv in yuvs {
|
|
||||||
let frames = encoder.encode(yuv).unwrap();
|
let mut yuv = Vec::new();
|
||||||
for frame in frames {
|
let mut mid_data = Vec::new();
|
||||||
size += frame.data.len();
|
let mut counter = 0;
|
||||||
|
let mut time_sum = Duration::ZERO;
|
||||||
|
loop {
|
||||||
|
match c.frame(std::time::Duration::from_millis(30)) {
|
||||||
|
Ok(frame) => {
|
||||||
|
let tmp_timer = Instant::now();
|
||||||
|
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
|
||||||
|
for ref frame in encoder.encode(&yuv).unwrap() {
|
||||||
|
size += frame.data.len();
|
||||||
|
|
||||||
|
h26xs.push(frame.data.to_vec());
|
||||||
|
counter += 1;
|
||||||
|
print!("\r{:?} {}/{}", info.name, counter, yuv_count);
|
||||||
|
std::io::stdout().flush().ok();
|
||||||
|
}
|
||||||
|
time_sum += tmp_timer.elapsed();
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("{e:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if counter >= yuv_count {
|
||||||
|
println!();
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!(
|
println!(
|
||||||
"{}{}: {:?}, {} byte",
|
"{}: {:?}, {} byte",
|
||||||
if best { "*" } else { "" },
|
info.name,
|
||||||
ctx.name,
|
time_sum / yuv_count as u32,
|
||||||
start.elapsed() / yuvs.len() as _,
|
size / yuv_count,
|
||||||
size / yuvs.len(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_decoder(info: CodecInfo, h26xs: &Vec<Vec<u8>>, best: bool) {
|
fn test_decoder(info: CodecInfo, h26xs: &Vec<Vec<u8>>) {
|
||||||
let ctx = DecodeContext {
|
let mut decoder = HwDecoder::new(info.clone()).unwrap();
|
||||||
name: info.name,
|
|
||||||
device_type: info.hwdevice,
|
|
||||||
thread_count: codec_thread_num() as _,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut decoder = Decoder::new(ctx.clone()).unwrap();
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let mut cnt = 0;
|
let mut cnt = 0;
|
||||||
for h26x in h26xs {
|
for h26x in h26xs {
|
||||||
let _ = decoder.decode(h26x).unwrap();
|
let _ = decoder.decode(h26x).unwrap();
|
||||||
cnt += 1;
|
cnt += 1;
|
||||||
}
|
}
|
||||||
let device = format!("{:?}", ctx.device_type).to_lowercase();
|
let device = format!("{:?}", info.hwdevice).to_lowercase();
|
||||||
let device = device.split("_").last().unwrap();
|
let device = device.split("_").last().unwrap();
|
||||||
println!(
|
println!("{} {}: {:?}", info.name, device, start.elapsed() / cnt);
|
||||||
"{}{} {}: {:?}",
|
|
||||||
if best { "*" } else { "" },
|
|
||||||
ctx.name,
|
|
||||||
device,
|
|
||||||
start.elapsed() / cnt
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prepare_h26x(
|
|
||||||
best: CodecInfos,
|
|
||||||
ctx: EncodeContext,
|
|
||||||
yuvs: &Vec<Vec<u8>>,
|
|
||||||
) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
|
|
||||||
let f = |info: Option<CodecInfo>| {
|
|
||||||
let mut h26xs = vec![];
|
|
||||||
if let Some(info) = info {
|
|
||||||
let mut ctx = ctx.clone();
|
|
||||||
ctx.name = info.name;
|
|
||||||
let mut encoder = Encoder::new(ctx).unwrap();
|
|
||||||
for yuv in yuvs {
|
|
||||||
let h26x = encoder.encode(yuv).unwrap();
|
|
||||||
for frame in h26x {
|
|
||||||
h26xs.push(frame.data.to_vec());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
h26xs
|
|
||||||
};
|
|
||||||
(f(best.h264), f(best.h265))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_best(best: &CodecInfos, info: &CodecInfo) -> bool {
|
|
||||||
Some(info.clone()) == best.h264 || Some(info.clone()) == best.h265
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn vpx_yuv_to_hw_yuv(
|
|
||||||
yuvs: Vec<Vec<u8>>,
|
|
||||||
width: usize,
|
|
||||||
height: usize,
|
|
||||||
pixfmt: AVPixelFormat,
|
|
||||||
) -> Vec<Vec<u8>> {
|
|
||||||
let yuvs = yuvs;
|
|
||||||
let mut bgra = vec![];
|
|
||||||
let mut v = vec![];
|
|
||||||
let (linesize, offset, length) =
|
|
||||||
ffmpeg_linesize_offset_length(pixfmt, width, height, HW_STRIDE_ALIGN).unwrap();
|
|
||||||
for mut yuv in yuvs {
|
|
||||||
i420_to_bgra(width, height, &yuv, &mut bgra);
|
|
||||||
if pixfmt == AVPixelFormat::AV_PIX_FMT_YUV420P {
|
|
||||||
hw_bgra_to_i420(width, height, &linesize, &offset, length, &bgra, &mut yuv);
|
|
||||||
} else {
|
|
||||||
hw_bgra_to_nv12(width, height, &linesize, &offset, length, &bgra, &mut yuv);
|
|
||||||
}
|
|
||||||
v.push(yuv);
|
|
||||||
}
|
|
||||||
v
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ extern crate scrap;
|
|||||||
|
|
||||||
use scrap::Display;
|
use scrap::Display;
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use scrap::{i420_to_rgb, CapturerMag, TraitCapturer};
|
use scrap::{CapturerMag, TraitCapturer};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
|
||||||
@ -24,6 +24,8 @@ fn get_display(i: usize) -> Display {
|
|||||||
fn record(i: usize) {
|
fn record(i: usize) {
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use scrap::TraitFrame;
|
||||||
|
|
||||||
for d in Display::all().unwrap() {
|
for d in Display::all().unwrap() {
|
||||||
println!("{:?} {} {}", d.origin(), d.width(), d.height());
|
println!("{:?} {} {}", d.origin(), d.width(), d.height());
|
||||||
}
|
}
|
||||||
@ -32,9 +34,8 @@ fn record(i: usize) {
|
|||||||
let (w, h) = (display.width(), display.height());
|
let (w, h) = (display.width(), display.height());
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut capture_mag =
|
let mut capture_mag = CapturerMag::new(display.origin(), display.width(), display.height())
|
||||||
CapturerMag::new(display.origin(), display.width(), display.height(), false)
|
.expect("Couldn't begin capture.");
|
||||||
.expect("Couldn't begin capture.");
|
|
||||||
let wnd_cls = "";
|
let wnd_cls = "";
|
||||||
let wnd_name = "RustDeskPrivacyWindow";
|
let wnd_name = "RustDeskPrivacyWindow";
|
||||||
if false == capture_mag.exclude(wnd_cls, wnd_name).unwrap() {
|
if false == capture_mag.exclude(wnd_cls, wnd_name).unwrap() {
|
||||||
@ -43,7 +44,8 @@ fn record(i: usize) {
|
|||||||
println!("Filter window for cls {} name {}", wnd_cls, wnd_name);
|
println!("Filter window for cls {} name {}", wnd_cls, wnd_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
|
let captured_frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
|
||||||
|
let frame = captured_frame.data();
|
||||||
println!("Capture data len: {}, Saving...", frame.len());
|
println!("Capture data len: {}, Saving...", frame.len());
|
||||||
|
|
||||||
let mut bitflipped = Vec::with_capacity(w * h * 4);
|
let mut bitflipped = Vec::with_capacity(w * h * 4);
|
||||||
@ -68,9 +70,8 @@ fn record(i: usize) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut capture_mag =
|
let mut capture_mag = CapturerMag::new(display.origin(), display.width(), display.height())
|
||||||
CapturerMag::new(display.origin(), display.width(), display.height(), true)
|
.expect("Couldn't begin capture.");
|
||||||
.expect("Couldn't begin capture.");
|
|
||||||
let wnd_cls = "";
|
let wnd_cls = "";
|
||||||
let wnd_title = "RustDeskPrivacyWindow";
|
let wnd_title = "RustDeskPrivacyWindow";
|
||||||
if false == capture_mag.exclude(wnd_cls, wnd_title).unwrap() {
|
if false == capture_mag.exclude(wnd_cls, wnd_title).unwrap() {
|
||||||
@ -79,19 +80,28 @@ fn record(i: usize) {
|
|||||||
println!("Filter window for cls {} title {}", wnd_cls, wnd_title);
|
println!("Filter window for cls {} title {}", wnd_cls, wnd_title);
|
||||||
}
|
}
|
||||||
|
|
||||||
let buffer = capture_mag.frame(Duration::from_millis(0)).unwrap();
|
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
|
||||||
println!("Capture data len: {}, Saving...", buffer.len());
|
println!("Capture data len: {}, Saving...", frame.data().len());
|
||||||
|
|
||||||
let mut frame = Default::default();
|
let mut raw = Vec::new();
|
||||||
i420_to_rgb(w, h, &buffer, &mut frame);
|
unsafe {
|
||||||
|
scrap::ARGBToRAW(
|
||||||
|
frame.data().as_ptr(),
|
||||||
|
frame.stride()[0] as _,
|
||||||
|
(&mut raw).as_mut_ptr(),
|
||||||
|
(w * 3) as _,
|
||||||
|
w as _,
|
||||||
|
h as _,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
let mut bitflipped = Vec::with_capacity(w * h * 4);
|
let mut bitflipped = Vec::with_capacity(w * h * 4);
|
||||||
let stride = frame.len() / h;
|
let stride = raw.len() / h;
|
||||||
|
|
||||||
for y in 0..h {
|
for y in 0..h {
|
||||||
for x in 0..w {
|
for x in 0..w {
|
||||||
let i = stride * y + 3 * x;
|
let i = stride * y + 3 * x;
|
||||||
bitflipped.extend_from_slice(&[frame[i], frame[i + 1], frame[i + 2], 255]);
|
bitflipped.extend_from_slice(&[raw[i], raw[i + 1], raw[i + 2], 255]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let name = format!("capture_mag_{}_2.png", i);
|
let name = format!("capture_mag_{}_2.png", i);
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use scrap::TraitFrame;
|
||||||
|
|
||||||
extern crate scrap;
|
extern crate scrap;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
@ -27,16 +29,16 @@ fn main() {
|
|||||||
.spawn()
|
.spawn()
|
||||||
.expect("This example requires ffplay.");
|
.expect("This example requires ffplay.");
|
||||||
|
|
||||||
let mut capturer = Capturer::new(d, false).unwrap();
|
let mut capturer = Capturer::new(d).unwrap();
|
||||||
let mut out = child.stdin.unwrap();
|
let mut out = child.stdin.unwrap();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match capturer.frame(Duration::from_millis(0)) {
|
match capturer.frame(Duration::from_millis(0)) {
|
||||||
Ok(frame) => {
|
Ok(frame) => {
|
||||||
// Write the frame, removing end-of-row padding.
|
// Write the frame, removing end-of-row padding.
|
||||||
let stride = frame.len() / h;
|
let stride = frame.stride()[0];
|
||||||
let rowlen = 4 * w;
|
let rowlen = 4 * w;
|
||||||
for row in frame.chunks(stride) {
|
for row in frame.data().chunks(stride) {
|
||||||
let row = &row[..rowlen];
|
let row = &row[..rowlen];
|
||||||
out.write_all(row).unwrap();
|
out.write_all(row).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ use scrap::codec::{EncoderApi, EncoderCfg, Quality as Q};
|
|||||||
use webm::mux;
|
use webm::mux;
|
||||||
use webm::mux::Track;
|
use webm::mux::Track;
|
||||||
|
|
||||||
use scrap::vpxcodec as vpx_encode;
|
use scrap::{convert_to_yuv, vpxcodec as vpx_encode};
|
||||||
use scrap::{Capturer, Display, TraitCapturer, STRIDE_ALIGN};
|
use scrap::{Capturer, Display, TraitCapturer, STRIDE_ALIGN};
|
||||||
|
|
||||||
const USAGE: &'static str = "
|
const USAGE: &'static str = "
|
||||||
@ -110,13 +110,16 @@ fn main() -> io::Result<()> {
|
|||||||
Quality::Balanced => Q::Balanced,
|
Quality::Balanced => Q::Balanced,
|
||||||
Quality::Low => Q::Low,
|
Quality::Low => Q::Low,
|
||||||
};
|
};
|
||||||
let mut vpx = vpx_encode::VpxEncoder::new(EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
|
let mut vpx = vpx_encode::VpxEncoder::new(
|
||||||
width,
|
EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
|
||||||
height,
|
width,
|
||||||
quality,
|
height,
|
||||||
codec: vpx_codec,
|
quality,
|
||||||
keyframe_interval: None,
|
codec: vpx_codec,
|
||||||
}))
|
keyframe_interval: None,
|
||||||
|
}),
|
||||||
|
false,
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Start recording.
|
// Start recording.
|
||||||
@ -136,7 +139,9 @@ fn main() -> io::Result<()> {
|
|||||||
let spf = Duration::from_nanos(1_000_000_000 / args.flag_fps);
|
let spf = Duration::from_nanos(1_000_000_000 / args.flag_fps);
|
||||||
|
|
||||||
// Capturer object is expensive, avoiding to create it frequently.
|
// Capturer object is expensive, avoiding to create it frequently.
|
||||||
let mut c = Capturer::new(d, true).unwrap();
|
let mut c = Capturer::new(d).unwrap();
|
||||||
|
let mut yuv = Vec::new();
|
||||||
|
let mut mid_data = Vec::new();
|
||||||
while !stop.load(Ordering::Acquire) {
|
while !stop.load(Ordering::Acquire) {
|
||||||
let now = Instant::now();
|
let now = Instant::now();
|
||||||
let time = now - start;
|
let time = now - start;
|
||||||
@ -147,8 +152,8 @@ fn main() -> io::Result<()> {
|
|||||||
|
|
||||||
if let Ok(frame) = c.frame(Duration::from_millis(0)) {
|
if let Ok(frame) = c.frame(Duration::from_millis(0)) {
|
||||||
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
|
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
|
||||||
|
convert_to_yuv(&frame, vpx.yuvfmt(), &mut yuv, &mut mid_data);
|
||||||
for frame in vpx.encode(ms as i64, &frame, STRIDE_ALIGN).unwrap() {
|
for frame in vpx.encode(ms as i64, &yuv, STRIDE_ALIGN).unwrap() {
|
||||||
vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key);
|
vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@ use std::io::ErrorKind::WouldBlock;
|
|||||||
use std::thread;
|
use std::thread;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use scrap::{i420_to_rgb, Capturer, Display, TraitCapturer};
|
use scrap::{Capturer, Display, TraitCapturer, TraitFrame};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let n = Display::all().unwrap().len();
|
let n = Display::all().unwrap().len();
|
||||||
@ -28,14 +28,14 @@ fn record(i: usize) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let display = get_display(i);
|
let display = get_display(i);
|
||||||
let mut capturer = Capturer::new(display, false).expect("Couldn't begin capture.");
|
let mut capturer = Capturer::new(display).expect("Couldn't begin capture.");
|
||||||
let (w, h) = (capturer.width(), capturer.height());
|
let (w, h) = (capturer.width(), capturer.height());
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
// Wait until there's a frame.
|
// Wait until there's a frame.
|
||||||
|
|
||||||
let buffer = match capturer.frame(Duration::from_millis(0)) {
|
let frame = match capturer.frame(Duration::from_millis(0)) {
|
||||||
Ok(buffer) => buffer,
|
Ok(frame) => frame,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
if error.kind() == WouldBlock {
|
if error.kind() == WouldBlock {
|
||||||
// Keep spinning.
|
// Keep spinning.
|
||||||
@ -46,6 +46,7 @@ fn record(i: usize) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
let buffer = frame.data();
|
||||||
println!("Captured data len: {}, Saving...", buffer.len());
|
println!("Captured data len: {}, Saving...", buffer.len());
|
||||||
|
|
||||||
// Flip the BGRA image into a RGBA image.
|
// Flip the BGRA image into a RGBA image.
|
||||||
@ -77,14 +78,14 @@ fn record(i: usize) {
|
|||||||
|
|
||||||
drop(capturer);
|
drop(capturer);
|
||||||
let display = get_display(i);
|
let display = get_display(i);
|
||||||
let mut capturer = Capturer::new(display, true).expect("Couldn't begin capture.");
|
let mut capturer = Capturer::new(display).expect("Couldn't begin capture.");
|
||||||
let (w, h) = (capturer.width(), capturer.height());
|
let (w, h) = (capturer.width(), capturer.height());
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
// Wait until there's a frame.
|
// Wait until there's a frame.
|
||||||
|
|
||||||
let buffer = match capturer.frame(Duration::from_millis(0)) {
|
let frame = match capturer.frame(Duration::from_millis(0)) {
|
||||||
Ok(buffer) => buffer,
|
Ok(frame) => frame,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
if error.kind() == WouldBlock {
|
if error.kind() == WouldBlock {
|
||||||
// Keep spinning.
|
// Keep spinning.
|
||||||
@ -95,18 +96,28 @@ fn record(i: usize) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
let buffer = frame.data();
|
||||||
println!("Captured data len: {}, Saving...", buffer.len());
|
println!("Captured data len: {}, Saving...", buffer.len());
|
||||||
|
|
||||||
let mut frame = Default::default();
|
let mut raw = Vec::new();
|
||||||
i420_to_rgb(w, h, &buffer, &mut frame);
|
unsafe {
|
||||||
|
scrap::ARGBToRAW(
|
||||||
|
buffer.as_ptr(),
|
||||||
|
frame.stride()[0] as _,
|
||||||
|
(&mut raw).as_mut_ptr(),
|
||||||
|
(w * 3) as _,
|
||||||
|
w as _,
|
||||||
|
h as _,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
let mut bitflipped = Vec::with_capacity(w * h * 4);
|
let mut bitflipped = Vec::with_capacity(w * h * 4);
|
||||||
let stride = frame.len() / h;
|
let stride = raw.len() / h;
|
||||||
|
|
||||||
for y in 0..h {
|
for y in 0..h {
|
||||||
for x in 0..w {
|
for x in 0..w {
|
||||||
let i = stride * y + 3 * x;
|
let i = stride * y + 3 * x;
|
||||||
bitflipped.extend_from_slice(&[frame[i], frame[i + 1], frame[i + 2], 255]);
|
bitflipped.extend_from_slice(&[raw[i], raw[i + 1], raw[i + 2], 255]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let name = format!("screenshot{}_2.png", i);
|
let name = format!("screenshot{}_2.png", i);
|
||||||
|
6
libs/scrap/src/bindings/yuv_ffi.h
Normal file
6
libs/scrap/src/bindings/yuv_ffi.h
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
#include <libyuv/convert.h>
|
||||||
|
#include <libyuv/convert_argb.h>
|
||||||
|
#include <libyuv/convert_from.h>
|
||||||
|
#include <libyuv/convert_from_argb.h>
|
||||||
|
#include <libyuv/rotate.h>
|
||||||
|
#include <libyuv/rotate_argb.h>
|
@ -1,5 +1,5 @@
|
|||||||
use crate::android::ffi::*;
|
use crate::android::ffi::*;
|
||||||
use crate::rgba_to_i420;
|
use crate::Pixfmt;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
@ -12,15 +12,15 @@ lazy_static! {
|
|||||||
|
|
||||||
pub struct Capturer {
|
pub struct Capturer {
|
||||||
display: Display,
|
display: Display,
|
||||||
bgra: Vec<u8>,
|
rgba: Vec<u8>,
|
||||||
saved_raw_data: Vec<u8>, // for faster compare and copy
|
saved_raw_data: Vec<u8>, // for faster compare and copy
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, _yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
Ok(Capturer {
|
Ok(Capturer {
|
||||||
display,
|
display,
|
||||||
bgra: Vec::new(),
|
rgba: Vec::new(),
|
||||||
saved_raw_data: Vec::new(),
|
saved_raw_data: Vec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -35,22 +35,47 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl crate::TraitCapturer for Capturer {
|
impl crate::TraitCapturer for Capturer {
|
||||||
fn set_use_yuv(&mut self, _use_yuv: bool) {}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
if let Some(buf) = get_video_raw() {
|
if let Some(buf) = get_video_raw() {
|
||||||
crate::would_block_if_equal(&mut self.saved_raw_data, buf)?;
|
crate::would_block_if_equal(&mut self.saved_raw_data, buf)?;
|
||||||
rgba_to_i420(self.width(), self.height(), buf, &mut self.bgra);
|
// Is it safe to directly return buf without copy?
|
||||||
Ok(Frame::RAW(&self.bgra))
|
self.rgba.resize(buf.len(), 0);
|
||||||
|
unsafe {
|
||||||
|
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
|
||||||
|
};
|
||||||
|
Ok(Frame::new(&self.rgba, self.height()))
|
||||||
} else {
|
} else {
|
||||||
return Err(io::ErrorKind::WouldBlock.into());
|
return Err(io::ErrorKind::WouldBlock.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum Frame<'a> {
|
pub struct Frame<'a> {
|
||||||
RAW(&'a [u8]),
|
pub data: &'a [u8],
|
||||||
Empty,
|
pub stride: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Frame<'a> {
|
||||||
|
pub fn new(data: &'a [u8], h: usize) -> Self {
|
||||||
|
let stride = data.len() / h;
|
||||||
|
let mut v = Vec::new();
|
||||||
|
v.push(stride);
|
||||||
|
Frame { data, stride: v }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> crate::TraitFrame for Frame<'a> {
|
||||||
|
fn data(&self) -> &[u8] {
|
||||||
|
self.data
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stride(&self) -> Vec<usize> {
|
||||||
|
self.stride.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pixfmt(&self) -> Pixfmt {
|
||||||
|
Pixfmt::RGBA
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Display {
|
pub struct Display {
|
||||||
|
@ -7,13 +7,14 @@
|
|||||||
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
|
include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
|
||||||
|
|
||||||
use crate::codec::{base_bitrate, codec_thread_num, Quality};
|
use crate::codec::{base_bitrate, codec_thread_num, Quality};
|
||||||
|
use crate::Pixfmt;
|
||||||
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
|
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
|
||||||
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
|
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
|
||||||
use hbb_common::{
|
use hbb_common::{
|
||||||
anyhow::{anyhow, Context},
|
anyhow::{anyhow, Context},
|
||||||
bytes::Bytes,
|
bytes::Bytes,
|
||||||
log,
|
log,
|
||||||
message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
|
message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
|
||||||
ResultType,
|
ResultType,
|
||||||
};
|
};
|
||||||
use std::{ptr, slice};
|
use std::{ptr, slice};
|
||||||
@ -52,6 +53,7 @@ pub struct AomEncoder {
|
|||||||
ctx: aom_codec_ctx_t,
|
ctx: aom_codec_ctx_t,
|
||||||
width: usize,
|
width: usize,
|
||||||
height: usize,
|
height: usize,
|
||||||
|
i444: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
|
// https://webrtc.googlesource.com/src/+/refs/heads/main/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
|
||||||
@ -95,6 +97,7 @@ mod webrtc {
|
|||||||
pub fn enc_cfg(
|
pub fn enc_cfg(
|
||||||
i: *const aom_codec_iface,
|
i: *const aom_codec_iface,
|
||||||
cfg: AomEncoderConfig,
|
cfg: AomEncoderConfig,
|
||||||
|
i444: bool,
|
||||||
) -> ResultType<aom_codec_enc_cfg> {
|
) -> ResultType<aom_codec_enc_cfg> {
|
||||||
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
|
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
|
||||||
call_aom!(aom_codec_enc_config_default(i, &mut c, kUsageProfile));
|
call_aom!(aom_codec_enc_config_default(i, &mut c, kUsageProfile));
|
||||||
@ -139,6 +142,9 @@ mod webrtc {
|
|||||||
c.g_pass = aom_enc_pass::AOM_RC_ONE_PASS; // One-pass rate control
|
c.g_pass = aom_enc_pass::AOM_RC_ONE_PASS; // One-pass rate control
|
||||||
c.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0.
|
c.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0.
|
||||||
|
|
||||||
|
// https://aomedia.googlesource.com/aom/+/refs/tags/v3.6.0/av1/common/enums.h#82
|
||||||
|
c.g_profile = if i444 { 1 } else { 0 };
|
||||||
|
|
||||||
Ok(c)
|
Ok(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -210,14 +216,14 @@ mod webrtc {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EncoderApi for AomEncoder {
|
impl EncoderApi for AomEncoder {
|
||||||
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
|
fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
match cfg {
|
match cfg {
|
||||||
crate::codec::EncoderCfg::AOM(config) => {
|
crate::codec::EncoderCfg::AOM(config) => {
|
||||||
let i = call_aom_ptr!(aom_codec_av1_cx());
|
let i = call_aom_ptr!(aom_codec_av1_cx());
|
||||||
let c = webrtc::enc_cfg(i, config)?;
|
let c = webrtc::enc_cfg(i, config, i444)?;
|
||||||
|
|
||||||
let mut ctx = Default::default();
|
let mut ctx = Default::default();
|
||||||
// Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH
|
// Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH
|
||||||
@ -234,6 +240,7 @@ impl EncoderApi for AomEncoder {
|
|||||||
ctx,
|
ctx,
|
||||||
width: config.width as _,
|
width: config.width as _,
|
||||||
height: config.height as _,
|
height: config.height as _,
|
||||||
|
i444,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => Err(anyhow!("encoder type mismatch")),
|
_ => Err(anyhow!("encoder type mismatch")),
|
||||||
@ -255,8 +262,36 @@ impl EncoderApi for AomEncoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn use_yuv(&self) -> bool {
|
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||||
true
|
let mut img = Default::default();
|
||||||
|
let fmt = if self.i444 {
|
||||||
|
aom_img_fmt::AOM_IMG_FMT_I444
|
||||||
|
} else {
|
||||||
|
aom_img_fmt::AOM_IMG_FMT_I420
|
||||||
|
};
|
||||||
|
unsafe {
|
||||||
|
aom_img_wrap(
|
||||||
|
&mut img,
|
||||||
|
fmt,
|
||||||
|
self.width as _,
|
||||||
|
self.height as _,
|
||||||
|
crate::STRIDE_ALIGN as _,
|
||||||
|
0x1 as _,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let pixfmt = if self.i444 {
|
||||||
|
Pixfmt::I444
|
||||||
|
} else {
|
||||||
|
Pixfmt::I420
|
||||||
|
};
|
||||||
|
crate::EncodeYuvFormat {
|
||||||
|
pixfmt,
|
||||||
|
w: img.w as _,
|
||||||
|
h: img.h as _,
|
||||||
|
stride: img.stride.map(|s| s as usize).to_vec(),
|
||||||
|
u: img.planes[1] as usize - img.planes[0] as usize,
|
||||||
|
v: img.planes[2] as usize - img.planes[0] as usize,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
||||||
@ -282,14 +317,20 @@ impl EncoderApi for AomEncoder {
|
|||||||
|
|
||||||
impl AomEncoder {
|
impl AomEncoder {
|
||||||
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
|
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
|
||||||
if 2 * data.len() < 3 * self.width * self.height {
|
let bpp = if self.i444 { 24 } else { 12 };
|
||||||
|
if data.len() < self.width * self.height * bpp / 8 {
|
||||||
return Err(Error::FailedCall("len not enough".to_string()));
|
return Err(Error::FailedCall("len not enough".to_string()));
|
||||||
}
|
}
|
||||||
|
let fmt = if self.i444 {
|
||||||
|
aom_img_fmt::AOM_IMG_FMT_I444
|
||||||
|
} else {
|
||||||
|
aom_img_fmt::AOM_IMG_FMT_I420
|
||||||
|
};
|
||||||
|
|
||||||
let mut image = Default::default();
|
let mut image = Default::default();
|
||||||
call_aom_ptr!(aom_img_wrap(
|
call_aom_ptr!(aom_img_wrap(
|
||||||
&mut image,
|
&mut image,
|
||||||
aom_img_fmt::AOM_IMG_FMT_I420,
|
fmt,
|
||||||
self.width as _,
|
self.width as _,
|
||||||
self.height as _,
|
self.height as _,
|
||||||
stride_align as _,
|
stride_align as _,
|
||||||
@ -524,6 +565,13 @@ impl GoogleImage for Image {
|
|||||||
fn planes(&self) -> Vec<*mut u8> {
|
fn planes(&self) -> Vec<*mut u8> {
|
||||||
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
|
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn chroma(&self) -> Chroma {
|
||||||
|
match self.inner().fmt {
|
||||||
|
aom_img_fmt::AOM_IMG_FMT_I444 => Chroma::I444,
|
||||||
|
_ => Chroma::I420,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for Image {
|
impl Drop for Image {
|
||||||
|
@ -14,7 +14,7 @@ use crate::{
|
|||||||
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
|
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
|
||||||
common::GoogleImage,
|
common::GoogleImage,
|
||||||
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
|
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
|
||||||
CodecName, ImageRgb,
|
CodecName, EncodeYuvFormat, ImageRgb,
|
||||||
};
|
};
|
||||||
|
|
||||||
use hbb_common::{
|
use hbb_common::{
|
||||||
@ -23,7 +23,7 @@ use hbb_common::{
|
|||||||
config::PeerConfig,
|
config::PeerConfig,
|
||||||
log,
|
log,
|
||||||
message_proto::{
|
message_proto::{
|
||||||
supported_decoding::PreferCodec, video_frame, EncodedVideoFrames,
|
supported_decoding::PreferCodec, video_frame, Chroma, CodecAbility, EncodedVideoFrames,
|
||||||
SupportedDecoding, SupportedEncoding, VideoFrame,
|
SupportedDecoding, SupportedEncoding, VideoFrame,
|
||||||
},
|
},
|
||||||
sysinfo::{System, SystemExt},
|
sysinfo::{System, SystemExt},
|
||||||
@ -56,13 +56,13 @@ pub enum EncoderCfg {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait EncoderApi {
|
pub trait EncoderApi {
|
||||||
fn new(cfg: EncoderCfg) -> ResultType<Self>
|
fn new(cfg: EncoderCfg, i444: bool) -> ResultType<Self>
|
||||||
where
|
where
|
||||||
Self: Sized;
|
Self: Sized;
|
||||||
|
|
||||||
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame>;
|
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame>;
|
||||||
|
|
||||||
fn use_yuv(&self) -> bool;
|
fn yuvfmt(&self) -> EncodeYuvFormat;
|
||||||
|
|
||||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
|
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
|
||||||
|
|
||||||
@ -107,18 +107,18 @@ pub enum EncodingUpdate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Encoder {
|
impl Encoder {
|
||||||
pub fn new(config: EncoderCfg) -> ResultType<Encoder> {
|
pub fn new(config: EncoderCfg, i444: bool) -> ResultType<Encoder> {
|
||||||
log::info!("new encoder:{:?}", config);
|
log::info!("new encoder:{config:?}, i444:{i444}");
|
||||||
match config {
|
match config {
|
||||||
EncoderCfg::VPX(_) => Ok(Encoder {
|
EncoderCfg::VPX(_) => Ok(Encoder {
|
||||||
codec: Box::new(VpxEncoder::new(config)?),
|
codec: Box::new(VpxEncoder::new(config, i444)?),
|
||||||
}),
|
}),
|
||||||
EncoderCfg::AOM(_) => Ok(Encoder {
|
EncoderCfg::AOM(_) => Ok(Encoder {
|
||||||
codec: Box::new(AomEncoder::new(config)?),
|
codec: Box::new(AomEncoder::new(config, i444)?),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
#[cfg(feature = "hwcodec")]
|
#[cfg(feature = "hwcodec")]
|
||||||
EncoderCfg::HW(_) => match HwEncoder::new(config) {
|
EncoderCfg::HW(_) => match HwEncoder::new(config, i444) {
|
||||||
Ok(hw) => Ok(Encoder {
|
Ok(hw) => Ok(Encoder {
|
||||||
codec: Box::new(hw),
|
codec: Box::new(hw),
|
||||||
}),
|
}),
|
||||||
@ -230,6 +230,12 @@ impl Encoder {
|
|||||||
let mut encoding = SupportedEncoding {
|
let mut encoding = SupportedEncoding {
|
||||||
vp8: true,
|
vp8: true,
|
||||||
av1: true,
|
av1: true,
|
||||||
|
i444: Some(CodecAbility {
|
||||||
|
vp9: true,
|
||||||
|
av1: true,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.into(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
#[cfg(feature = "hwcodec")]
|
#[cfg(feature = "hwcodec")]
|
||||||
@ -240,18 +246,41 @@ impl Encoder {
|
|||||||
}
|
}
|
||||||
encoding
|
encoding
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn use_i444(config: &EncoderCfg) -> bool {
|
||||||
|
let decodings = PEER_DECODINGS.lock().unwrap().clone();
|
||||||
|
let prefer_i444 = decodings
|
||||||
|
.iter()
|
||||||
|
.all(|d| d.1.prefer_chroma == Chroma::I444.into());
|
||||||
|
let i444_useable = match config {
|
||||||
|
EncoderCfg::VPX(vpx) => match vpx.codec {
|
||||||
|
VpxVideoCodecId::VP8 => false,
|
||||||
|
VpxVideoCodecId::VP9 => decodings.iter().all(|d| d.1.i444.vp9),
|
||||||
|
},
|
||||||
|
EncoderCfg::AOM(_) => decodings.iter().all(|d| d.1.i444.av1),
|
||||||
|
EncoderCfg::HW(_) => false,
|
||||||
|
};
|
||||||
|
prefer_i444 && i444_useable && !decodings.is_empty()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Decoder {
|
impl Decoder {
|
||||||
pub fn supported_decodings(id_for_perfer: Option<&str>) -> SupportedDecoding {
|
pub fn supported_decodings(id_for_perfer: Option<&str>) -> SupportedDecoding {
|
||||||
|
let (prefer, prefer_chroma) = Self::preference(id_for_perfer);
|
||||||
|
|
||||||
#[allow(unused_mut)]
|
#[allow(unused_mut)]
|
||||||
let mut decoding = SupportedDecoding {
|
let mut decoding = SupportedDecoding {
|
||||||
ability_vp8: 1,
|
ability_vp8: 1,
|
||||||
ability_vp9: 1,
|
ability_vp9: 1,
|
||||||
ability_av1: 1,
|
ability_av1: 1,
|
||||||
prefer: id_for_perfer
|
i444: Some(CodecAbility {
|
||||||
.map_or(PreferCodec::Auto, |id| Self::codec_preference(id))
|
vp9: true,
|
||||||
.into(),
|
av1: true,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.into(),
|
||||||
|
prefer: prefer.into(),
|
||||||
|
prefer_chroma: prefer_chroma.into(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
#[cfg(feature = "hwcodec")]
|
#[cfg(feature = "hwcodec")]
|
||||||
@ -314,31 +343,33 @@ impl Decoder {
|
|||||||
&mut self,
|
&mut self,
|
||||||
frame: &video_frame::Union,
|
frame: &video_frame::Union,
|
||||||
rgb: &mut ImageRgb,
|
rgb: &mut ImageRgb,
|
||||||
|
chroma: &mut Option<Chroma>,
|
||||||
) -> ResultType<bool> {
|
) -> ResultType<bool> {
|
||||||
match frame {
|
match frame {
|
||||||
video_frame::Union::Vp8s(vp8s) => {
|
video_frame::Union::Vp8s(vp8s) => {
|
||||||
if let Some(vp8) = &mut self.vp8 {
|
if let Some(vp8) = &mut self.vp8 {
|
||||||
Decoder::handle_vpxs_video_frame(vp8, vp8s, rgb)
|
Decoder::handle_vpxs_video_frame(vp8, vp8s, rgb, chroma)
|
||||||
} else {
|
} else {
|
||||||
bail!("vp8 decoder not available");
|
bail!("vp8 decoder not available");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
video_frame::Union::Vp9s(vp9s) => {
|
video_frame::Union::Vp9s(vp9s) => {
|
||||||
if let Some(vp9) = &mut self.vp9 {
|
if let Some(vp9) = &mut self.vp9 {
|
||||||
Decoder::handle_vpxs_video_frame(vp9, vp9s, rgb)
|
Decoder::handle_vpxs_video_frame(vp9, vp9s, rgb, chroma)
|
||||||
} else {
|
} else {
|
||||||
bail!("vp9 decoder not available");
|
bail!("vp9 decoder not available");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
video_frame::Union::Av1s(av1s) => {
|
video_frame::Union::Av1s(av1s) => {
|
||||||
if let Some(av1) = &mut self.av1 {
|
if let Some(av1) = &mut self.av1 {
|
||||||
Decoder::handle_av1s_video_frame(av1, av1s, rgb)
|
Decoder::handle_av1s_video_frame(av1, av1s, rgb, chroma)
|
||||||
} else {
|
} else {
|
||||||
bail!("av1 decoder not available");
|
bail!("av1 decoder not available");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[cfg(feature = "hwcodec")]
|
#[cfg(feature = "hwcodec")]
|
||||||
video_frame::Union::H264s(h264s) => {
|
video_frame::Union::H264s(h264s) => {
|
||||||
|
*chroma = Some(Chroma::I420);
|
||||||
if let Some(decoder) = &mut self.hw.h264 {
|
if let Some(decoder) = &mut self.hw.h264 {
|
||||||
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
|
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
|
||||||
} else {
|
} else {
|
||||||
@ -347,6 +378,7 @@ impl Decoder {
|
|||||||
}
|
}
|
||||||
#[cfg(feature = "hwcodec")]
|
#[cfg(feature = "hwcodec")]
|
||||||
video_frame::Union::H265s(h265s) => {
|
video_frame::Union::H265s(h265s) => {
|
||||||
|
*chroma = Some(Chroma::I420);
|
||||||
if let Some(decoder) = &mut self.hw.h265 {
|
if let Some(decoder) = &mut self.hw.h265 {
|
||||||
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
|
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
|
||||||
} else {
|
} else {
|
||||||
@ -355,6 +387,7 @@ impl Decoder {
|
|||||||
}
|
}
|
||||||
#[cfg(feature = "mediacodec")]
|
#[cfg(feature = "mediacodec")]
|
||||||
video_frame::Union::H264s(h264s) => {
|
video_frame::Union::H264s(h264s) => {
|
||||||
|
*chroma = Some(Chroma::I420);
|
||||||
if let Some(decoder) = &mut self.media_codec.h264 {
|
if let Some(decoder) = &mut self.media_codec.h264 {
|
||||||
Decoder::handle_mediacodec_video_frame(decoder, h264s, rgb)
|
Decoder::handle_mediacodec_video_frame(decoder, h264s, rgb)
|
||||||
} else {
|
} else {
|
||||||
@ -363,6 +396,7 @@ impl Decoder {
|
|||||||
}
|
}
|
||||||
#[cfg(feature = "mediacodec")]
|
#[cfg(feature = "mediacodec")]
|
||||||
video_frame::Union::H265s(h265s) => {
|
video_frame::Union::H265s(h265s) => {
|
||||||
|
*chroma = Some(Chroma::I420);
|
||||||
if let Some(decoder) = &mut self.media_codec.h265 {
|
if let Some(decoder) = &mut self.media_codec.h265 {
|
||||||
Decoder::handle_mediacodec_video_frame(decoder, h265s, rgb)
|
Decoder::handle_mediacodec_video_frame(decoder, h265s, rgb)
|
||||||
} else {
|
} else {
|
||||||
@ -378,6 +412,7 @@ impl Decoder {
|
|||||||
decoder: &mut VpxDecoder,
|
decoder: &mut VpxDecoder,
|
||||||
vpxs: &EncodedVideoFrames,
|
vpxs: &EncodedVideoFrames,
|
||||||
rgb: &mut ImageRgb,
|
rgb: &mut ImageRgb,
|
||||||
|
chroma: &mut Option<Chroma>,
|
||||||
) -> ResultType<bool> {
|
) -> ResultType<bool> {
|
||||||
let mut last_frame = vpxcodec::Image::new();
|
let mut last_frame = vpxcodec::Image::new();
|
||||||
for vpx in vpxs.frames.iter() {
|
for vpx in vpxs.frames.iter() {
|
||||||
@ -393,6 +428,7 @@ impl Decoder {
|
|||||||
if last_frame.is_null() {
|
if last_frame.is_null() {
|
||||||
Ok(false)
|
Ok(false)
|
||||||
} else {
|
} else {
|
||||||
|
*chroma = Some(last_frame.chroma());
|
||||||
last_frame.to(rgb);
|
last_frame.to(rgb);
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
@ -403,6 +439,7 @@ impl Decoder {
|
|||||||
decoder: &mut AomDecoder,
|
decoder: &mut AomDecoder,
|
||||||
av1s: &EncodedVideoFrames,
|
av1s: &EncodedVideoFrames,
|
||||||
rgb: &mut ImageRgb,
|
rgb: &mut ImageRgb,
|
||||||
|
chroma: &mut Option<Chroma>,
|
||||||
) -> ResultType<bool> {
|
) -> ResultType<bool> {
|
||||||
let mut last_frame = aom::Image::new();
|
let mut last_frame = aom::Image::new();
|
||||||
for av1 in av1s.frames.iter() {
|
for av1 in av1s.frames.iter() {
|
||||||
@ -418,6 +455,7 @@ impl Decoder {
|
|||||||
if last_frame.is_null() {
|
if last_frame.is_null() {
|
||||||
Ok(false)
|
Ok(false)
|
||||||
} else {
|
} else {
|
||||||
|
*chroma = Some(last_frame.chroma());
|
||||||
last_frame.to(rgb);
|
last_frame.to(rgb);
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
@ -457,12 +495,16 @@ impl Decoder {
|
|||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn codec_preference(id: &str) -> PreferCodec {
|
fn preference(id: Option<&str>) -> (PreferCodec, Chroma) {
|
||||||
let codec = PeerConfig::load(id)
|
let id = id.unwrap_or_default();
|
||||||
.options
|
if id.is_empty() {
|
||||||
|
return (PreferCodec::Auto, Chroma::I420);
|
||||||
|
}
|
||||||
|
let options = PeerConfig::load(id).options;
|
||||||
|
let codec = options
|
||||||
.get("codec-preference")
|
.get("codec-preference")
|
||||||
.map_or("".to_owned(), |c| c.to_owned());
|
.map_or("".to_owned(), |c| c.to_owned());
|
||||||
if codec == "vp8" {
|
let codec = if codec == "vp8" {
|
||||||
PreferCodec::VP8
|
PreferCodec::VP8
|
||||||
} else if codec == "vp9" {
|
} else if codec == "vp9" {
|
||||||
PreferCodec::VP9
|
PreferCodec::VP9
|
||||||
@ -474,7 +516,13 @@ impl Decoder {
|
|||||||
PreferCodec::H265
|
PreferCodec::H265
|
||||||
} else {
|
} else {
|
||||||
PreferCodec::Auto
|
PreferCodec::Auto
|
||||||
}
|
};
|
||||||
|
let chroma = if options.get("i444") == Some(&"Y".to_string()) {
|
||||||
|
Chroma::I444
|
||||||
|
} else {
|
||||||
|
Chroma::I420
|
||||||
|
};
|
||||||
|
(codec, chroma)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,367 +1,25 @@
|
|||||||
use super::vpx::*;
|
#![allow(non_camel_case_types)]
|
||||||
use std::os::raw::c_int;
|
#![allow(non_snake_case)]
|
||||||
|
#![allow(non_upper_case_globals)]
|
||||||
|
#![allow(improper_ctypes)]
|
||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
extern "C" {
|
include!(concat!(env!("OUT_DIR"), "/yuv_ffi.rs"));
|
||||||
// seems libyuv uses reverse byte order compared with our view
|
|
||||||
|
|
||||||
pub fn ARGBRotate(
|
#[cfg(not(target_os = "ios"))]
|
||||||
src_argb: *const u8,
|
use crate::Frame;
|
||||||
src_stride_argb: c_int,
|
use crate::{generate_call_macro, EncodeYuvFormat, TraitFrame};
|
||||||
dst_argb: *mut u8,
|
use hbb_common::{bail, log, ResultType};
|
||||||
dst_stride_argb: c_int,
|
|
||||||
src_width: c_int,
|
|
||||||
src_height: c_int,
|
|
||||||
mode: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn ARGBMirror(
|
generate_call_macro!(call_yuv, false);
|
||||||
src_argb: *const u8,
|
|
||||||
src_stride_argb: c_int,
|
|
||||||
dst_argb: *mut u8,
|
|
||||||
dst_stride_argb: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn ARGBToI420(
|
|
||||||
src_bgra: *const u8,
|
|
||||||
src_stride_bgra: c_int,
|
|
||||||
dst_y: *mut u8,
|
|
||||||
dst_stride_y: c_int,
|
|
||||||
dst_u: *mut u8,
|
|
||||||
dst_stride_u: c_int,
|
|
||||||
dst_v: *mut u8,
|
|
||||||
dst_stride_v: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn ABGRToI420(
|
|
||||||
src_rgba: *const u8,
|
|
||||||
src_stride_rgba: c_int,
|
|
||||||
dst_y: *mut u8,
|
|
||||||
dst_stride_y: c_int,
|
|
||||||
dst_u: *mut u8,
|
|
||||||
dst_stride_u: c_int,
|
|
||||||
dst_v: *mut u8,
|
|
||||||
dst_stride_v: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn ARGBToNV12(
|
|
||||||
src_bgra: *const u8,
|
|
||||||
src_stride_bgra: c_int,
|
|
||||||
dst_y: *mut u8,
|
|
||||||
dst_stride_y: c_int,
|
|
||||||
dst_uv: *mut u8,
|
|
||||||
dst_stride_uv: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn NV12ToI420(
|
|
||||||
src_y: *const u8,
|
|
||||||
src_stride_y: c_int,
|
|
||||||
src_uv: *const u8,
|
|
||||||
src_stride_uv: c_int,
|
|
||||||
dst_y: *mut u8,
|
|
||||||
dst_stride_y: c_int,
|
|
||||||
dst_u: *mut u8,
|
|
||||||
dst_stride_u: c_int,
|
|
||||||
dst_v: *mut u8,
|
|
||||||
dst_stride_v: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
// I420ToRGB24: RGB little endian (bgr in memory)
|
|
||||||
// I420ToRaw: RGB big endian (rgb in memory) to RGBA.
|
|
||||||
pub fn I420ToRAW(
|
|
||||||
src_y: *const u8,
|
|
||||||
src_stride_y: c_int,
|
|
||||||
src_u: *const u8,
|
|
||||||
src_stride_u: c_int,
|
|
||||||
src_v: *const u8,
|
|
||||||
src_stride_v: c_int,
|
|
||||||
dst_rgba: *mut u8,
|
|
||||||
dst_stride_raw: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn I420ToARGB(
|
|
||||||
src_y: *const u8,
|
|
||||||
src_stride_y: c_int,
|
|
||||||
src_u: *const u8,
|
|
||||||
src_stride_u: c_int,
|
|
||||||
src_v: *const u8,
|
|
||||||
src_stride_v: c_int,
|
|
||||||
dst_rgba: *mut u8,
|
|
||||||
dst_stride_rgba: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn I420ToABGR(
|
|
||||||
src_y: *const u8,
|
|
||||||
src_stride_y: c_int,
|
|
||||||
src_u: *const u8,
|
|
||||||
src_stride_u: c_int,
|
|
||||||
src_v: *const u8,
|
|
||||||
src_stride_v: c_int,
|
|
||||||
dst_rgba: *mut u8,
|
|
||||||
dst_stride_rgba: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn NV12ToARGB(
|
|
||||||
src_y: *const u8,
|
|
||||||
src_stride_y: c_int,
|
|
||||||
src_uv: *const u8,
|
|
||||||
src_stride_uv: c_int,
|
|
||||||
dst_rgba: *mut u8,
|
|
||||||
dst_stride_rgba: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
|
|
||||||
pub fn NV12ToABGR(
|
|
||||||
src_y: *const u8,
|
|
||||||
src_stride_y: c_int,
|
|
||||||
src_uv: *const u8,
|
|
||||||
src_stride_uv: c_int,
|
|
||||||
dst_rgba: *mut u8,
|
|
||||||
dst_stride_rgba: c_int,
|
|
||||||
width: c_int,
|
|
||||||
height: c_int,
|
|
||||||
) -> c_int;
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://github.com/webmproject/libvpx/blob/master/vpx/src/vpx_image.c
|
|
||||||
#[inline]
|
|
||||||
fn get_vpx_i420_stride(
|
|
||||||
width: usize,
|
|
||||||
height: usize,
|
|
||||||
stride_align: usize,
|
|
||||||
) -> (usize, usize, usize, usize, usize, usize) {
|
|
||||||
let mut img = Default::default();
|
|
||||||
unsafe {
|
|
||||||
vpx_img_wrap(
|
|
||||||
&mut img,
|
|
||||||
vpx_img_fmt::VPX_IMG_FMT_I420,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
stride_align as _,
|
|
||||||
0x1 as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
(
|
|
||||||
img.w as _,
|
|
||||||
img.h as _,
|
|
||||||
img.stride[0] as _,
|
|
||||||
img.stride[1] as _,
|
|
||||||
img.planes[1] as usize - img.planes[0] as usize,
|
|
||||||
img.planes[2] as usize - img.planes[0] as usize,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn i420_to_rgb(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
|
||||||
let (_, _, src_stride_y, src_stride_uv, u, v) =
|
|
||||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
|
||||||
let src_y = src.as_ptr();
|
|
||||||
let src_u = src[u..].as_ptr();
|
|
||||||
let src_v = src[v..].as_ptr();
|
|
||||||
dst.resize(width * height * 3, 0);
|
|
||||||
unsafe {
|
|
||||||
super::I420ToRAW(
|
|
||||||
src_y,
|
|
||||||
src_stride_y as _,
|
|
||||||
src_u,
|
|
||||||
src_stride_uv as _,
|
|
||||||
src_v,
|
|
||||||
src_stride_uv as _,
|
|
||||||
dst.as_mut_ptr(),
|
|
||||||
(width * 3) as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn i420_to_bgra(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
|
||||||
let (_, _, src_stride_y, src_stride_uv, u, v) =
|
|
||||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
|
||||||
let src_y = src.as_ptr();
|
|
||||||
let src_u = src[u..].as_ptr();
|
|
||||||
let src_v = src[v..].as_ptr();
|
|
||||||
dst.resize(width * height * 4, 0);
|
|
||||||
unsafe {
|
|
||||||
super::I420ToARGB(
|
|
||||||
src_y,
|
|
||||||
src_stride_y as _,
|
|
||||||
src_u,
|
|
||||||
src_stride_uv as _,
|
|
||||||
src_v,
|
|
||||||
src_stride_uv as _,
|
|
||||||
dst.as_mut_ptr(),
|
|
||||||
(width * 3) as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bgra_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
|
||||||
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
|
|
||||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
|
||||||
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
|
||||||
let dst_y = dst.as_mut_ptr();
|
|
||||||
let dst_u = dst[u..].as_mut_ptr();
|
|
||||||
let dst_v = dst[v..].as_mut_ptr();
|
|
||||||
unsafe {
|
|
||||||
ARGBToI420(
|
|
||||||
src.as_ptr(),
|
|
||||||
(src.len() / height) as _,
|
|
||||||
dst_y,
|
|
||||||
dst_stride_y as _,
|
|
||||||
dst_u,
|
|
||||||
dst_stride_uv as _,
|
|
||||||
dst_v,
|
|
||||||
dst_stride_uv as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn rgba_to_i420(width: usize, height: usize, src: &[u8], dst: &mut Vec<u8>) {
|
|
||||||
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
|
|
||||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
|
||||||
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
|
||||||
let dst_y = dst.as_mut_ptr();
|
|
||||||
let dst_u = dst[u..].as_mut_ptr();
|
|
||||||
let dst_v = dst[v..].as_mut_ptr();
|
|
||||||
unsafe {
|
|
||||||
ABGRToI420(
|
|
||||||
src.as_ptr(),
|
|
||||||
(src.len() / height) as _,
|
|
||||||
dst_y,
|
|
||||||
dst_stride_y as _,
|
|
||||||
dst_u,
|
|
||||||
dst_stride_uv as _,
|
|
||||||
dst_v,
|
|
||||||
dst_stride_uv as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub unsafe fn nv12_to_i420(
|
|
||||||
src_y: *const u8,
|
|
||||||
src_stride_y: c_int,
|
|
||||||
src_uv: *const u8,
|
|
||||||
src_stride_uv: c_int,
|
|
||||||
width: usize,
|
|
||||||
height: usize,
|
|
||||||
dst: &mut Vec<u8>,
|
|
||||||
) {
|
|
||||||
let (_, h, dst_stride_y, dst_stride_uv, u, v) =
|
|
||||||
get_vpx_i420_stride(width, height, super::STRIDE_ALIGN);
|
|
||||||
dst.resize(h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
|
||||||
let dst_y = dst.as_mut_ptr();
|
|
||||||
let dst_u = dst[u..].as_mut_ptr();
|
|
||||||
let dst_v = dst[v..].as_mut_ptr();
|
|
||||||
NV12ToI420(
|
|
||||||
src_y,
|
|
||||||
src_stride_y,
|
|
||||||
src_uv,
|
|
||||||
src_stride_uv,
|
|
||||||
dst_y,
|
|
||||||
dst_stride_y as _,
|
|
||||||
dst_u,
|
|
||||||
dst_stride_uv as _,
|
|
||||||
dst_v,
|
|
||||||
dst_stride_uv as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "hwcodec")]
|
#[cfg(feature = "hwcodec")]
|
||||||
pub mod hw {
|
pub mod hw {
|
||||||
|
use super::*;
|
||||||
use crate::ImageFormat;
|
use crate::ImageFormat;
|
||||||
use hbb_common::{anyhow::anyhow, ResultType};
|
|
||||||
#[cfg(target_os = "windows")]
|
#[cfg(target_os = "windows")]
|
||||||
use hwcodec::{ffmpeg::ffmpeg_linesize_offset_length, AVPixelFormat};
|
use hwcodec::{ffmpeg::ffmpeg_linesize_offset_length, AVPixelFormat};
|
||||||
|
|
||||||
pub fn hw_bgra_to_i420(
|
|
||||||
width: usize,
|
|
||||||
height: usize,
|
|
||||||
stride: &[i32],
|
|
||||||
offset: &[i32],
|
|
||||||
length: i32,
|
|
||||||
src: &[u8],
|
|
||||||
dst: &mut Vec<u8>,
|
|
||||||
) {
|
|
||||||
let stride_y = stride[0] as usize;
|
|
||||||
let stride_u = stride[1] as usize;
|
|
||||||
let stride_v = stride[2] as usize;
|
|
||||||
let offset_u = offset[0] as usize;
|
|
||||||
let offset_v = offset[1] as usize;
|
|
||||||
|
|
||||||
dst.resize(length as _, 0);
|
|
||||||
let dst_y = dst.as_mut_ptr();
|
|
||||||
let dst_u = dst[offset_u..].as_mut_ptr();
|
|
||||||
let dst_v = dst[offset_v..].as_mut_ptr();
|
|
||||||
unsafe {
|
|
||||||
super::ARGBToI420(
|
|
||||||
src.as_ptr(),
|
|
||||||
(src.len() / height) as _,
|
|
||||||
dst_y,
|
|
||||||
stride_y as _,
|
|
||||||
dst_u,
|
|
||||||
stride_u as _,
|
|
||||||
dst_v,
|
|
||||||
stride_v as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hw_bgra_to_nv12(
|
|
||||||
width: usize,
|
|
||||||
height: usize,
|
|
||||||
stride: &[i32],
|
|
||||||
offset: &[i32],
|
|
||||||
length: i32,
|
|
||||||
src: &[u8],
|
|
||||||
dst: &mut Vec<u8>,
|
|
||||||
) {
|
|
||||||
let stride_y = stride[0] as usize;
|
|
||||||
let stride_uv = stride[1] as usize;
|
|
||||||
let offset_uv = offset[0] as usize;
|
|
||||||
|
|
||||||
dst.resize(length as _, 0);
|
|
||||||
let dst_y = dst.as_mut_ptr();
|
|
||||||
let dst_uv = dst[offset_uv..].as_mut_ptr();
|
|
||||||
unsafe {
|
|
||||||
super::ARGBToNV12(
|
|
||||||
src.as_ptr(),
|
|
||||||
(src.len() / height) as _,
|
|
||||||
dst_y,
|
|
||||||
stride_y as _,
|
|
||||||
dst_uv,
|
|
||||||
stride_uv as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "windows")]
|
#[cfg(target_os = "windows")]
|
||||||
pub fn hw_nv12_to(
|
pub fn hw_nv12_to(
|
||||||
fmt: ImageFormat,
|
fmt: ImageFormat,
|
||||||
@ -386,61 +44,59 @@ pub mod hw {
|
|||||||
let i420_stride_v = linesize_i420[2];
|
let i420_stride_v = linesize_i420[2];
|
||||||
i420.resize(i420_len as _, 0);
|
i420.resize(i420_len as _, 0);
|
||||||
|
|
||||||
unsafe {
|
let i420_offset_y = unsafe { i420.as_ptr().add(0) as _ };
|
||||||
let i420_offset_y = i420.as_ptr().add(0) as _;
|
let i420_offset_u = unsafe { i420.as_ptr().add(offset_i420[0] as _) as _ };
|
||||||
let i420_offset_u = i420.as_ptr().add(offset_i420[0] as _) as _;
|
let i420_offset_v = unsafe { i420.as_ptr().add(offset_i420[1] as _) as _ };
|
||||||
let i420_offset_v = i420.as_ptr().add(offset_i420[1] as _) as _;
|
call_yuv!(NV12ToI420(
|
||||||
super::NV12ToI420(
|
src_y.as_ptr(),
|
||||||
src_y.as_ptr(),
|
nv12_stride_y as _,
|
||||||
nv12_stride_y as _,
|
src_uv.as_ptr(),
|
||||||
src_uv.as_ptr(),
|
nv12_stride_uv as _,
|
||||||
nv12_stride_uv as _,
|
i420_offset_y,
|
||||||
i420_offset_y,
|
i420_stride_y,
|
||||||
i420_stride_y,
|
i420_offset_u,
|
||||||
i420_offset_u,
|
i420_stride_u,
|
||||||
i420_stride_u,
|
i420_offset_v,
|
||||||
i420_offset_v,
|
i420_stride_v,
|
||||||
i420_stride_v,
|
width as _,
|
||||||
width as _,
|
height as _,
|
||||||
height as _,
|
));
|
||||||
);
|
match fmt {
|
||||||
match fmt {
|
ImageFormat::ARGB => {
|
||||||
ImageFormat::ARGB => {
|
call_yuv!(I420ToARGB(
|
||||||
super::I420ToARGB(
|
i420_offset_y,
|
||||||
i420_offset_y,
|
i420_stride_y,
|
||||||
i420_stride_y,
|
i420_offset_u,
|
||||||
i420_offset_u,
|
i420_stride_u,
|
||||||
i420_stride_u,
|
i420_offset_v,
|
||||||
i420_offset_v,
|
i420_stride_v,
|
||||||
i420_stride_v,
|
dst.as_mut_ptr(),
|
||||||
dst.as_mut_ptr(),
|
(width * 4) as _,
|
||||||
(width * 4) as _,
|
width as _,
|
||||||
width as _,
|
height as _,
|
||||||
height as _,
|
));
|
||||||
);
|
|
||||||
}
|
|
||||||
ImageFormat::ABGR => {
|
|
||||||
super::I420ToABGR(
|
|
||||||
i420_offset_y,
|
|
||||||
i420_stride_y,
|
|
||||||
i420_offset_u,
|
|
||||||
i420_stride_u,
|
|
||||||
i420_offset_v,
|
|
||||||
i420_stride_v,
|
|
||||||
dst.as_mut_ptr(),
|
|
||||||
(width * 4) as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err(anyhow!("unsupported image format"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return Ok(());
|
ImageFormat::ABGR => {
|
||||||
};
|
call_yuv!(I420ToABGR(
|
||||||
|
i420_offset_y,
|
||||||
|
i420_stride_y,
|
||||||
|
i420_offset_u,
|
||||||
|
i420_stride_u,
|
||||||
|
i420_offset_v,
|
||||||
|
i420_stride_v,
|
||||||
|
dst.as_mut_ptr(),
|
||||||
|
(width * 4) as _,
|
||||||
|
width as _,
|
||||||
|
height as _,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
bail!("unsupported image format");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
return Err(anyhow!("get linesize offset failed"));
|
bail!("get linesize offset failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(target_os = "windows"))]
|
#[cfg(not(target_os = "windows"))]
|
||||||
@ -457,41 +113,34 @@ pub mod hw {
|
|||||||
_align: usize,
|
_align: usize,
|
||||||
) -> ResultType<()> {
|
) -> ResultType<()> {
|
||||||
dst.resize(width * height * 4, 0);
|
dst.resize(width * height * 4, 0);
|
||||||
unsafe {
|
match fmt {
|
||||||
match fmt {
|
ImageFormat::ARGB => {
|
||||||
ImageFormat::ARGB => {
|
call_yuv!(NV12ToARGB(
|
||||||
match super::NV12ToARGB(
|
src_y.as_ptr(),
|
||||||
src_y.as_ptr(),
|
src_stride_y as _,
|
||||||
src_stride_y as _,
|
src_uv.as_ptr(),
|
||||||
src_uv.as_ptr(),
|
src_stride_uv as _,
|
||||||
src_stride_uv as _,
|
dst.as_mut_ptr(),
|
||||||
dst.as_mut_ptr(),
|
(width * 4) as _,
|
||||||
(width * 4) as _,
|
width as _,
|
||||||
width as _,
|
height as _,
|
||||||
height as _,
|
));
|
||||||
) {
|
|
||||||
0 => Ok(()),
|
|
||||||
_ => Err(anyhow!("NV12ToARGB failed")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ImageFormat::ABGR => {
|
|
||||||
match super::NV12ToABGR(
|
|
||||||
src_y.as_ptr(),
|
|
||||||
src_stride_y as _,
|
|
||||||
src_uv.as_ptr(),
|
|
||||||
src_stride_uv as _,
|
|
||||||
dst.as_mut_ptr(),
|
|
||||||
(width * 4) as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
) {
|
|
||||||
0 => Ok(()),
|
|
||||||
_ => Err(anyhow!("NV12ToABGR failed")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => Err(anyhow!("unsupported image format")),
|
|
||||||
}
|
}
|
||||||
|
ImageFormat::ABGR => {
|
||||||
|
call_yuv!(NV12ToABGR(
|
||||||
|
src_y.as_ptr(),
|
||||||
|
src_stride_y as _,
|
||||||
|
src_uv.as_ptr(),
|
||||||
|
src_stride_uv as _,
|
||||||
|
dst.as_mut_ptr(),
|
||||||
|
(width * 4) as _,
|
||||||
|
width as _,
|
||||||
|
height as _,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
_ => bail!("unsupported image format"),
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hw_i420_to(
|
pub fn hw_i420_to(
|
||||||
@ -505,43 +154,153 @@ pub mod hw {
|
|||||||
src_stride_u: usize,
|
src_stride_u: usize,
|
||||||
src_stride_v: usize,
|
src_stride_v: usize,
|
||||||
dst: &mut Vec<u8>,
|
dst: &mut Vec<u8>,
|
||||||
) {
|
) -> ResultType<()> {
|
||||||
let src_y = src_y.as_ptr();
|
let src_y = src_y.as_ptr();
|
||||||
let src_u = src_u.as_ptr();
|
let src_u = src_u.as_ptr();
|
||||||
let src_v = src_v.as_ptr();
|
let src_v = src_v.as_ptr();
|
||||||
dst.resize(width * height * 4, 0);
|
dst.resize(width * height * 4, 0);
|
||||||
unsafe {
|
match fmt {
|
||||||
match fmt {
|
ImageFormat::ARGB => {
|
||||||
ImageFormat::ARGB => {
|
call_yuv!(I420ToARGB(
|
||||||
super::I420ToARGB(
|
src_y,
|
||||||
src_y,
|
src_stride_y as _,
|
||||||
src_stride_y as _,
|
src_u,
|
||||||
src_u,
|
src_stride_u as _,
|
||||||
src_stride_u as _,
|
src_v,
|
||||||
src_v,
|
src_stride_v as _,
|
||||||
src_stride_v as _,
|
dst.as_mut_ptr(),
|
||||||
dst.as_mut_ptr(),
|
(width * 4) as _,
|
||||||
(width * 4) as _,
|
width as _,
|
||||||
width as _,
|
height as _,
|
||||||
height as _,
|
));
|
||||||
);
|
|
||||||
}
|
|
||||||
ImageFormat::ABGR => {
|
|
||||||
super::I420ToABGR(
|
|
||||||
src_y,
|
|
||||||
src_stride_y as _,
|
|
||||||
src_u,
|
|
||||||
src_stride_u as _,
|
|
||||||
src_v,
|
|
||||||
src_stride_v as _,
|
|
||||||
dst.as_mut_ptr(),
|
|
||||||
(width * 4) as _,
|
|
||||||
width as _,
|
|
||||||
height as _,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
ImageFormat::ABGR => {
|
||||||
|
call_yuv!(I420ToABGR(
|
||||||
|
src_y,
|
||||||
|
src_stride_y as _,
|
||||||
|
src_u,
|
||||||
|
src_stride_u as _,
|
||||||
|
src_v,
|
||||||
|
src_stride_v as _,
|
||||||
|
dst.as_mut_ptr(),
|
||||||
|
(width * 4) as _,
|
||||||
|
width as _,
|
||||||
|
height as _,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
_ => bail!("unsupported image format"),
|
||||||
};
|
};
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[cfg(not(target_os = "ios"))]
|
||||||
|
pub fn convert_to_yuv(
|
||||||
|
captured: &Frame,
|
||||||
|
dst_fmt: EncodeYuvFormat,
|
||||||
|
dst: &mut Vec<u8>,
|
||||||
|
mid_data: &mut Vec<u8>,
|
||||||
|
) -> ResultType<()> {
|
||||||
|
let src = captured.data();
|
||||||
|
let src_stride = captured.stride();
|
||||||
|
let captured_pixfmt = captured.pixfmt();
|
||||||
|
if captured_pixfmt == crate::Pixfmt::BGRA || captured_pixfmt == crate::Pixfmt::RGBA {
|
||||||
|
if src.len() < src_stride[0] * dst_fmt.h {
|
||||||
|
bail!(
|
||||||
|
"length not enough: {} < {}",
|
||||||
|
src.len(),
|
||||||
|
src_stride[0] * dst_fmt.h
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match (captured_pixfmt, dst_fmt.pixfmt) {
|
||||||
|
(crate::Pixfmt::BGRA, crate::Pixfmt::I420) | (crate::Pixfmt::RGBA, crate::Pixfmt::I420) => {
|
||||||
|
let dst_stride_y = dst_fmt.stride[0];
|
||||||
|
let dst_stride_uv = dst_fmt.stride[1];
|
||||||
|
dst.resize(dst_fmt.h * dst_stride_y * 2, 0); // waste some memory to ensure memory safety
|
||||||
|
let dst_y = dst.as_mut_ptr();
|
||||||
|
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
||||||
|
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
||||||
|
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
|
||||||
|
ARGBToI420
|
||||||
|
} else {
|
||||||
|
ABGRToI420
|
||||||
|
};
|
||||||
|
call_yuv!(f(
|
||||||
|
src.as_ptr(),
|
||||||
|
src_stride[0] as _,
|
||||||
|
dst_y,
|
||||||
|
dst_stride_y as _,
|
||||||
|
dst_u,
|
||||||
|
dst_stride_uv as _,
|
||||||
|
dst_v,
|
||||||
|
dst_stride_uv as _,
|
||||||
|
dst_fmt.w as _,
|
||||||
|
dst_fmt.h as _,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
(crate::Pixfmt::BGRA, crate::Pixfmt::NV12) | (crate::Pixfmt::RGBA, crate::Pixfmt::NV12) => {
|
||||||
|
let dst_stride_y = dst_fmt.stride[0];
|
||||||
|
let dst_stride_uv = dst_fmt.stride[1];
|
||||||
|
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_uv / 2), 0);
|
||||||
|
let dst_y = dst.as_mut_ptr();
|
||||||
|
let dst_uv = dst[dst_fmt.u..].as_mut_ptr();
|
||||||
|
let f = if captured_pixfmt == crate::Pixfmt::BGRA {
|
||||||
|
ARGBToNV12
|
||||||
|
} else {
|
||||||
|
ABGRToNV12
|
||||||
|
};
|
||||||
|
call_yuv!(f(
|
||||||
|
src.as_ptr(),
|
||||||
|
src_stride[0] as _,
|
||||||
|
dst_y,
|
||||||
|
dst_stride_y as _,
|
||||||
|
dst_uv,
|
||||||
|
dst_stride_uv as _,
|
||||||
|
dst_fmt.w as _,
|
||||||
|
dst_fmt.h as _,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
(crate::Pixfmt::BGRA, crate::Pixfmt::I444) | (crate::Pixfmt::RGBA, crate::Pixfmt::I444) => {
|
||||||
|
let dst_stride_y = dst_fmt.stride[0];
|
||||||
|
let dst_stride_u = dst_fmt.stride[1];
|
||||||
|
let dst_stride_v = dst_fmt.stride[2];
|
||||||
|
dst.resize(dst_fmt.h * (dst_stride_y + dst_stride_u + dst_stride_v), 0);
|
||||||
|
let dst_y = dst.as_mut_ptr();
|
||||||
|
let dst_u = dst[dst_fmt.u..].as_mut_ptr();
|
||||||
|
let dst_v = dst[dst_fmt.v..].as_mut_ptr();
|
||||||
|
let src = if captured_pixfmt == crate::Pixfmt::BGRA {
|
||||||
|
src
|
||||||
|
} else {
|
||||||
|
mid_data.resize(src.len(), 0);
|
||||||
|
call_yuv!(ABGRToARGB(
|
||||||
|
src.as_ptr(),
|
||||||
|
src_stride[0] as _,
|
||||||
|
mid_data.as_mut_ptr(),
|
||||||
|
src_stride[0] as _,
|
||||||
|
dst_fmt.w as _,
|
||||||
|
dst_fmt.h as _,
|
||||||
|
));
|
||||||
|
mid_data
|
||||||
|
};
|
||||||
|
call_yuv!(ARGBToI444(
|
||||||
|
src.as_ptr(),
|
||||||
|
src_stride[0] as _,
|
||||||
|
dst_y,
|
||||||
|
dst_stride_y as _,
|
||||||
|
dst_u,
|
||||||
|
dst_stride_u as _,
|
||||||
|
dst_v,
|
||||||
|
dst_stride_v as _,
|
||||||
|
dst_fmt.w as _,
|
||||||
|
dst_fmt.h as _,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
bail!(
|
||||||
|
"convert not support, {captured_pixfmt:?} -> {:?}",
|
||||||
|
dst_fmt.pixfmt
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
use crate::{common::TraitCapturer, dxgi};
|
use crate::{common::TraitCapturer, dxgi, Pixfmt};
|
||||||
use std::{
|
use std::{
|
||||||
io::{
|
io::{
|
||||||
self,
|
self,
|
||||||
ErrorKind::{NotFound, TimedOut, WouldBlock},
|
ErrorKind::{NotFound, TimedOut, WouldBlock},
|
||||||
},
|
},
|
||||||
ops,
|
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -15,10 +14,10 @@ pub struct Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
let width = display.width();
|
let width = display.width();
|
||||||
let height = display.height();
|
let height = display.height();
|
||||||
let inner = dxgi::Capturer::new(display.0, yuv)?;
|
let inner = dxgi::Capturer::new(display.0)?;
|
||||||
Ok(Capturer {
|
Ok(Capturer {
|
||||||
inner,
|
inner,
|
||||||
width,
|
width,
|
||||||
@ -40,13 +39,9 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TraitCapturer for Capturer {
|
impl TraitCapturer for Capturer {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.inner.set_use_yuv(use_yuv);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
match self.inner.frame(timeout.as_millis() as _) {
|
match self.inner.frame(timeout.as_millis() as _) {
|
||||||
Ok(frame) => Ok(Frame(frame)),
|
Ok(frame) => Ok(Frame::new(frame, self.height)),
|
||||||
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
|
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
|
||||||
Err(error) => Err(error),
|
Err(error) => Err(error),
|
||||||
}
|
}
|
||||||
@ -61,12 +56,31 @@ impl TraitCapturer for Capturer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Frame<'a>(pub &'a [u8]);
|
pub struct Frame<'a> {
|
||||||
|
data: &'a [u8],
|
||||||
|
stride: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> ops::Deref for Frame<'a> {
|
impl<'a> Frame<'a> {
|
||||||
type Target = [u8];
|
pub fn new(data: &'a [u8], h: usize) -> Self {
|
||||||
fn deref(&self) -> &[u8] {
|
let stride = data.len() / h;
|
||||||
self.0
|
let mut v = Vec::new();
|
||||||
|
v.push(stride);
|
||||||
|
Frame { data, stride: v }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> crate::TraitFrame for Frame<'a> {
|
||||||
|
fn data(&self) -> &[u8] {
|
||||||
|
self.data
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stride(&self) -> Vec<usize> {
|
||||||
|
self.stride.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pixfmt(&self) -> Pixfmt {
|
||||||
|
Pixfmt::BGRA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -134,9 +148,9 @@ impl CapturerMag {
|
|||||||
dxgi::mag::CapturerMag::is_supported()
|
dxgi::mag::CapturerMag::is_supported()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(origin: (i32, i32), width: usize, height: usize, use_yuv: bool) -> io::Result<Self> {
|
pub fn new(origin: (i32, i32), width: usize, height: usize) -> io::Result<Self> {
|
||||||
Ok(CapturerMag {
|
Ok(CapturerMag {
|
||||||
inner: dxgi::mag::CapturerMag::new(origin, width, height, use_yuv)?,
|
inner: dxgi::mag::CapturerMag::new(origin, width, height)?,
|
||||||
data: Vec::new(),
|
data: Vec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -151,13 +165,9 @@ impl CapturerMag {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TraitCapturer for CapturerMag {
|
impl TraitCapturer for CapturerMag {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.inner.set_use_yuv(use_yuv)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
|
||||||
self.inner.frame(&mut self.data)?;
|
self.inner.frame(&mut self.data)?;
|
||||||
Ok(Frame(&self.data))
|
Ok(Frame::new(&self.data, self.inner.get_rect().2))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_gdi(&self) -> bool {
|
fn is_gdi(&self) -> bool {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
|
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
|
||||||
hw, ImageFormat, ImageRgb, HW_STRIDE_ALIGN,
|
hw, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
|
||||||
};
|
};
|
||||||
use hbb_common::{
|
use hbb_common::{
|
||||||
allow_err,
|
allow_err,
|
||||||
@ -31,7 +31,6 @@ const DEFAULT_RC: RateControl = RC_DEFAULT;
|
|||||||
|
|
||||||
pub struct HwEncoder {
|
pub struct HwEncoder {
|
||||||
encoder: Encoder,
|
encoder: Encoder,
|
||||||
yuv: Vec<u8>,
|
|
||||||
pub format: DataFormat,
|
pub format: DataFormat,
|
||||||
pub pixfmt: AVPixelFormat,
|
pub pixfmt: AVPixelFormat,
|
||||||
width: u32,
|
width: u32,
|
||||||
@ -40,7 +39,7 @@ pub struct HwEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EncoderApi for HwEncoder {
|
impl EncoderApi for HwEncoder {
|
||||||
fn new(cfg: EncoderCfg) -> ResultType<Self>
|
fn new(cfg: EncoderCfg, _i444: bool) -> ResultType<Self>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
@ -78,7 +77,6 @@ impl EncoderApi for HwEncoder {
|
|||||||
match Encoder::new(ctx.clone()) {
|
match Encoder::new(ctx.clone()) {
|
||||||
Ok(encoder) => Ok(HwEncoder {
|
Ok(encoder) => Ok(HwEncoder {
|
||||||
encoder,
|
encoder,
|
||||||
yuv: vec![],
|
|
||||||
format,
|
format,
|
||||||
pixfmt: ctx.pixfmt,
|
pixfmt: ctx.pixfmt,
|
||||||
width: ctx.width as _,
|
width: ctx.width as _,
|
||||||
@ -118,8 +116,31 @@ impl EncoderApi for HwEncoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn use_yuv(&self) -> bool {
|
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||||
false
|
let pixfmt = if self.pixfmt == AVPixelFormat::AV_PIX_FMT_NV12 {
|
||||||
|
Pixfmt::NV12
|
||||||
|
} else {
|
||||||
|
Pixfmt::I420
|
||||||
|
};
|
||||||
|
let stride = self
|
||||||
|
.encoder
|
||||||
|
.linesize
|
||||||
|
.clone()
|
||||||
|
.drain(..)
|
||||||
|
.map(|i| i as usize)
|
||||||
|
.collect();
|
||||||
|
crate::EncodeYuvFormat {
|
||||||
|
pixfmt,
|
||||||
|
w: self.encoder.ctx.width as _,
|
||||||
|
h: self.encoder.ctx.height as _,
|
||||||
|
stride,
|
||||||
|
u: self.encoder.offset[0] as _,
|
||||||
|
v: if pixfmt == Pixfmt::NV12 {
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
self.encoder.offset[1] as _
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
|
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
|
||||||
@ -145,29 +166,8 @@ impl HwEncoder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn encode(&mut self, bgra: &[u8]) -> ResultType<Vec<EncodeFrame>> {
|
pub fn encode(&mut self, yuv: &[u8]) -> ResultType<Vec<EncodeFrame>> {
|
||||||
match self.pixfmt {
|
match self.encoder.encode(yuv) {
|
||||||
AVPixelFormat::AV_PIX_FMT_YUV420P => hw::hw_bgra_to_i420(
|
|
||||||
self.encoder.ctx.width as _,
|
|
||||||
self.encoder.ctx.height as _,
|
|
||||||
&self.encoder.linesize,
|
|
||||||
&self.encoder.offset,
|
|
||||||
self.encoder.length,
|
|
||||||
bgra,
|
|
||||||
&mut self.yuv,
|
|
||||||
),
|
|
||||||
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_bgra_to_nv12(
|
|
||||||
self.encoder.ctx.width as _,
|
|
||||||
self.encoder.ctx.height as _,
|
|
||||||
&self.encoder.linesize,
|
|
||||||
&self.encoder.offset,
|
|
||||||
self.encoder.length,
|
|
||||||
bgra,
|
|
||||||
&mut self.yuv,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
match self.encoder.encode(&self.yuv) {
|
|
||||||
Ok(v) => {
|
Ok(v) => {
|
||||||
let mut data = Vec::<EncodeFrame>::new();
|
let mut data = Vec::<EncodeFrame>::new();
|
||||||
data.append(v);
|
data.append(v);
|
||||||
@ -245,7 +245,7 @@ impl HwDecoder {
|
|||||||
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<HwDecoderImage>> {
|
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<HwDecoderImage>> {
|
||||||
match self.decoder.decode(data) {
|
match self.decoder.decode(data) {
|
||||||
Ok(v) => Ok(v.iter().map(|f| HwDecoderImage { frame: f }).collect()),
|
Ok(v) => Ok(v.iter().map(|f| HwDecoderImage { frame: f }).collect()),
|
||||||
Err(_) => Ok(vec![]),
|
Err(e) => Err(anyhow!(e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -274,7 +274,7 @@ impl HwDecoderImage<'_> {
|
|||||||
&mut rgb.raw as _,
|
&mut rgb.raw as _,
|
||||||
i420,
|
i420,
|
||||||
HW_STRIDE_ALIGN,
|
HW_STRIDE_ALIGN,
|
||||||
),
|
)?,
|
||||||
AVPixelFormat::AV_PIX_FMT_YUV420P => {
|
AVPixelFormat::AV_PIX_FMT_YUV420P => {
|
||||||
hw::hw_i420_to(
|
hw::hw_i420_to(
|
||||||
rgb.fmt(),
|
rgb.fmt(),
|
||||||
@ -287,10 +287,10 @@ impl HwDecoderImage<'_> {
|
|||||||
frame.linesize[1] as _,
|
frame.linesize[1] as _,
|
||||||
frame.linesize[2] as _,
|
frame.linesize[2] as _,
|
||||||
&mut rgb.raw as _,
|
&mut rgb.raw as _,
|
||||||
);
|
)?;
|
||||||
return Ok(());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
|
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
|
||||||
|
@ -11,10 +11,10 @@ pub enum Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
Ok(match display {
|
Ok(match display {
|
||||||
Display::X11(d) => Capturer::X11(x11::Capturer::new(d, yuv)?),
|
Display::X11(d) => Capturer::X11(x11::Capturer::new(d)?),
|
||||||
Display::WAYLAND(d) => Capturer::WAYLAND(wayland::Capturer::new(d, yuv)?),
|
Display::WAYLAND(d) => Capturer::WAYLAND(wayland::Capturer::new(d)?),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,13 +34,6 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TraitCapturer for Capturer {
|
impl TraitCapturer for Capturer {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
match self {
|
|
||||||
Capturer::X11(d) => d.set_use_yuv(use_yuv),
|
|
||||||
Capturer::WAYLAND(d) => d.set_use_yuv(use_yuv),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
match self {
|
match self {
|
||||||
Capturer::X11(d) => d.frame(timeout),
|
Capturer::X11(d) => d.frame(timeout),
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
pub use self::vpxcodec::*;
|
pub use self::vpxcodec::*;
|
||||||
use hbb_common::message_proto::{video_frame, VideoFrame};
|
use hbb_common::{
|
||||||
|
log,
|
||||||
|
message_proto::{video_frame, Chroma, VideoFrame},
|
||||||
|
};
|
||||||
use std::slice;
|
use std::slice;
|
||||||
|
|
||||||
cfg_if! {
|
cfg_if! {
|
||||||
@ -96,8 +99,6 @@ pub fn would_block_if_equal(old: &mut Vec<u8>, b: &[u8]) -> std::io::Result<()>
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait TraitCapturer {
|
pub trait TraitCapturer {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool);
|
|
||||||
|
|
||||||
// We doesn't support
|
// We doesn't support
|
||||||
#[cfg(not(any(target_os = "ios")))]
|
#[cfg(not(any(target_os = "ios")))]
|
||||||
fn frame<'a>(&'a mut self, timeout: std::time::Duration) -> std::io::Result<Frame<'a>>;
|
fn frame<'a>(&'a mut self, timeout: std::time::Duration) -> std::io::Result<Frame<'a>>;
|
||||||
@ -108,6 +109,31 @@ pub trait TraitCapturer {
|
|||||||
fn set_gdi(&mut self) -> bool;
|
fn set_gdi(&mut self) -> bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait TraitFrame {
|
||||||
|
fn data(&self) -> &[u8];
|
||||||
|
|
||||||
|
fn stride(&self) -> Vec<usize>;
|
||||||
|
|
||||||
|
fn pixfmt(&self) -> Pixfmt;
|
||||||
|
}
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||||
|
pub enum Pixfmt {
|
||||||
|
BGRA,
|
||||||
|
RGBA,
|
||||||
|
I420,
|
||||||
|
NV12,
|
||||||
|
I444,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct EncodeYuvFormat {
|
||||||
|
pub pixfmt: Pixfmt,
|
||||||
|
pub w: usize,
|
||||||
|
pub h: usize,
|
||||||
|
pub stride: Vec<usize>,
|
||||||
|
pub u: usize,
|
||||||
|
pub v: usize,
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(x11)]
|
#[cfg(x11)]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_x11() -> bool {
|
pub fn is_x11() -> bool {
|
||||||
@ -260,6 +286,7 @@ pub trait GoogleImage {
|
|||||||
fn height(&self) -> usize;
|
fn height(&self) -> usize;
|
||||||
fn stride(&self) -> Vec<i32>;
|
fn stride(&self) -> Vec<i32>;
|
||||||
fn planes(&self) -> Vec<*mut u8>;
|
fn planes(&self) -> Vec<*mut u8>;
|
||||||
|
fn chroma(&self) -> Chroma;
|
||||||
fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize {
|
fn get_bytes_per_row(w: usize, fmt: ImageFormat, stride: usize) -> usize {
|
||||||
let bytes_per_pixel = match fmt {
|
let bytes_per_pixel = match fmt {
|
||||||
ImageFormat::Raw => 3,
|
ImageFormat::Raw => 3,
|
||||||
@ -278,8 +305,8 @@ pub trait GoogleImage {
|
|||||||
let stride = self.stride();
|
let stride = self.stride();
|
||||||
let planes = self.planes();
|
let planes = self.planes();
|
||||||
unsafe {
|
unsafe {
|
||||||
match rgb.fmt() {
|
match (self.chroma(), rgb.fmt()) {
|
||||||
ImageFormat::Raw => {
|
(Chroma::I420, ImageFormat::Raw) => {
|
||||||
super::I420ToRAW(
|
super::I420ToRAW(
|
||||||
planes[0],
|
planes[0],
|
||||||
stride[0],
|
stride[0],
|
||||||
@ -293,7 +320,7 @@ pub trait GoogleImage {
|
|||||||
self.height() as _,
|
self.height() as _,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ImageFormat::ARGB => {
|
(Chroma::I420, ImageFormat::ARGB) => {
|
||||||
super::I420ToARGB(
|
super::I420ToARGB(
|
||||||
planes[0],
|
planes[0],
|
||||||
stride[0],
|
stride[0],
|
||||||
@ -307,7 +334,7 @@ pub trait GoogleImage {
|
|||||||
self.height() as _,
|
self.height() as _,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ImageFormat::ABGR => {
|
(Chroma::I420, ImageFormat::ABGR) => {
|
||||||
super::I420ToABGR(
|
super::I420ToABGR(
|
||||||
planes[0],
|
planes[0],
|
||||||
stride[0],
|
stride[0],
|
||||||
@ -321,6 +348,36 @@ pub trait GoogleImage {
|
|||||||
self.height() as _,
|
self.height() as _,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
(Chroma::I444, ImageFormat::ARGB) => {
|
||||||
|
super::I444ToARGB(
|
||||||
|
planes[0],
|
||||||
|
stride[0],
|
||||||
|
planes[1],
|
||||||
|
stride[1],
|
||||||
|
planes[2],
|
||||||
|
stride[2],
|
||||||
|
rgb.raw.as_mut_ptr(),
|
||||||
|
bytes_per_row as _,
|
||||||
|
self.width() as _,
|
||||||
|
self.height() as _,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
(Chroma::I444, ImageFormat::ABGR) => {
|
||||||
|
super::I444ToABGR(
|
||||||
|
planes[0],
|
||||||
|
stride[0],
|
||||||
|
planes[1],
|
||||||
|
stride[1],
|
||||||
|
planes[2],
|
||||||
|
stride[2],
|
||||||
|
rgb.raw.as_mut_ptr(),
|
||||||
|
bytes_per_row as _,
|
||||||
|
self.width() as _,
|
||||||
|
self.height() as _,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// (Chroma::I444, ImageFormat::Raw), new version libyuv have I444ToRAW
|
||||||
|
_ => log::error!("unsupported pixfmt:{:?}", self.chroma()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,16 @@
|
|||||||
use crate::quartz;
|
use crate::{quartz, Pixfmt};
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::sync::{Arc, Mutex, TryLockError};
|
use std::sync::{Arc, Mutex, TryLockError};
|
||||||
use std::{io, mem, ops};
|
use std::{io, mem};
|
||||||
|
|
||||||
pub struct Capturer {
|
pub struct Capturer {
|
||||||
inner: quartz::Capturer,
|
inner: quartz::Capturer,
|
||||||
frame: Arc<Mutex<Option<quartz::Frame>>>,
|
frame: Arc<Mutex<Option<quartz::Frame>>>,
|
||||||
use_yuv: bool,
|
|
||||||
i420: Vec<u8>,
|
|
||||||
saved_raw_data: Vec<u8>, // for faster compare and copy
|
saved_raw_data: Vec<u8>, // for faster compare and copy
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
let frame = Arc::new(Mutex::new(None));
|
let frame = Arc::new(Mutex::new(None));
|
||||||
|
|
||||||
let f = frame.clone();
|
let f = frame.clone();
|
||||||
@ -20,11 +18,7 @@ impl Capturer {
|
|||||||
display.0,
|
display.0,
|
||||||
display.width(),
|
display.width(),
|
||||||
display.height(),
|
display.height(),
|
||||||
if use_yuv {
|
quartz::PixelFormat::Argb8888,
|
||||||
quartz::PixelFormat::YCbCr420Video
|
|
||||||
} else {
|
|
||||||
quartz::PixelFormat::Argb8888
|
|
||||||
},
|
|
||||||
Default::default(),
|
Default::default(),
|
||||||
move |inner| {
|
move |inner| {
|
||||||
if let Ok(mut f) = f.lock() {
|
if let Ok(mut f) = f.lock() {
|
||||||
@ -37,8 +31,6 @@ impl Capturer {
|
|||||||
Ok(Capturer {
|
Ok(Capturer {
|
||||||
inner,
|
inner,
|
||||||
frame,
|
frame,
|
||||||
use_yuv,
|
|
||||||
i420: Vec::new(),
|
|
||||||
saved_raw_data: Vec::new(),
|
saved_raw_data: Vec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -53,10 +45,6 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl crate::TraitCapturer for Capturer {
|
impl crate::TraitCapturer for Capturer {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.use_yuv = use_yuv;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, _timeout_ms: std::time::Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, _timeout_ms: std::time::Duration) -> io::Result<Frame<'a>> {
|
||||||
match self.frame.try_lock() {
|
match self.frame.try_lock() {
|
||||||
Ok(mut handle) => {
|
Ok(mut handle) => {
|
||||||
@ -66,9 +54,7 @@ impl crate::TraitCapturer for Capturer {
|
|||||||
match frame {
|
match frame {
|
||||||
Some(mut frame) => {
|
Some(mut frame) => {
|
||||||
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
|
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
|
||||||
if self.use_yuv {
|
frame.surface_to_bgra(self.height());
|
||||||
frame.nv12_to_i420(self.width(), self.height(), &mut self.i420);
|
|
||||||
}
|
|
||||||
Ok(Frame(frame, PhantomData))
|
Ok(Frame(frame, PhantomData))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,11 +71,20 @@ impl crate::TraitCapturer for Capturer {
|
|||||||
|
|
||||||
pub struct Frame<'a>(pub quartz::Frame, PhantomData<&'a [u8]>);
|
pub struct Frame<'a>(pub quartz::Frame, PhantomData<&'a [u8]>);
|
||||||
|
|
||||||
impl<'a> ops::Deref for Frame<'a> {
|
impl<'a> crate::TraitFrame for Frame<'a> {
|
||||||
type Target = [u8];
|
fn data(&self) -> &[u8] {
|
||||||
fn deref(&self) -> &[u8] {
|
|
||||||
&*self.0
|
&*self.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn stride(&self) -> Vec<usize> {
|
||||||
|
let mut v = Vec::new();
|
||||||
|
v.push(self.0.stride());
|
||||||
|
v
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pixfmt(&self) -> Pixfmt {
|
||||||
|
Pixfmt::BGRA
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Display(quartz::Display);
|
pub struct Display(quartz::Display);
|
||||||
|
@ -4,11 +4,11 @@
|
|||||||
|
|
||||||
use hbb_common::anyhow::{anyhow, Context};
|
use hbb_common::anyhow::{anyhow, Context};
|
||||||
use hbb_common::log;
|
use hbb_common::log;
|
||||||
use hbb_common::message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
|
use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, VideoFrame};
|
||||||
use hbb_common::ResultType;
|
use hbb_common::ResultType;
|
||||||
|
|
||||||
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
|
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
|
||||||
use crate::{GoogleImage, STRIDE_ALIGN};
|
use crate::{GoogleImage, Pixfmt, STRIDE_ALIGN};
|
||||||
|
|
||||||
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
|
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
|
||||||
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
|
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
|
||||||
@ -39,6 +39,7 @@ pub struct VpxEncoder {
|
|||||||
width: usize,
|
width: usize,
|
||||||
height: usize,
|
height: usize,
|
||||||
id: VpxVideoCodecId,
|
id: VpxVideoCodecId,
|
||||||
|
i444: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct VpxDecoder {
|
pub struct VpxDecoder {
|
||||||
@ -46,7 +47,7 @@ pub struct VpxDecoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EncoderApi for VpxEncoder {
|
impl EncoderApi for VpxEncoder {
|
||||||
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
|
fn new(cfg: crate::codec::EncoderCfg, i444: bool) -> ResultType<Self>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
@ -98,6 +99,13 @@ impl EncoderApi for VpxEncoder {
|
|||||||
} else {
|
} else {
|
||||||
c.rc_target_bitrate = base_bitrate;
|
c.rc_target_bitrate = base_bitrate;
|
||||||
}
|
}
|
||||||
|
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp9/common/vp9_enums.h#29
|
||||||
|
// https://chromium.googlesource.com/webm/libvpx/+/refs/heads/main/vp8/vp8_cx_iface.c#282
|
||||||
|
c.g_profile = if i444 && config.codec == VpxVideoCodecId::VP9 {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
The VPX encoder supports two-pass encoding for rate control purposes.
|
The VPX encoder supports two-pass encoding for rate control purposes.
|
||||||
@ -166,6 +174,7 @@ impl EncoderApi for VpxEncoder {
|
|||||||
width: config.width as _,
|
width: config.width as _,
|
||||||
height: config.height as _,
|
height: config.height as _,
|
||||||
id: config.codec,
|
id: config.codec,
|
||||||
|
i444,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => Err(anyhow!("encoder type mismatch")),
|
_ => Err(anyhow!("encoder type mismatch")),
|
||||||
@ -192,8 +201,36 @@ impl EncoderApi for VpxEncoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn use_yuv(&self) -> bool {
|
fn yuvfmt(&self) -> crate::EncodeYuvFormat {
|
||||||
true
|
let mut img = Default::default();
|
||||||
|
let fmt = if self.i444 {
|
||||||
|
vpx_img_fmt::VPX_IMG_FMT_I444
|
||||||
|
} else {
|
||||||
|
vpx_img_fmt::VPX_IMG_FMT_I420
|
||||||
|
};
|
||||||
|
unsafe {
|
||||||
|
vpx_img_wrap(
|
||||||
|
&mut img,
|
||||||
|
fmt,
|
||||||
|
self.width as _,
|
||||||
|
self.height as _,
|
||||||
|
crate::STRIDE_ALIGN as _,
|
||||||
|
0x1 as _,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let pixfmt = if self.i444 {
|
||||||
|
Pixfmt::I444
|
||||||
|
} else {
|
||||||
|
Pixfmt::I420
|
||||||
|
};
|
||||||
|
crate::EncodeYuvFormat {
|
||||||
|
pixfmt,
|
||||||
|
w: img.w as _,
|
||||||
|
h: img.h as _,
|
||||||
|
stride: img.stride.map(|s| s as usize).to_vec(),
|
||||||
|
u: img.planes[1] as usize - img.planes[0] as usize,
|
||||||
|
v: img.planes[2] as usize - img.planes[0] as usize,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
|
||||||
@ -219,14 +256,20 @@ impl EncoderApi for VpxEncoder {
|
|||||||
|
|
||||||
impl VpxEncoder {
|
impl VpxEncoder {
|
||||||
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
|
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
|
||||||
if 2 * data.len() < 3 * self.width * self.height {
|
let bpp = if self.i444 { 24 } else { 12 };
|
||||||
|
if data.len() < self.width * self.height * bpp / 8 {
|
||||||
return Err(Error::FailedCall("len not enough".to_string()));
|
return Err(Error::FailedCall("len not enough".to_string()));
|
||||||
}
|
}
|
||||||
|
let fmt = if self.i444 {
|
||||||
|
vpx_img_fmt::VPX_IMG_FMT_I444
|
||||||
|
} else {
|
||||||
|
vpx_img_fmt::VPX_IMG_FMT_I420
|
||||||
|
};
|
||||||
|
|
||||||
let mut image = Default::default();
|
let mut image = Default::default();
|
||||||
call_vpx_ptr!(vpx_img_wrap(
|
call_vpx_ptr!(vpx_img_wrap(
|
||||||
&mut image,
|
&mut image,
|
||||||
vpx_img_fmt::VPX_IMG_FMT_I420,
|
fmt,
|
||||||
self.width as _,
|
self.width as _,
|
||||||
self.height as _,
|
self.height as _,
|
||||||
stride_align as _,
|
stride_align as _,
|
||||||
@ -533,6 +576,13 @@ impl GoogleImage for Image {
|
|||||||
fn planes(&self) -> Vec<*mut u8> {
|
fn planes(&self) -> Vec<*mut u8> {
|
||||||
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
|
self.inner().planes.iter().map(|p| *p as *mut u8).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn chroma(&self) -> Chroma {
|
||||||
|
match self.inner().fmt {
|
||||||
|
vpx_img_fmt::VPX_IMG_FMT_I444 => Chroma::I444,
|
||||||
|
_ => Chroma::I420,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for Image {
|
impl Drop for Image {
|
||||||
|
@ -2,7 +2,7 @@ use crate::common::{x11::Frame, TraitCapturer};
|
|||||||
use crate::wayland::{capturable::*, *};
|
use crate::wayland::{capturable::*, *};
|
||||||
use std::{io, sync::RwLock, time::Duration};
|
use std::{io, sync::RwLock, time::Duration};
|
||||||
|
|
||||||
pub struct Capturer(Display, Box<dyn Recorder>, bool, Vec<u8>);
|
pub struct Capturer(Display, Box<dyn Recorder>, Vec<u8>);
|
||||||
|
|
||||||
static mut IS_CURSOR_EMBEDDED: Option<bool> = None;
|
static mut IS_CURSOR_EMBEDDED: Option<bool> = None;
|
||||||
|
|
||||||
@ -45,9 +45,9 @@ fn map_err<E: ToString>(err: E) -> io::Error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
let r = display.0.recorder(false).map_err(map_err)?;
|
let r = display.0.recorder(false).map_err(map_err)?;
|
||||||
Ok(Capturer(display, r, yuv, Default::default()))
|
Ok(Capturer(display, r, Default::default()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn width(&self) -> usize {
|
pub fn width(&self) -> usize {
|
||||||
@ -60,24 +60,10 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TraitCapturer for Capturer {
|
impl TraitCapturer for Capturer {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.2 = use_yuv;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
|
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
|
||||||
PixelProvider::BGR0(w, h, x) => Ok(Frame(if self.2 {
|
PixelProvider::BGR0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, h)),
|
||||||
crate::common::bgra_to_i420(w as _, h as _, &x, &mut self.3);
|
PixelProvider::RGB0(_w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, h)),
|
||||||
&self.3[..]
|
|
||||||
} else {
|
|
||||||
x
|
|
||||||
})),
|
|
||||||
PixelProvider::RGB0(w, h, x) => Ok(Frame(if self.2 {
|
|
||||||
crate::common::rgba_to_i420(w as _, h as _, &x, &mut self.3);
|
|
||||||
&self.3[..]
|
|
||||||
} else {
|
|
||||||
x
|
|
||||||
})),
|
|
||||||
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
|
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
|
||||||
_ => Err(map_err("Invalid data")),
|
_ => Err(map_err("Invalid data")),
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
use crate::{common::TraitCapturer, x11};
|
use crate::{common::TraitCapturer, x11, TraitFrame, Pixfmt};
|
||||||
use std::{io, ops, time::Duration};
|
use std::{io, time::Duration};
|
||||||
|
|
||||||
pub struct Capturer(x11::Capturer);
|
pub struct Capturer(x11::Capturer);
|
||||||
|
|
||||||
pub const IS_CURSOR_EMBEDDED: bool = false;
|
pub const IS_CURSOR_EMBEDDED: bool = false;
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
x11::Capturer::new(display.0, yuv).map(Capturer)
|
x11::Capturer::new(display.0).map(Capturer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn width(&self) -> usize {
|
pub fn width(&self) -> usize {
|
||||||
@ -20,21 +20,37 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TraitCapturer for Capturer {
|
impl TraitCapturer for Capturer {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.0.set_use_yuv(use_yuv);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
Ok(Frame(self.0.frame()?))
|
Ok(self.0.frame()?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Frame<'a>(pub &'a [u8]);
|
pub struct Frame<'a>{
|
||||||
|
pub data: &'a [u8],
|
||||||
|
pub pixfmt:Pixfmt,
|
||||||
|
pub stride:Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> ops::Deref for Frame<'a> {
|
impl<'a> Frame<'a> {
|
||||||
type Target = [u8];
|
pub fn new(data:&'a [u8], pixfmt:Pixfmt, h:usize) -> Self {
|
||||||
fn deref(&self) -> &[u8] {
|
let stride = data.len() / h;
|
||||||
self.0
|
let mut v = Vec::new();
|
||||||
|
v.push(stride);
|
||||||
|
Self { data, pixfmt, stride: v }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TraitFrame for Frame<'a> {
|
||||||
|
fn data(&self) -> &[u8] {
|
||||||
|
self.data
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stride(&self) -> Vec<usize> {
|
||||||
|
self.stride.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pixfmt(&self) -> crate::Pixfmt {
|
||||||
|
self.pixfmt
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,7 +160,7 @@ impl CapturerGDI {
|
|||||||
stride,
|
stride,
|
||||||
self.width,
|
self.width,
|
||||||
self.height,
|
self.height,
|
||||||
180,
|
crate::RotationMode::kRotate180,
|
||||||
);
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -245,9 +245,6 @@ pub struct CapturerMag {
|
|||||||
rect: RECT,
|
rect: RECT,
|
||||||
width: usize,
|
width: usize,
|
||||||
height: usize,
|
height: usize,
|
||||||
|
|
||||||
use_yuv: bool,
|
|
||||||
data: Vec<u8>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for CapturerMag {
|
impl Drop for CapturerMag {
|
||||||
@ -262,12 +259,7 @@ impl CapturerMag {
|
|||||||
MagInterface::new().is_ok()
|
MagInterface::new().is_ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn new(
|
pub(crate) fn new(origin: (i32, i32), width: usize, height: usize) -> Result<Self> {
|
||||||
origin: (i32, i32),
|
|
||||||
width: usize,
|
|
||||||
height: usize,
|
|
||||||
use_yuv: bool,
|
|
||||||
) -> Result<Self> {
|
|
||||||
unsafe {
|
unsafe {
|
||||||
let x = GetSystemMetrics(SM_XVIRTUALSCREEN);
|
let x = GetSystemMetrics(SM_XVIRTUALSCREEN);
|
||||||
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
|
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
|
||||||
@ -311,8 +303,6 @@ impl CapturerMag {
|
|||||||
},
|
},
|
||||||
width,
|
width,
|
||||||
height,
|
height,
|
||||||
use_yuv,
|
|
||||||
data: Vec::new(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
@ -437,10 +427,6 @@ impl CapturerMag {
|
|||||||
Ok(s)
|
Ok(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.use_yuv = use_yuv;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn exclude(&mut self, cls: &str, name: &str) -> Result<bool> {
|
pub(crate) fn exclude(&mut self, cls: &str, name: &str) -> Result<bool> {
|
||||||
let name_c = CString::new(name)?;
|
let name_c = CString::new(name)?;
|
||||||
unsafe {
|
unsafe {
|
||||||
@ -579,22 +565,9 @@ impl CapturerMag {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.use_yuv {
|
data.resize(lock.1.len(), 0);
|
||||||
self.data.resize(lock.1.len(), 0);
|
unsafe {
|
||||||
unsafe {
|
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut data[0], data.len());
|
||||||
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut self.data[0], self.data.len());
|
|
||||||
}
|
|
||||||
crate::common::bgra_to_i420(
|
|
||||||
self.width as usize,
|
|
||||||
self.height as usize,
|
|
||||||
&self.data,
|
|
||||||
data,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
data.resize(lock.1.len(), 0);
|
|
||||||
unsafe {
|
|
||||||
std::ptr::copy_nonoverlapping(&mut lock.1[0], &mut data[0], data.len());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -651,7 +624,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
#[test]
|
#[test]
|
||||||
fn test() {
|
fn test() {
|
||||||
let mut capture_mag = CapturerMag::new((0, 0), 1920, 1080, false).unwrap();
|
let mut capture_mag = CapturerMag::new((0, 0), 1920, 1080).unwrap();
|
||||||
capture_mag.exclude("", "RustDeskPrivacyWindow").unwrap();
|
capture_mag.exclude("", "RustDeskPrivacyWindow").unwrap();
|
||||||
std::thread::sleep(std::time::Duration::from_millis(1000 * 10));
|
std::thread::sleep(std::time::Duration::from_millis(1000 * 10));
|
||||||
let mut data = Vec::new();
|
let mut data = Vec::new();
|
||||||
|
@ -20,6 +20,8 @@ use winapi::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::RotationMode::*;
|
||||||
|
|
||||||
pub struct ComPtr<T>(*mut T);
|
pub struct ComPtr<T>(*mut T);
|
||||||
impl<T> ComPtr<T> {
|
impl<T> ComPtr<T> {
|
||||||
fn is_null(&self) -> bool {
|
fn is_null(&self) -> bool {
|
||||||
@ -45,8 +47,6 @@ pub struct Capturer {
|
|||||||
surface: ComPtr<IDXGISurface>,
|
surface: ComPtr<IDXGISurface>,
|
||||||
width: usize,
|
width: usize,
|
||||||
height: usize,
|
height: usize,
|
||||||
use_yuv: bool,
|
|
||||||
yuv: Vec<u8>,
|
|
||||||
rotated: Vec<u8>,
|
rotated: Vec<u8>,
|
||||||
gdi_capturer: Option<CapturerGDI>,
|
gdi_capturer: Option<CapturerGDI>,
|
||||||
gdi_buffer: Vec<u8>,
|
gdi_buffer: Vec<u8>,
|
||||||
@ -54,7 +54,7 @@ pub struct Capturer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
let mut device = ptr::null_mut();
|
let mut device = ptr::null_mut();
|
||||||
let mut context = ptr::null_mut();
|
let mut context = ptr::null_mut();
|
||||||
let mut duplication = ptr::null_mut();
|
let mut duplication = ptr::null_mut();
|
||||||
@ -148,8 +148,6 @@ impl Capturer {
|
|||||||
width: display.width() as usize,
|
width: display.width() as usize,
|
||||||
height: display.height() as usize,
|
height: display.height() as usize,
|
||||||
display,
|
display,
|
||||||
use_yuv,
|
|
||||||
yuv: Vec::new(),
|
|
||||||
rotated: Vec::new(),
|
rotated: Vec::new(),
|
||||||
gdi_capturer,
|
gdi_capturer,
|
||||||
gdi_buffer: Vec::new(),
|
gdi_buffer: Vec::new(),
|
||||||
@ -157,10 +155,6 @@ impl Capturer {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.use_yuv = use_yuv;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_gdi(&self) -> bool {
|
pub fn is_gdi(&self) -> bool {
|
||||||
self.gdi_capturer.is_some()
|
self.gdi_capturer.is_some()
|
||||||
}
|
}
|
||||||
@ -259,10 +253,10 @@ impl Capturer {
|
|||||||
self.unmap();
|
self.unmap();
|
||||||
let r = self.load_frame(timeout)?;
|
let r = self.load_frame(timeout)?;
|
||||||
let rotate = match self.display.rotation() {
|
let rotate = match self.display.rotation() {
|
||||||
DXGI_MODE_ROTATION_IDENTITY | DXGI_MODE_ROTATION_UNSPECIFIED => 0,
|
DXGI_MODE_ROTATION_IDENTITY | DXGI_MODE_ROTATION_UNSPECIFIED => kRotate0,
|
||||||
DXGI_MODE_ROTATION_ROTATE90 => 90,
|
DXGI_MODE_ROTATION_ROTATE90 => kRotate90,
|
||||||
DXGI_MODE_ROTATION_ROTATE180 => 180,
|
DXGI_MODE_ROTATION_ROTATE180 => kRotate180,
|
||||||
DXGI_MODE_ROTATION_ROTATE270 => 270,
|
DXGI_MODE_ROTATION_ROTATE270 => kRotate270,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::Other,
|
io::ErrorKind::Other,
|
||||||
@ -270,7 +264,7 @@ impl Capturer {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if rotate == 0 {
|
if rotate == kRotate0 {
|
||||||
slice::from_raw_parts(r.0, r.1 as usize * self.height)
|
slice::from_raw_parts(r.0, r.1 as usize * self.height)
|
||||||
} else {
|
} else {
|
||||||
self.rotated.resize(self.width * self.height * 4, 0);
|
self.rotated.resize(self.width * self.height * 4, 0);
|
||||||
@ -279,12 +273,12 @@ impl Capturer {
|
|||||||
r.1,
|
r.1,
|
||||||
self.rotated.as_mut_ptr(),
|
self.rotated.as_mut_ptr(),
|
||||||
4 * self.width as i32,
|
4 * self.width as i32,
|
||||||
if rotate == 180 {
|
if rotate == kRotate180 {
|
||||||
self.width
|
self.width
|
||||||
} else {
|
} else {
|
||||||
self.height
|
self.height
|
||||||
} as _,
|
} as _,
|
||||||
if rotate != 180 {
|
if rotate != kRotate180 {
|
||||||
self.width
|
self.width
|
||||||
} else {
|
} else {
|
||||||
self.height
|
self.height
|
||||||
@ -295,19 +289,7 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Ok({
|
Ok(result)
|
||||||
if self.use_yuv {
|
|
||||||
crate::common::bgra_to_i420(
|
|
||||||
self.width as usize,
|
|
||||||
self.height as usize,
|
|
||||||
&result,
|
|
||||||
&mut self.yuv,
|
|
||||||
);
|
|
||||||
&self.yuv[..]
|
|
||||||
} else {
|
|
||||||
result
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,8 +5,8 @@ use super::ffi::*;
|
|||||||
pub struct Frame {
|
pub struct Frame {
|
||||||
surface: IOSurfaceRef,
|
surface: IOSurfaceRef,
|
||||||
inner: &'static [u8],
|
inner: &'static [u8],
|
||||||
i420: *mut u8,
|
bgra: Vec<u8>,
|
||||||
i420_len: usize,
|
bgra_stride: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Frame {
|
impl Frame {
|
||||||
@ -24,8 +24,8 @@ impl Frame {
|
|||||||
Frame {
|
Frame {
|
||||||
surface,
|
surface,
|
||||||
inner,
|
inner,
|
||||||
i420: ptr::null_mut(),
|
bgra: Vec::new(),
|
||||||
i420_len: 0,
|
bgra_stride: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,23 +34,20 @@ impl Frame {
|
|||||||
self.inner
|
self.inner
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nv12_to_i420<'a>(&'a mut self, w: usize, h: usize, i420: &'a mut Vec<u8>) {
|
pub fn stride(&self) -> usize {
|
||||||
|
self.bgra_stride
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn surface_to_bgra<'a>(&'a mut self, h: usize) {
|
||||||
unsafe {
|
unsafe {
|
||||||
let plane0 = IOSurfaceGetBaseAddressOfPlane(self.surface, 0);
|
let plane0 = IOSurfaceGetBaseAddressOfPlane(self.surface, 0);
|
||||||
let stride0 = IOSurfaceGetBytesPerRowOfPlane(self.surface, 0);
|
self.bgra_stride = IOSurfaceGetBytesPerRowOfPlane(self.surface, 0);
|
||||||
let plane1 = IOSurfaceGetBaseAddressOfPlane(self.surface, 1);
|
self.bgra.resize(self.bgra_stride * h, 0);
|
||||||
let stride1 = IOSurfaceGetBytesPerRowOfPlane(self.surface, 1);
|
std::ptr::copy_nonoverlapping(
|
||||||
crate::common::nv12_to_i420(
|
|
||||||
plane0 as _,
|
plane0 as _,
|
||||||
stride0 as _,
|
self.bgra.as_mut_ptr(),
|
||||||
plane1 as _,
|
self.bgra_stride * h,
|
||||||
stride1 as _,
|
|
||||||
w,
|
|
||||||
h,
|
|
||||||
i420,
|
|
||||||
);
|
);
|
||||||
self.i420 = i420.as_mut_ptr() as _;
|
|
||||||
self.i420_len = i420.len();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -58,14 +55,7 @@ impl Frame {
|
|||||||
impl ops::Deref for Frame {
|
impl ops::Deref for Frame {
|
||||||
type Target = [u8];
|
type Target = [u8];
|
||||||
fn deref<'a>(&'a self) -> &'a [u8] {
|
fn deref<'a>(&'a self) -> &'a [u8] {
|
||||||
if self.i420.is_null() {
|
&self.bgra
|
||||||
self.inner
|
|
||||||
} else {
|
|
||||||
unsafe {
|
|
||||||
let inner = slice::from_raw_parts(self.i420 as *const u8, self.i420_len);
|
|
||||||
inner
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,6 +2,8 @@ use std::{io, ptr, slice};
|
|||||||
|
|
||||||
use hbb_common::libc;
|
use hbb_common::libc;
|
||||||
|
|
||||||
|
use crate::Frame;
|
||||||
|
|
||||||
use super::ffi::*;
|
use super::ffi::*;
|
||||||
use super::Display;
|
use super::Display;
|
||||||
|
|
||||||
@ -12,13 +14,11 @@ pub struct Capturer {
|
|||||||
buffer: *const u8,
|
buffer: *const u8,
|
||||||
|
|
||||||
size: usize,
|
size: usize,
|
||||||
use_yuv: bool,
|
|
||||||
yuv: Vec<u8>,
|
|
||||||
saved_raw_data: Vec<u8>, // for faster compare and copy
|
saved_raw_data: Vec<u8>, // for faster compare and copy
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Capturer {
|
impl Capturer {
|
||||||
pub fn new(display: Display, use_yuv: bool) -> io::Result<Capturer> {
|
pub fn new(display: Display) -> io::Result<Capturer> {
|
||||||
// Calculate dimensions.
|
// Calculate dimensions.
|
||||||
|
|
||||||
let pixel_width = 4;
|
let pixel_width = 4;
|
||||||
@ -67,17 +67,11 @@ impl Capturer {
|
|||||||
xcbid,
|
xcbid,
|
||||||
buffer,
|
buffer,
|
||||||
size,
|
size,
|
||||||
use_yuv,
|
|
||||||
yuv: Vec::new(),
|
|
||||||
saved_raw_data: Vec::new(),
|
saved_raw_data: Vec::new(),
|
||||||
};
|
};
|
||||||
Ok(c)
|
Ok(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
self.use_yuv = use_yuv;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn display(&self) -> &Display {
|
pub fn display(&self) -> &Display {
|
||||||
&self.display
|
&self.display
|
||||||
}
|
}
|
||||||
@ -103,16 +97,13 @@ impl Capturer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn frame<'b>(&'b mut self) -> std::io::Result<&'b [u8]> {
|
pub fn frame<'b>(&'b mut self) -> std::io::Result<Frame> {
|
||||||
self.get_image();
|
self.get_image();
|
||||||
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
|
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
|
||||||
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
|
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
|
||||||
Ok(if self.use_yuv {
|
Ok(
|
||||||
crate::common::bgra_to_i420(self.display.w(), self.display.h(), &result, &mut self.yuv);
|
Frame::new(result, crate::Pixfmt::BGRA, self.display.h())
|
||||||
&self.yuv[..]
|
)
|
||||||
} else {
|
|
||||||
result
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1017,10 +1017,16 @@ impl VideoHandler {
|
|||||||
|
|
||||||
/// Handle a new video frame.
|
/// Handle a new video frame.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn handle_frame(&mut self, vf: VideoFrame) -> ResultType<bool> {
|
pub fn handle_frame(
|
||||||
|
&mut self,
|
||||||
|
vf: VideoFrame,
|
||||||
|
chroma: &mut Option<Chroma>,
|
||||||
|
) -> ResultType<bool> {
|
||||||
match &vf.union {
|
match &vf.union {
|
||||||
Some(frame) => {
|
Some(frame) => {
|
||||||
let res = self.decoder.handle_video_frame(frame, &mut self.rgb);
|
let res = self
|
||||||
|
.decoder
|
||||||
|
.handle_video_frame(frame, &mut self.rgb, chroma);
|
||||||
if self.record {
|
if self.record {
|
||||||
self.recorder
|
self.recorder
|
||||||
.lock()
|
.lock()
|
||||||
@ -1855,6 +1861,7 @@ pub fn start_video_audio_threads<F, T>(
|
|||||||
MediaSender,
|
MediaSender,
|
||||||
Arc<RwLock<HashMap<usize, ArrayQueue<VideoFrame>>>>,
|
Arc<RwLock<HashMap<usize, ArrayQueue<VideoFrame>>>>,
|
||||||
Arc<RwLock<HashMap<usize, usize>>>,
|
Arc<RwLock<HashMap<usize, usize>>>,
|
||||||
|
Arc<RwLock<Option<Chroma>>>,
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
F: 'static + FnMut(usize, &mut scrap::ImageRgb) + Send,
|
F: 'static + FnMut(usize, &mut scrap::ImageRgb) + Send,
|
||||||
@ -1866,6 +1873,9 @@ where
|
|||||||
let mut video_callback = video_callback;
|
let mut video_callback = video_callback;
|
||||||
let fps_map = Arc::new(RwLock::new(HashMap::new()));
|
let fps_map = Arc::new(RwLock::new(HashMap::new()));
|
||||||
let decode_fps_map = fps_map.clone();
|
let decode_fps_map = fps_map.clone();
|
||||||
|
let chroma = Arc::new(RwLock::new(None));
|
||||||
|
let chroma_cloned = chroma.clone();
|
||||||
|
let mut last_chroma = None;
|
||||||
|
|
||||||
std::thread::spawn(move || {
|
std::thread::spawn(move || {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
@ -1911,10 +1921,17 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(handler_controller) = handler_controller_map.get_mut(display) {
|
if let Some(handler_controller) = handler_controller_map.get_mut(display) {
|
||||||
match handler_controller.handler.handle_frame(vf) {
|
let mut tmp_chroma = None;
|
||||||
|
match handler_controller.handler.handle_frame(vf, &mut tmp_chroma) {
|
||||||
Ok(true) => {
|
Ok(true) => {
|
||||||
video_callback(display, &mut handler_controller.handler.rgb);
|
video_callback(display, &mut handler_controller.handler.rgb);
|
||||||
|
|
||||||
|
// chroma
|
||||||
|
if tmp_chroma.is_some() && last_chroma != tmp_chroma {
|
||||||
|
last_chroma = tmp_chroma;
|
||||||
|
*chroma.write().unwrap() = tmp_chroma;
|
||||||
|
}
|
||||||
|
|
||||||
// fps calculation
|
// fps calculation
|
||||||
// The first frame will be very slow
|
// The first frame will be very slow
|
||||||
if handler_controller.skip_beginning < 5 {
|
if handler_controller.skip_beginning < 5 {
|
||||||
@ -1992,6 +2009,7 @@ where
|
|||||||
audio_sender,
|
audio_sender,
|
||||||
video_queue_map_cloned,
|
video_queue_map_cloned,
|
||||||
decode_fps_map,
|
decode_fps_map,
|
||||||
|
chroma_cloned,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
use hbb_common::{
|
use hbb_common::{
|
||||||
get_time,
|
get_time,
|
||||||
message_proto::{Message, VoiceCallRequest, VoiceCallResponse},
|
message_proto::{Message, VoiceCallRequest, VoiceCallResponse},
|
||||||
};
|
};
|
||||||
use scrap::CodecFormat;
|
use scrap::CodecFormat;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct QualityStatus {
|
pub struct QualityStatus {
|
||||||
@ -12,6 +12,7 @@ pub struct QualityStatus {
|
|||||||
pub delay: Option<i32>,
|
pub delay: Option<i32>,
|
||||||
pub target_bitrate: Option<i32>,
|
pub target_bitrate: Option<i32>,
|
||||||
pub codec_format: Option<CodecFormat>,
|
pub codec_format: Option<CodecFormat>,
|
||||||
|
pub chroma: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -74,6 +74,7 @@ pub struct Remote<T: InvokeUiSession> {
|
|||||||
elevation_requested: bool,
|
elevation_requested: bool,
|
||||||
fps_control_map: HashMap<usize, FpsControl>,
|
fps_control_map: HashMap<usize, FpsControl>,
|
||||||
decode_fps_map: Arc<RwLock<HashMap<usize, usize>>>,
|
decode_fps_map: Arc<RwLock<HashMap<usize, usize>>>,
|
||||||
|
chroma: Arc<RwLock<Option<Chroma>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: InvokeUiSession> Remote<T> {
|
impl<T: InvokeUiSession> Remote<T> {
|
||||||
@ -86,6 +87,7 @@ impl<T: InvokeUiSession> Remote<T> {
|
|||||||
sender: mpsc::UnboundedSender<Data>,
|
sender: mpsc::UnboundedSender<Data>,
|
||||||
frame_count_map: Arc<RwLock<HashMap<usize, usize>>>,
|
frame_count_map: Arc<RwLock<HashMap<usize, usize>>>,
|
||||||
decode_fps: Arc<RwLock<HashMap<usize, usize>>>,
|
decode_fps: Arc<RwLock<HashMap<usize, usize>>>,
|
||||||
|
chroma: Arc<RwLock<Option<Chroma>>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
handler,
|
handler,
|
||||||
@ -111,6 +113,7 @@ impl<T: InvokeUiSession> Remote<T> {
|
|||||||
elevation_requested: false,
|
elevation_requested: false,
|
||||||
fps_control_map: Default::default(),
|
fps_control_map: Default::default(),
|
||||||
decode_fps_map: decode_fps,
|
decode_fps_map: decode_fps,
|
||||||
|
chroma,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,9 +250,17 @@ impl<T: InvokeUiSession> Remote<T> {
|
|||||||
// Correcting the inaccuracy of status_timer
|
// Correcting the inaccuracy of status_timer
|
||||||
(k.clone(), (*v as i32) * 1000 / elapsed as i32)
|
(k.clone(), (*v as i32) * 1000 / elapsed as i32)
|
||||||
}).collect::<HashMap<usize, i32>>();
|
}).collect::<HashMap<usize, i32>>();
|
||||||
|
let chroma = self.chroma.read().unwrap().clone();
|
||||||
|
let chroma = match chroma {
|
||||||
|
Some(Chroma::I444) => "4:4:4",
|
||||||
|
Some(Chroma::I420) => "4:2:0",
|
||||||
|
None => "-",
|
||||||
|
};
|
||||||
|
let chroma = Some(chroma.to_string());
|
||||||
self.handler.update_quality_status(QualityStatus {
|
self.handler.update_quality_status(QualityStatus {
|
||||||
speed: Some(speed),
|
speed: Some(speed),
|
||||||
fps,
|
fps,
|
||||||
|
chroma,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -471,6 +471,7 @@ impl InvokeUiSession for FlutterHandler {
|
|||||||
"codec_format",
|
"codec_format",
|
||||||
&status.codec_format.map_or(NULL, |it| it.to_string()),
|
&status.codec_format.map_or(NULL, |it| it.to_string()),
|
||||||
),
|
),
|
||||||
|
("chroma", &status.chroma.map_or(NULL, |it| it.to_string())),
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", "列表"),
|
("List", "列表"),
|
||||||
("Virtual display", "虚拟显示器"),
|
("Virtual display", "虚拟显示器"),
|
||||||
("Plug out all", "拔出所有"),
|
("Plug out all", "拔出所有"),
|
||||||
|
("True color(4:4:4)", "真彩模式(4:4:4)"),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", "Seznam"),
|
("List", "Seznam"),
|
||||||
("Virtual display", "Virtuální obrazovka"),
|
("Virtual display", "Virtuální obrazovka"),
|
||||||
("Plug out all", "Odpojit všechny"),
|
("Plug out all", "Odpojit všechny"),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", "Liste"),
|
("List", "Liste"),
|
||||||
("Virtual display", "Virtueller Bildschirm"),
|
("Virtual display", "Virtueller Bildschirm"),
|
||||||
("Plug out all", "Alle ausschalten"),
|
("Plug out all", "Alle ausschalten"),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -565,13 +565,13 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("Open in new window", "Abrir en una nueva ventana"),
|
("Open in new window", "Abrir en una nueva ventana"),
|
||||||
("Show displays as individual windows", "Mostrar pantallas como ventanas individuales"),
|
("Show displays as individual windows", "Mostrar pantallas como ventanas individuales"),
|
||||||
("Use all my displays for the remote session", "Usar todas mis pantallas para la sesión remota"),
|
("Use all my displays for the remote session", "Usar todas mis pantallas para la sesión remota"),
|
||||||
("selinux_tip", "SELinux está activado en tu dispositivo, lo que puede hacer que RustDesk no se ejecute correctamente como lado controlado."),
|
("selinux_tip", ""),
|
||||||
("Change view", ""),
|
("Change view", ""),
|
||||||
("Big tiles", ""),
|
("Big tiles", ""),
|
||||||
("Small tiles", ""),
|
("Small tiles", ""),
|
||||||
("List", ""),
|
("List", ""),
|
||||||
("selinux_tip", ""),
|
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", "Tampilan virtual"),
|
("Virtual display", "Tampilan virtual"),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", "Elenco"),
|
("List", "Elenco"),
|
||||||
("Virtual display", "Scehrmo virtuale"),
|
("Virtual display", "Scehrmo virtuale"),
|
||||||
("Plug out all", "Scollega tutto"),
|
("Plug out all", "Scollega tutto"),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", "Saraksts"),
|
("List", "Saraksts"),
|
||||||
("Virtual display", "Virtuālais displejs"),
|
("Virtual display", "Virtuālais displejs"),
|
||||||
("Plug out all", "Atvienot visu"),
|
("Plug out all", "Atvienot visu"),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", "Lista"),
|
("List", "Lista"),
|
||||||
("Virtual display", "Witualne ekrany"),
|
("Virtual display", "Witualne ekrany"),
|
||||||
("Plug out all", "Odłącz wszystko"),
|
("Plug out all", "Odłącz wszystko"),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", "Список"),
|
("List", "Список"),
|
||||||
("Virtual display", "Виртуальный дисплей"),
|
("Virtual display", "Виртуальный дисплей"),
|
||||||
("Plug out all", "Отключить все"),
|
("Plug out all", "Отключить все"),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -572,5 +572,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
|||||||
("List", ""),
|
("List", ""),
|
||||||
("Virtual display", ""),
|
("Virtual display", ""),
|
||||||
("Plug out all", ""),
|
("Plug out all", ""),
|
||||||
|
("True color(4:4:4)", ""),
|
||||||
].iter().cloned().collect();
|
].iter().cloned().collect();
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ pub fn is_can_screen_recording(prompt: bool) -> bool {
|
|||||||
if !can_record_screen && prompt {
|
if !can_record_screen && prompt {
|
||||||
use scrap::{Capturer, Display};
|
use scrap::{Capturer, Display};
|
||||||
if let Ok(d) = Display::primary() {
|
if let Ok(d) = Display::primary() {
|
||||||
Capturer::new(d, true).ok();
|
Capturer::new(d).ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
can_record_screen
|
can_record_screen
|
||||||
|
@ -1214,7 +1214,7 @@ impl Connection {
|
|||||||
|
|
||||||
fn on_remote_authorized() {
|
fn on_remote_authorized() {
|
||||||
use std::sync::Once;
|
use std::sync::Once;
|
||||||
static ONCE: Once = Once::new();
|
static _ONCE: Once = Once::new();
|
||||||
#[cfg(any(target_os = "windows", target_os = "linux"))]
|
#[cfg(any(target_os = "windows", target_os = "linux"))]
|
||||||
if !Config::get_option("allow-remove-wallpaper").is_empty() {
|
if !Config::get_option("allow-remove-wallpaper").is_empty() {
|
||||||
// multi connections set once
|
// multi connections set once
|
||||||
@ -1223,7 +1223,7 @@ impl Connection {
|
|||||||
match crate::platform::WallPaperRemover::new() {
|
match crate::platform::WallPaperRemover::new() {
|
||||||
Ok(remover) => {
|
Ok(remover) => {
|
||||||
*wallpaper = Some(remover);
|
*wallpaper = Some(remover);
|
||||||
ONCE.call_once(|| {
|
_ONCE.call_once(|| {
|
||||||
shutdown_hooks::add_shutdown_hook(shutdown_hook);
|
shutdown_hooks::add_shutdown_hook(shutdown_hook);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@ use hbb_common::{
|
|||||||
tokio::{self, sync::mpsc},
|
tokio::{self, sync::mpsc},
|
||||||
ResultType,
|
ResultType,
|
||||||
};
|
};
|
||||||
use scrap::{Capturer, Frame, TraitCapturer};
|
use scrap::{Capturer, Frame, TraitCapturer, TraitFrame};
|
||||||
use shared_memory::*;
|
use shared_memory::*;
|
||||||
use std::{
|
use std::{
|
||||||
mem::size_of,
|
mem::size_of,
|
||||||
@ -300,7 +300,6 @@ pub mod server {
|
|||||||
fn run_capture(shmem: Arc<SharedMemory>) {
|
fn run_capture(shmem: Arc<SharedMemory>) {
|
||||||
let mut c = None;
|
let mut c = None;
|
||||||
let mut last_current_display = usize::MAX;
|
let mut last_current_display = usize::MAX;
|
||||||
let mut last_use_yuv = false;
|
|
||||||
let mut last_timeout_ms: i32 = 33;
|
let mut last_timeout_ms: i32 = 33;
|
||||||
let mut spf = Duration::from_millis(last_timeout_ms as _);
|
let mut spf = Duration::from_millis(last_timeout_ms as _);
|
||||||
let mut first_frame_captured = false;
|
let mut first_frame_captured = false;
|
||||||
@ -316,14 +315,7 @@ pub mod server {
|
|||||||
let para = para_ptr as *const CapturerPara;
|
let para = para_ptr as *const CapturerPara;
|
||||||
let recreate = (*para).recreate;
|
let recreate = (*para).recreate;
|
||||||
let current_display = (*para).current_display;
|
let current_display = (*para).current_display;
|
||||||
let use_yuv = (*para).use_yuv;
|
|
||||||
let use_yuv_set = (*para).use_yuv_set;
|
|
||||||
let timeout_ms = (*para).timeout_ms;
|
let timeout_ms = (*para).timeout_ms;
|
||||||
if !use_yuv_set {
|
|
||||||
c = None;
|
|
||||||
std::thread::sleep(spf);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if c.is_none() {
|
if c.is_none() {
|
||||||
let Ok(mut displays) = display_service::try_get_displays() else {
|
let Ok(mut displays) = display_service::try_get_displays() else {
|
||||||
log::error!("Failed to get displays");
|
log::error!("Failed to get displays");
|
||||||
@ -338,11 +330,10 @@ pub mod server {
|
|||||||
let display = displays.remove(current_display);
|
let display = displays.remove(current_display);
|
||||||
display_width = display.width();
|
display_width = display.width();
|
||||||
display_height = display.height();
|
display_height = display.height();
|
||||||
match Capturer::new(display, use_yuv) {
|
match Capturer::new(display) {
|
||||||
Ok(mut v) => {
|
Ok(mut v) => {
|
||||||
c = {
|
c = {
|
||||||
last_current_display = current_display;
|
last_current_display = current_display;
|
||||||
last_use_yuv = use_yuv;
|
|
||||||
first_frame_captured = false;
|
first_frame_captured = false;
|
||||||
if dxgi_failed_times > MAX_DXGI_FAIL_TIME {
|
if dxgi_failed_times > MAX_DXGI_FAIL_TIME {
|
||||||
dxgi_failed_times = 0;
|
dxgi_failed_times = 0;
|
||||||
@ -353,8 +344,6 @@ pub mod server {
|
|||||||
CapturerPara {
|
CapturerPara {
|
||||||
recreate: false,
|
recreate: false,
|
||||||
current_display: (*para).current_display,
|
current_display: (*para).current_display,
|
||||||
use_yuv: (*para).use_yuv,
|
|
||||||
use_yuv_set: (*para).use_yuv_set,
|
|
||||||
timeout_ms: (*para).timeout_ms,
|
timeout_ms: (*para).timeout_ms,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@ -368,16 +357,11 @@ pub mod server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if recreate
|
if recreate || current_display != last_current_display {
|
||||||
|| current_display != last_current_display
|
|
||||||
|| use_yuv != last_use_yuv
|
|
||||||
{
|
|
||||||
log::info!(
|
log::info!(
|
||||||
"create capturer, display:{}->{}, use_yuv:{}->{}",
|
"create capturer, display:{}->{}",
|
||||||
last_current_display,
|
last_current_display,
|
||||||
current_display,
|
current_display,
|
||||||
last_use_yuv,
|
|
||||||
use_yuv
|
|
||||||
);
|
);
|
||||||
c = None;
|
c = None;
|
||||||
continue;
|
continue;
|
||||||
@ -401,12 +385,12 @@ pub mod server {
|
|||||||
utils::set_frame_info(
|
utils::set_frame_info(
|
||||||
&shmem,
|
&shmem,
|
||||||
FrameInfo {
|
FrameInfo {
|
||||||
length: f.0.len(),
|
length: f.data().len(),
|
||||||
width: display_width,
|
width: display_width,
|
||||||
height: display_height,
|
height: display_height,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
shmem.write(ADDR_CAPTURE_FRAME, f.0);
|
shmem.write(ADDR_CAPTURE_FRAME, f.data());
|
||||||
shmem.write(ADDR_CAPTURE_WOULDBLOCK, &utils::i32_to_vec(TRUE));
|
shmem.write(ADDR_CAPTURE_WOULDBLOCK, &utils::i32_to_vec(TRUE));
|
||||||
utils::increase_counter(shmem.as_ptr().add(ADDR_CAPTURE_FRAME_COUNTER));
|
utils::increase_counter(shmem.as_ptr().add(ADDR_CAPTURE_FRAME_COUNTER));
|
||||||
first_frame_captured = true;
|
first_frame_captured = true;
|
||||||
@ -651,7 +635,7 @@ pub mod client {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CapturerPortable {
|
impl CapturerPortable {
|
||||||
pub fn new(current_display: usize, use_yuv: bool) -> Self
|
pub fn new(current_display: usize) -> Self
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
@ -665,8 +649,6 @@ pub mod client {
|
|||||||
CapturerPara {
|
CapturerPara {
|
||||||
recreate: true,
|
recreate: true,
|
||||||
current_display,
|
current_display,
|
||||||
use_yuv,
|
|
||||||
use_yuv_set: false,
|
|
||||||
timeout_ms: 33,
|
timeout_ms: 33,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@ -684,26 +666,6 @@ pub mod client {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TraitCapturer for CapturerPortable {
|
impl TraitCapturer for CapturerPortable {
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
let mut option = SHMEM.lock().unwrap();
|
|
||||||
if let Some(shmem) = option.as_mut() {
|
|
||||||
unsafe {
|
|
||||||
let para_ptr = shmem.as_ptr().add(ADDR_CAPTURER_PARA);
|
|
||||||
let para = para_ptr as *const CapturerPara;
|
|
||||||
utils::set_para(
|
|
||||||
shmem,
|
|
||||||
CapturerPara {
|
|
||||||
recreate: (*para).recreate,
|
|
||||||
current_display: (*para).current_display,
|
|
||||||
use_yuv,
|
|
||||||
use_yuv_set: true,
|
|
||||||
timeout_ms: (*para).timeout_ms,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame<'a>(&'a mut self, timeout: Duration) -> std::io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, timeout: Duration) -> std::io::Result<Frame<'a>> {
|
||||||
let mut lock = SHMEM.lock().unwrap();
|
let mut lock = SHMEM.lock().unwrap();
|
||||||
let shmem = lock.as_mut().ok_or(std::io::Error::new(
|
let shmem = lock.as_mut().ok_or(std::io::Error::new(
|
||||||
@ -720,8 +682,6 @@ pub mod client {
|
|||||||
CapturerPara {
|
CapturerPara {
|
||||||
recreate: (*para).recreate,
|
recreate: (*para).recreate,
|
||||||
current_display: (*para).current_display,
|
current_display: (*para).current_display,
|
||||||
use_yuv: (*para).use_yuv,
|
|
||||||
use_yuv_set: (*para).use_yuv_set,
|
|
||||||
timeout_ms: timeout.as_millis() as _,
|
timeout_ms: timeout.as_millis() as _,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@ -744,7 +704,7 @@ pub mod client {
|
|||||||
}
|
}
|
||||||
let frame_ptr = base.add(ADDR_CAPTURE_FRAME);
|
let frame_ptr = base.add(ADDR_CAPTURE_FRAME);
|
||||||
let data = slice::from_raw_parts(frame_ptr, (*frame_info).length);
|
let data = slice::from_raw_parts(frame_ptr, (*frame_info).length);
|
||||||
Ok(Frame(data))
|
Ok(Frame::new(data, self.height))
|
||||||
} else {
|
} else {
|
||||||
let ptr = base.add(ADDR_CAPTURE_WOULDBLOCK);
|
let ptr = base.add(ADDR_CAPTURE_WOULDBLOCK);
|
||||||
let wouldblock = utils::ptr_to_i32(ptr);
|
let wouldblock = utils::ptr_to_i32(ptr);
|
||||||
@ -910,7 +870,6 @@ pub mod client {
|
|||||||
pub fn create_capturer(
|
pub fn create_capturer(
|
||||||
current_display: usize,
|
current_display: usize,
|
||||||
display: scrap::Display,
|
display: scrap::Display,
|
||||||
use_yuv: bool,
|
|
||||||
portable_service_running: bool,
|
portable_service_running: bool,
|
||||||
) -> ResultType<Box<dyn TraitCapturer>> {
|
) -> ResultType<Box<dyn TraitCapturer>> {
|
||||||
if portable_service_running != RUNNING.lock().unwrap().clone() {
|
if portable_service_running != RUNNING.lock().unwrap().clone() {
|
||||||
@ -919,7 +878,7 @@ pub mod client {
|
|||||||
if portable_service_running {
|
if portable_service_running {
|
||||||
log::info!("Create shared memory capturer");
|
log::info!("Create shared memory capturer");
|
||||||
if current_display == *display_service::PRIMARY_DISPLAY_IDX {
|
if current_display == *display_service::PRIMARY_DISPLAY_IDX {
|
||||||
return Ok(Box::new(CapturerPortable::new(current_display, use_yuv)));
|
return Ok(Box::new(CapturerPortable::new(current_display)));
|
||||||
} else {
|
} else {
|
||||||
bail!(
|
bail!(
|
||||||
"Ignore capture display index: {}, the primary display index is: {}",
|
"Ignore capture display index: {}, the primary display index is: {}",
|
||||||
@ -930,7 +889,7 @@ pub mod client {
|
|||||||
} else {
|
} else {
|
||||||
log::debug!("Create capturer dxgi|gdi");
|
log::debug!("Create capturer dxgi|gdi");
|
||||||
return Ok(Box::new(
|
return Ok(Box::new(
|
||||||
Capturer::new(display, use_yuv).with_context(|| "Failed to create capturer")?,
|
Capturer::new(display).with_context(|| "Failed to create capturer")?,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -981,8 +940,6 @@ pub mod client {
|
|||||||
pub struct CapturerPara {
|
pub struct CapturerPara {
|
||||||
recreate: bool,
|
recreate: bool,
|
||||||
current_display: usize,
|
current_display: usize,
|
||||||
use_yuv: bool,
|
|
||||||
use_yuv_set: bool,
|
|
||||||
timeout_ms: i32,
|
timeout_ms: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,9 +42,10 @@ use scrap::Capturer;
|
|||||||
use scrap::{
|
use scrap::{
|
||||||
aom::AomEncoderConfig,
|
aom::AomEncoderConfig,
|
||||||
codec::{Encoder, EncoderCfg, HwEncoderConfig, Quality},
|
codec::{Encoder, EncoderCfg, HwEncoderConfig, Quality},
|
||||||
|
convert_to_yuv,
|
||||||
record::{Recorder, RecorderContext},
|
record::{Recorder, RecorderContext},
|
||||||
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
|
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
|
||||||
CodecName, Display, TraitCapturer,
|
CodecName, Display, Frame, TraitCapturer, TraitFrame,
|
||||||
};
|
};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use std::sync::Once;
|
use std::sync::Once;
|
||||||
@ -171,7 +172,6 @@ pub fn new(idx: usize) -> GenericService {
|
|||||||
fn create_capturer(
|
fn create_capturer(
|
||||||
privacy_mode_id: i32,
|
privacy_mode_id: i32,
|
||||||
display: Display,
|
display: Display,
|
||||||
use_yuv: bool,
|
|
||||||
_current: usize,
|
_current: usize,
|
||||||
_portable_service_running: bool,
|
_portable_service_running: bool,
|
||||||
) -> ResultType<Box<dyn TraitCapturer>> {
|
) -> ResultType<Box<dyn TraitCapturer>> {
|
||||||
@ -182,12 +182,7 @@ fn create_capturer(
|
|||||||
if privacy_mode_id > 0 {
|
if privacy_mode_id > 0 {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
match scrap::CapturerMag::new(
|
match scrap::CapturerMag::new(display.origin(), display.width(), display.height()) {
|
||||||
display.origin(),
|
|
||||||
display.width(),
|
|
||||||
display.height(),
|
|
||||||
use_yuv,
|
|
||||||
) {
|
|
||||||
Ok(mut c1) => {
|
Ok(mut c1) => {
|
||||||
let mut ok = false;
|
let mut ok = false;
|
||||||
let check_begin = Instant::now();
|
let check_begin = Instant::now();
|
||||||
@ -236,12 +231,11 @@ fn create_capturer(
|
|||||||
return crate::portable_service::client::create_capturer(
|
return crate::portable_service::client::create_capturer(
|
||||||
_current,
|
_current,
|
||||||
display,
|
display,
|
||||||
use_yuv,
|
|
||||||
_portable_service_running,
|
_portable_service_running,
|
||||||
);
|
);
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
return Ok(Box::new(
|
return Ok(Box::new(
|
||||||
Capturer::new(display, use_yuv).with_context(|| "Failed to create capturer")?,
|
Capturer::new(display).with_context(|| "Failed to create capturer")?,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -265,7 +259,7 @@ pub fn test_create_capturer(
|
|||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
let display = displays.remove(display_idx);
|
let display = displays.remove(display_idx);
|
||||||
match create_capturer(privacy_mode_id, display, true, display_idx, false) {
|
match create_capturer(privacy_mode_id, display, display_idx, false) {
|
||||||
Ok(_) => return "".to_owned(),
|
Ok(_) => return "".to_owned(),
|
||||||
Err(e) => e,
|
Err(e) => e,
|
||||||
}
|
}
|
||||||
@ -320,11 +314,7 @@ impl DerefMut for CapturerInfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_capturer(
|
fn get_capturer(current: usize, portable_service_running: bool) -> ResultType<CapturerInfo> {
|
||||||
current: usize,
|
|
||||||
use_yuv: bool,
|
|
||||||
portable_service_running: bool,
|
|
||||||
) -> ResultType<CapturerInfo> {
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
{
|
{
|
||||||
if !is_x11() {
|
if !is_x11() {
|
||||||
@ -382,7 +372,6 @@ fn get_capturer(
|
|||||||
let capturer = create_capturer(
|
let capturer = create_capturer(
|
||||||
capturer_privacy_mode_id,
|
capturer_privacy_mode_id,
|
||||||
display,
|
display,
|
||||||
use_yuv,
|
|
||||||
current,
|
current,
|
||||||
portable_service_running,
|
portable_service_running,
|
||||||
)?;
|
)?;
|
||||||
@ -424,7 +413,7 @@ fn run(vs: VideoService) -> ResultType<()> {
|
|||||||
|
|
||||||
let display_idx = vs.idx;
|
let display_idx = vs.idx;
|
||||||
let sp = vs.sp;
|
let sp = vs.sp;
|
||||||
let mut c = get_capturer(display_idx, true, last_portable_service_running)?;
|
let mut c = get_capturer(display_idx, last_portable_service_running)?;
|
||||||
|
|
||||||
let mut video_qos = VIDEO_QOS.lock().unwrap();
|
let mut video_qos = VIDEO_QOS.lock().unwrap();
|
||||||
video_qos.refresh(None);
|
video_qos.refresh(None);
|
||||||
@ -439,11 +428,11 @@ fn run(vs: VideoService) -> ResultType<()> {
|
|||||||
let encoder_cfg = get_encoder_config(&c, quality, last_recording);
|
let encoder_cfg = get_encoder_config(&c, quality, last_recording);
|
||||||
|
|
||||||
let mut encoder;
|
let mut encoder;
|
||||||
match Encoder::new(encoder_cfg) {
|
let use_i444 = Encoder::use_i444(&encoder_cfg);
|
||||||
|
match Encoder::new(encoder_cfg.clone(), use_i444) {
|
||||||
Ok(x) => encoder = x,
|
Ok(x) => encoder = x,
|
||||||
Err(err) => bail!("Failed to create encoder: {}", err),
|
Err(err) => bail!("Failed to create encoder: {}", err),
|
||||||
}
|
}
|
||||||
c.set_use_yuv(encoder.use_yuv());
|
|
||||||
VIDEO_QOS.lock().unwrap().store_bitrate(encoder.bitrate());
|
VIDEO_QOS.lock().unwrap().store_bitrate(encoder.bitrate());
|
||||||
|
|
||||||
if sp.is_option_true(OPTION_REFRESH) {
|
if sp.is_option_true(OPTION_REFRESH) {
|
||||||
@ -463,6 +452,8 @@ fn run(vs: VideoService) -> ResultType<()> {
|
|||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
let mut would_block_count = 0u32;
|
let mut would_block_count = 0u32;
|
||||||
|
let mut yuv = Vec::new();
|
||||||
|
let mut mid_data = Vec::new();
|
||||||
|
|
||||||
while sp.ok() {
|
while sp.ok() {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
@ -493,6 +484,9 @@ fn run(vs: VideoService) -> ResultType<()> {
|
|||||||
if last_portable_service_running != crate::portable_service::client::running() {
|
if last_portable_service_running != crate::portable_service::client::running() {
|
||||||
bail!("SWITCH");
|
bail!("SWITCH");
|
||||||
}
|
}
|
||||||
|
if Encoder::use_i444(&encoder_cfg) != use_i444 {
|
||||||
|
bail!("SWITCH");
|
||||||
|
}
|
||||||
check_privacy_mode_changed(&sp, c.privacy_mode_id)?;
|
check_privacy_mode_changed(&sp, c.privacy_mode_id)?;
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
@ -512,40 +506,23 @@ fn run(vs: VideoService) -> ResultType<()> {
|
|||||||
|
|
||||||
frame_controller.reset();
|
frame_controller.reset();
|
||||||
|
|
||||||
#[cfg(any(target_os = "android", target_os = "ios"))]
|
|
||||||
let res = match c.frame(spf) {
|
let res = match c.frame(spf) {
|
||||||
Ok(frame) => {
|
Ok(frame) => {
|
||||||
let time = now - start;
|
let time = now - start;
|
||||||
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
|
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
|
||||||
match frame {
|
if frame.data().len() != 0 {
|
||||||
scrap::Frame::RAW(data) => {
|
let send_conn_ids = handle_one_frame(
|
||||||
if data.len() != 0 {
|
display_idx,
|
||||||
let send_conn_ids = handle_one_frame(
|
&sp,
|
||||||
display_idx,
|
frame,
|
||||||
&sp,
|
&mut yuv,
|
||||||
data,
|
&mut mid_data,
|
||||||
ms,
|
ms,
|
||||||
&mut encoder,
|
&mut encoder,
|
||||||
recorder.clone(),
|
recorder.clone(),
|
||||||
)?;
|
)?;
|
||||||
frame_controller.set_send(now, send_conn_ids);
|
frame_controller.set_send(now, send_conn_ids);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
};
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
Err(err) => Err(err),
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(not(any(target_os = "android", target_os = "ios")))]
|
|
||||||
let res = match c.frame(spf) {
|
|
||||||
Ok(frame) => {
|
|
||||||
let time = now - start;
|
|
||||||
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
|
|
||||||
let send_conn_ids =
|
|
||||||
handle_one_frame(display_idx, &sp, &frame, ms, &mut encoder, recorder.clone())?;
|
|
||||||
frame_controller.set_send(now, send_conn_ids);
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
try_gdi = 0;
|
try_gdi = 0;
|
||||||
@ -718,7 +695,9 @@ fn check_privacy_mode_changed(sp: &GenericService, privacy_mode_id: i32) -> Resu
|
|||||||
fn handle_one_frame(
|
fn handle_one_frame(
|
||||||
display: usize,
|
display: usize,
|
||||||
sp: &GenericService,
|
sp: &GenericService,
|
||||||
frame: &[u8],
|
frame: Frame,
|
||||||
|
yuv: &mut Vec<u8>,
|
||||||
|
mid_data: &mut Vec<u8>,
|
||||||
ms: i64,
|
ms: i64,
|
||||||
encoder: &mut Encoder,
|
encoder: &mut Encoder,
|
||||||
recorder: Arc<Mutex<Option<Recorder>>>,
|
recorder: Arc<Mutex<Option<Recorder>>>,
|
||||||
@ -732,7 +711,8 @@ fn handle_one_frame(
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
let mut send_conn_ids: HashSet<i32> = Default::default();
|
let mut send_conn_ids: HashSet<i32> = Default::default();
|
||||||
if let Ok(mut vf) = encoder.encode_to_message(frame, ms) {
|
convert_to_yuv(&frame, encoder.yuvfmt(), yuv, mid_data)?;
|
||||||
|
if let Ok(mut vf) = encoder.encode_to_message(yuv, ms) {
|
||||||
vf.display = display as _;
|
vf.display = display as _;
|
||||||
let mut msg = Message::new();
|
let mut msg = Message::new();
|
||||||
msg.set_video_frame(vf);
|
msg.set_video_frame(vf);
|
||||||
|
@ -76,12 +76,6 @@ impl TraitCapturer for CapturerPtr {
|
|||||||
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
|
||||||
unsafe { (*self.0).frame(timeout) }
|
unsafe { (*self.0).frame(timeout) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_use_yuv(&mut self, use_yuv: bool) {
|
|
||||||
unsafe {
|
|
||||||
(*self.0).set_use_yuv(use_yuv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CapDisplayInfo {
|
struct CapDisplayInfo {
|
||||||
@ -192,7 +186,8 @@ pub(super) async fn check_init() -> ResultType<()> {
|
|||||||
maxy = max_height;
|
maxy = max_height;
|
||||||
|
|
||||||
let capturer = Box::into_raw(Box::new(
|
let capturer = Box::into_raw(Box::new(
|
||||||
Capturer::new(display, true).with_context(|| "Failed to create capturer")?,
|
Capturer::new(display)
|
||||||
|
.with_context(|| "Failed to create capturer")?,
|
||||||
));
|
));
|
||||||
let capturer = CapturerPtr(capturer);
|
let capturer = CapturerPtr(capturer);
|
||||||
let cap_display_info = Box::into_raw(Box::new(CapDisplayInfo {
|
let cap_display_info = Box::into_raw(Box::new(CapDisplayInfo {
|
||||||
|
@ -201,6 +201,7 @@ class Header: Reactor.Component {
|
|||||||
{keyboard_enabled ? <li #lock-after-session-end .toggle-option><span>{svg_checkmark}</span>{translate('Lock after session end')}</li> : ""}
|
{keyboard_enabled ? <li #lock-after-session-end .toggle-option><span>{svg_checkmark}</span>{translate('Lock after session end')}</li> : ""}
|
||||||
{keyboard_enabled && pi.platform == "Windows" ? <li #privacy-mode><span>{svg_checkmark}</span>{translate('Privacy mode')}</li> : ""}
|
{keyboard_enabled && pi.platform == "Windows" ? <li #privacy-mode><span>{svg_checkmark}</span>{translate('Privacy mode')}</li> : ""}
|
||||||
{keyboard_enabled && ((is_osx && pi.platform != "Mac OS") || (!is_osx && pi.platform == "Mac OS")) ? <li #allow_swap_key .toggle-option><span>{svg_checkmark}</span>{translate('Swap control-command key')}</li> : ""}
|
{keyboard_enabled && ((is_osx && pi.platform != "Mac OS") || (!is_osx && pi.platform == "Mac OS")) ? <li #allow_swap_key .toggle-option><span>{svg_checkmark}</span>{translate('Swap control-command key')}</li> : ""}
|
||||||
|
{handler.version_cmp(pi.version, '1.2.4') >= 0 ? <li #i444><span>{svg_checkmark}</span>{translate('True color(4:4:4)')}</li> : ""}
|
||||||
</menu>
|
</menu>
|
||||||
</popup>;
|
</popup>;
|
||||||
}
|
}
|
||||||
@ -402,6 +403,8 @@ class Header: Reactor.Component {
|
|||||||
togglePrivacyMode(me.id);
|
togglePrivacyMode(me.id);
|
||||||
} else if (me.id == "show-quality-monitor") {
|
} else if (me.id == "show-quality-monitor") {
|
||||||
toggleQualityMonitor(me.id);
|
toggleQualityMonitor(me.id);
|
||||||
|
} else if (me.id == "i444") {
|
||||||
|
toggleI444(me.id);
|
||||||
} else if (me.attributes.hasClass("toggle-option")) {
|
} else if (me.attributes.hasClass("toggle-option")) {
|
||||||
handler.toggle_option(me.id);
|
handler.toggle_option(me.id);
|
||||||
toggleMenuState();
|
toggleMenuState();
|
||||||
@ -476,7 +479,7 @@ function toggleMenuState() {
|
|||||||
for (var el in $$(menu#keyboard-options>li)) {
|
for (var el in $$(menu#keyboard-options>li)) {
|
||||||
el.attributes.toggleClass("selected", values.indexOf(el.id) >= 0);
|
el.attributes.toggleClass("selected", values.indexOf(el.id) >= 0);
|
||||||
}
|
}
|
||||||
for (var id in ["show-remote-cursor", "show-quality-monitor", "disable-audio", "enable-file-transfer", "disable-clipboard", "lock-after-session-end", "allow_swap_key"]) {
|
for (var id in ["show-remote-cursor", "show-quality-monitor", "disable-audio", "enable-file-transfer", "disable-clipboard", "lock-after-session-end", "allow_swap_key", "i444"]) {
|
||||||
var el = self.select('#' + id);
|
var el = self.select('#' + id);
|
||||||
if (el) {
|
if (el) {
|
||||||
var value = handler.get_toggle_option(id);
|
var value = handler.get_toggle_option(id);
|
||||||
@ -563,6 +566,12 @@ function toggleQualityMonitor(name) {
|
|||||||
toggleMenuState();
|
toggleMenuState();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function toggleI444(name) {
|
||||||
|
handler.toggle_option(name);
|
||||||
|
handler.change_prefer_codec();
|
||||||
|
toggleMenuState();
|
||||||
|
}
|
||||||
|
|
||||||
handler.updateBlockInputState = function(input_blocked) {
|
handler.updateBlockInputState = function(input_blocked) {
|
||||||
if (!input_blocked) {
|
if (!input_blocked) {
|
||||||
handler.toggle_option("block-input");
|
handler.toggle_option("block-input");
|
||||||
|
@ -131,7 +131,8 @@ impl InvokeUiSession for SciterHandler {
|
|||||||
status.target_bitrate.map_or(Value::null(), |it| it.into()),
|
status.target_bitrate.map_or(Value::null(), |it| it.into()),
|
||||||
status
|
status
|
||||||
.codec_format
|
.codec_format
|
||||||
.map_or(Value::null(), |it| it.to_string().into())
|
.map_or(Value::null(), |it| it.to_string().into()),
|
||||||
|
status.chroma.map_or(Value::null(), |it| it.into())
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -510,17 +510,21 @@ class QualityMonitor: Reactor.Component
|
|||||||
<div>
|
<div>
|
||||||
Codec: {qualityMonitorData[4]}
|
Codec: {qualityMonitorData[4]}
|
||||||
</div>
|
</div>
|
||||||
|
<div>
|
||||||
|
Chroma: {qualityMonitorData[5]}
|
||||||
|
</div>
|
||||||
</div>;
|
</div>;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$(#quality-monitor).content(<QualityMonitor />);
|
$(#quality-monitor).content(<QualityMonitor />);
|
||||||
handler.updateQualityStatus = function(speed, fps, delay, bitrate, codec_format) {
|
handler.updateQualityStatus = function(speed, fps, delay, bitrate, codec_format, chroma) {
|
||||||
speed ? qualityMonitorData[0] = speed:null;
|
speed ? qualityMonitorData[0] = speed:null;
|
||||||
fps ? qualityMonitorData[1] = fps:null;
|
fps ? qualityMonitorData[1] = fps:null;
|
||||||
delay ? qualityMonitorData[2] = delay:null;
|
delay ? qualityMonitorData[2] = delay:null;
|
||||||
bitrate ? qualityMonitorData[3] = bitrate:null;
|
bitrate ? qualityMonitorData[3] = bitrate:null;
|
||||||
codec_format ? qualityMonitorData[4] = codec_format:null;
|
codec_format ? qualityMonitorData[4] = codec_format:null;
|
||||||
|
chroma ? qualityMonitorData[5] = chroma:null;
|
||||||
qualityMonitor.update();
|
qualityMonitor.update();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1527,16 +1527,17 @@ pub async fn io_loop<T: InvokeUiSession>(handler: Session<T>, round: u32) {
|
|||||||
let frame_count_map: Arc<RwLock<HashMap<usize, usize>>> = Default::default();
|
let frame_count_map: Arc<RwLock<HashMap<usize, usize>>> = Default::default();
|
||||||
let frame_count_map_cl = frame_count_map.clone();
|
let frame_count_map_cl = frame_count_map.clone();
|
||||||
let ui_handler = handler.ui_handler.clone();
|
let ui_handler = handler.ui_handler.clone();
|
||||||
let (video_sender, audio_sender, video_queue_map, decode_fps_map) = start_video_audio_threads(
|
let (video_sender, audio_sender, video_queue_map, decode_fps_map, chroma) =
|
||||||
handler.clone(),
|
start_video_audio_threads(
|
||||||
move |display: usize, data: &mut scrap::ImageRgb| {
|
handler.clone(),
|
||||||
let mut write_lock = frame_count_map_cl.write().unwrap();
|
move |display: usize, data: &mut scrap::ImageRgb| {
|
||||||
let count = write_lock.get(&display).unwrap_or(&0) + 1;
|
let mut write_lock = frame_count_map_cl.write().unwrap();
|
||||||
write_lock.insert(display, count);
|
let count = write_lock.get(&display).unwrap_or(&0) + 1;
|
||||||
drop(write_lock);
|
write_lock.insert(display, count);
|
||||||
ui_handler.on_rgba(display, data);
|
drop(write_lock);
|
||||||
},
|
ui_handler.on_rgba(display, data);
|
||||||
);
|
},
|
||||||
|
);
|
||||||
|
|
||||||
let mut remote = Remote::new(
|
let mut remote = Remote::new(
|
||||||
handler,
|
handler,
|
||||||
@ -1547,6 +1548,7 @@ pub async fn io_loop<T: InvokeUiSession>(handler: Session<T>, round: u32) {
|
|||||||
sender,
|
sender,
|
||||||
frame_count_map,
|
frame_count_map,
|
||||||
decode_fps_map,
|
decode_fps_map,
|
||||||
|
chroma,
|
||||||
);
|
);
|
||||||
remote.io_loop(&key, &token, round).await;
|
remote.io_loop(&key, &token, round).await;
|
||||||
remote.sync_jobs_status_to_local().await;
|
remote.sync_jobs_status_to_local().await;
|
||||||
|
Loading…
Reference in New Issue
Block a user