add zero copy mode hareware codec for windows (#6778)

Signed-off-by: 21pages <pages21@163.com>
This commit is contained in:
21pages 2024-01-02 16:58:10 +08:00 committed by GitHub
parent f47faa548b
commit 89150317e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 2540 additions and 429 deletions

View File

@ -107,7 +107,7 @@ jobs:
shell: bash
- name: Build rustdesk
run: python3 .\build.py --portable --hwcodec --flutter --feature IddDriver
run: python3 .\build.py --portable --hwcodec --flutter --gpucodec --feature IddDriver
- name: find Runner.res
# Windows: find Runner.res (compiled from ./flutter/windows/runner/Runner.rc), copy to ./Runner.res
@ -225,7 +225,7 @@ jobs:
python3 res/inline-sciter.py
# Patch sciter x86
sed -i 's/branch = "dyn"/branch = "dyn_x86"/g' ./Cargo.toml
cargo build --features inline --release --bins
cargo build --features inline,gpucodec --release --bins
mkdir -p ./Release
mv ./target/release/rustdesk.exe ./Release/rustdesk.exe
curl -LJ -o ./Release/sciter.dll https://github.com/c-smile/sciter-sdk/raw/master/bin.win/x32/sciter.dll

68
Cargo.lock generated
View File

@ -115,6 +115,17 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "amf"
version = "0.1.0"
source = "git+https://github.com/21pages/gpucodec#90800ce41bee33cd898ec36a86c2e32a407e3f02"
dependencies = [
"bindgen 0.59.2",
"cc",
"gpu_common",
"log",
]
[[package]]
name = "android-tzdata"
version = "0.1.1"
@ -2645,6 +2656,36 @@ dependencies = [
"system-deps 6.1.2",
]
[[package]]
name = "gpu_common"
version = "0.1.0"
source = "git+https://github.com/21pages/gpucodec#90800ce41bee33cd898ec36a86c2e32a407e3f02"
dependencies = [
"bindgen 0.59.2",
"cc",
"log",
"serde 1.0.190",
"serde_derive",
"serde_json 1.0.107",
]
[[package]]
name = "gpucodec"
version = "0.1.0"
source = "git+https://github.com/21pages/gpucodec#90800ce41bee33cd898ec36a86c2e32a407e3f02"
dependencies = [
"amf",
"bindgen 0.59.2",
"cc",
"gpu_common",
"log",
"nv",
"serde 1.0.190",
"serde_derive",
"serde_json 1.0.107",
"vpl",
]
[[package]]
name = "gstreamer"
version = "0.16.7"
@ -3025,8 +3066,8 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hwcodec"
version = "0.1.3"
source = "git+https://github.com/21pages/hwcodec?branch=stable#83300549075158e5a3fa6c59ea527af3330e48ff"
version = "0.2.0"
source = "git+https://github.com/21pages/hwcodec?branch=stable#da8aec8e8abb6a5506e027484023e6e2ad1f47eb"
dependencies = [
"bindgen 0.59.2",
"cc",
@ -4065,6 +4106,17 @@ dependencies = [
"libc",
]
[[package]]
name = "nv"
version = "0.1.0"
source = "git+https://github.com/21pages/gpucodec#90800ce41bee33cd898ec36a86c2e32a407e3f02"
dependencies = [
"bindgen 0.59.2",
"cc",
"gpu_common",
"log",
]
[[package]]
name = "objc"
version = "0.2.7"
@ -5534,6 +5586,7 @@ dependencies = [
"cfg-if 1.0.0",
"dbus",
"docopt",
"gpucodec",
"gstreamer",
"gstreamer-app",
"gstreamer-video",
@ -6709,6 +6762,17 @@ dependencies = [
"lazy_static",
]
[[package]]
name = "vpl"
version = "0.1.0"
source = "git+https://github.com/21pages/gpucodec#90800ce41bee33cd898ec36a86c2e32a407e3f02"
dependencies = [
"bindgen 0.59.2",
"cc",
"gpu_common",
"log",
]
[[package]]
name = "vswhom"
version = "0.1.0"

View File

@ -28,6 +28,7 @@ use_dasp = ["dasp"]
flutter = ["flutter_rust_bridge"]
default = ["use_dasp"]
hwcodec = ["scrap/hwcodec"]
gpucodec = ["scrap/gpucodec"]
mediacodec = ["scrap/mediacodec"]
linux_headless = ["pam" ]
virtual_display_driver = ["virtual_display"]

View File

@ -121,6 +121,11 @@ def make_parser():
help='Enable feature hwcodec' + (
'' if windows or osx else ', need libva-dev, libvdpau-dev.')
)
parser.add_argument(
'--gpucodec',
action='store_true',
help='Enable feature gpucodec, only available on windows now.'
)
parser.add_argument(
'--portable',
action='store_true',
@ -274,6 +279,8 @@ def get_features(args):
features.append('virtual_display_driver')
if args.hwcodec:
features.append('hwcodec')
if args.gpucodec:
features.append('gpucodec')
if args.flutter:
features.append('flutter')
features.append('flutter_texture_render')

View File

@ -388,10 +388,12 @@ class _GeneralState extends State<_General> {
}
Widget hwcodec() {
final hwcodec = bind.mainHasHwcodec();
final gpucodec = bind.mainHasGpucodec();
return Offstage(
offstage: !bind.mainHasHwcodec(),
offstage: !(hwcodec || gpucodec),
child: _Card(title: 'Hardware Codec', children: [
_OptionCheckBox(context, 'Enable hardware codec', 'enable-hwcodec'),
_OptionCheckBox(context, 'Enable hardware codec', 'enable-hwcodec')
]),
);
}

View File

@ -79,7 +79,6 @@ class _RemotePageState extends State<RemotePage>
late RxBool _zoomCursor;
late RxBool _remoteCursorMoved;
late RxBool _keyboardEnabled;
final Map<int, RenderTexture> _renderTextures = {};
var _blockableOverlayState = BlockableOverlayState();
@ -212,9 +211,7 @@ class _RemotePageState extends State<RemotePage>
// https://github.com/flutter/flutter/issues/64935
super.dispose();
debugPrint("REMOTE PAGE dispose session $sessionId ${widget.id}");
for (final texture in _renderTextures.values) {
await texture.destroy(closeSession);
}
_ffi.textureModel.onRemotePageDispose(closeSession);
// ensure we leave this session, this is a double check
_ffi.inputModel.enterOrLeave(false);
DesktopMultiWindow.removeListener(this);
@ -429,38 +426,6 @@ class _RemotePageState extends State<RemotePage>
);
}
Map<int, RenderTexture> _updateGetRenderTextures(int curDisplay) {
tryCreateTexture(int idx) {
if (!_renderTextures.containsKey(idx)) {
final renderTexture = RenderTexture();
_renderTextures[idx] = renderTexture;
renderTexture.create(idx, sessionId);
}
}
tryRemoveTexture(int idx) {
if (_renderTextures.containsKey(idx)) {
_renderTextures[idx]!.destroy(true);
_renderTextures.remove(idx);
}
}
if (curDisplay == kAllDisplayValue) {
final displays = _ffi.ffiModel.pi.getCurDisplays();
for (var i = 0; i < displays.length; i++) {
tryCreateTexture(i);
}
} else {
tryCreateTexture(curDisplay);
for (var i = 0; i < _ffi.ffiModel.pi.displays.length; i++) {
if (i != curDisplay) {
tryRemoveTexture(i);
}
}
}
return _renderTextures;
}
Widget getBodyForDesktop(BuildContext context) {
var paints = <Widget>[
MouseRegion(onEnter: (evt) {
@ -475,16 +440,19 @@ class _RemotePageState extends State<RemotePage>
return Obx(
() => _ffi.ffiModel.pi.isSet.isFalse
? Container(color: Colors.transparent)
: Obx(() => ImagePaint(
: Obx(() {
_ffi.textureModel.updateCurrentDisplay(peerDisplay.value);
return ImagePaint(
id: widget.id,
zoomCursor: _zoomCursor,
cursorOverImage: _cursorOverImage,
keyboardEnabled: _keyboardEnabled,
remoteCursorMoved: _remoteCursorMoved,
renderTextures: _updateGetRenderTextures(peerDisplay.value),
listenerBuilder: (child) => _buildRawTouchAndPointerRegion(
child, enterView, leaveView),
)),
ffi: _ffi,
);
}),
);
}))
];
@ -515,22 +483,22 @@ class _RemotePageState extends State<RemotePage>
}
class ImagePaint extends StatefulWidget {
final FFI ffi;
final String id;
final RxBool zoomCursor;
final RxBool cursorOverImage;
final RxBool keyboardEnabled;
final RxBool remoteCursorMoved;
final Map<int, RenderTexture> renderTextures;
final Widget Function(Widget)? listenerBuilder;
ImagePaint(
{Key? key,
required this.ffi,
required this.id,
required this.zoomCursor,
required this.cursorOverImage,
required this.keyboardEnabled,
required this.remoteCursorMoved,
required this.renderTextures,
this.listenerBuilder})
: super(key: key);
@ -548,6 +516,11 @@ class _ImagePaintState extends State<ImagePaint> {
RxBool get remoteCursorMoved => widget.remoteCursorMoved;
Widget Function(Widget)? get listenerBuilder => widget.listenerBuilder;
@override
void initState() {
super.initState();
}
@override
Widget build(BuildContext context) {
final m = Provider.of<ImageModel>(context);
@ -668,10 +641,10 @@ class _ImagePaintState extends State<ImagePaint> {
}
final curDisplay = ffiModel.pi.currentDisplay;
for (var i = 0; i < displays.length; i++) {
final textureId = widget
.renderTextures[curDisplay == kAllDisplayValue ? i : curDisplay]
?.textureId;
if (textureId != null) {
final textureId = widget.ffi.textureModel
.getTextureId(curDisplay == kAllDisplayValue ? i : curDisplay);
if (true) {
// both "textureId.value != -1" and "true" seems ok
children.add(Positioned(
left: (displays[i].x - rect.left) * s + offset.dx,
top: (displays[i].y - rect.top) * s + offset.dy,

View File

@ -151,7 +151,8 @@ class DesktopTabController {
return false;
}
state.update((val) {
val!.selected = index;
if (val != null) {
val.selected = index;
Future.delayed(Duration(milliseconds: 100), (() {
if (val.pageController.hasClients) {
val.pageController.jumpToPage(index);
@ -163,6 +164,7 @@ class DesktopTabController {
.scrollToItem(index, center: false, animate: true);
}
}));
}
});
if (callOnSelected) {
if (state.value.tabs.length > index) {

View File

@ -1,46 +1,241 @@
import 'package:flutter/material.dart';
import 'package:flutter_gpu_texture_renderer/flutter_gpu_texture_renderer.dart';
import 'package:flutter_hbb/consts.dart';
import 'package:flutter_hbb/models/model.dart';
import 'package:get/get.dart';
import 'package:texture_rgba_renderer/texture_rgba_renderer.dart';
import '../../common.dart';
import './platform_model.dart';
final useTextureRender = bind.mainUseTextureRender();
final useTextureRender =
bind.mainHasPixelbufferTextureRender() || bind.mainHasGpuTextureRender();
class RenderTexture {
final RxInt textureId = RxInt(-1);
class _PixelbufferTexture {
int _textureKey = -1;
int _display = 0;
SessionID? _sessionId;
final support = bind.mainHasPixelbufferTextureRender();
bool _destroying = false;
int? _id;
final textureRenderer = TextureRgbaRenderer();
RenderTexture();
int get display => _display;
create(int d, SessionID sessionId) {
if (useTextureRender) {
create(int d, SessionID sessionId, FFI ffi) {
if (support) {
_display = d;
_textureKey = bind.getNextTextureKey();
_sessionId = sessionId;
textureRenderer.createTexture(_textureKey).then((id) async {
_id = id;
if (id != -1) {
ffi.textureModel.setRgbaTextureId(display: d, id: id);
final ptr = await textureRenderer.getTexturePtr(_textureKey);
platformFFI.registerTexture(sessionId, display, ptr);
textureId.value = id;
platformFFI.registerPixelbufferTexture(sessionId, display, ptr);
debugPrint(
"create pixelbuffer texture: peerId: ${ffi.id} display:$_display, textureId:$id");
}
});
}
}
destroy(bool unregisterTexture) async {
if (useTextureRender && _textureKey != -1 && _sessionId != null) {
destroy(bool unregisterTexture, FFI ffi) async {
if (!_destroying && support && _textureKey != -1 && _sessionId != null) {
_destroying = true;
if (unregisterTexture) {
platformFFI.registerTexture(_sessionId!, display, 0);
platformFFI.registerPixelbufferTexture(_sessionId!, display, 0);
// sleep for a while to avoid the texture is used after it's unregistered.
await Future.delayed(Duration(milliseconds: 100));
}
await textureRenderer.closeTexture(_textureKey);
_textureKey = -1;
_destroying = false;
debugPrint(
"destroy pixelbuffer texture: peerId: ${ffi.id} display:$_display, textureId:$_id");
}
}
}
class _GpuTexture {
int _textureId = -1;
SessionID? _sessionId;
final support = bind.mainHasGpuTextureRender();
bool _destroying = false;
int _display = 0;
int? _id;
int? _output;
int get display => _display;
final gpuTextureRenderer = FlutterGpuTextureRenderer();
_GpuTexture();
create(int d, SessionID sessionId, FFI ffi) {
if (support) {
_sessionId = sessionId;
_display = d;
gpuTextureRenderer.registerTexture().then((id) async {
_id = id;
if (id != null) {
_textureId = id;
ffi.textureModel.setGpuTextureId(display: d, id: id);
final output = await gpuTextureRenderer.output(id);
_output = output;
if (output != null) {
platformFFI.registerGpuTexture(sessionId, d, output);
}
debugPrint(
"create gpu texture: peerId: ${ffi.id} display:$_display, textureId:$id, output:$output");
}
}, onError: (err) {
debugPrint("Failed to register gpu texture:$err");
});
}
}
destroy(FFI ffi) async {
// must stop texture render, render unregistered texture cause crash
if (!_destroying && support && _sessionId != null && _textureId != -1) {
_destroying = true;
platformFFI.registerGpuTexture(_sessionId!, _display, 0);
// sleep for a while to avoid the texture is used after it's unregistered.
await Future.delayed(Duration(milliseconds: 100));
await gpuTextureRenderer.unregisterTexture(_textureId);
_textureId = -1;
_destroying = false;
debugPrint(
"destroy gpu texture: peerId: ${ffi.id} display:$_display, textureId:$_id, output:$_output");
}
}
}
class _Control {
RxInt textureID = (-1).obs;
int _rgbaTextureId = -1;
int get rgbaTextureId => _rgbaTextureId;
int _gpuTextureId = -1;
int get gpuTextureId => _gpuTextureId;
bool _isGpuTexture = false;
bool get isGpuTexture => _isGpuTexture;
setTextureType({bool gpuTexture = false}) {
_isGpuTexture = gpuTexture;
textureID.value = _isGpuTexture ? gpuTextureId : rgbaTextureId;
}
setRgbaTextureId(int id) {
_rgbaTextureId = id;
textureID.value = _isGpuTexture ? gpuTextureId : rgbaTextureId;
}
setGpuTextureId(int id) {
_gpuTextureId = id;
textureID.value = _isGpuTexture ? gpuTextureId : rgbaTextureId;
}
}
class TextureModel {
final WeakReference<FFI> parent;
final Map<int, _Control> _control = {};
final Map<int, _PixelbufferTexture> _pixelbufferRenderTextures = {};
final Map<int, _GpuTexture> _gpuRenderTextures = {};
TextureModel(this.parent);
setTextureType({required int display, required bool gpuTexture}) {
debugPrint("setTextureType: display:$display, isGpuTexture:$gpuTexture");
var texture = _control[display];
if (texture == null) {
texture = _Control();
_control[display] = texture;
}
texture.setTextureType(gpuTexture: gpuTexture);
}
setRgbaTextureId({required int display, required int id}) {
var ctl = _control[display];
if (ctl == null) {
ctl = _Control();
_control[display] = ctl;
}
ctl.setRgbaTextureId(id);
}
setGpuTextureId({required int display, required int id}) {
var ctl = _control[display];
if (ctl == null) {
ctl = _Control();
_control[display] = ctl;
}
ctl.setGpuTextureId(id);
}
RxInt getTextureId(int display) {
var ctl = _control[display];
if (ctl == null) {
ctl = _Control();
_control[display] = ctl;
}
return ctl.textureID;
}
updateCurrentDisplay(int curDisplay) {
final ffi = parent.target;
if (ffi == null) return;
tryCreateTexture(int idx) {
if (!_pixelbufferRenderTextures.containsKey(idx)) {
final renderTexture = _PixelbufferTexture();
_pixelbufferRenderTextures[idx] = renderTexture;
renderTexture.create(idx, ffi.sessionId, ffi);
}
if (!_gpuRenderTextures.containsKey(idx)) {
final renderTexture = _GpuTexture();
_gpuRenderTextures[idx] = renderTexture;
renderTexture.create(idx, ffi.sessionId, ffi);
}
}
tryRemoveTexture(int idx) {
_control.remove(idx);
if (_pixelbufferRenderTextures.containsKey(idx)) {
_pixelbufferRenderTextures[idx]!.destroy(true, ffi);
_pixelbufferRenderTextures.remove(idx);
}
if (_gpuRenderTextures.containsKey(idx)) {
_gpuRenderTextures[idx]!.destroy(ffi);
_gpuRenderTextures.remove(idx);
}
}
if (curDisplay == kAllDisplayValue) {
final displays = ffi.ffiModel.pi.getCurDisplays();
for (var i = 0; i < displays.length; i++) {
tryCreateTexture(i);
}
} else {
tryCreateTexture(curDisplay);
for (var i = 0; i < ffi.ffiModel.pi.displays.length; i++) {
if (i != curDisplay) {
tryRemoveTexture(i);
}
}
}
}
onRemotePageDispose(bool closeSession) async {
final ffi = parent.target;
if (ffi == null) return;
for (final texture in _pixelbufferRenderTextures.values) {
await texture.destroy(closeSession, ffi);
}
for (final texture in _gpuRenderTextures.values) {
await texture.destroy(ffi);
}
}
}

View File

@ -2097,6 +2097,7 @@ class FFI {
late final InputModel inputModel; // session
late final ElevationModel elevationModel; // session
late final CmFileModel cmFileModel; // cm
late final TextureModel textureModel; //session
FFI(SessionID? sId) {
sessionId = sId ?? (isDesktop ? Uuid().v4obj() : _constSessionId);
@ -2116,6 +2117,7 @@ class FFI {
inputModel = InputModel(WeakReference(this));
elevationModel = ElevationModel(WeakReference(this));
cmFileModel = CmFileModel(WeakReference(this));
textureModel = TextureModel(WeakReference(this));
}
/// Mobile reuse FFI
@ -2195,6 +2197,9 @@ class FFI {
}
}
final hasPixelBufferTextureRender = bind.mainHasPixelbufferTextureRender();
final hasGpuTextureRender = bind.mainHasGpuTextureRender();
final SimpleWrapper<bool> isToNewWindowNotified = SimpleWrapper(false);
// Preserved for the rgba data.
stream.listen((message) {
@ -2240,7 +2245,9 @@ class FFI {
}
} else if (message is EventToUI_Rgba) {
final display = message.field0;
if (useTextureRender) {
if (hasPixelBufferTextureRender) {
debugPrint("EventToUI_Rgba display:$display");
textureModel.setTextureType(display: display, gpuTexture: false);
onEvent2UIRgba();
} else {
// Fetch the image buffer from rust codes.
@ -2254,6 +2261,13 @@ class FFI {
imageModel.onRgba(display, rgba);
}
}
} else if (message is EventToUI_Texture) {
final display = message.field0;
debugPrint("EventToUI_Texture display:$display");
if (hasGpuTextureRender) {
textureModel.setTextureType(display: display, gpuTexture: true);
onEvent2UIRgba();
}
}
}();
});

View File

@ -99,9 +99,14 @@ class PlatformFFI {
int getRgbaSize(SessionID sessionId, int display) =>
_ffiBind.sessionGetRgbaSize(sessionId: sessionId, display: display);
void nextRgba(SessionID sessionId, int display) => _ffiBind.sessionNextRgba(sessionId: sessionId, display: display);
void registerTexture(SessionID sessionId, int display, int ptr) =>
_ffiBind.sessionRegisterTexture(sessionId: sessionId, display: display, ptr: ptr);
void nextRgba(SessionID sessionId, int display) =>
_ffiBind.sessionNextRgba(sessionId: sessionId, display: display);
void registerPixelbufferTexture(SessionID sessionId, int display, int ptr) =>
_ffiBind.sessionRegisterPixelbufferTexture(
sessionId: sessionId, display: display, ptr: ptr);
void registerGpuTexture(SessionID sessionId, int display, int ptr) =>
_ffiBind.sessionRegisterGpuTexture(
sessionId: sessionId, display: display, ptr: ptr);
/// Init the FFI class, loads the native Rust core library.
Future<void> init(String appType) async {

View File

@ -505,6 +505,15 @@ packages:
url: "https://pub.dev"
source: hosted
version: "0.0.4"
flutter_gpu_texture_renderer:
dependency: "direct main"
description:
path: "."
ref: "3865a99f60a92bea4d95bb5d55cf524b1bcbbf5a"
resolved-ref: "3865a99f60a92bea4d95bb5d55cf524b1bcbbf5a"
url: "https://github.com/21pages/flutter_gpu_texture_renderer"
source: git
version: "0.0.1"
flutter_improved_scrolling:
dependency: "direct main"
description:

View File

@ -92,6 +92,10 @@ dependencies:
texture_rgba_renderer: ^0.0.16
percent_indicator: ^4.2.2
dropdown_button2: ^2.0.0
flutter_gpu_texture_renderer:
git:
url: https://github.com/21pages/flutter_gpu_texture_renderer
ref: 3865a99f60a92bea4d95bb5d55cf524b1bcbbf5a
uuid: ^3.0.7
auto_size_text_field: ^2.2.1
flex_color_picker: ^3.3.0

View File

@ -4,6 +4,7 @@
#include <desktop_multi_window/desktop_multi_window_plugin.h>
#include <texture_rgba_renderer/texture_rgba_renderer_plugin_c_api.h>
#include <flutter_gpu_texture_renderer/flutter_gpu_texture_renderer_plugin_c_api.h>
#include "flutter/generated_plugin_registrant.h"
@ -34,6 +35,8 @@ bool FlutterWindow::OnCreate() {
auto *registry = flutter_view_controller->engine();
TextureRgbaRendererPluginCApiRegisterWithRegistrar(
registry->GetRegistrarForPlugin("TextureRgbaRendererPlugin"));
FlutterGpuTextureRendererPluginCApiRegisterWithRegistrar(
registry->GetRegistrarForPlugin("FlutterGpuTextureRendererPluginCApi"));
});
SetChildContent(flutter_controller_->view()->GetNativeWindow());
return true;

View File

@ -729,6 +729,7 @@ message Misc {
int32 refresh_video_display = 31;
ToggleVirtualDisplay toggle_virtual_display = 32;
TogglePrivacyMode toggle_privacy_mode = 33;
SupportedEncoding supported_encoding = 34;
}
}

View File

@ -492,7 +492,6 @@ impl Config {
suffix: &str,
) -> T {
let file = Self::file_(suffix);
log::debug!("Configuration path: {}", file.display());
let cfg = load_path(file);
if suffix.is_empty() {
log::trace!("{:?}", cfg);
@ -1488,6 +1487,26 @@ impl HwCodecConfig {
}
}
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct GpucodecConfig {
#[serde(default, deserialize_with = "deserialize_string")]
pub available: String,
}
impl GpucodecConfig {
pub fn load() -> GpucodecConfig {
Config::load_::<GpucodecConfig>("_gpucodec")
}
pub fn store(&self) {
Config::store_(self, "_gpucodec");
}
pub fn clear() {
GpucodecConfig::default().store();
}
}
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct UserDefaultConfig {
#[serde(default, deserialize_with = "deserialize_hashmap_string_string")]

View File

@ -58,3 +58,6 @@ gstreamer-video = { version = "0.16", optional = true }
[target.'cfg(any(target_os = "windows", target_os = "linux"))'.dependencies]
hwcodec = { git = "https://github.com/21pages/hwcodec", branch = "stable", optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
gpucodec = { git = "https://github.com/21pages/gpucodec", optional = true }

View File

@ -6,8 +6,7 @@ use hbb_common::{
use scrap::{
aom::{AomDecoder, AomEncoder, AomEncoderConfig},
codec::{EncoderApi, EncoderCfg, Quality as Q},
convert_to_yuv, Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder,
VpxEncoderConfig,
Capturer, Display, TraitCapturer, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig,
VpxVideoCodecId::{self, *},
STRIDE_ALIGN,
};
@ -122,7 +121,8 @@ fn test_vpx(
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
let frame = frame.to(encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
let yuv = frame.yuv().unwrap();
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap()
@ -199,7 +199,8 @@ fn test_av1(
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
let frame = frame.to(encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
let yuv = frame.yuv().unwrap();
for ref frame in encoder
.encode(start.elapsed().as_millis() as _, &yuv, STRIDE_ALIGN)
.unwrap()
@ -239,10 +240,7 @@ fn test_av1(
#[cfg(feature = "hwcodec")]
mod hw {
use hwcodec::ffmpeg::CodecInfo;
use scrap::{
codec::HwEncoderConfig,
hwcodec::{HwDecoder, HwEncoder},
};
use scrap::hwcodec::{HwDecoder, HwEncoder, HwEncoderConfig};
use super::*;
@ -295,7 +293,8 @@ mod hw {
match c.frame(std::time::Duration::from_millis(30)) {
Ok(frame) => {
let tmp_timer = Instant::now();
convert_to_yuv(&frame, encoder.yuvfmt(), &mut yuv, &mut mid_data);
let frame = frame.to(encoder.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
let yuv = frame.yuv().unwrap();
for ref frame in encoder.encode(&yuv).unwrap() {
size += frame.data.len();

View File

@ -24,7 +24,7 @@ fn get_display(i: usize) -> Display {
fn record(i: usize) {
use std::time::Duration;
use scrap::TraitFrame;
use scrap::{Frame, TraitPixelBuffer};
for d in Display::all().unwrap() {
println!("{:?} {} {}", d.origin(), d.width(), d.height());
@ -44,8 +44,11 @@ fn record(i: usize) {
println!("Filter window for cls {} name {}", wnd_cls, wnd_name);
}
let captured_frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let frame = captured_frame.data();
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let Frame::PixelBuffer(frame) = frame else {
return;
};
let frame = frame.data();
println!("Capture data len: {}, Saving...", frame.len());
let mut bitflipped = Vec::with_capacity(w * h * 4);
@ -81,6 +84,9 @@ fn record(i: usize) {
}
let frame = capture_mag.frame(Duration::from_millis(0)).unwrap();
let Frame::PixelBuffer(frame) = frame else {
return;
};
println!("Capture data len: {}, Saving...", frame.data().len());
let mut raw = Vec::new();

View File

@ -1,6 +1,6 @@
use std::time::Duration;
use scrap::TraitFrame;
use scrap::{Frame, TraitPixelBuffer};
extern crate scrap;
@ -36,6 +36,9 @@ fn main() {
match capturer.frame(Duration::from_millis(0)) {
Ok(frame) => {
// Write the frame, removing end-of-row padding.
let Frame::PixelBuffer(frame) = frame else {
return;
};
let stride = frame.stride()[0];
let rowlen = 4 * w;
for row in frame.data().chunks(stride) {

View File

@ -17,7 +17,7 @@ use scrap::codec::{EncoderApi, EncoderCfg, Quality as Q};
use webm::mux;
use webm::mux::Track;
use scrap::{convert_to_yuv, vpxcodec as vpx_encode};
use scrap::vpxcodec as vpx_encode;
use scrap::{Capturer, Display, TraitCapturer, STRIDE_ALIGN};
const USAGE: &'static str = "
@ -152,7 +152,7 @@ fn main() -> io::Result<()> {
if let Ok(frame) = c.frame(Duration::from_millis(0)) {
let ms = time.as_secs() * 1000 + time.subsec_millis() as u64;
convert_to_yuv(&frame, vpx.yuvfmt(), &mut yuv, &mut mid_data);
frame.to(vpx.yuvfmt(), &mut yuv, &mut mid_data).unwrap();
for frame in vpx.encode(ms as i64, &yuv, STRIDE_ALIGN).unwrap() {
vt.add_frame(frame.data, frame.pts as u64 * 1_000_000, frame.key);
}

View File

@ -6,7 +6,7 @@ use std::io::ErrorKind::WouldBlock;
use std::thread;
use std::time::Duration;
use scrap::{Capturer, Display, TraitCapturer, TraitFrame};
use scrap::{Capturer, Display, Frame, TraitCapturer, TraitPixelBuffer};
fn main() {
let n = Display::all().unwrap().len();
@ -46,6 +46,9 @@ fn record(i: usize) {
}
}
};
let Frame::PixelBuffer(frame) = frame else {
return;
};
let buffer = frame.data();
println!("Captured data len: {}, Saving...", buffer.len());
@ -96,6 +99,9 @@ fn record(i: usize) {
}
}
};
let Frame::PixelBuffer(frame) = frame else {
return;
};
let buffer = frame.data();
println!("Captured data len: {}, Saving...", buffer.len());

View File

@ -1,5 +1,5 @@
use crate::android::ffi::*;
use crate::Pixfmt;
use crate::{Frame, Pixfmt};
use lazy_static::lazy_static;
use serde_json::Value;
use std::collections::HashMap;
@ -43,26 +43,30 @@ impl crate::TraitCapturer for Capturer {
unsafe {
std::ptr::copy_nonoverlapping(buf.as_ptr(), self.rgba.as_mut_ptr(), buf.len())
};
Ok(Frame::new(&self.rgba, self.width(), self.height()))
Ok(Frame::PixelBuffer(PixelBuffer::new(
&self.rgba,
self.width(),
self.height(),
)))
} else {
return Err(io::ErrorKind::WouldBlock.into());
}
}
}
pub struct Frame<'a> {
pub struct PixelBuffer<'a> {
data: &'a [u8],
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
impl<'a> PixelBuffer<'a> {
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
Frame {
PixelBuffer {
data,
width,
height,
@ -71,7 +75,7 @@ impl<'a> Frame<'a> {
}
}
impl<'a> crate::TraitFrame for Frame<'a> {
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
self.data
}

View File

@ -9,7 +9,7 @@ include!(concat!(env!("OUT_DIR"), "/aom_ffi.rs"));
use crate::codec::{base_bitrate, codec_thread_num, Quality};
use crate::{codec::EncoderApi, EncodeFrame, STRIDE_ALIGN};
use crate::{common::GoogleImage, generate_call_macro, generate_call_ptr_macro, Error, Result};
use crate::{EncodeYuvFormat, Pixfmt};
use crate::{EncodeInput, EncodeYuvFormat, Pixfmt};
use hbb_common::{
anyhow::{anyhow, Context},
bytes::Bytes,
@ -249,10 +249,10 @@ impl EncoderApi for AomEncoder {
}
}
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame> {
fn encode_to_message(&mut self, input: EncodeInput, ms: i64) -> ResultType<VideoFrame> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE_ALIGN)
.encode(ms, input.yuv()?, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(Self::create_frame(frame));
@ -268,6 +268,11 @@ impl EncoderApi for AomEncoder {
self.yuvfmt.clone()
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
false
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
@ -287,6 +292,10 @@ impl EncoderApi for AomEncoder {
let c = unsafe { *self.ctx.config.enc.to_owned() };
c.rc_target_bitrate
}
fn support_abr(&self) -> bool {
true
}
}
impl AomEncoder {

View File

@ -1,9 +1,12 @@
use std::{
collections::HashMap,
ffi::c_void,
ops::{Deref, DerefMut},
sync::{Arc, Mutex},
};
#[cfg(feature = "gpucodec")]
use crate::gpucodec::*;
#[cfg(feature = "hwcodec")]
use crate::hwcodec::*;
#[cfg(feature = "mediacodec")]
@ -14,7 +17,7 @@ use crate::{
aom::{self, AomDecoder, AomEncoder, AomEncoderConfig},
common::GoogleImage,
vpxcodec::{self, VpxDecoder, VpxDecoderConfig, VpxEncoder, VpxEncoderConfig, VpxVideoCodecId},
CodecName, EncodeYuvFormat, ImageRgb,
CodecName, EncodeInput, EncodeYuvFormat, ImageRgb,
};
use hbb_common::{
@ -30,29 +33,25 @@ use hbb_common::{
tokio::time::Instant,
ResultType,
};
#[cfg(any(feature = "hwcodec", feature = "mediacodec"))]
#[cfg(any(feature = "hwcodec", feature = "mediacodec", feature = "gpucodec"))]
use hbb_common::{config::Config2, lazy_static};
lazy_static::lazy_static! {
static ref PEER_DECODINGS: Arc<Mutex<HashMap<i32, SupportedDecoding>>> = Default::default();
static ref CODEC_NAME: Arc<Mutex<CodecName>> = Arc::new(Mutex::new(CodecName::VP9));
static ref ENCODE_CODEC_NAME: Arc<Mutex<CodecName>> = Arc::new(Mutex::new(CodecName::VP9));
static ref THREAD_LOG_TIME: Arc<Mutex<Option<Instant>>> = Arc::new(Mutex::new(None));
}
#[derive(Debug, Clone)]
pub struct HwEncoderConfig {
pub name: String,
pub width: usize,
pub height: usize,
pub quality: Quality,
pub keyframe_interval: Option<usize>,
}
pub const ENCODE_NEED_SWITCH: &'static str = "ENCODE_NEED_SWITCH";
#[derive(Debug, Clone)]
pub enum EncoderCfg {
VPX(VpxEncoderConfig),
AOM(AomEncoderConfig),
#[cfg(feature = "hwcodec")]
HW(HwEncoderConfig),
#[cfg(feature = "gpucodec")]
GPU(GpuEncoderConfig),
}
pub trait EncoderApi {
@ -60,13 +59,18 @@ pub trait EncoderApi {
where
Self: Sized;
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame>;
fn encode_to_message(&mut self, frame: EncodeInput, ms: i64) -> ResultType<VideoFrame>;
fn yuvfmt(&self) -> EncodeYuvFormat;
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool;
fn set_quality(&mut self, quality: Quality) -> ResultType<()>;
fn bitrate(&self) -> u32;
fn support_abr(&self) -> bool;
}
pub struct Encoder {
@ -93,6 +97,8 @@ pub struct Decoder {
av1: Option<AomDecoder>,
#[cfg(feature = "hwcodec")]
hw: HwDecoders,
#[cfg(feature = "gpucodec")]
gpu: GpuDecoders,
#[cfg(feature = "hwcodec")]
i420: Vec<u8>,
#[cfg(feature = "mediacodec")]
@ -101,9 +107,10 @@ pub struct Decoder {
#[derive(Debug, Clone)]
pub enum EncodingUpdate {
New(SupportedDecoding),
Remove,
NewOnlyVP9,
Update(i32, SupportedDecoding),
Remove(i32),
NewOnlyVP9(i32),
Check,
}
impl Encoder {
@ -123,26 +130,38 @@ impl Encoder {
codec: Box::new(hw),
}),
Err(e) => {
check_config_process();
*CODEC_NAME.lock().unwrap() = CodecName::VP9;
log::error!("new hw encoder failed: {e:?}, clear config");
hbb_common::config::HwCodecConfig::clear();
*ENCODE_CODEC_NAME.lock().unwrap() = CodecName::VP9;
Err(e)
}
},
#[cfg(feature = "gpucodec")]
EncoderCfg::GPU(_) => match GpuEncoder::new(config, i444) {
Ok(tex) => Ok(Encoder {
codec: Box::new(tex),
}),
Err(e) => {
log::error!("new gpu encoder failed: {e:?}, clear config");
hbb_common::config::GpucodecConfig::clear();
*ENCODE_CODEC_NAME.lock().unwrap() = CodecName::VP9;
Err(e)
}
},
#[cfg(not(feature = "hwcodec"))]
_ => Err(anyhow!("unsupported encoder type")),
}
}
pub fn update(id: i32, update: EncodingUpdate) {
pub fn update(update: EncodingUpdate) {
log::info!("update:{:?}", update);
let mut decodings = PEER_DECODINGS.lock().unwrap();
match update {
EncodingUpdate::New(decoding) => {
EncodingUpdate::Update(id, decoding) => {
decodings.insert(id, decoding);
}
EncodingUpdate::Remove => {
EncodingUpdate::Remove(id) => {
decodings.remove(&id);
}
EncodingUpdate::NewOnlyVP9 => {
EncodingUpdate::NewOnlyVP9(id) => {
decodings.insert(
id,
SupportedDecoding {
@ -151,32 +170,51 @@ impl Encoder {
},
);
}
EncodingUpdate::Check => {}
}
let vp8_useable = decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_vp8 > 0);
let av1_useable = decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_av1 > 0);
let _all_support_h264_decoding =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h264 > 0);
let _all_support_h265_decoding =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h265 > 0);
#[allow(unused_mut)]
let mut h264_name = None;
let mut h264gpu_encoding = false;
#[allow(unused_mut)]
let mut h265_name = None;
let mut h265gpu_encoding = false;
#[cfg(feature = "gpucodec")]
if enable_gpucodec_option() {
if _all_support_h264_decoding {
if GpuEncoder::available(CodecName::H264GPU).len() > 0 {
h264gpu_encoding = true;
}
}
if _all_support_h265_decoding {
if GpuEncoder::available(CodecName::H265GPU).len() > 0 {
h265gpu_encoding = true;
}
}
}
#[allow(unused_mut)]
let mut h264hw_encoding = None;
#[allow(unused_mut)]
let mut h265hw_encoding = None;
#[cfg(feature = "hwcodec")]
{
if enable_hwcodec_option() {
let best = HwEncoder::best();
if _all_support_h264_decoding {
h264hw_encoding = best.h264.map_or(None, |c| Some(c.name));
}
if _all_support_h265_decoding {
h265hw_encoding = best.h265.map_or(None, |c| Some(c.name));
}
}
let h264_useable =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h264 > 0);
_all_support_h264_decoding && (h264gpu_encoding || h264hw_encoding.is_some());
let h265_useable =
decodings.len() > 0 && decodings.iter().all(|(_, s)| s.ability_h265 > 0);
if h264_useable {
h264_name = best.h264.map_or(None, |c| Some(c.name));
}
if h265_useable {
h265_name = best.h265.map_or(None, |c| Some(c.name));
}
}
}
let mut name = CODEC_NAME.lock().unwrap();
_all_support_h265_decoding && (h265gpu_encoding || h265hw_encoding.is_some());
let mut name = ENCODE_CODEC_NAME.lock().unwrap();
let mut preference = PreferCodec::Auto;
let preferences: Vec<_> = decodings
.iter()
@ -184,8 +222,8 @@ impl Encoder {
s.prefer == PreferCodec::VP9.into()
|| s.prefer == PreferCodec::VP8.into() && vp8_useable
|| s.prefer == PreferCodec::AV1.into() && av1_useable
|| s.prefer == PreferCodec::H264.into() && h264_name.is_some()
|| s.prefer == PreferCodec::H265.into() && h265_name.is_some()
|| s.prefer == PreferCodec::H264.into() && h264_useable
|| s.prefer == PreferCodec::H265.into() && h265_useable
})
.map(|(_, s)| s.prefer)
.collect();
@ -205,15 +243,34 @@ impl Encoder {
auto_codec = CodecName::VP8
}
match preference {
PreferCodec::VP8 => *name = CodecName::VP8,
PreferCodec::VP9 => *name = CodecName::VP9,
PreferCodec::AV1 => *name = CodecName::AV1,
PreferCodec::H264 => *name = h264_name.map_or(auto_codec, |c| CodecName::H264(c)),
PreferCodec::H265 => *name = h265_name.map_or(auto_codec, |c| CodecName::H265(c)),
PreferCodec::Auto => *name = auto_codec,
*name = match preference {
PreferCodec::VP8 => CodecName::VP8,
PreferCodec::VP9 => CodecName::VP9,
PreferCodec::AV1 => CodecName::AV1,
PreferCodec::H264 => {
if h264gpu_encoding {
CodecName::H264GPU
} else if let Some(v) = h264hw_encoding {
CodecName::H264HW(v)
} else {
auto_codec
}
}
PreferCodec::H265 => {
if h265gpu_encoding {
CodecName::H265GPU
} else if let Some(v) = h265hw_encoding {
CodecName::H265HW(v)
} else {
auto_codec
}
}
PreferCodec::Auto => auto_codec,
};
if decodings.len() > 0 {
log::info!(
"usable: vp8={vp8_useable}, av1={av1_useable}, h264={h264_useable}, h265={h265_useable}",
);
log::info!(
"connection count: {}, used preference: {:?}, encoder: {:?}",
decodings.len(),
@ -221,10 +278,11 @@ impl Encoder {
*name
)
}
}
#[inline]
pub fn negotiated_codec() -> CodecName {
CODEC_NAME.lock().unwrap().clone()
ENCODE_CODEC_NAME.lock().unwrap().clone()
}
pub fn supported_encoding() -> SupportedEncoding {
@ -243,12 +301,52 @@ impl Encoder {
#[cfg(feature = "hwcodec")]
if enable_hwcodec_option() {
let best = HwEncoder::best();
encoding.h264 = best.h264.is_some();
encoding.h265 = best.h265.is_some();
encoding.h264 |= best.h264.is_some();
encoding.h265 |= best.h265.is_some();
}
#[cfg(feature = "gpucodec")]
if enable_gpucodec_option() {
encoding.h264 |= GpuEncoder::available(CodecName::H264GPU).len() > 0;
encoding.h265 |= GpuEncoder::available(CodecName::H265GPU).len() > 0;
}
encoding
}
pub fn set_fallback(config: &EncoderCfg) {
let name = match config {
EncoderCfg::VPX(vpx) => match vpx.codec {
VpxVideoCodecId::VP8 => CodecName::VP8,
VpxVideoCodecId::VP9 => CodecName::VP9,
},
EncoderCfg::AOM(_) => CodecName::AV1,
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(hw) => {
if hw.name.to_lowercase().contains("h264") {
CodecName::H264HW(hw.name.clone())
} else {
CodecName::H265HW(hw.name.clone())
}
}
#[cfg(feature = "gpucodec")]
EncoderCfg::GPU(gpu) => match gpu.feature.data_format {
gpucodec::gpu_common::DataFormat::H264 => CodecName::H264GPU,
gpucodec::gpu_common::DataFormat::H265 => CodecName::H265GPU,
_ => {
log::error!(
"should not reach here, gpucodec not support {:?}",
gpu.feature.data_format
);
return;
}
},
};
let current = ENCODE_CODEC_NAME.lock().unwrap().clone();
if current != name {
log::info!("codec fallback: {:?} -> {:?}", current, name);
*ENCODE_CODEC_NAME.lock().unwrap() = name;
}
}
pub fn use_i444(config: &EncoderCfg) -> bool {
let decodings = PEER_DECODINGS.lock().unwrap().clone();
let prefer_i444 = decodings
@ -260,14 +358,21 @@ impl Encoder {
VpxVideoCodecId::VP9 => decodings.iter().all(|d| d.1.i444.vp9),
},
EncoderCfg::AOM(_) => decodings.iter().all(|d| d.1.i444.av1),
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(_) => false,
#[cfg(feature = "gpucodec")]
EncoderCfg::GPU(_) => false,
};
prefer_i444 && i444_useable && !decodings.is_empty()
}
}
impl Decoder {
pub fn supported_decodings(id_for_perfer: Option<&str>) -> SupportedDecoding {
pub fn supported_decodings(
id_for_perfer: Option<&str>,
_flutter: bool,
_luid: Option<i64>,
) -> SupportedDecoding {
let (prefer, prefer_chroma) = Self::preference(id_for_perfer);
#[allow(unused_mut)]
@ -288,8 +393,21 @@ impl Decoder {
#[cfg(feature = "hwcodec")]
if enable_hwcodec_option() {
let best = HwDecoder::best();
decoding.ability_h264 = if best.h264.is_some() { 1 } else { 0 };
decoding.ability_h265 = if best.h265.is_some() { 1 } else { 0 };
decoding.ability_h264 |= if best.h264.is_some() { 1 } else { 0 };
decoding.ability_h265 |= if best.h265.is_some() { 1 } else { 0 };
}
#[cfg(feature = "gpucodec")]
if enable_gpucodec_option() && _flutter {
decoding.ability_h264 |= if GpuDecoder::available(CodecName::H264GPU, _luid).len() > 0 {
1
} else {
0
};
decoding.ability_h265 |= if GpuDecoder::available(CodecName::H265GPU, _luid).len() > 0 {
1
} else {
0
};
}
#[cfg(feature = "mediacodec")]
if enable_hwcodec_option() {
@ -309,7 +427,33 @@ impl Decoder {
decoding
}
pub fn new() -> Decoder {
pub fn exist_codecs(&self, _flutter: bool) -> CodecAbility {
#[allow(unused_mut)]
let mut ability = CodecAbility {
vp8: self.vp8.is_some(),
vp9: self.vp9.is_some(),
av1: self.av1.is_some(),
..Default::default()
};
#[cfg(feature = "hwcodec")]
{
ability.h264 |= self.hw.h264.is_some();
ability.h265 |= self.hw.h265.is_some();
}
#[cfg(feature = "gpucodec")]
if _flutter {
ability.h264 |= self.gpu.h264.is_some();
ability.h265 |= self.gpu.h265.is_some();
}
#[cfg(feature = "mediacodec")]
{
ability.h264 = self.media_codec.h264.is_some();
ability.h265 = self.media_codec.h265.is_some();
}
ability
}
pub fn new(_luid: Option<i64>) -> Decoder {
let vp8 = VpxDecoder::new(VpxDecoderConfig {
codec: VpxVideoCodecId::VP8,
})
@ -329,6 +473,12 @@ impl Decoder {
} else {
HwDecoders::default()
},
#[cfg(feature = "gpucodec")]
gpu: if enable_gpucodec_option() && _luid.clone().unwrap_or_default() != 0 {
GpuDecoder::new_decoders(_luid)
} else {
GpuDecoders::default()
},
#[cfg(feature = "hwcodec")]
i420: vec![],
#[cfg(feature = "mediacodec")]
@ -345,6 +495,8 @@ impl Decoder {
&mut self,
frame: &video_frame::Union,
rgb: &mut ImageRgb,
_texture: &mut *mut c_void,
_pixelbuffer: &mut bool,
chroma: &mut Option<Chroma>,
) -> ResultType<bool> {
match frame {
@ -369,23 +521,33 @@ impl Decoder {
bail!("av1 decoder not available");
}
}
#[cfg(feature = "hwcodec")]
#[cfg(any(feature = "hwcodec", feature = "gpucodec"))]
video_frame::Union::H264s(h264s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h264!"))
}
#[cfg(feature = "gpucodec")]
if let Some(decoder) = &mut self.gpu.h264 {
*_pixelbuffer = false;
return Decoder::handle_gpu_video_frame(decoder, h264s, _texture);
}
#[cfg(feature = "hwcodec")]
if let Some(decoder) = &mut self.hw.h264 {
return Decoder::handle_hw_video_frame(decoder, h264s, rgb, &mut self.i420);
}
Err(anyhow!("don't support h264!"))
}
#[cfg(any(feature = "hwcodec", feature = "gpucodec"))]
video_frame::Union::H265s(h265s) => {
*chroma = Some(Chroma::I420);
if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h265!"))
#[cfg(feature = "gpucodec")]
if let Some(decoder) = &mut self.gpu.h265 {
*_pixelbuffer = false;
return Decoder::handle_gpu_video_frame(decoder, h265s, _texture);
}
#[cfg(feature = "hwcodec")]
if let Some(decoder) = &mut self.hw.h265 {
return Decoder::handle_hw_video_frame(decoder, h265s, rgb, &mut self.i420);
}
Err(anyhow!("don't support h265!"))
}
#[cfg(feature = "mediacodec")]
video_frame::Union::H264s(h264s) => {
@ -483,6 +645,22 @@ impl Decoder {
return Ok(ret);
}
#[cfg(feature = "gpucodec")]
fn handle_gpu_video_frame(
decoder: &mut GpuDecoder,
frames: &EncodedVideoFrames,
texture: &mut *mut c_void,
) -> ResultType<bool> {
let mut ret = false;
for h26x in frames.frames.iter() {
for image in decoder.decode(&h26x.data)? {
*texture = image.frame.texture;
ret = true;
}
}
return Ok(ret);
}
// rgb [in/out] fmt and stride must be set in ImageRgb
#[cfg(feature = "mediacodec")]
fn handle_mediacodec_video_frame(
@ -529,7 +707,14 @@ impl Decoder {
}
#[cfg(any(feature = "hwcodec", feature = "mediacodec"))]
fn enable_hwcodec_option() -> bool {
pub fn enable_hwcodec_option() -> bool {
if let Some(v) = Config2::get().options.get("enable-hwcodec") {
return v != "N";
}
return true; // default is true
}
#[cfg(feature = "gpucodec")]
pub fn enable_gpucodec_option() -> bool {
if let Some(v) = Config2::get().options.get("enable-hwcodec") {
return v != "N";
}

View File

@ -7,8 +7,8 @@
include!(concat!(env!("OUT_DIR"), "/yuv_ffi.rs"));
#[cfg(not(target_os = "ios"))]
use crate::Frame;
use crate::{generate_call_macro, EncodeYuvFormat, TraitFrame};
use crate::PixelBuffer;
use crate::{generate_call_macro, EncodeYuvFormat, TraitPixelBuffer};
use hbb_common::{bail, log, ResultType};
generate_call_macro!(call_yuv, false);
@ -195,7 +195,7 @@ pub mod hw {
}
#[cfg(not(target_os = "ios"))]
pub fn convert_to_yuv(
captured: &Frame,
captured: &PixelBuffer,
dst_fmt: EncodeYuvFormat,
dst: &mut Vec<u8>,
mid_data: &mut Vec<u8>,

View File

@ -1,4 +1,6 @@
use crate::{common::TraitCapturer, dxgi, Pixfmt};
#[cfg(feature = "gpucodec")]
use crate::AdapterDevice;
use crate::{common::TraitCapturer, dxgi, Frame, Pixfmt};
use std::{
io::{
self,
@ -41,7 +43,7 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.inner.frame(timeout.as_millis() as _) {
Ok(frame) => Ok(Frame::new(frame, self.width, self.height)),
Ok(frame) => Ok(frame),
Err(ref error) if error.kind() == TimedOut => Err(WouldBlock.into()),
Err(error) => Err(error),
}
@ -54,21 +56,31 @@ impl TraitCapturer for Capturer {
fn set_gdi(&mut self) -> bool {
self.inner.set_gdi()
}
#[cfg(feature = "gpucodec")]
fn device(&self) -> AdapterDevice {
self.inner.device()
}
pub struct Frame<'a> {
#[cfg(feature = "gpucodec")]
fn set_output_texture(&mut self, texture: bool) {
self.inner.set_output_texture(texture);
}
}
pub struct PixelBuffer<'a> {
data: &'a [u8],
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
impl<'a> PixelBuffer<'a> {
pub fn new(data: &'a [u8], width: usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
stride.push(stride0);
Frame {
PixelBuffer {
data,
width,
height,
@ -77,7 +89,7 @@ impl<'a> Frame<'a> {
}
}
impl<'a> crate::TraitFrame for Frame<'a> {
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
self.data
}
@ -184,6 +196,11 @@ impl Display {
// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-devmodea
self.origin() == (0, 0)
}
#[cfg(feature = "gpucodec")]
pub fn adapter_luid(&self) -> Option<i64> {
self.0.adapter_luid()
}
}
pub struct CapturerMag {
@ -215,11 +232,11 @@ impl CapturerMag {
impl TraitCapturer for CapturerMag {
fn frame<'a>(&'a mut self, _timeout_ms: Duration) -> io::Result<Frame<'a>> {
self.inner.frame(&mut self.data)?;
Ok(Frame::new(
Ok(Frame::PixelBuffer(PixelBuffer::new(
&self.data,
self.inner.get_rect().1,
self.inner.get_rect().2,
))
)))
}
fn is_gdi(&self) -> bool {
@ -229,4 +246,12 @@ impl TraitCapturer for CapturerMag {
fn set_gdi(&mut self) -> bool {
false
}
#[cfg(feature = "gpucodec")]
fn device(&self) -> AdapterDevice {
AdapterDevice::default()
}
#[cfg(feature = "gpucodec")]
fn set_output_texture(&mut self, _texture: bool) {}
}

View File

@ -0,0 +1,451 @@
use std::{
collections::HashMap,
ffi::c_void,
sync::{Arc, Mutex},
};
use crate::{
codec::{base_bitrate, enable_gpucodec_option, EncoderApi, EncoderCfg, Quality},
AdapterDevice, CodecName, EncodeInput, EncodeYuvFormat, Pixfmt,
};
use gpucodec::gpu_common::{
self, Available, DecodeContext, DynamicContext, EncodeContext, FeatureContext, MAX_GOP,
};
use gpucodec::{
decode::{self, DecodeFrame, Decoder},
encode::{self, EncodeFrame, Encoder},
};
use hbb_common::{
allow_err,
anyhow::{anyhow, bail, Context},
bytes::Bytes,
log,
message_proto::{EncodedVideoFrame, EncodedVideoFrames, VideoFrame},
ResultType,
};
const OUTPUT_SHARED_HANDLE: bool = false;
// https://www.reddit.com/r/buildapc/comments/d2m4ny/two_graphics_cards_two_monitors/
// https://www.reddit.com/r/techsupport/comments/t2v9u6/dual_monitor_setup_with_dual_gpu/
// https://cybersided.com/two-monitors-two-gpus/
// https://learn.microsoft.com/en-us/windows/win32/api/d3d12/nf-d3d12-id3d12device-getadapterluid#remarks
lazy_static::lazy_static! {
static ref ENOCDE_NOT_USE: Arc<Mutex<HashMap<usize, bool>>> = Default::default();
}
#[derive(Debug, Clone)]
pub struct GpuEncoderConfig {
pub device: AdapterDevice,
pub width: usize,
pub height: usize,
pub quality: Quality,
pub feature: gpucodec::gpu_common::FeatureContext,
pub keyframe_interval: Option<usize>,
}
pub struct GpuEncoder {
encoder: Encoder,
pub format: gpu_common::DataFormat,
ctx: EncodeContext,
bitrate: u32,
last_frame_len: usize,
same_bad_len_counter: usize,
}
impl EncoderApi for GpuEncoder {
fn new(cfg: EncoderCfg, _i444: bool) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
EncoderCfg::GPU(config) => {
let b = Self::convert_quality(config.quality, &config.feature);
let base_bitrate = base_bitrate(config.width as _, config.height as _);
let mut bitrate = base_bitrate * b / 100;
if base_bitrate <= 0 {
bitrate = base_bitrate;
}
let gop = config.keyframe_interval.unwrap_or(MAX_GOP as _) as i32;
let ctx = EncodeContext {
f: config.feature.clone(),
d: DynamicContext {
device: Some(config.device.device),
width: config.width as _,
height: config.height as _,
kbitrate: bitrate as _,
framerate: 30,
gop,
},
};
match Encoder::new(ctx.clone()) {
Ok(encoder) => Ok(GpuEncoder {
encoder,
ctx,
format: config.feature.data_format,
bitrate,
last_frame_len: 0,
same_bad_len_counter: 0,
}),
Err(_) => Err(anyhow!(format!("Failed to create encoder"))),
}
}
_ => Err(anyhow!("encoder type mismatch")),
}
}
fn encode_to_message(
&mut self,
frame: EncodeInput,
_ms: i64,
) -> ResultType<hbb_common::message_proto::VideoFrame> {
let texture = frame.texture()?;
let mut vf = VideoFrame::new();
let mut frames = Vec::new();
for frame in self.encode(texture).with_context(|| "Failed to encode")? {
frames.push(EncodedVideoFrame {
data: Bytes::from(frame.data),
pts: frame.pts as _,
key: frame.key == 1,
..Default::default()
});
}
if frames.len() > 0 {
// This kind of problem is occurred after a period of time when using AMD encoding,
// the encoding length is fixed at about 40, and the picture is still
const MIN_BAD_LEN: usize = 100;
const MAX_BAD_COUNTER: usize = 30;
let this_frame_len = frames[0].data.len();
if this_frame_len < MIN_BAD_LEN && this_frame_len == self.last_frame_len {
self.same_bad_len_counter += 1;
if self.same_bad_len_counter >= MAX_BAD_COUNTER {
log::info!(
"{} times encoding len is {}, switch",
self.same_bad_len_counter,
self.last_frame_len
);
bail!(crate::codec::ENCODE_NEED_SWITCH);
}
} else {
self.same_bad_len_counter = 0;
}
self.last_frame_len = this_frame_len;
let frames = EncodedVideoFrames {
frames: frames.into(),
..Default::default()
};
match self.format {
gpu_common::DataFormat::H264 => vf.set_h264s(frames),
gpu_common::DataFormat::H265 => vf.set_h265s(frames),
_ => bail!("{:?} not supported", self.format),
}
Ok(vf)
} else {
Err(anyhow!("no valid frame"))
}
}
fn yuvfmt(&self) -> EncodeYuvFormat {
// useless
EncodeYuvFormat {
pixfmt: Pixfmt::BGRA,
w: self.ctx.d.width as _,
h: self.ctx.d.height as _,
stride: Vec::new(),
u: 0,
v: 0,
}
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
true
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let b = Self::convert_quality(quality, &self.ctx.f);
let bitrate = base_bitrate(self.ctx.d.width as _, self.ctx.d.height as _) * b / 100;
if bitrate > 0 {
if self.encoder.set_bitrate((bitrate) as _).is_ok() {
self.bitrate = bitrate;
}
}
Ok(())
}
fn bitrate(&self) -> u32 {
self.bitrate
}
fn support_abr(&self) -> bool {
self.ctx.f.driver != gpu_common::EncodeDriver::VPL
}
}
impl GpuEncoder {
pub fn try_get(device: &AdapterDevice, name: CodecName) -> Option<FeatureContext> {
let v: Vec<_> = Self::available(name)
.drain(..)
.filter(|e| e.luid == device.luid)
.collect();
if v.len() > 0 {
Some(v[0].clone())
} else {
None
}
}
pub fn available(name: CodecName) -> Vec<FeatureContext> {
let not_use = ENOCDE_NOT_USE.lock().unwrap().clone();
if not_use.values().any(|not_use| *not_use) {
log::info!("currently not use gpucodec encoders: {not_use:?}");
return vec![];
}
let data_format = match name {
CodecName::H264GPU => gpu_common::DataFormat::H264,
CodecName::H265GPU => gpu_common::DataFormat::H265,
_ => return vec![],
};
let Ok(displays) = crate::Display::all() else {
log::error!("failed to get displays");
return vec![];
};
if displays.is_empty() {
log::error!("no display found");
return vec![];
}
let luids = displays
.iter()
.map(|d| d.adapter_luid())
.collect::<Vec<_>>();
let v: Vec<_> = get_available_config()
.map(|c| c.e)
.unwrap_or_default()
.drain(..)
.filter(|c| c.data_format == data_format)
.collect();
if luids
.iter()
.all(|luid| v.iter().any(|f| Some(f.luid) == *luid))
{
v
} else {
log::info!("not all adapters support {data_format:?}, luids = {luids:?}");
vec![]
}
}
pub fn encode(&mut self, texture: *mut c_void) -> ResultType<Vec<EncodeFrame>> {
match self.encoder.encode(texture) {
Ok(v) => {
let mut data = Vec::<EncodeFrame>::new();
data.append(v);
Ok(data)
}
Err(_) => Ok(Vec::<EncodeFrame>::new()),
}
}
pub fn convert_quality(quality: Quality, f: &FeatureContext) -> u32 {
match quality {
Quality::Best => {
if f.driver == gpu_common::EncodeDriver::VPL
&& f.data_format == gpu_common::DataFormat::H264
{
200
} else {
150
}
}
Quality::Balanced => {
if f.driver == gpu_common::EncodeDriver::VPL
&& f.data_format == gpu_common::DataFormat::H264
{
150
} else {
100
}
}
Quality::Low => {
if f.driver == gpu_common::EncodeDriver::VPL
&& f.data_format == gpu_common::DataFormat::H264
{
75
} else {
50
}
}
Quality::Custom(b) => b,
}
}
pub fn set_not_use(display: usize, not_use: bool) {
log::info!("set display#{display} not use gpucodec encode to {not_use}");
ENOCDE_NOT_USE.lock().unwrap().insert(display, not_use);
}
pub fn not_use() -> bool {
ENOCDE_NOT_USE.lock().unwrap().iter().any(|v| *v.1)
}
}
pub struct GpuDecoder {
decoder: Decoder,
}
#[derive(Default)]
pub struct GpuDecoders {
pub h264: Option<GpuDecoder>,
pub h265: Option<GpuDecoder>,
}
impl GpuDecoder {
pub fn try_get(name: CodecName, luid: Option<i64>) -> Option<DecodeContext> {
let v: Vec<_> = Self::available(name, luid);
if v.len() > 0 {
Some(v[0].clone())
} else {
None
}
}
pub fn available(name: CodecName, luid: Option<i64>) -> Vec<DecodeContext> {
let luid = luid.unwrap_or_default();
let data_format = match name {
CodecName::H264GPU => gpu_common::DataFormat::H264,
CodecName::H265GPU => gpu_common::DataFormat::H265,
_ => return vec![],
};
get_available_config()
.map(|c| c.d)
.unwrap_or_default()
.drain(..)
.filter(|c| c.data_format == data_format && c.luid == luid)
.collect()
}
pub fn possible_available_without_check() -> (bool, bool) {
if !enable_gpucodec_option() {
return (false, false);
}
let v = get_available_config().map(|c| c.d).unwrap_or_default();
(
v.iter()
.any(|d| d.data_format == gpu_common::DataFormat::H264),
v.iter()
.any(|d| d.data_format == gpu_common::DataFormat::H265),
)
}
pub fn new_decoders(luid: Option<i64>) -> GpuDecoders {
let mut h264: Option<GpuDecoder> = None;
let mut h265: Option<GpuDecoder> = None;
if let Ok(decoder) = GpuDecoder::new(CodecName::H264GPU, luid) {
h264 = Some(decoder);
}
if let Ok(decoder) = GpuDecoder::new(CodecName::H265GPU, luid) {
h265 = Some(decoder);
}
log::info!(
"new gpu decoders, support h264: {}, h265: {}",
h264.is_some(),
h265.is_some()
);
GpuDecoders { h264, h265 }
}
pub fn new(name: CodecName, luid: Option<i64>) -> ResultType<Self> {
let ctx = Self::try_get(name, luid).ok_or(anyhow!("Failed to get decode context"))?;
match Decoder::new(ctx) {
Ok(decoder) => Ok(Self { decoder }),
Err(_) => Err(anyhow!(format!("Failed to create decoder"))),
}
}
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<GpuDecoderImage>> {
match self.decoder.decode(data) {
Ok(v) => Ok(v.iter().map(|f| GpuDecoderImage { frame: f }).collect()),
Err(e) => Err(anyhow!(e)),
}
}
}
pub struct GpuDecoderImage<'a> {
pub frame: &'a DecodeFrame,
}
impl GpuDecoderImage<'_> {}
fn get_available_config() -> ResultType<Available> {
let available = hbb_common::config::GpucodecConfig::load().available;
match Available::deserialize(&available) {
Ok(v) => Ok(v),
Err(_) => Err(anyhow!("Failed to deserialize:{}", available)),
}
}
pub fn check_available_gpucodec() {
let d = DynamicContext {
device: None,
width: 1920,
height: 1080,
kbitrate: 5000,
framerate: 60,
gop: MAX_GOP as _,
};
let encoders = encode::available(d);
let decoders = decode::available(OUTPUT_SHARED_HANDLE);
let available = Available {
e: encoders,
d: decoders,
};
if let Ok(available) = available.serialize() {
let mut config = hbb_common::config::GpucodecConfig::load();
config.available = available;
config.store();
return;
}
log::error!("Failed to serialize gpucodec");
}
pub fn gpucodec_new_check_process() {
use std::sync::Once;
static ONCE: Once = Once::new();
ONCE.call_once(|| {
std::thread::spawn(move || {
// Remove to avoid checking process errors
// But when the program is just started, the configuration file has not been updated, and the new connection will read an empty configuration
hbb_common::config::GpucodecConfig::clear();
if let Ok(exe) = std::env::current_exe() {
let arg = "--check-gpucodec-config";
if let Ok(mut child) = std::process::Command::new(exe).arg(arg).spawn() {
// wait up to 30 seconds
for _ in 0..30 {
std::thread::sleep(std::time::Duration::from_secs(1));
if let Ok(Some(_)) = child.try_wait() {
break;
}
}
allow_err!(child.kill());
std::thread::sleep(std::time::Duration::from_millis(30));
match child.try_wait() {
Ok(Some(status)) => {
log::info!("Check gpucodec config, exit with: {status}")
}
Ok(None) => {
log::info!(
"Check gpucodec config, status not ready yet, let's really wait"
);
let res = child.wait();
log::info!("Check gpucodec config, wait result: {res:?}");
}
Err(e) => {
log::error!("Check gpucodec config, error attempting to wait: {e}")
}
}
}
};
});
});
}

View File

@ -1,6 +1,6 @@
use crate::{
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg},
hw, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
codec::{base_bitrate, codec_thread_num, EncoderApi, EncoderCfg, Quality as Q},
hw, EncodeInput, ImageFormat, ImageRgb, Pixfmt, HW_STRIDE_ALIGN,
};
use hbb_common::{
allow_err,
@ -29,8 +29,18 @@ const DEFAULT_GOP: i32 = i32::MAX;
const DEFAULT_HW_QUALITY: Quality = Quality_Default;
const DEFAULT_RC: RateControl = RC_DEFAULT;
#[derive(Debug, Clone)]
pub struct HwEncoderConfig {
pub name: String,
pub width: usize,
pub height: usize,
pub quality: Q,
pub keyframe_interval: Option<usize>,
}
pub struct HwEncoder {
encoder: Encoder,
name: String,
pub format: DataFormat,
pub pixfmt: AVPixelFormat,
width: u32,
@ -77,6 +87,7 @@ impl EncoderApi for HwEncoder {
match Encoder::new(ctx.clone()) {
Ok(encoder) => Ok(HwEncoder {
encoder,
name: config.name,
format,
pixfmt: ctx.pixfmt,
width: ctx.width as _,
@ -90,10 +101,13 @@ impl EncoderApi for HwEncoder {
}
}
fn encode_to_message(&mut self, frame: &[u8], _ms: i64) -> ResultType<VideoFrame> {
fn encode_to_message(&mut self, input: EncodeInput, _ms: i64) -> ResultType<VideoFrame> {
let mut vf = VideoFrame::new();
let mut frames = Vec::new();
for frame in self.encode(frame).with_context(|| "Failed to encode")? {
for frame in self
.encode(input.yuv()?)
.with_context(|| "Failed to encode")?
{
frames.push(EncodedVideoFrame {
data: Bytes::from(frame.data),
pts: frame.pts as _,
@ -143,6 +157,11 @@ impl EncoderApi for HwEncoder {
}
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
false
}
fn set_quality(&mut self, quality: crate::codec::Quality) -> ResultType<()> {
let b = Self::convert_quality(quality);
let bitrate = base_bitrate(self.width as _, self.height as _) * b / 100;
@ -156,6 +175,10 @@ impl EncoderApi for HwEncoder {
fn bitrate(&self) -> u32 {
self.bitrate
}
fn support_abr(&self) -> bool {
!self.name.contains("qsv")
}
}
impl HwEncoder {
@ -226,7 +249,7 @@ impl HwDecoder {
}
}
if fail {
check_config_process();
hwcodec_new_check_process();
}
HwDecoders { h264, h265 }
}
@ -320,7 +343,7 @@ fn get_config(k: &str) -> ResultType<CodecInfos> {
}
}
pub fn check_config() {
pub fn check_available_hwcodec() {
let ctx = EncodeContext {
name: String::from(""),
width: 1920,
@ -357,7 +380,7 @@ pub fn check_config() {
log::error!("Failed to serialize codec info");
}
pub fn check_config_process() {
pub fn hwcodec_new_check_process() {
use std::sync::Once;
let f = || {
// Clear to avoid checking process errors

View File

@ -1,7 +1,10 @@
use crate::common::{
use crate::{
common::{
wayland,
x11::{self, Frame},
x11::{self},
TraitCapturer,
},
Frame,
};
use std::{io, time::Duration};

View File

@ -1,9 +1,10 @@
pub use self::vpxcodec::*;
use hbb_common::{
log,
bail, log,
message_proto::{video_frame, Chroma, VideoFrame},
ResultType,
};
use std::slice;
use std::{ffi::c_void, slice};
cfg_if! {
if #[cfg(quartz)] {
@ -16,8 +17,8 @@ cfg_if! {
mod wayland;
mod x11;
pub use self::linux::*;
pub use self::x11::Frame;
pub use self::wayland::set_map_err;
pub use self::x11::PixelBuffer;
} else {
mod x11;
pub use self::x11::*;
@ -36,6 +37,8 @@ cfg_if! {
pub mod codec;
pub mod convert;
#[cfg(feature = "gpucodec")]
pub mod gpucodec;
#[cfg(feature = "hwcodec")]
pub mod hwcodec;
#[cfg(feature = "mediacodec")]
@ -107,9 +110,32 @@ pub trait TraitCapturer {
fn is_gdi(&self) -> bool;
#[cfg(windows)]
fn set_gdi(&mut self) -> bool;
#[cfg(feature = "gpucodec")]
fn device(&self) -> AdapterDevice;
#[cfg(feature = "gpucodec")]
fn set_output_texture(&mut self, texture: bool);
}
pub trait TraitFrame {
#[derive(Debug, Clone, Copy)]
pub struct AdapterDevice {
pub device: *mut c_void,
pub vendor_id: ::std::os::raw::c_uint,
pub luid: i64,
}
impl Default for AdapterDevice {
fn default() -> Self {
Self {
device: std::ptr::null_mut(),
vendor_id: Default::default(),
luid: Default::default(),
}
}
}
pub trait TraitPixelBuffer {
fn data(&self) -> &[u8];
fn width(&self) -> usize;
@ -120,6 +146,59 @@ pub trait TraitFrame {
fn pixfmt(&self) -> Pixfmt;
}
#[cfg(not(any(target_os = "ios")))]
pub enum Frame<'a> {
PixelBuffer(PixelBuffer<'a>),
Texture(*mut c_void),
}
#[cfg(not(any(target_os = "ios")))]
impl Frame<'_> {
pub fn valid<'a>(&'a self) -> bool {
match self {
Frame::PixelBuffer(pixelbuffer) => !pixelbuffer.data().is_empty(),
Frame::Texture(texture) => !texture.is_null(),
}
}
pub fn to<'a>(
&'a self,
yuvfmt: EncodeYuvFormat,
yuv: &'a mut Vec<u8>,
mid_data: &mut Vec<u8>,
) -> ResultType<EncodeInput> {
match self {
Frame::PixelBuffer(pixelbuffer) => {
convert_to_yuv(&pixelbuffer, yuvfmt, yuv, mid_data)?;
Ok(EncodeInput::YUV(yuv))
}
Frame::Texture(texture) => Ok(EncodeInput::Texture(*texture)),
}
}
}
pub enum EncodeInput<'a> {
YUV(&'a [u8]),
Texture(*mut c_void),
}
impl<'a> EncodeInput<'a> {
pub fn yuv(&self) -> ResultType<&'_ [u8]> {
match self {
Self::YUV(f) => Ok(f),
_ => bail!("not pixelfbuffer frame"),
}
}
pub fn texture(&self) -> ResultType<*mut c_void> {
match self {
Self::Texture(f) => Ok(*f),
_ => bail!("not texture frame"),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Pixfmt {
BGRA,
@ -166,8 +245,10 @@ pub enum CodecName {
VP8,
VP9,
AV1,
H264(String),
H265(String),
H264HW(String),
H265HW(String),
H264GPU,
H265GPU,
}
#[derive(PartialEq, Debug, Clone)]
@ -199,8 +280,8 @@ impl From<&CodecName> for CodecFormat {
CodecName::VP8 => Self::VP8,
CodecName::VP9 => Self::VP9,
CodecName::AV1 => Self::AV1,
CodecName::H264(_) => Self::H264,
CodecName::H265(_) => Self::H265,
CodecName::H264HW(_) | CodecName::H264GPU => Self::H264,
CodecName::H265HW(_) | CodecName::H265GPU => Self::H265,
}
}
}

View File

@ -1,4 +1,4 @@
use crate::{quartz, Pixfmt};
use crate::{quartz, Frame, Pixfmt};
use std::marker::PhantomData;
use std::sync::{Arc, Mutex, TryLockError};
use std::{io, mem};
@ -55,12 +55,12 @@ impl crate::TraitCapturer for Capturer {
Some(mut frame) => {
crate::would_block_if_equal(&mut self.saved_raw_data, frame.inner())?;
frame.surface_to_bgra(self.height());
Ok(Frame {
Ok(Frame::PixelBuffer(PixelBuffer {
frame,
data: PhantomData,
width: self.width(),
height: self.height(),
})
}))
}
None => Err(io::ErrorKind::WouldBlock.into()),
@ -74,14 +74,14 @@ impl crate::TraitCapturer for Capturer {
}
}
pub struct Frame<'a> {
pub struct PixelBuffer<'a> {
frame: quartz::Frame,
data: PhantomData<&'a [u8]>,
width: usize,
height: usize,
}
impl<'a> crate::TraitFrame for Frame<'a> {
impl<'a> crate::TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
&*self.frame
}

View File

@ -8,7 +8,7 @@ use hbb_common::message_proto::{Chroma, EncodedVideoFrame, EncodedVideoFrames, V
use hbb_common::ResultType;
use crate::codec::{base_bitrate, codec_thread_num, EncoderApi, Quality};
use crate::{EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
use crate::{EncodeInput, EncodeYuvFormat, GoogleImage, Pixfmt, STRIDE_ALIGN};
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use crate::{generate_call_macro, generate_call_ptr_macro, Error, Result};
@ -183,10 +183,10 @@ impl EncoderApi for VpxEncoder {
}
}
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<VideoFrame> {
fn encode_to_message(&mut self, input: EncodeInput, ms: i64) -> ResultType<VideoFrame> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE_ALIGN)
.encode(ms, input.yuv()?, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(VpxEncoder::create_frame(frame));
@ -207,6 +207,11 @@ impl EncoderApi for VpxEncoder {
self.yuvfmt.clone()
}
#[cfg(feature = "gpucodec")]
fn input_texture(&self) -> bool {
false
}
fn set_quality(&mut self, quality: Quality) -> ResultType<()> {
let mut c = unsafe { *self.ctx.config.enc.to_owned() };
let (q_min, q_max, b) = Self::convert_quality(quality);
@ -226,6 +231,10 @@ impl EncoderApi for VpxEncoder {
let c = unsafe { *self.ctx.config.enc.to_owned() };
c.rc_target_bitrate
}
fn support_abr(&self) -> bool {
true
}
}
impl VpxEncoder {

View File

@ -1,7 +1,11 @@
use crate::common::{x11::Frame, TraitCapturer};
use crate::wayland::{capturable::*, *};
use crate::{
wayland::{capturable::*, *},
Frame, TraitCapturer,
};
use std::{io, sync::RwLock, time::Duration};
use super::x11::PixelBuffer;
pub struct Capturer(Display, Box<dyn Recorder>, Vec<u8>);
@ -39,8 +43,18 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, timeout: Duration) -> io::Result<Frame<'a>> {
match self.1.capture(timeout.as_millis() as _).map_err(map_err)? {
PixelProvider::BGR0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::BGRA, w, h)),
PixelProvider::RGB0(w, h, x) => Ok(Frame::new(x, crate::Pixfmt::RGBA, w,h)),
PixelProvider::BGR0(w, h, x) => Ok(Frame::PixelBuffer(PixelBuffer::new(
x,
crate::Pixfmt::BGRA,
w,
h,
))),
PixelProvider::RGB0(w, h, x) => Ok(Frame::PixelBuffer(PixelBuffer::new(
x,
crate::Pixfmt::RGBA,
w,
h,
))),
PixelProvider::NONE => Err(std::io::ErrorKind::WouldBlock.into()),
_ => Err(map_err("Invalid data")),
}

View File

@ -1,4 +1,4 @@
use crate::{common::TraitCapturer, x11, Pixfmt, TraitFrame};
use crate::{common::TraitCapturer, x11, Frame, Pixfmt, TraitPixelBuffer};
use std::{io, time::Duration};
pub struct Capturer(x11::Capturer);
@ -21,19 +21,26 @@ impl Capturer {
impl TraitCapturer for Capturer {
fn frame<'a>(&'a mut self, _timeout: Duration) -> io::Result<Frame<'a>> {
Ok(self.0.frame()?)
let width = self.width();
let height = self.height();
Ok(Frame::PixelBuffer(PixelBuffer::new(
self.0.frame()?,
Pixfmt::BGRA,
width,
height,
)))
}
}
pub struct Frame<'a> {
pub data: &'a [u8],
pub pixfmt: Pixfmt,
pub width: usize,
pub height: usize,
pub stride: Vec<usize>,
pub struct PixelBuffer<'a> {
data: &'a [u8],
pixfmt: Pixfmt,
width: usize,
height: usize,
stride: Vec<usize>,
}
impl<'a> Frame<'a> {
impl<'a> PixelBuffer<'a> {
pub fn new(data: &'a [u8], pixfmt: Pixfmt, width:usize, height: usize) -> Self {
let stride0 = data.len() / height;
let mut stride = Vec::new();
@ -48,7 +55,7 @@ impl<'a> Frame<'a> {
}
}
impl<'a> TraitFrame for Frame<'a> {
impl<'a> TraitPixelBuffer for PixelBuffer<'a> {
fn data(&self) -> &[u8] {
self.data
}

View File

@ -22,6 +22,9 @@ use winapi::{
use crate::RotationMode::*;
use crate::{AdapterDevice, Frame, PixelBuffer};
use std::ffi::c_void;
pub struct ComPtr<T>(*mut T);
impl<T> ComPtr<T> {
fn is_null(&self) -> bool {
@ -45,12 +48,15 @@ pub struct Capturer {
duplication: ComPtr<IDXGIOutputDuplication>,
fastlane: bool,
surface: ComPtr<IDXGISurface>,
texture: ComPtr<ID3D11Texture2D>,
width: usize,
height: usize,
rotated: Vec<u8>,
gdi_capturer: Option<CapturerGDI>,
gdi_buffer: Vec<u8>,
saved_raw_data: Vec<u8>, // for faster compare and copy
output_texture: bool,
adapter_desc1: DXGI_ADAPTER_DESC1,
}
impl Capturer {
@ -60,12 +66,14 @@ impl Capturer {
let mut duplication = ptr::null_mut();
#[allow(invalid_value)]
let mut desc = unsafe { mem::MaybeUninit::uninit().assume_init() };
#[allow(invalid_value)]
let mut adapter_desc1 = unsafe { mem::MaybeUninit::uninit().assume_init() };
let mut gdi_capturer = None;
let mut res = if display.gdi {
wrap_hresult(1)
} else {
wrap_hresult(unsafe {
let res = wrap_hresult(unsafe {
D3D11CreateDevice(
display.adapter.0 as *mut _,
D3D_DRIVER_TYPE_UNKNOWN,
@ -78,7 +86,12 @@ impl Capturer {
ptr::null_mut(),
&mut context,
)
})
});
if res.is_ok() {
wrap_hresult(unsafe { (*display.adapter.0).GetDesc1(&mut adapter_desc1) })
} else {
res
}
};
let device = ComPtr(device);
let context = ComPtr(context);
@ -145,6 +158,7 @@ impl Capturer {
duplication: ComPtr(duplication),
fastlane: desc.DesktopImageInSystemMemory == TRUE,
surface: ComPtr(ptr::null_mut()),
texture: ComPtr(ptr::null_mut()),
width: display.width() as usize,
height: display.height() as usize,
display,
@ -152,6 +166,8 @@ impl Capturer {
gdi_capturer,
gdi_buffer: Vec::new(),
saved_raw_data: Vec::new(),
output_texture: false,
adapter_desc1,
})
}
@ -169,6 +185,11 @@ impl Capturer {
self.gdi_capturer.take();
}
#[cfg(feature = "gpucodec")]
pub fn set_output_texture(&mut self, texture: bool) {
self.output_texture = texture;
}
unsafe fn load_frame(&mut self, timeout: UINT) -> io::Result<(*const u8, i32)> {
let mut frame = ptr::null_mut();
#[allow(invalid_value)]
@ -230,7 +251,21 @@ impl Capturer {
Ok(surface)
}
pub fn frame<'a>(&'a mut self, timeout: UINT) -> io::Result<&'a [u8]> {
pub fn frame<'a>(&'a mut self, timeout: UINT) -> io::Result<Frame<'a>> {
if self.output_texture {
Ok(Frame::Texture(self.get_texture(timeout)?))
} else {
let width = self.width;
let height = self.height;
Ok(Frame::PixelBuffer(PixelBuffer::new(
self.get_pixelbuffer(timeout)?,
width,
height,
)))
}
}
fn get_pixelbuffer<'a>(&'a mut self, timeout: UINT) -> io::Result<&'a [u8]> {
unsafe {
// Release last frame.
// No error checking needed because we don't care.
@ -293,6 +328,34 @@ impl Capturer {
}
}
fn get_texture(&mut self, timeout: UINT) -> io::Result<*mut c_void> {
unsafe {
if self.duplication.0.is_null() {
return Err(std::io::ErrorKind::AddrNotAvailable.into());
}
(*self.duplication.0).ReleaseFrame();
let mut frame = ptr::null_mut();
#[allow(invalid_value)]
let mut info = mem::MaybeUninit::uninit().assume_init();
wrap_hresult((*self.duplication.0).AcquireNextFrame(timeout, &mut info, &mut frame))?;
let frame = ComPtr(frame);
if info.AccumulatedFrames == 0 || *info.LastPresentTime.QuadPart() == 0 {
return Err(std::io::ErrorKind::WouldBlock.into());
}
let mut texture: *mut ID3D11Texture2D = ptr::null_mut();
(*frame.0).QueryInterface(
&IID_ID3D11Texture2D,
&mut texture as *mut *mut _ as *mut *mut _,
);
let texture = ComPtr(texture);
self.texture = texture;
Ok(self.texture.0 as *mut c_void)
}
}
fn unmap(&self) {
unsafe {
(*self.duplication.0).ReleaseFrame();
@ -305,6 +368,15 @@ impl Capturer {
}
}
}
pub fn device(&self) -> AdapterDevice {
AdapterDevice {
device: self.device.0 as _,
vendor_id: self.adapter_desc1.VendorId,
luid: ((self.adapter_desc1.AdapterLuid.HighPart as i64) << 32)
| self.adapter_desc1.AdapterLuid.LowPart as i64,
}
}
}
impl Drop for Capturer {
@ -547,6 +619,22 @@ impl Display {
self.desc.DesktopCoordinates.top,
)
}
#[cfg(feature = "gpucodec")]
pub fn adapter_luid(&self) -> Option<i64> {
unsafe {
if !self.adapter.is_null() {
#[allow(invalid_value)]
let mut adapter_desc1 = mem::MaybeUninit::uninit().assume_init();
if wrap_hresult((*self.adapter.0).GetDesc1(&mut adapter_desc1)).is_ok() {
let luid = ((adapter_desc1.AdapterLuid.HighPart as i64) << 32)
| adapter_desc1.AdapterLuid.LowPart as i64;
return Some(luid);
}
}
None
}
}
}
fn wrap_hresult(x: HRESULT) -> io::Result<()> {

View File

@ -1,11 +1,7 @@
use std::{io, ptr, slice};
use hbb_common::libc;
use crate::Frame;
use super::ffi::*;
use super::Display;
use hbb_common::libc;
use std::{io, ptr, slice};
pub struct Capturer {
display: Display,
@ -97,13 +93,11 @@ impl Capturer {
}
}
pub fn frame<'b>(&'b mut self) -> std::io::Result<Frame> {
pub fn frame<'b>(&'b mut self) -> std::io::Result<&'b [u8]> {
self.get_image();
let result = unsafe { slice::from_raw_parts(self.buffer, self.size) };
crate::would_block_if_equal(&mut self.saved_raw_data, result)?;
Ok(
Frame::new(result, crate::Pixfmt::BGRA, self.display.w(), self.display.h())
)
Ok(result)
}
}

View File

@ -1,5 +1,6 @@
use std::{
collections::HashMap,
ffi::c_void,
net::SocketAddr,
ops::Deref,
str::FromStr,
@ -155,7 +156,7 @@ pub fn get_key_state(key: enigo::Key) -> bool {
cfg_if::cfg_if! {
if #[cfg(target_os = "android")] {
use hbb_common::libc::{c_float, c_int, c_void};
use hbb_common::libc::{c_float, c_int};
type Oboe = *mut c_void;
extern "C" {
fn create_oboe_player(channels: c_int, sample_rate: c_int) -> Oboe;
@ -1020,6 +1021,7 @@ impl AudioHandler {
pub struct VideoHandler {
decoder: Decoder,
pub rgb: ImageRgb,
pub texture: *mut c_void,
recorder: Arc<Mutex<Option<Recorder>>>,
record: bool,
_display: usize, // useful for debug
@ -1028,10 +1030,16 @@ pub struct VideoHandler {
impl VideoHandler {
/// Create a new video handler.
pub fn new(_display: usize) -> Self {
#[cfg(all(feature = "gpucodec", feature = "flutter"))]
let luid = crate::flutter::get_adapter_luid();
#[cfg(not(all(feature = "gpucodec", feature = "flutter")))]
let luid = Default::default();
println!("new session_get_adapter_luid: {:?}", luid);
log::info!("new video handler for display #{_display}");
VideoHandler {
decoder: Decoder::new(),
decoder: Decoder::new(luid),
rgb: ImageRgb::new(ImageFormat::ARGB, crate::DST_STRIDE_RGBA),
texture: std::ptr::null_mut(),
recorder: Default::default(),
record: false,
_display,
@ -1043,13 +1051,18 @@ impl VideoHandler {
pub fn handle_frame(
&mut self,
vf: VideoFrame,
pixelbuffer: &mut bool,
chroma: &mut Option<Chroma>,
) -> ResultType<bool> {
match &vf.union {
Some(frame) => {
let res = self
.decoder
.handle_video_frame(frame, &mut self.rgb, chroma);
let res = self.decoder.handle_video_frame(
frame,
&mut self.rgb,
&mut self.texture,
pixelbuffer,
chroma,
);
if self.record {
self.recorder
.lock()
@ -1065,7 +1078,11 @@ impl VideoHandler {
/// Reset the decoder.
pub fn reset(&mut self) {
self.decoder = Decoder::new();
#[cfg(all(feature = "flutter", feature = "gpucodec"))]
let luid = crate::flutter::get_adapter_luid();
#[cfg(not(all(feature = "flutter", feature = "gpucodec")))]
let luid = None;
self.decoder = Decoder::new(luid);
}
/// Start or stop screen record.
@ -1113,6 +1130,7 @@ pub struct LoginConfigHandler {
pub save_ab_password_to_recent: bool, // true: connected with ab password
pub other_server: Option<(String, String, String)>,
pub custom_fps: Arc<Mutex<Option<usize>>>,
pub adapter_luid: Option<i64>,
}
impl Deref for LoginConfigHandler {
@ -1136,6 +1154,7 @@ impl LoginConfigHandler {
conn_type: ConnType,
switch_uuid: Option<String>,
mut force_relay: bool,
adapter_luid: Option<i64>,
) {
let mut id = id;
if id.contains("@") {
@ -1197,6 +1216,7 @@ impl LoginConfigHandler {
self.direct = None;
self.received = false;
self.switch_uuid = switch_uuid;
self.adapter_luid = adapter_luid;
}
/// Check if the client should auto login.
@ -1536,7 +1556,11 @@ impl LoginConfigHandler {
n += 1;
}
msg.supported_decoding =
hbb_common::protobuf::MessageField::some(Decoder::supported_decodings(Some(&self.id)));
hbb_common::protobuf::MessageField::some(Decoder::supported_decodings(
Some(&self.id),
cfg!(feature = "flutter"),
self.adapter_luid,
));
n += 1;
if n > 0 {
@ -1842,6 +1866,7 @@ impl LoginConfigHandler {
// no matter if change, for update file time
self.save_config(config);
self.supported_encoding = pi.encoding.clone().unwrap_or_default();
log::info!("peer info supported_encoding:{:?}", self.supported_encoding);
}
pub fn get_remote_dir(&self) -> String {
@ -1915,8 +1940,12 @@ impl LoginConfigHandler {
msg_out
}
pub fn change_prefer_codec(&self) -> Message {
let decoding = scrap::codec::Decoder::supported_decodings(Some(&self.id));
pub fn update_supported_decodings(&self) -> Message {
let decoding = scrap::codec::Decoder::supported_decodings(
Some(&self.id),
cfg!(feature = "flutter"),
self.adapter_luid,
);
let mut misc = Misc::new();
misc.set_option(OptionMessage {
supported_decoding: hbb_common::protobuf::MessageField::some(decoding),
@ -1927,6 +1956,44 @@ impl LoginConfigHandler {
msg_out
}
fn real_supported_decodings(
&self,
handler_controller_map: &Vec<VideoHandlerController>,
) -> Data {
let abilities: Vec<CodecAbility> = handler_controller_map
.iter()
.map(|h| h.handler.decoder.exist_codecs(cfg!(feature = "flutter")))
.collect();
let all = |ability: fn(&CodecAbility) -> bool| -> i32 {
if abilities.iter().all(|d| ability(d)) {
1
} else {
0
}
};
let decoding = scrap::codec::Decoder::supported_decodings(
Some(&self.id),
cfg!(feature = "flutter"),
self.adapter_luid,
);
let decoding = SupportedDecoding {
ability_vp8: all(|e| e.vp8),
ability_vp9: all(|e| e.vp9),
ability_av1: all(|e| e.av1),
ability_h264: all(|e| e.h264),
ability_h265: all(|e| e.h265),
..decoding
};
let mut misc = Misc::new();
misc.set_option(OptionMessage {
supported_decoding: hbb_common::protobuf::MessageField::some(decoding),
..Default::default()
});
let mut msg_out = Message::new();
msg_out.set_misc(misc);
Data::Message(msg_out)
}
pub fn restart_remote_device(&self) -> Message {
let mut misc = Misc::new();
misc.set_restart_remote_device(true);
@ -1972,13 +2039,14 @@ pub fn start_video_audio_threads<F, T>(
Arc<RwLock<Option<Chroma>>>,
)
where
F: 'static + FnMut(usize, &mut scrap::ImageRgb) + Send,
F: 'static + FnMut(usize, &mut scrap::ImageRgb, *mut c_void, bool) + Send,
T: InvokeUiSession,
{
let (video_sender, video_receiver) = mpsc::channel::<MediaData>();
let video_queue_map: Arc<RwLock<HashMap<usize, ArrayQueue<VideoFrame>>>> = Default::default();
let video_queue_map_cloned = video_queue_map.clone();
let mut video_callback = video_callback;
let fps_map = Arc::new(RwLock::new(HashMap::new()));
let decode_fps_map = fps_map.clone();
let chroma = Arc::new(RwLock::new(None));
@ -2018,6 +2086,7 @@ where
};
let display = vf.display as usize;
let start = std::time::Instant::now();
let mut created_new_handler = false;
if handler_controller_map.len() <= display {
for _i in handler_controller_map.len()..=display {
handler_controller_map.push(VideoHandlerController {
@ -2026,13 +2095,33 @@ where
duration: std::time::Duration::ZERO,
skip_beginning: 0,
});
created_new_handler = true;
}
}
if created_new_handler {
session.send(
session
.lc
.read()
.unwrap()
.real_supported_decodings(&handler_controller_map),
);
}
if let Some(handler_controller) = handler_controller_map.get_mut(display) {
let mut pixelbuffer = true;
let mut tmp_chroma = None;
match handler_controller.handler.handle_frame(vf, &mut tmp_chroma) {
match handler_controller.handler.handle_frame(
vf,
&mut pixelbuffer,
&mut tmp_chroma,
) {
Ok(true) => {
video_callback(display, &mut handler_controller.handler.rgb);
video_callback(
display,
&mut handler_controller.handler.rgb,
handler_controller.handler.texture,
pixelbuffer,
);
// chroma
if tmp_chroma.is_some() && last_chroma != tmp_chroma {
@ -2085,6 +2174,13 @@ where
MediaData::Reset(display) => {
if let Some(handler_controler) = handler_controller_map.get_mut(display) {
handler_controler.handler.reset();
session.send(
session
.lc
.read()
.unwrap()
.real_supported_decodings(&handler_controller_map),
);
}
}
MediaData::RecordScreen(start, display, w, h, id) => {

View File

@ -1495,6 +1495,11 @@ impl<T: InvokeUiSession> Remote<T> {
};
self.handler.msgbox("custom-nocancel", &name, &p.msg, "");
}
Some(misc::Union::SupportedEncoding(e)) => {
log::info!("update supported encoding:{:?}", e);
self.handler.lc.write().unwrap().supported_encoding = e;
}
_ => {}
},
Some(message::Union::TestDelay(t)) => {

View File

@ -420,7 +420,11 @@ pub fn core_main() -> Option<Vec<String>> {
return None;
} else if args[0] == "--check-hwcodec-config" {
#[cfg(feature = "hwcodec")]
scrap::hwcodec::check_config();
scrap::hwcodec::check_available_hwcodec();
return None;
} else if args[0] == "--check-gpucodec-config" {
#[cfg(feature = "gpucodec")]
scrap::gpucodec::check_available_gpucodec();
return None;
} else if args[0] == "--cm" {
// call connection manager to establish connections

View File

@ -4,20 +4,20 @@ use crate::{
ui_session_interface::{io_loop, InvokeUiSession, Session},
};
use flutter_rust_bridge::StreamSink;
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
use hbb_common::dlopen::{
symbor::{Library, Symbol},
Error as LibError,
};
use hbb_common::{
anyhow::anyhow, bail, config::LocalConfig, get_version_number, log, message_proto::*,
rendezvous_proto::ConnType, ResultType,
};
#[cfg(feature = "flutter_texture_render")]
use hbb_common::{
dlopen::{
symbor::{Library, Symbol},
Error as LibError,
},
libc::c_void,
};
use serde_json::json;
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
use std::os::raw::c_void;
use std::{
collections::HashMap,
ffi::CString,
@ -62,6 +62,11 @@ lazy_static::lazy_static! {
pub static ref TEXTURE_RGBA_RENDERER_PLUGIN: Result<Library, LibError> = Library::open_self();
}
#[cfg(all(target_os = "windows", feature = "gpucodec"))]
lazy_static::lazy_static! {
pub static ref TEXTURE_GPU_RENDERER_PLUGIN: Result<Library, LibError> = Library::open("flutter_gpu_texture_renderer_plugin.dll");
}
/// FFI for rustdesk core's main entry.
/// Return true if the app should continue running with UI(possibly Flutter), false if the app should exit.
#[cfg(not(windows))]
@ -151,19 +156,29 @@ pub unsafe extern "C" fn free_c_args(ptr: *mut *mut c_char, len: c_int) {
#[derive(Default)]
struct SessionHandler {
event_stream: Option<StreamSink<EventToUI>>,
#[cfg(feature = "flutter_texture_render")]
notify_rendered: bool,
#[cfg(feature = "flutter_texture_render")]
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
renderer: VideoRenderer,
}
#[cfg(feature = "flutter_texture_render")]
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum RenderType {
PixelBuffer,
#[cfg(feature = "gpucodec")]
Texture,
}
#[derive(Default, Clone)]
pub struct FlutterHandler {
// ui session id -> display handler data
session_handlers: Arc<RwLock<HashMap<SessionID, SessionHandler>>>,
#[cfg(not(feature = "flutter_texture_render"))]
display_rgbas: Arc<RwLock<HashMap<usize, RgbaData>>>,
peer_info: Arc<RwLock<PeerInfo>>,
#[cfg(feature = "plugin_framework")]
#[cfg(any(
not(feature = "flutter_texture_render"),
all(feature = "flutter_texture_render", feature = "plugin_framework")
))]
#[cfg(not(any(target_os = "android", target_os = "ios")))]
hooks: Arc<RwLock<HashMap<String, SessionHook>>>,
}
@ -177,16 +192,6 @@ struct RgbaData {
valid: bool,
}
#[cfg(not(feature = "flutter_texture_render"))]
#[derive(Default, Clone)]
pub struct FlutterHandler {
session_handlers: Arc<RwLock<HashMap<SessionID, SessionHandler>>>,
display_rgbas: Arc<RwLock<HashMap<usize, RgbaData>>>,
peer_info: Arc<RwLock<PeerInfo>>,
#[cfg(not(any(target_os = "android", target_os = "ios")))]
hooks: Arc<RwLock<HashMap<String, SessionHook>>>,
}
#[cfg(feature = "flutter_texture_render")]
pub type FlutterRgbaRendererPluginOnRgba = unsafe extern "C" fn(
texture_rgba: *mut c_void,
@ -197,28 +202,44 @@ pub type FlutterRgbaRendererPluginOnRgba = unsafe extern "C" fn(
dst_rgba_stride: c_int,
);
#[cfg(feature = "gpucodec")]
pub type FlutterGpuTextureRendererPluginCApiSetTexture =
unsafe extern "C" fn(output: *mut c_void, texture: *mut c_void);
#[cfg(feature = "gpucodec")]
pub type FlutterGpuTextureRendererPluginCApiGetAdapterLuid = unsafe extern "C" fn() -> i64;
#[cfg(feature = "flutter_texture_render")]
pub(super) type TextureRgbaPtr = usize;
#[cfg(feature = "flutter_texture_render")]
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
struct DisplaySessionInfo {
// TextureRgba pointer in flutter native.
#[cfg(feature = "flutter_texture_render")]
texture_rgba_ptr: TextureRgbaPtr,
#[cfg(feature = "flutter_texture_render")]
size: (usize, usize),
#[cfg(feature = "gpucodec")]
gpu_output_ptr: usize,
notify_render_type: Option<RenderType>,
}
// Video Texture Renderer in Flutter
#[cfg(feature = "flutter_texture_render")]
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
#[derive(Clone)]
struct VideoRenderer {
is_support_multi_ui_session: bool,
map_display_sessions: Arc<RwLock<HashMap<usize, DisplaySessionInfo>>>,
#[cfg(feature = "flutter_texture_render")]
on_rgba_func: Option<Symbol<'static, FlutterRgbaRendererPluginOnRgba>>,
#[cfg(feature = "gpucodec")]
on_texture_func: Option<Symbol<'static, FlutterGpuTextureRendererPluginCApiSetTexture>>,
}
#[cfg(feature = "flutter_texture_render")]
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
impl Default for VideoRenderer {
fn default() -> Self {
#[cfg(feature = "flutter_texture_render")]
let on_rgba_func = match &*TEXTURE_RGBA_RENDERER_PLUGIN {
Ok(lib) => {
let find_sym_res = unsafe {
@ -237,33 +258,64 @@ impl Default for VideoRenderer {
None
}
};
#[cfg(feature = "gpucodec")]
let on_texture_func = match &*TEXTURE_GPU_RENDERER_PLUGIN {
Ok(lib) => {
let find_sym_res = unsafe {
lib.symbol::<FlutterGpuTextureRendererPluginCApiSetTexture>(
"FlutterGpuTextureRendererPluginCApiSetTexture",
)
};
match find_sym_res {
Ok(sym) => Some(sym),
Err(e) => {
log::error!("Failed to find symbol FlutterGpuTextureRendererPluginCApiSetTexture, {e}");
None
}
}
}
Err(e) => {
log::error!("Failed to load texture gpu renderer plugin, {e}");
None
}
};
Self {
map_display_sessions: Default::default(),
is_support_multi_ui_session: false,
#[cfg(feature = "flutter_texture_render")]
on_rgba_func,
#[cfg(feature = "gpucodec")]
on_texture_func,
}
}
}
#[cfg(feature = "flutter_texture_render")]
#[cfg(any(feature = "flutter_texture_render", feature = "gpucodec"))]
impl VideoRenderer {
#[inline]
#[cfg(feature = "flutter_texture_render")]
fn set_size(&mut self, display: usize, width: usize, height: usize) {
let mut sessions_lock = self.map_display_sessions.write().unwrap();
if let Some(info) = sessions_lock.get_mut(&display) {
info.size = (width, height);
info.notify_render_type = None;
} else {
sessions_lock.insert(
display,
DisplaySessionInfo {
texture_rgba_ptr: usize::default(),
size: (width, height),
#[cfg(feature = "gpucodec")]
gpu_output_ptr: usize::default(),
notify_render_type: None,
},
);
}
}
fn register_texture(&self, display: usize, ptr: usize) {
#[cfg(feature = "flutter_texture_render")]
fn register_pixelbuffer_texture(&self, display: usize, ptr: usize) {
let mut sessions_lock = self.map_display_sessions.write().unwrap();
if ptr == 0 {
sessions_lock.remove(&display);
@ -273,6 +325,7 @@ impl VideoRenderer {
log::error!("unreachable, texture_rgba_ptr is not null and not equal to ptr");
}
info.texture_rgba_ptr = ptr as _;
info.notify_render_type = None;
} else {
if ptr != 0 {
sessions_lock.insert(
@ -280,6 +333,9 @@ impl VideoRenderer {
DisplaySessionInfo {
texture_rgba_ptr: ptr as _,
size: (0, 0),
#[cfg(feature = "gpucodec")]
gpu_output_ptr: usize::default(),
notify_render_type: None,
},
);
}
@ -287,18 +343,49 @@ impl VideoRenderer {
}
}
pub fn on_rgba(&self, display: usize, rgba: &scrap::ImageRgb) {
let read_lock = self.map_display_sessions.read().unwrap();
let opt_info = if !self.is_support_multi_ui_session {
read_lock.values().next()
#[cfg(feature = "gpucodec")]
pub fn register_gpu_output(&self, display: usize, ptr: usize) {
let mut sessions_lock = self.map_display_sessions.write().unwrap();
if ptr == 0 {
sessions_lock.remove(&display);
} else {
read_lock.get(&display)
if let Some(info) = sessions_lock.get_mut(&display) {
if info.gpu_output_ptr != 0 && info.gpu_output_ptr != ptr {
log::error!("unreachable, gpu_output_ptr is not null and not equal to ptr");
}
info.gpu_output_ptr = ptr as _;
info.notify_render_type = None;
} else {
if ptr != 0 {
sessions_lock.insert(
display,
DisplaySessionInfo {
#[cfg(feature = "flutter_texture_render")]
texture_rgba_ptr: 0,
#[cfg(feature = "flutter_texture_render")]
size: (0, 0),
gpu_output_ptr: ptr,
notify_render_type: None,
},
);
}
}
}
}
#[cfg(feature = "flutter_texture_render")]
pub fn on_rgba(&self, display: usize, rgba: &scrap::ImageRgb) -> bool {
let mut write_lock = self.map_display_sessions.write().unwrap();
let opt_info = if !self.is_support_multi_ui_session {
write_lock.values_mut().next()
} else {
write_lock.get_mut(&display)
};
let Some(info) = opt_info else {
return;
return false;
};
if info.texture_rgba_ptr == usize::default() {
return;
return false;
}
// It is also Ok to skip this check.
@ -310,7 +397,7 @@ impl VideoRenderer {
rgba.w,
rgba.h
);
return;
return false;
}
if let Some(func) = &self.on_rgba_func {
unsafe {
@ -324,14 +411,53 @@ impl VideoRenderer {
)
};
}
if info.notify_render_type != Some(RenderType::PixelBuffer) {
info.notify_render_type = Some(RenderType::PixelBuffer);
true
} else {
false
}
}
#[cfg(feature = "gpucodec")]
pub fn on_texture(&self, display: usize, texture: *mut c_void) -> bool {
let mut write_lock = self.map_display_sessions.write().unwrap();
let opt_info = if !self.is_support_multi_ui_session {
write_lock.values_mut().next()
} else {
write_lock.get_mut(&display)
};
let Some(info) = opt_info else {
return false;
};
if info.gpu_output_ptr == usize::default() {
return false;
}
if let Some(func) = &self.on_texture_func {
unsafe { func(info.gpu_output_ptr as _, texture) };
}
if info.notify_render_type != Some(RenderType::Texture) {
info.notify_render_type = Some(RenderType::Texture);
true
} else {
false
}
}
pub fn reset_all_display_render_type(&self) {
let mut write_lock = self.map_display_sessions.write().unwrap();
write_lock
.values_mut()
.map(|v| v.notify_render_type = None)
.count();
}
}
impl SessionHandler {
pub fn on_waiting_for_image_dialog_show(&mut self) {
pub fn on_waiting_for_image_dialog_show(&self) {
#[cfg(any(feature = "flutter_texture_render"))]
{
self.notify_rendered = false;
self.renderer.reset_all_display_render_type();
}
// rgba array render will notify every frame
}
@ -623,24 +749,25 @@ impl InvokeUiSession for FlutterHandler {
#[inline]
#[cfg(feature = "flutter_texture_render")]
fn on_rgba(&self, display: usize, rgba: &mut scrap::ImageRgb) {
let mut try_notify_sessions = Vec::new();
for (id, session) in self.session_handlers.read().unwrap().iter() {
session.renderer.on_rgba(display, rgba);
if !session.notify_rendered {
try_notify_sessions.push(id.clone());
}
}
if try_notify_sessions.len() > 0 {
let mut write_lock = self.session_handlers.write().unwrap();
for id in try_notify_sessions.iter() {
if let Some(session) = write_lock.get_mut(id) {
for (_, session) in self.session_handlers.read().unwrap().iter() {
if session.renderer.on_rgba(display, rgba) {
if let Some(stream) = &session.event_stream {
stream.add(EventToUI::Rgba(display));
session.notify_rendered = true;
}
}
}
}
#[inline]
#[cfg(feature = "gpucodec")]
fn on_texture(&self, display: usize, texture: *mut c_void) {
for (_, session) in self.session_handlers.read().unwrap().iter() {
if session.renderer.on_texture(display, texture) {
if let Some(stream) = &session.event_stream {
stream.add(EventToUI::Texture(display));
}
}
}
}
fn set_peer_info(&self, pi: &PeerInfo) {
@ -876,11 +1003,19 @@ pub fn session_add(
Some(switch_uuid.to_string())
};
session
.lc
.write()
.unwrap()
.initialize(id.to_owned(), conn_type, switch_uuid, force_relay);
#[cfg(feature = "gpucodec")]
let adapter_luid = get_adapter_luid();
#[cfg(not(feature = "gpucodec"))]
let adapter_luid = None;
session.lc.write().unwrap().initialize(
id.to_owned(),
conn_type,
switch_uuid,
force_relay,
adapter_luid,
);
let session = Arc::new(session.clone());
sessions::insert_session(session_id.to_owned(), conn_type, session.clone());
@ -1217,7 +1352,6 @@ pub fn session_set_size(_session_id: SessionID, _display: usize, _width: usize,
.unwrap()
.get_mut(&_session_id)
{
h.notify_rendered = false;
h.renderer.set_size(_display, _width, _height);
break;
}
@ -1225,7 +1359,7 @@ pub fn session_set_size(_session_id: SessionID, _display: usize, _width: usize,
}
#[inline]
pub fn session_register_texture(_session_id: SessionID, _display: usize, _ptr: usize) {
pub fn session_register_pixelbuffer_texture(_session_id: SessionID, _display: usize, _ptr: usize) {
#[cfg(feature = "flutter_texture_render")]
for s in sessions::get_sessions() {
if let Some(h) = s
@ -1235,12 +1369,58 @@ pub fn session_register_texture(_session_id: SessionID, _display: usize, _ptr: u
.unwrap()
.get(&_session_id)
{
h.renderer.register_texture(_display, _ptr);
h.renderer.register_pixelbuffer_texture(_display, _ptr);
break;
}
}
}
#[inline]
pub fn session_register_gpu_texture(_session_id: SessionID, _display: usize, _output_ptr: usize) {
#[cfg(feature = "gpucodec")]
for s in sessions::get_sessions() {
if let Some(h) = s
.ui_handler
.session_handlers
.read()
.unwrap()
.get(&_session_id)
{
h.renderer.register_gpu_output(_display, _output_ptr);
break;
}
}
}
#[cfg(feature = "gpucodec")]
pub fn get_adapter_luid() -> Option<i64> {
let get_adapter_luid_func = match &*TEXTURE_GPU_RENDERER_PLUGIN {
Ok(lib) => {
let find_sym_res = unsafe {
lib.symbol::<FlutterGpuTextureRendererPluginCApiGetAdapterLuid>(
"FlutterGpuTextureRendererPluginCApiGetAdapterLuid",
)
};
match find_sym_res {
Ok(sym) => Some(sym),
Err(e) => {
log::error!("Failed to find symbol FlutterGpuTextureRendererPluginCApiGetAdapterLuid, {e}");
None
}
}
}
Err(e) => {
log::error!("Failed to load texture gpu renderer plugin, {e}");
None
}
};
let adapter_luid = match get_adapter_luid_func {
Some(get_adapter_luid_func) => unsafe { Some(get_adapter_luid_func()) },
None => Default::default(),
};
return adapter_luid;
}
#[inline]
pub fn push_session_event(session_id: &SessionID, name: &str, event: Vec<(&str, &str)>) {
if let Some(s) = sessions::get_session_by_session_id(session_id) {

View File

@ -77,6 +77,7 @@ pub fn stop_global_event_stream(app_type: String) {
pub enum EventToUI {
Event(String),
Rgba(usize),
Texture(usize),
}
pub fn host_stop_system_key_propagate(_stopped: bool) {
@ -1268,6 +1269,10 @@ pub fn main_has_hwcodec() -> SyncReturn<bool> {
SyncReturn(has_hwcodec())
}
pub fn main_has_gpucodec() -> SyncReturn<bool> {
SyncReturn(has_gpucodec())
}
pub fn main_supported_hwdecodings() -> SyncReturn<String> {
let decoding = supported_hwdecodings();
let msg = HashMap::from([("h264", decoding.0), ("h265", decoding.1)]);
@ -1584,12 +1589,22 @@ pub fn session_next_rgba(session_id: SessionID, display: usize) -> SyncReturn<()
SyncReturn(super::flutter::session_next_rgba(session_id, display))
}
pub fn session_register_texture(
pub fn session_register_pixelbuffer_texture(
session_id: SessionID,
display: usize,
ptr: usize,
) -> SyncReturn<()> {
SyncReturn(super::flutter::session_register_texture(
SyncReturn(super::flutter::session_register_pixelbuffer_texture(
session_id, display, ptr,
))
}
pub fn session_register_gpu_texture(
session_id: SessionID,
display: usize,
ptr: usize,
) -> SyncReturn<()> {
SyncReturn(super::flutter::session_register_gpu_texture(
session_id, display, ptr,
))
}
@ -1729,15 +1744,8 @@ pub fn main_hide_docker() -> SyncReturn<bool> {
SyncReturn(true)
}
pub fn main_use_texture_render() -> SyncReturn<bool> {
#[cfg(not(feature = "flutter_texture_render"))]
{
SyncReturn(false)
}
#[cfg(feature = "flutter_texture_render")]
{
SyncReturn(true)
}
pub fn main_has_pixelbuffer_texture_render() -> SyncReturn<bool> {
SyncReturn(cfg!(feature = "flutter_texture_render"))
}
pub fn main_has_file_clipboard() -> SyncReturn<bool> {
@ -1751,6 +1759,10 @@ pub fn main_has_file_clipboard() -> SyncReturn<bool> {
SyncReturn(ret)
}
pub fn main_has_gpu_texture_render() -> SyncReturn<bool> {
SyncReturn(cfg!(feature = "gpucodec"))
}
pub fn cm_init() {
#[cfg(not(any(target_os = "android", target_os = "ios")))]
crate::flutter::connection_manager::cm_init();

View File

@ -346,7 +346,7 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("Dark", "黑暗"),
("Light", "明亮"),
("Follow System", "跟随系统"),
("Enable hardware codec", "使硬件编解码"),
("Enable hardware codec", "使硬件编解码"),
("Unlock Security Settings", "解锁安全设置"),
("Enable audio", "允许传输音频"),
("Unlock Network Settings", "解锁网络设置"),

View File

@ -1,67 +1,254 @@
lazy_static::lazy_static! {
pub static ref T: std::collections::HashMap<&'static str, &'static str> =
[
("Status", ""),
("Your Desktop", ""),
("desk_tip", "Sinu töölauale saab selle ID ja parooliga ligi pääseda."),
("Password", ""),
("Ready", ""),
("Established", ""),
("connecting_status", "RustDeski võrguga ühendumine..."),
("Enable service", ""),
("Start service", ""),
("Service is running", ""),
("Service is not running", ""),
("not_ready_status", "Pole valmis. Palun kontrolli oma ühendust"),
("Control Remote Desktop", ""),
("Transfer file", ""),
("Connect", ""),
("Recent sessions", ""),
("Address book", ""),
("Confirmation", ""),
("TCP tunneling", ""),
("Remove", ""),
("Refresh random password", ""),
("Set your own password", ""),
("Enable keyboard/mouse", ""),
("Enable clipboard", ""),
("Enable file transfer", ""),
("Enable TCP tunneling", ""),
("IP Whitelisting", ""),
("ID/Relay Server", "ID-/releeserver"),
("Import server config", ""),
("Export Server Config", ""),
("Import server configuration successfully", ""),
("Export server configuration successfully", ""),
("Invalid server configuration", ""),
("Clipboard is empty", ""),
("Stop service", ""),
("Change ID", ""),
("Your new ID", ""),
("length %min% to %max%", ""),
("starts with a letter", ""),
("allowed characters", ""),
("id_change_tip", "Lubatud on vaid a-z, A-Z, 0-9 ja _ (alakriips) tähemärgid. Esimene täht peab olema a-z või A-Z. Pikkus vahemikus 6-16."),
("Website", ""),
("About", ""),
("Slogan_tip", "Loodud südamega selles kaootilises maailmas!"),
("Privacy Statement", ""),
("Mute", ""),
("Build Date", "Ehituskuupäev"),
("Version", ""),
("Home", ""),
("Audio Input", "Helisisend"),
("Enhancements", ""),
("Hardware Codec", "Riistvarakoodek"),
("Adaptive bitrate", ""),
("ID Server", "ID-server"),
("Relay Server", "Releeserver"),
("API Server", "Rakendusliidese server"),
("invalid_http", "peab algama: http:// või https://"),
("Invalid IP", ""),
("Invalid format", ""),
("server_not_support", "Pole veel serveri poolt toetatud"),
("Not available", ""),
("Too frequent", ""),
("Cancel", ""),
("Skip", ""),
("Close", ""),
("Retry", ""),
("OK", ""),
("Password Required", "Parool on nõutud"),
("Please enter your password", ""),
("Remember password", ""),
("Wrong Password", "Vale parool"),
("Do you want to enter again?", ""),
("Connection Error", "Ühenduse viga"),
("Error", ""),
("Reset by the peer", ""),
("Connecting...", ""),
("Connection in progress. Please wait.", ""),
("Please try 1 minute later", ""),
("Login Error", "Sisselogimise viga"),
("Successful", ""),
("Connected, waiting for image...", ""),
("Name", ""),
("Type", ""),
("Modified", ""),
("Size", ""),
("Show Hidden Files", "Kuva peidetud faile"),
("Receive", ""),
("Send", ""),
("Refresh File", "Värskenda faili"),
("Local", ""),
("Remote", ""),
("Remote Computer", "Kaugarvuti"),
("Local Computer", "Kohalik arvuti"),
("Confirm Delete", "Kinnita kustutamist"),
("Delete", ""),
("Properties", ""),
("Multi Select", "Mitmikvalik"),
("Select All", "Vali kõik"),
("Unselect All", "Tühista kõigi valik"),
("Empty Directory", "Tühi kaust"),
("Not an empty directory", ""),
("Are you sure you want to delete this file?", ""),
("Are you sure you want to delete this empty directory?", ""),
("Are you sure you want to delete the file of this directory?", ""),
("Do this for all conflicts", ""),
("This is irreversible!", ""),
("Deleting", ""),
("files", ""),
("Waiting", ""),
("Finished", ""),
("Speed", ""),
("Custom Image Quality", "Kohandatud pildikvaliteet"),
("Privacy mode", ""),
("Block user input", ""),
("Unblock user input", ""),
("Adjust Window", "Kohanda akent"),
("Original", ""),
("Shrink", ""),
("Stretch", ""),
("Scrollbar", ""),
("ScrollAuto", ""),
("Good image quality", ""),
("Balanced", ""),
("Optimize reaction time", ""),
("Custom", ""),
("Show remote cursor", ""),
("Show quality monitor", ""),
("Disable clipboard", ""),
("Lock after session end", ""),
("Insert", ""),
("Insert Lock", "Sisesta lukk"),
("Refresh", ""),
("ID does not exist", ""),
("Failed to connect to rendezvous server", ""),
("Please try later", ""),
("Remote desktop is offline", ""),
("Key mismatch", ""),
("Timeout", ""),
("Failed to connect to relay server", ""),
("Failed to connect via rendezvous server", ""),
("Failed to connect via relay server", ""),
("Failed to make direct connection to remote desktop", ""),
("Set Password", "Määra parool"),
("OS Password", "Opsüsteemi parool"),
("install_tip", "Kasutajakonto kontrolli (UAC) tõttu ei saa RustDesk mõnel juhul korralikult kaugjuhtimispoolena töötada. Kontrolli vältimiseks palun klõpsa alloleval nupul, et RustDesk oma süsteemi paigaldada."),
("Click to upgrade", ""),
("Click to download", ""),
("Click to update", ""),
("Configure", ""),
("config_acc", "Töölaua kaugjuhtimiseks tuleb RustDeskile anda \"juurdepääsetavuse\" õigused."),
("config_screen", "Töölaua kaugjuhtimiseks tuleb RustDeskile anda \"ekraanisalvestuse\" õigused."),
("Installing ...", ""),
("Install", ""),
("Installation", ""),
("Installation Path", "Paigaldustee"),
("Create start menu shortcuts", ""),
("Create desktop icon", ""),
("agreement_tip", "Paigalduse alustamisel nõustud litsentsilepinguga."),
("Accept and Install", "Nõustu ja paigalda"),
("End-user license agreement", ""),
("Generating ...", ""),
("Your installation is lower version.", ""),
("not_close_tcp_tip", "Ara sulge seda akent, kuni kasutad tunnelit"),
("Listening ...", ""),
("Remote Host", "Kaughost"),
("Remote Port", "Kaugport"),
("Action", ""),
("Add", ""),
("Local Port", "Kohalik port"),
("Local Address", "Kohalik aadress"),
("Change Local Port", "Muuda kohalikku porti"),
("setup_server_tip", "Kiirema ühenduse jaoks palun seadista oma server"),
("Too short, at least 6 characters.", ""),
("The confirmation is not identical.", ""),
("Permissions", ""),
("Accept", ""),
("Dismiss", ""),
("Disconnect", ""),
("Enable file copy and paste", ""),
("Connected", ""),
("Direct and encrypted connection", ""),
("Relayed and encrypted connection", ""),
("Direct and unencrypted connection", ""),
("Relayed and unencrypted connection", ""),
("Enter Remote ID", "Sisesta kaug-ID"),
("Enter your password", ""),
("Logging in...", ""),
("Enable RDP session sharing", ""),
("Auto Login", "Logi automaatselt sisse (Kehtib vaid valiku \"lukusta pärast seansi lõppu\" lubamisel)"),
("Enable direct IP access", ""),
("Rename", ""),
("Space", ""),
("Create desktop shortcut", ""),
("Change Path", "Muuda failiteed"),
("Create Folder", "Loo kaust"),
("Please enter the folder name", ""),
("Fix it", ""),
("Warning", ""),
("Login screen using Wayland is not supported", ""),
("Reboot required", ""),
("Unsupported display server", ""),
("x11 expected", ""),
("Port", ""),
("Settings", ""),
("Username", ""),
("Invalid port", ""),
("Closed manually by the peer", ""),
("Enable remote configuration modification", ""),
("Run without install", ""),
("Connect via relay", ""),
("Always connect via relay", ""),
("whitelist_tip", "Ainult lubamisloendis IP saab mulle ligi"),
("Login", ""),
("Verify", ""),
("Remember me", ""),
("Trust this device", ""),
("Verification code", ""),
("verification_tip", "Registreeritud e-posti aadressile on saadetud kinnituskood, sisselogimise jätkamiseks sisesta kinnituskood."),
("Logout", ""),
("Tags", ""),
("Search ID", ""),
("whitelist_sep", "Eraldatud koma, semikooloni, tühikute või uue reaga"),
("Add ID", ""),
("Add Tag", "Lisa silt"),
("Unselect all tags", ""),
("Network error", ""),
("Username missed", ""),
("Password missed", ""),
("Wrong credentials", "Vale kasutajanimi või parool"),
("The verification code is incorrect or has expired", ""),
("Edit Tag", "Muuda silti"),
("Forget Password", "Unusta parool"),
("Favorites", ""),
("Add to Favorites", "Lisa lemmikutesse"),
("Remove from Favorites", "Eemalda lemmikutest"),
("Empty", ""),
("Invalid folder name", ""),
("Socks5 Proxy", "Socks5 proksi"),
("Hostname", ""),
("Discovered", ""),
("install_daemon_tip", "Süsteemikäivitusel käivitamiseks tuleb paigaldada süsteemiteenus."),
("Remote ID", ""),
("Paste", ""),
("Paste here?", ""),
("Are you sure to close the connection?", "Kas soovid kindlasti ühenduse sulgeda?"),
("Download new version", ""),
("Touch mode", ""),
("Mouse mode", ""),
("One-Finger Tap", "Ühe sõrme koputus"),
("Left Mouse", "Vasak hiireklahv"),
("One-Long Tap", "Üks pikk koputus"),
@ -76,13 +263,23 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("Canvas Move", "Lõuendi liigutus"),
("Pinch to Zoom", "Näpistus-suum"),
("Canvas Zoom", "Lõuendi suum"),
("Reset canvas", ""),
("No permission of file transfer", ""),
("Note", ""),
("Connection", ""),
("Share Screen", "Jaga ekraani"),
("Chat", ""),
("Total", ""),
("items", ""),
("Selected", ""),
("Screen Capture", "Ekraanisalvestus"),
("Input Control", "Sisendjuhtimine"),
("Audio Capture", "Helisalvestus"),
("File Connection", "Failiühendus"),
("Screen Connection", "Kuvaühendus"),
("Do you accept?", ""),
("Open System Setting", "Ava süsteemisätted"),
("How to get Android input permission?", ""),
("android_input_permission_tip1", "Selleks, et kaugseade saaks sinu Androidi seadet juhtida hiire või puute abil, pead andma RustDeskile \"juurdepääsetavuse\" loa."),
("android_input_permission_tip2", "Palun mine järgmisele süsteemiseadete lehele, leia ja sisesta [Paigaldatud teenused], lülita teenus [RustDesk Input] sisse."),
("android_new_connection_tip", "Saabunud on uus juhtimistaotlus, mis soovib sinu praegust seadet juhtida."),
@ -91,15 +288,46 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("android_version_audio_tip", "Kasutatav Androidi versioon ei toeta helisalvestust, palun täienda Android 10 või uuemale versioonile."),
("android_start_service_tip", "Koputa [Alusta teenust] või anna [Ekraanisalvestuse] luba, et ekraanijagamisteenust alustada."),
("android_permission_may_not_change_tip", "Loodud ühenduste õigused ei pruugi muutuda enne taasühendamist koheselt."),
("doc_mac_permission", "https://rustdesk.com/docs/en/manual/mac/#enable-permissions"),
("Account", ""),
("Overwrite", ""),
("This file exists, skip or overwrite this file?", ""),
("Quit", ""),
("Help", ""),
("Failed", ""),
("Succeeded", ""),
("Someone turns on privacy mode, exit", ""),
("Unsupported", ""),
("Peer denied", ""),
("Please install plugins", ""),
("Peer exit", ""),
("Failed to turn off", ""),
("Turned off", ""),
("Language", ""),
("Keep RustDesk background service", ""),
("Ignore Battery Optimizations", "Ignoreeri akuoptimeerimisi"),
("android_open_battery_optimizations_tip", "Kui soovid selle funktsiooni keelata, palun mine järgmisele RustDeski rakenduse seadete lehele, leia ja sisesta [Aku], eemalda linnuke valikult [Piiramata]"),
("Start on boot", ""),
("Start the screen sharing service on boot, requires special permissions", ""),
("Connection not allowed", ""),
("Legacy mode", ""),
("Map mode", ""),
("Translate mode", ""),
("Use permanent password", ""),
("Use both passwords", ""),
("Set permanent password", ""),
("Enable remote restart", ""),
("Restart remote device", ""),
("Are you sure you want to restart", ""),
("Restarting remote device", ""),
("remote_restarting_tip", "Kaugseade taaskäivitub, palun sulge see sõnumikast ja ühendu mõne aja pärast uuesti püsiva parooliga."),
("Copied", ""),
("Exit Fullscreen", "Lahku täisekraanist"),
("Fullscreen", ""),
("Mobile Actions", "Mobiilitegevused"),
("Select Monitor", "Vali kuvar"),
("Control Actions", "Juhtimistegevused"),
("Display Settings", "Kuvasätted"),
("Ratio", ""),
("Image Quality", "Pildikvaliteet"),
("Scroll Style", "Kerimisstiil"),
("Show Toolbar", "Kuva tööriistariba"),
@ -108,51 +336,138 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("Relay Connection", "Releeühendus"),
("Secure Connection", "Turvaline ühendus"),
("Insecure Connection", "Ebaturvaline ühendus"),
("Scale original", ""),
("Scale adaptive", ""),
("General", ""),
("Security", ""),
("Theme", ""),
("Dark Theme", "Tume teema"),
("Light Theme", "Hele teema"),
("Dark", ""),
("Light", ""),
("Follow System", "Järgi süsteemi"),
("Enable hardware codec", ""),
("Unlock Security Settings", "Lukusta lahti turvasätted"),
("Enable audio", ""),
("Unlock Network Settings", "Lukusta lahti võrgusätted"),
("Server", ""),
("Direct IP Access", "Otsene IP-ligipääs"),
("Proxy", ""),
("Apply", ""),
("Disconnect all devices?", ""),
("Clear", ""),
("Audio Input Device", "Heli sisendseade"),
("Use IP Whitelisting", "Kasuta IP-lubamisloendit"),
("Network", ""),
("Pin Toolbar", "Kinnita tööriistariba"),
("Unpin Toolbar", "Eemalda tööriistariba kinnitus"),
("Recording", ""),
("Directory", ""),
("Automatically record incoming sessions", ""),
("Change", ""),
("Start session recording", ""),
("Stop session recording", ""),
("Enable recording session", ""),
("Enable LAN discovery", ""),
("Deny LAN discovery", ""),
("Write a message", ""),
("Prompt", ""),
("Please wait for confirmation of UAC...", ""),
("elevated_foreground_window_tip", "Kaugtöölaua praegune aken nõuab töötamiseks kõrgemaid õigusi, mistõttu ei saa see ajutiselt hiirt ja klaviatuuri kasutada. Võid kaugkasutajal paluda minimeerida praegune aken või klõpsata ühenduse haldamise aknas kõrgendatud loa nuppu. Selle probleemi vältimiseks on soovitatav kaugseadmesse tarkvara paigaldada."),
("Disconnected", ""),
("Other", ""),
("Confirm before closing multiple tabs", ""),
("Keyboard Settings", "Klaviatuurisätted"),
("Full Access", "Täielik ligipääs"),
("Screen Share", "Ekraanijagamine"),
("Wayland requires Ubuntu 21.04 or higher version.", ""),
("Wayland requires higher version of linux distro. Please try X11 desktop or change your OS.", ""),
("JumpLink", "Kuva"),
("Please Select the screen to be shared(Operate on the peer side).", "Palun vali jagatav ekraan (tegutse partneri poolel)."),
("Show RustDesk", ""),
("This PC", ""),
("or", ""),
("Continue with", ""),
("Elevate", ""),
("Zoom cursor", ""),
("Accept sessions via password", ""),
("Accept sessions via click", ""),
("Accept sessions via both", ""),
("Please wait for the remote side to accept your session request...", ""),
("One-time Password", "Ühekordne parool"),
("Use one-time password", ""),
("One-time password length", ""),
("Request access to your device", ""),
("Hide connection management window", ""),
("hide_cm_tip", "Luba varjamine ainult siis, kui parooliga seansse võetakse vastu ning kasutatakse püsivat parooli."),
("wayland_experiment_tip", "Waylandi tugi on katsetusjärgus, järelvalveta juurdepääsu vajadusel palun kasuta X11."),
("Right click to select tabs", ""),
("Skipped", ""),
("Add to address book", ""),
("Group", ""),
("Search", ""),
("Closed manually by web console", ""),
("Local keyboard type", ""),
("Select local keyboard type", ""),
("software_render_tip", "Kui kasutad Linuxis Nvidia graafikakaarti ja kaugaken sulgub kohe pärast ühendamist, võib aidata üleminek avatud lähtekoodiga Nouveau draiverile ja valida tarkvaraline renderdamise. Vajalik on tarkvara taaskäivitamine."),
("Always use software rendering", ""),
("config_input", "Kaugtöölaua klaviatuuriga juhtimiseks pead andma RustDeskile \"sisendi jälgimise\" õigused."),
("config_microphone", "Kaugelt rääkimiseks pead andma RustDeskile \"heli salvestamise\" õigused."),
("request_elevation_tip", "Sa võid kõrgendatud õigusi taotleda ka siis, kui keegi on kaugpoolel."),
("Wait", ""),
("Elevation Error", "Kõrgendatud õiguste viga"),
("Ask the remote user for authentication", ""),
("Choose this if the remote account is administrator", ""),
("Transmit the username and password of administrator", ""),
("still_click_uac_tip", "Kaugkasutaja peab siiski ise vajutama käitatud RustDeski kasutajakonto kontrollis (UAC) OK-nuppu."),
("Request Elevation", "Taotle kõrgendatud õigusi"),
("wait_accept_uac_tip", "Palun oota, kuni kaugkasutaja nõustub UAC-dialoogiga (kasutajakonto kontroll)."),
("Elevate successfully", ""),
("uppercase", ""),
("lowercase", ""),
("digit", ""),
("special character", ""),
("length>=8", ""),
("Weak", ""),
("Medium", ""),
("Strong", ""),
("Switch Sides", "Vaheta pooli"),
("Please confirm if you want to share your desktop?", ""),
("Display", ""),
("Default View Style", "Vaikimisi kuvastiil"),
("Default Scroll Style", "Vaikimisi kerimisstiil"),
("Default Image Quality", "Vaikimisi pildikvaliteet"),
("Default Codec", "Vaikimisi koodek"),
("Bitrate", ""),
("FPS", ""),
("Auto", ""),
("Other Default Options", "Teised vaikevalikud"),
("Voice call", ""),
("Text chat", ""),
("Stop voice call", ""),
("relay_hint_tip", "Otseühendust ei pruugi olla võimalik luua; võid proovida ühendust relee kaudu. Lisaks, kui soovid esimesel katsel kasutada releed, võid lisada ID-le järelliite \"/r\" või valida viimaste seansside kaardil - kui see on olemas - valiku \"Ühenda alati relee kaudu\"."),
("Reconnect", ""),
("Codec", ""),
("Resolution", ""),
("No transfers in progress", ""),
("Set one-time password length", ""),
("install_cert_tip", "Paigalda RustDesk sertifikaat"),
("confirm_install_cert_tip", "See on RustDeski testimise sertifikaat, mida võib usaldada. Sertifikaati kasutatakse vajadusel RustDeski draiverite usaldamiseks ja paigaldamiseks."),
("RDP Settings", "RDP seaded"),
("Sort by", ""),
("New Connection", "Uus ühendus"),
("Restore", ""),
("Minimize", ""),
("Maximize", ""),
("Your Device", "Sinu seade"),
("empty_recent_tip", "Ups, hiljutised seansid puuduvad!\nAeg uus planeerida."),
("empty_favorite_tip", "Ei ole veel ühtegi lemmikpartnerit?\nLeia keegi, kellega suhelda ja lisa ta oma lemmikute hulka!"),
("empty_lan_tip", "Oh ei, tundub, et me pole veel ühtegi partnerit avastanud."),
("empty_address_book_tip", "Oh ei, tundub et sinu aadressiraamatus ei ole hetkel ühtegi partnerit."),
("eg: admin", ""),
("Empty Username", "Tühi kasutajanimi"),
("Empty Password", "Tühi parool"),
("Me", ""),
("identical_file_tip", "See fail on partneri omaga identne."),
("show_monitors_tip", "Kuva kuvarid tööriistaribal"),
("View Mode", "Kuvarežiim"),
@ -167,21 +482,49 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("xorg_not_found_text_tip", "Palun paigalda Xorg"),
("no_desktop_title_tip", "Töölaud pole saadaval"),
("no_desktop_text_tip", "Palun paigalda GNOME Desktop"),
("No need to elevate", ""),
("System Sound", "Süsteemiheli"),
("Default", ""),
("New RDP", ""),
("Fingerprint", ""),
("Copy Fingerprint", "Kopeeri sõrmejälg"),
("no fingerprints", "Sõrmejäljed puuduvad"),
("Select a peer", ""),
("Select peers", ""),
("Plugins", ""),
("Uninstall", ""),
("Update", ""),
("Enable", ""),
("Disable", ""),
("Options", ""),
("resolution_original_tip", "Originaalne eraldusvõime"),
("resolution_fit_local_tip", "Ühita kohaliku eraldusvõimega"),
("resolution_custom_tip", "Kohandatud eraldusvõime"),
("Collapse toolbar", ""),
("Accept and Elevate", "Luba kõrgemate õigustega"),
("accept_and_elevate_btn_tooltip", "Võta ühendus vastu ja anna kõrgemad UAC-õigused (kasutajakonto kontroll)."),
("clipboard_wait_response_timeout_tip", "Koopia vastuse ootamisel tekkis ajalõpp."),
("Incoming connection", ""),
("Outgoing connection", ""),
("Exit", ""),
("Open", ""),
("logout_tip", "Kas soovid kindlasti välja logida?"),
("Service", ""),
("Start", ""),
("Stop", ""),
("exceed_max_devices", "Oled saavutanud hallatavate seadmete maksimaalse arvu."),
("Sync with recent sessions", ""),
("Sort tags", ""),
("Open connection in new tab", ""),
("Move tab to new window", ""),
("Can not be empty", ""),
("Already exists", ""),
("Change Password", "Vaheta parooli"),
("Refresh Password", "Värskenda parool"),
("ID", ""),
("Grid View", "Ruudustikuvaade"),
("List View", "Loendivaade"),
("Select", ""),
("Toggle Tags", "Lülita silte"),
("pull_ab_failed_tip", "Aadressiraamatu värskendamine ebaõnnestus"),
("push_ab_failed_tip", "Aadressiraamatu sünkroonimine serveriga ebaõnnestus"),
@ -190,25 +533,50 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
("Primary Color", "Põhivärv"),
("HSV Color", "HSV-värv"),
("Installation Successful!", "Paigaldus oli edukas!"),
("Installation failed!", ""),
("Reverse mouse wheel", ""),
("{} sessions", ""),
("scam_title", "Võid olla KELMUSE ohver!"),
("scam_text1", "Kui räägid telefoniga kellegagi, keda EI TUNNE ja EI USALDA, kes on palunud sul RustDeski kasutada ja teenus käivitada, ära jätka ning lõpeta kõne koheselt."),
("scam_text2", "Tõenäoliselt on tegemist petturiga, kes üritab sinu raha või muid privaatseid andmeid varastada."),
("Don't show again", ""),
("I Agree", ""),
("Decline", ""),
("Timeout in minutes", ""),
("auto_disconnect_option_tip", "Sissetulevate seansside automaatne sulgemine kasutaja mitteaktiivsuse korral"),
("Connection failed due to inactivity", "Mitteaktiivsuse tõttu automaatselt lahti ühendatud"),
("Check for software update on startup", ""),
("upgrade_rustdesk_server_pro_to_{}_tip", "Palun täienda RustDesk Server Pro versioonile {} või uuem!"),
("pull_group_failed_tip", "Grupi värskendamine ebaõnnestus"),
("doc_fix_wayland", "https://rustdesk.com/docs/en/manual/linux/#x11-required"),
("Filter by intersection", ""),
("Remove wallpaper during incoming sessions", ""),
("Test", ""),
("display_is_plugged_out_msg", "See kuvar on välja lülitatud, lülita esmasele kuvarile."),
("No displays", ""),
("elevated_switch_display_msg", "Lülita ümber esmasele kuvarile, sest kõrgendatud kasutajarežiimis ei toetata mitut kuvarit."),
("Open in new window", ""),
("Show displays as individual windows", ""),
("Use all my displays for the remote session", ""),
("selinux_tip", "SELinux on su seadmes lubatud, mis võib RustDeskil takistada juhitud poolel toimimist."),
("Change view", ""),
("Big tiles", ""),
("Small tiles", ""),
("List", ""),
("Virtual display", ""),
("Plug out all", ""),
("True color (4:4:4)", ""),
("Enable blocking user input", ""),
("id_input_tip", "Võid sisestada ID, otsese IP või domeeni koos pordiga (<domeen>:<port>).\nKui soovid juurdepääsu seadmele mõnes teises serveris, lisa palun serveri aadress (<id>@<serveri_aadress>?key=<võtme_väärtus>), näiteks,\n9123456234@192.168.16.1:21117?key=5Qbwsde3unUcJBtrx9ZkvUmwFNoExHzpryHuPUdqlWM=.\nKui soovid juurdepääsu seadmele avalikus serveris, sisesta \"<id>@public\", avaliku serveri puhul ei ole võtit vaja."),
("privacy_mode_impl_mag_tip", "Režiim 1"),
("privacy_mode_impl_virtual_display_tip", "Režiim 2"),
("Enter privacy mode", ""),
("Exit privacy mode", ""),
("idd_not_support_under_win10_2004_tip", "Kaudse kuvari draiver ei ole toetatud. Vajalik on Windows 10, versioon 2004 või uuem."),
("switch_display_elevated_connections_tip", "Mitme ühenduse korral ei toetata kõrgendatud kasutajarežiimil üleminekut muule kui primaarsele kuvale. Kui soovid juhtida mitut ekraani, palun proovi uuesti pärast paigaldamist."),
("input_source_1_tip", "Sisendallikas 1"),
("input_source_2_tip", "Sisendallikas 2"),
("capture_display_elevated_connections_tip", "Mitme ekraani jäädvustamine ei ole kõrgendatud kasutajarežiimis toetatud. Kui soovid juhtida mitut ekraani, palun proovi uuesti pärast paigaldamist."),
("Swap control-command key", ""),
("swap-left-right-mouse", "Vaheta vasak ja parem hiirenupp"),
].iter().cloned().collect();
}

View File

@ -432,7 +432,9 @@ pub async fn start_server(is_server: bool) {
log::info!("XAUTHORITY={:?}", std::env::var("XAUTHORITY"));
}
#[cfg(feature = "hwcodec")]
scrap::hwcodec::check_config_process();
scrap::hwcodec::hwcodec_new_check_process();
#[cfg(feature = "gpucodec")]
scrap::gpucodec::gpucodec_new_check_process();
#[cfg(windows)]
hbb_common::platform::windows::start_cpu_performance_monitor();

View File

@ -233,6 +233,8 @@ pub struct Connection {
auto_disconnect_timer: Option<(Instant, u64)>,
authed_conn_id: Option<self::raii::AuthedConnID>,
file_remove_log_control: FileRemoveLogControl,
#[cfg(feature = "gpucodec")]
supported_encoding_flag: (bool, Option<bool>),
}
impl ConnInner {
@ -377,6 +379,8 @@ impl Connection {
auto_disconnect_timer: None,
authed_conn_id: None,
file_remove_log_control: FileRemoveLogControl::new(id),
#[cfg(feature = "gpucodec")]
supported_encoding_flag: (false, None),
};
let addr = hbb_common::try_into_v4(addr);
if !conn.on_open(addr).await {
@ -657,6 +661,8 @@ impl Connection {
}
}
conn.file_remove_log_control.on_timer().drain(..).map(|x| conn.send_to_cm(x)).count();
#[cfg(feature = "gpucodec")]
conn.update_supported_encoding();
}
_ = test_delay_timer.tick() => {
if last_recv_time.elapsed() >= SEC30 {
@ -1112,7 +1118,9 @@ impl Connection {
pi.platform_additions = serde_json::to_string(&platform_additions).unwrap_or("".into());
}
pi.encoding = Some(scrap::codec::Encoder::supported_encoding()).into();
let supported_encoding = scrap::codec::Encoder::supported_encoding();
log::info!("peer info supported_encoding: {:?}", supported_encoding);
pi.encoding = Some(supported_encoding).into();
if self.port_forward_socket.is_some() {
let mut msg_out = Message::new();
@ -1454,23 +1462,15 @@ impl Connection {
}
fn update_codec_on_login(&self) {
use scrap::codec::{Encoder, EncodingUpdate::*};
if let Some(o) = self.lr.clone().option.as_ref() {
if let Some(q) = o.supported_decoding.clone().take() {
scrap::codec::Encoder::update(
self.inner.id(),
scrap::codec::EncodingUpdate::New(q),
);
Encoder::update(Update(self.inner.id(), q));
} else {
scrap::codec::Encoder::update(
self.inner.id(),
scrap::codec::EncodingUpdate::NewOnlyVP9,
);
Encoder::update(NewOnlyVP9(self.inner.id()));
}
} else {
scrap::codec::Encoder::update(
self.inner.id(),
scrap::codec::EncodingUpdate::NewOnlyVP9,
);
Encoder::update(NewOnlyVP9(self.inner.id()));
}
}
@ -2537,7 +2537,7 @@ impl Connection {
.user_custom_fps(self.inner.id(), o.custom_fps as _);
}
if let Some(q) = o.supported_decoding.clone().take() {
scrap::codec::Encoder::update(self.inner.id(), scrap::codec::EncodingUpdate::New(q));
scrap::codec::Encoder::update(scrap::codec::EncodingUpdate::Update(self.inner.id(), q));
}
if let Ok(q) = o.lock_after_session_end.enum_value() {
if q != BoolOption::NotSet {
@ -2904,6 +2904,24 @@ impl Connection {
.as_mut()
.map(|t| t.0 = Instant::now());
}
#[cfg(feature = "gpucodec")]
fn update_supported_encoding(&mut self) {
let not_use = Some(scrap::gpucodec::GpuEncoder::not_use());
if !self.authorized
|| self.supported_encoding_flag.0 && self.supported_encoding_flag.1 == not_use
{
return;
}
let mut misc: Misc = Misc::new();
let supported_encoding = scrap::codec::Encoder::supported_encoding();
log::info!("update supported encoding: {:?}", supported_encoding);
misc.set_supported_encoding(supported_encoding);
let mut msg = Message::new();
msg.set_misc(misc);
self.inner.send(msg.into());
self.supported_encoding_flag = (true, not_use);
}
}
pub fn insert_switch_sides_uuid(id: String, uuid: uuid::Uuid) {
@ -3367,7 +3385,7 @@ mod raii {
impl Drop for AuthedConnID {
fn drop(&mut self) {
if self.1 == AuthConnType::Remote {
scrap::codec::Encoder::update(self.0, scrap::codec::EncodingUpdate::Remove);
scrap::codec::Encoder::update(scrap::codec::EncodingUpdate::Remove(self.0));
}
AUTHED_CONNS.lock().unwrap().retain(|&c| c.0 != self.0);
let remote_count = AUTHED_CONNS

View File

@ -188,6 +188,11 @@ fn check_get_displays_changed_msg() -> Option<Message> {
get_displays_msg()
}
pub fn check_displays_changed() -> ResultType<()> {
check_update_displays(&try_get_displays()?);
Ok(())
}
fn get_displays_msg() -> Option<Message> {
let displays = SYNC_DISPLAYS.lock().unwrap().get_update_sync_displays()?;
Some(displays_to_msg(displays))

View File

@ -8,7 +8,9 @@ use hbb_common::{
tokio::{self, sync::mpsc},
ResultType,
};
use scrap::{Capturer, Frame, TraitCapturer, TraitFrame};
#[cfg(feature = "gpucodec")]
use scrap::AdapterDevice;
use scrap::{Capturer, Frame, TraitCapturer, TraitPixelBuffer};
use shared_memory::*;
use std::{
mem::size_of,
@ -381,7 +383,8 @@ pub mod server {
}
}
match c.as_mut().map(|f| f.frame(spf)) {
Some(Ok(f)) => {
Some(Ok(f)) => match f {
Frame::PixelBuffer(f) => {
utils::set_frame_info(
&shmem,
FrameInfo {
@ -396,6 +399,10 @@ pub mod server {
first_frame_captured = true;
dxgi_failed_times = 0;
}
Frame::Texture(_) => {
// should not happen
}
},
Some(Err(e)) => {
if e.kind() != std::io::ErrorKind::WouldBlock {
// DXGI_ERROR_INVALID_CALL after each success on Microsoft GPU driver
@ -510,11 +517,10 @@ pub mod server {
// functions called in main process.
pub mod client {
use hbb_common::{anyhow::Context, message_proto::PointerDeviceEvent};
use crate::display_service;
use super::*;
use crate::display_service;
use hbb_common::{anyhow::Context, message_proto::PointerDeviceEvent};
use scrap::PixelBuffer;
lazy_static::lazy_static! {
static ref RUNNING: Arc<Mutex<bool>> = Default::default();
@ -705,7 +711,11 @@ pub mod client {
}
let frame_ptr = base.add(ADDR_CAPTURE_FRAME);
let data = slice::from_raw_parts(frame_ptr, (*frame_info).length);
Ok(Frame::new(data, self.width, self.height))
Ok(Frame::PixelBuffer(PixelBuffer::new(
data,
self.width,
self.height,
)))
} else {
let ptr = base.add(ADDR_CAPTURE_WOULDBLOCK);
let wouldblock = utils::ptr_to_i32(ptr);
@ -732,6 +742,14 @@ pub mod client {
fn set_gdi(&mut self) -> bool {
true
}
#[cfg(feature = "gpucodec")]
fn device(&self) -> AdapterDevice {
AdapterDevice::default()
}
#[cfg(feature = "gpucodec")]
fn set_output_texture(&mut self, _texture: bool) {}
}
pub(super) fn start_ipc_server() -> mpsc::UnboundedSender<Data> {

View File

@ -43,6 +43,7 @@ pub struct VideoQoS {
quality: Quality,
users: HashMap<i32, UserData>,
bitrate_store: u32,
support_abr: HashMap<usize, bool>,
}
#[derive(PartialEq, Debug, Clone, Copy)]
@ -80,6 +81,7 @@ impl Default for VideoQoS {
quality: Default::default(),
users: Default::default(),
bitrate_store: 0,
support_abr: Default::default(),
}
}
}
@ -118,8 +120,8 @@ impl VideoQoS {
self.users.iter().any(|u| u.1.record)
}
pub fn abr_enabled() -> bool {
"N" != Config::get_option("enable-abr")
pub fn set_support_abr(&mut self, display_idx: usize, support: bool) {
self.support_abr.insert(display_idx, support);
}
pub fn refresh(&mut self, typ: Option<RefreshType>) {
@ -176,7 +178,9 @@ impl VideoQoS {
let mut quality = latest_quality;
// network delay
if Self::abr_enabled() && typ != Some(RefreshType::SetImageQuality) {
let abr_enabled =
Config::get_option("enable-abr") != "N" && self.support_abr.iter().all(|e| *e.1);
if abr_enabled && typ != Some(RefreshType::SetImageQuality) {
// max delay
let delay = self
.users

View File

@ -42,15 +42,18 @@ use hbb_common::{
Mutex as TokioMutex,
},
};
#[cfg(feature = "gpucodec")]
use scrap::gpucodec::{GpuEncoder, GpuEncoderConfig};
#[cfg(feature = "hwcodec")]
use scrap::hwcodec::{HwEncoder, HwEncoderConfig};
#[cfg(not(windows))]
use scrap::Capturer;
use scrap::{
aom::AomEncoderConfig,
codec::{Encoder, EncoderCfg, HwEncoderConfig, Quality},
convert_to_yuv,
codec::{Encoder, EncoderCfg, EncodingUpdate, Quality},
record::{Recorder, RecorderContext},
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
CodecName, Display, Frame, TraitCapturer, TraitFrame,
CodecName, Display, Frame, TraitCapturer,
};
#[cfg(windows)]
use std::sync::Once;
@ -363,6 +366,7 @@ fn get_capturer(current: usize, portable_service_running: bool) -> ResultType<Ca
}
fn run(vs: VideoService) -> ResultType<()> {
let _raii = Raii::new(vs.idx);
// Wayland only support one video capturer for now. It is ok to call ensure_inited() here.
//
// ensure_inited() is needed because clear() may be called.
@ -391,21 +395,33 @@ fn run(vs: VideoService) -> ResultType<()> {
video_qos.refresh(None);
let mut spf;
let mut quality = video_qos.quality();
let abr = VideoQoS::abr_enabled();
log::info!("initial quality: {quality:?}, abr enabled: {abr}");
let codec_name = Encoder::negotiated_codec();
let recorder = get_recorder(c.width, c.height, &codec_name);
let last_recording = recorder.lock().unwrap().is_some() || video_qos.record();
let record_incoming = !Config::get_option("allow-auto-record-incoming").is_empty();
let client_record = video_qos.record();
drop(video_qos);
let encoder_cfg = get_encoder_config(&c, quality, last_recording);
let encoder_cfg = get_encoder_config(
&c,
display_idx,
quality,
client_record || record_incoming,
last_portable_service_running,
);
Encoder::set_fallback(&encoder_cfg);
let codec_name = Encoder::negotiated_codec();
let recorder = get_recorder(c.width, c.height, &codec_name, record_incoming);
let mut encoder;
let use_i444 = Encoder::use_i444(&encoder_cfg);
match Encoder::new(encoder_cfg.clone(), use_i444) {
Ok(x) => encoder = x,
Err(err) => bail!("Failed to create encoder: {}", err),
}
#[cfg(feature = "gpucodec")]
c.set_output_texture(encoder.input_texture());
VIDEO_QOS.lock().unwrap().store_bitrate(encoder.bitrate());
VIDEO_QOS
.lock()
.unwrap()
.set_support_abr(display_idx, encoder.support_abr());
log::info!("initial quality: {quality:?}");
if sp.is_option_true(OPTION_REFRESH) {
sp.set_option_bool(OPTION_REFRESH, false);
@ -439,8 +455,7 @@ fn run(vs: VideoService) -> ResultType<()> {
allow_err!(encoder.set_quality(quality));
video_qos.store_bitrate(encoder.bitrate());
}
let recording = recorder.lock().unwrap().is_some() || video_qos.record();
if recording != last_recording {
if client_record != video_qos.record() {
bail!("SWITCH");
}
drop(video_qos);
@ -459,6 +474,11 @@ fn run(vs: VideoService) -> ResultType<()> {
if Encoder::use_i444(&encoder_cfg) != use_i444 {
bail!("SWITCH");
}
#[cfg(all(windows, feature = "gpucodec"))]
if c.is_gdi() && (codec_name == CodecName::H264GPU || codec_name == CodecName::H265GPU) {
log::info!("changed to gdi when using gpucodec");
bail!("SWITCH");
}
check_privacy_mode_changed(&sp, c.privacy_mode_id)?;
#[cfg(windows)]
{
@ -482,7 +502,7 @@ fn run(vs: VideoService) -> ResultType<()> {
Ok(frame) => {
let time = now - start;
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
if frame.data().len() != 0 {
if frame.valid() {
let send_conn_ids = handle_one_frame(
display_idx,
&sp,
@ -533,6 +553,8 @@ fn run(vs: VideoService) -> ResultType<()> {
}
}
Err(err) => {
// Get display information again immediately after error.
crate::display_service::check_displays_changed().ok();
// This check may be redundant, but it is better to be safe.
// The previous check in `sp.is_option_true(OPTION_REFRESH)` block may be enough.
try_broadcast_display_changed(&sp, display_idx, &c)?;
@ -576,33 +598,88 @@ fn run(vs: VideoService) -> ResultType<()> {
Ok(())
}
fn get_encoder_config(c: &CapturerInfo, quality: Quality, recording: bool) -> EncoderCfg {
struct Raii(usize);
impl Raii {
fn new(display_idx: usize) -> Self {
Raii(display_idx)
}
}
impl Drop for Raii {
fn drop(&mut self) {
#[cfg(feature = "gpucodec")]
GpuEncoder::set_not_use(self.0, false);
VIDEO_QOS.lock().unwrap().set_support_abr(self.0, true);
}
}
fn get_encoder_config(
c: &CapturerInfo,
_display_idx: usize,
quality: Quality,
record: bool,
_portable_service: bool,
) -> EncoderCfg {
#[cfg(all(windows, feature = "gpucodec"))]
if _portable_service || c.is_gdi() {
log::info!("gdi:{}, portable:{}", c.is_gdi(), _portable_service);
GpuEncoder::set_not_use(_display_idx, true);
}
#[cfg(feature = "gpucodec")]
Encoder::update(EncodingUpdate::Check);
// https://www.wowza.com/community/t/the-correct-keyframe-interval-in-obs-studio/95162
let keyframe_interval = if recording { Some(240) } else { None };
match Encoder::negotiated_codec() {
scrap::CodecName::H264(name) | scrap::CodecName::H265(name) => {
EncoderCfg::HW(HwEncoderConfig {
name,
let keyframe_interval = if record { Some(240) } else { None };
let negotiated_codec = Encoder::negotiated_codec();
match negotiated_codec.clone() {
CodecName::H264GPU | CodecName::H265GPU => {
#[cfg(feature = "gpucodec")]
if let Some(feature) = GpuEncoder::try_get(&c.device(), negotiated_codec.clone()) {
EncoderCfg::GPU(GpuEncoderConfig {
device: c.device(),
width: c.width,
height: c.height,
quality,
feature,
keyframe_interval,
})
} else {
handle_hw_encoder(
negotiated_codec.clone(),
c.width,
c.height,
quality as _,
keyframe_interval,
)
}
name @ (scrap::CodecName::VP8 | scrap::CodecName::VP9) => {
EncoderCfg::VPX(VpxEncoderConfig {
#[cfg(not(feature = "gpucodec"))]
handle_hw_encoder(
negotiated_codec.clone(),
c.width,
c.height,
quality as _,
keyframe_interval,
)
}
CodecName::H264HW(_name) | CodecName::H265HW(_name) => handle_hw_encoder(
negotiated_codec.clone(),
c.width,
c.height,
quality as _,
keyframe_interval,
),
name @ (CodecName::VP8 | CodecName::VP9) => EncoderCfg::VPX(VpxEncoderConfig {
width: c.width as _,
height: c.height as _,
quality,
codec: if name == scrap::CodecName::VP8 {
codec: if name == CodecName::VP8 {
VpxVideoCodecId::VP8
} else {
VpxVideoCodecId::VP9
},
keyframe_interval,
})
}
scrap::CodecName::AV1 => EncoderCfg::AOM(AomEncoderConfig {
}),
CodecName::AV1 => EncoderCfg::AOM(AomEncoderConfig {
width: c.width as _,
height: c.height as _,
quality,
@ -611,12 +688,81 @@ fn get_encoder_config(c: &CapturerInfo, quality: Quality, recording: bool) -> En
}
}
fn handle_hw_encoder(
_name: CodecName,
width: usize,
height: usize,
quality: Quality,
keyframe_interval: Option<usize>,
) -> EncoderCfg {
let f = || {
#[cfg(feature = "hwcodec")]
match _name {
CodecName::H264GPU | CodecName::H265GPU => {
if !scrap::codec::enable_hwcodec_option() {
return Err(());
}
let is_h265 = _name == CodecName::H265GPU;
let best = HwEncoder::best();
if let Some(h264) = best.h264 {
if !is_h265 {
return Ok(EncoderCfg::HW(HwEncoderConfig {
name: h264.name,
width,
height,
quality,
keyframe_interval,
}));
}
}
if let Some(h265) = best.h265 {
if is_h265 {
return Ok(EncoderCfg::HW(HwEncoderConfig {
name: h265.name,
width,
height,
quality,
keyframe_interval,
}));
}
}
}
CodecName::H264HW(name) | CodecName::H265HW(name) => {
return Ok(EncoderCfg::HW(HwEncoderConfig {
name,
width,
height,
quality,
keyframe_interval,
}));
}
_ => {
return Err(());
}
};
Err(())
};
match f() {
Ok(cfg) => cfg,
_ => EncoderCfg::VPX(VpxEncoderConfig {
width: width as _,
height: height as _,
quality,
codec: VpxVideoCodecId::VP9,
keyframe_interval,
}),
}
}
fn get_recorder(
width: usize,
height: usize,
codec_name: &CodecName,
record_incoming: bool,
) -> Arc<Mutex<Option<Recorder>>> {
let recorder = if !Config::get_option("allow-auto-record-incoming").is_empty() {
let recorder = if record_incoming {
use crate::hbbs_http::record_upload;
let tx = if record_upload::is_enable() {
@ -678,9 +824,10 @@ fn handle_one_frame(
Ok(())
})?;
let frame = frame.to(encoder.yuvfmt(), yuv, mid_data)?;
let mut send_conn_ids: HashSet<i32> = Default::default();
convert_to_yuv(&frame, encoder.yuvfmt(), yuv, mid_data)?;
if let Ok(mut vf) = encoder.encode_to_message(yuv, ms) {
match encoder.encode_to_message(frame, ms) {
Ok(mut vf) => {
vf.display = display as _;
let mut msg = Message::new();
msg.set_video_frame(vf);
@ -691,6 +838,13 @@ fn handle_one_frame(
.map(|r| r.write_message(&msg));
send_conn_ids = sp.send_video_frame(msg);
}
Err(e) => match e.to_string().as_str() {
scrap::codec::ENCODE_NEED_SWITCH => {
bail!("SWITCH");
}
_ => {}
},
}
Ok(send_conn_ids)
}

View File

@ -191,8 +191,7 @@ pub(super) async fn check_init() -> ResultType<()> {
maxy = max_height;
let capturer = Box::into_raw(Box::new(
Capturer::new(display)
.with_context(|| "Failed to create capturer")?,
Capturer::new(display).with_context(|| "Failed to create capturer")?,
));
let capturer = CapturerPtr(capturer);
let cap_display_info = Box::into_raw(Box::new(CapDisplayInfo {

View File

@ -580,6 +580,10 @@ impl UI {
has_hwcodec()
}
fn has_gpucodec(&self) -> bool {
has_gpucodec()
}
fn get_langs(&self) -> String {
get_langs()
}
@ -680,6 +684,7 @@ impl sciter::EventHandler for UI {
fn get_lan_peers();
fn get_uuid();
fn has_hwcodec();
fn has_gpucodec();
fn get_langs();
fn default_video_save_directory();
fn handle_relay_id(String);

View File

@ -210,12 +210,13 @@ class Enhancements: Reactor.Component {
function render() {
var has_hwcodec = handler.has_hwcodec();
var has_gpucodec = handler.has_gpucodec();
var support_remove_wallpaper = handler.support_remove_wallpaper();
var me = this;
self.timer(1ms, function() { me.toggleMenuState() });
return <li>{translate('Enhancements')}
<menu #enhancements-menu>
{has_hwcodec ? <li #enable-hwcodec><span>{svg_checkmark}</span>{translate("Hardware Codec")} (beta)</li> : ""}
{(has_hwcodec || has_gpucodec) ? <li #enable-hwcodec><span>{svg_checkmark}</span>{translate("Enable hardware codec")}</li> : ""}
<li #enable-abr><span>{svg_checkmark}</span>{translate("Adaptive bitrate")} (beta)</li>
<li #screen-recording>{translate("Recording")}</li>
{support_remove_wallpaper ? <li #allow-remove-wallpaper><span>{svg_checkmark}</span>{translate("Remove wallpaper during incoming sessions")}</li> : ""}

View File

@ -505,7 +505,7 @@ impl SciterSession {
.lc
.write()
.unwrap()
.initialize(id, conn_type, None, force_relay);
.initialize(id, conn_type, None, force_relay, None);
Self(session)
}

View File

@ -830,11 +830,29 @@ pub fn has_hwcodec() -> bool {
return true;
}
#[inline]
pub fn has_gpucodec() -> bool {
cfg!(feature = "gpucodec")
}
#[cfg(feature = "flutter")]
#[inline]
pub fn supported_hwdecodings() -> (bool, bool) {
let decoding = scrap::codec::Decoder::supported_decodings(None);
(decoding.ability_h264 > 0, decoding.ability_h265 > 0)
let decoding = scrap::codec::Decoder::supported_decodings(None, true, None);
#[allow(unused_mut)]
let (mut h264, mut h265) = (decoding.ability_h264 > 0, decoding.ability_h265 > 0);
#[cfg(feature = "gpucodec")]
{
// supported_decodings check runtime luid
let gpu = scrap::gpucodec::GpuDecoder::possible_available_without_check();
if gpu.0 {
h264 = true;
}
if gpu.1 {
h265 = true;
}
}
(h264, h265)
}
#[cfg(not(any(target_os = "android", target_os = "ios")))]

View File

@ -6,6 +6,7 @@ use bytes::Bytes;
use rdev::{Event, EventType::*, KeyCode};
use std::{
collections::HashMap,
ffi::c_void,
ops::{Deref, DerefMut},
str::FromStr,
sync::{Arc, Mutex, RwLock},
@ -435,7 +436,9 @@ impl<T: InvokeUiSession> Session<T> {
}
pub fn alternative_codecs(&self) -> (bool, bool, bool, bool) {
let decoder = scrap::codec::Decoder::supported_decodings(None);
let luid = self.lc.read().unwrap().adapter_luid;
let decoder =
scrap::codec::Decoder::supported_decodings(None, cfg!(feature = "flutter"), luid);
let mut vp8 = decoder.ability_vp8 > 0;
let mut av1 = decoder.ability_av1 > 0;
let mut h264 = decoder.ability_h264 > 0;
@ -449,7 +452,7 @@ impl<T: InvokeUiSession> Session<T> {
}
pub fn change_prefer_codec(&self) {
let msg = self.lc.write().unwrap().change_prefer_codec();
let msg = self.lc.write().unwrap().update_supported_decodings();
self.send(Data::Message(msg));
}
@ -1286,6 +1289,8 @@ pub trait InvokeUiSession: Send + Sync + Clone + 'static + Sized + Default {
fn on_voice_call_incoming(&self);
fn get_rgba(&self, display: usize) -> *const u8;
fn next_rgba(&self, display: usize);
#[cfg(all(feature = "gpucodec", feature = "flutter"))]
fn on_texture(&self, display: usize, texture: *mut c_void);
}
impl<T: InvokeUiSession> Deref for Session<T> {
@ -1567,12 +1572,20 @@ pub async fn io_loop<T: InvokeUiSession>(handler: Session<T>, round: u32) {
let (video_sender, audio_sender, video_queue_map, decode_fps_map, chroma) =
start_video_audio_threads(
handler.clone(),
move |display: usize, data: &mut scrap::ImageRgb| {
move |display: usize,
data: &mut scrap::ImageRgb,
_texture: *mut c_void,
pixelbuffer: bool| {
let mut write_lock = frame_count_map_cl.write().unwrap();
let count = write_lock.get(&display).unwrap_or(&0) + 1;
write_lock.insert(display, count);
drop(write_lock);
if pixelbuffer {
ui_handler.on_rgba(display, data);
} else {
#[cfg(all(feature = "gpucodec", feature = "flutter"))]
ui_handler.on_texture(display, _texture);
}
},
);