Merge branch 'hwcodec' into master

This commit is contained in:
21pages 2022-06-14 11:46:03 +08:00
commit 847c4acb07
17 changed files with 1782 additions and 638 deletions

15
Cargo.lock generated
View File

@ -2231,6 +2231,19 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hwcodec"
version = "0.1.0"
source = "git+https://github.com/21pages/hwcodec#504363444b6fcd93f0a9b2f21d80744306f15819"
dependencies = [
"bindgen",
"cc",
"log",
"serde 1.0.137",
"serde_derive",
"serde_json 1.0.81",
]
[[package]]
name = "hyper"
version = "0.14.19"
@ -4255,6 +4268,8 @@ dependencies = [
"gstreamer",
"gstreamer-app",
"gstreamer-video",
"hbb_common",
"hwcodec",
"jni",
"lazy_static",
"libc",

View File

@ -24,6 +24,7 @@ appimage = []
use_samplerate = ["samplerate"]
use_rubato = ["rubato"]
use_dasp = ["dasp"]
hwcodec = ["scrap/hwcodec"]
default = ["use_dasp"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -9,6 +9,26 @@ message VP9 {
message VP9s { repeated VP9 frames = 1; }
message H264 {
bytes data = 1;
bool key = 2;
int64 pts = 3;
}
message H264s {
repeated H264 h264s = 1;
}
message H265 {
bytes data = 1;
bool key = 2;
int64 pts = 3;
}
message H265s {
repeated H265 h265s = 1;
}
message RGB { bool compress = 1; }
// planes data send directly in binary for better use arraybuffer on web
@ -22,6 +42,8 @@ message VideoFrame {
VP9s vp9s = 6;
RGB rgb = 7;
YUV yuv = 8;
H264s h264s = 10;
H265s h265s = 11;
}
int64 timestamp = 9;
}
@ -430,6 +452,14 @@ enum ImageQuality {
Best = 4;
}
message VideoCodecState {
int32 ScoreVpx = 1;
bool H264 = 2;
int32 ScoreH264 = 3;
bool H265 = 4;
int32 ScoreH265 = 5;
}
message OptionMessage {
enum BoolOption {
NotSet = 0;
@ -445,6 +475,7 @@ message OptionMessage {
BoolOption disable_audio = 7;
BoolOption disable_clipboard = 8;
BoolOption enable_file_transfer = 9;
VideoCodecState video_codec_state = 10;
}
message TestDelay {

View File

@ -35,6 +35,7 @@ lazy_static::lazy_static! {
static ref CONFIG: Arc<RwLock<Config>> = Arc::new(RwLock::new(Config::load()));
static ref CONFIG2: Arc<RwLock<Config2>> = Arc::new(RwLock::new(Config2::load()));
static ref LOCAL_CONFIG: Arc<RwLock<LocalConfig>> = Arc::new(RwLock::new(LocalConfig::load()));
static ref HWCODEC_CONFIG: Arc<RwLock<HwCodecConfig>> = Arc::new(RwLock::new(HwCodecConfig::load()));
pub static ref ONLINE: Arc<Mutex<HashMap<String, i64>>> = Default::default();
pub static ref PROD_RENDEZVOUS_SERVER: Arc<RwLock<String>> = Default::default();
pub static ref APP_NAME: Arc<RwLock<String>> = Arc::new(RwLock::new("RustDesk".to_owned()));
@ -881,6 +882,43 @@ impl LanPeers {
}
}
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct HwCodecConfig {
#[serde(default)]
options: HashMap<String, String>,
}
impl HwCodecConfig {
fn load() -> HwCodecConfig {
Config::load_::<HwCodecConfig>("_hwcodec")
}
fn store(&self) {
Config::store_(self, "_hwcodec");
}
pub fn get_option(k: &str) -> String {
if let Some(v) = HWCODEC_CONFIG.read().unwrap().options.get(k) {
v.clone()
} else {
"".to_owned()
}
}
pub fn set_option(k: String, v: String) {
let mut config = HWCODEC_CONFIG.write().unwrap();
let v2 = if v.is_empty() { None } else { Some(&v) };
if v2 != config.options.get(&k) {
if v2.is_none() {
config.options.remove(&k);
} else {
config.options.insert(k, v);
}
config.store();
}
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -18,6 +18,7 @@ cfg-if = "1.0"
libc = "0.2"
num_cpus = "1.13"
lazy_static = "1.4"
hbb_common = { path = "../hbb_common" }
[dependencies.winapi]
version = "0.3"
@ -48,3 +49,6 @@ tracing = { version = "0.1", optional = true }
gstreamer = { version = "0.16", optional = true }
gstreamer-app = { version = "0.16", features = ["v1_10"], optional = true }
gstreamer-video = { version = "0.16", optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
hwcodec = { git = "https://github.com/21pages/hwcodec", optional = true }

View File

@ -13,10 +13,11 @@ use std::time::{Duration, Instant};
use std::{io, thread};
use docopt::Docopt;
use scrap::codec::{EncoderApi, EncoderCfg};
use webm::mux;
use webm::mux::Track;
use scrap::codec as vpx_encode;
use scrap::vpxcodec as vpx_encode;
use scrap::{Capturer, Display, STRIDE_ALIGN};
const USAGE: &'static str = "
@ -89,27 +90,25 @@ fn main() -> io::Result<()> {
mux::Segment::new(mux::Writer::new(out)).expect("Could not initialize the multiplexer.");
let (vpx_codec, mux_codec) = match args.flag_codec {
Codec::Vp8 => (vpx_encode::VideoCodecId::VP8, mux::VideoCodecId::VP8),
Codec::Vp9 => (vpx_encode::VideoCodecId::VP9, mux::VideoCodecId::VP9),
Codec::Vp8 => (vpx_encode::VpxVideoCodecId::VP8, mux::VideoCodecId::VP8),
Codec::Vp9 => (vpx_encode::VpxVideoCodecId::VP9, mux::VideoCodecId::VP9),
};
let mut vt = webm.add_video_track(width, height, None, mux_codec);
// Setup the encoder.
let mut vpx = vpx_encode::Encoder::new(
&vpx_encode::Config {
width,
height,
timebase: [1, 1000],
bitrate: args.flag_bv,
codec: vpx_codec,
rc_min_quantizer: 0,
rc_max_quantizer: 0,
speed: 6,
},
0,
)
let mut vpx = vpx_encode::VpxEncoder::new(EncoderCfg::VPX(vpx_encode::VpxEncoderConfig {
width,
height,
timebase: [1, 1000],
bitrate: args.flag_bv,
codec: vpx_codec,
rc_min_quantizer: 0,
rc_max_quantizer: 0,
speed: 6,
num_threads: 0,
}))
.unwrap();
// Start recording.

View File

@ -1,536 +1,328 @@
// https://github.com/astraw/vpx-encode
// https://github.com/astraw/env-libvpx-sys
// https://github.com/rust-av/vpx-rs/blob/master/src/decoder.rs
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use std::os::raw::{c_int, c_uint};
use std::{ptr, slice};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum VideoCodecId {
VP8,
VP9,
}
impl Default for VideoCodecId {
fn default() -> VideoCodecId {
VideoCodecId::VP9
}
}
pub struct Encoder {
ctx: vpx_codec_ctx_t,
width: usize,
height: usize,
}
pub struct Decoder {
ctx: vpx_codec_ctx_t,
}
#[derive(Debug)]
pub enum Error {
FailedCall(String),
BadPtr(String),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
macro_rules! call_vpx {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, i32>(result) };
if result_int != 0 {
return Err(Error::FailedCall(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
macro_rules! call_vpx_ptr {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, isize>(result) };
if result_int == 0 {
return Err(Error::BadPtr(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
impl Encoder {
pub fn new(config: &Config, num_threads: u32) -> Result<Self> {
let i;
if cfg!(feature = "VP8") {
i = match config.codec {
VideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_cx()),
VideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_cx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_cx());
}
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0));
// https://www.webmproject.org/docs/encoder-parameters/
// default: c.rc_min_quantizer = 0, c.rc_max_quantizer = 63
// try rc_resize_allowed later
c.g_w = config.width;
c.g_h = config.height;
c.g_timebase.num = config.timebase[0];
c.g_timebase.den = config.timebase[1];
c.rc_target_bitrate = config.bitrate;
c.rc_undershoot_pct = 95;
c.rc_dropframe_thresh = 25;
if config.rc_min_quantizer > 0 {
c.rc_min_quantizer = config.rc_min_quantizer;
}
if config.rc_max_quantizer > 0 {
c.rc_max_quantizer = config.rc_max_quantizer;
}
let mut speed = config.speed;
if speed <= 0 {
speed = 6;
}
c.g_threads = if num_threads == 0 {
num_cpus::get() as _
} else {
num_threads
};
c.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
// https://developers.google.com/media/vp9/bitrate-modes/
// Constant Bitrate mode (CBR) is recommended for live streaming with VP9.
c.rc_end_usage = vpx_rc_mode::VPX_CBR;
// c.kf_min_dist = 0;
// c.kf_max_dist = 999999;
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED; // reduce bandwidth a lot
/*
VPX encoder支持two-pass encoderate control的
bitrate下得到最好的PSNR
*/
let mut ctx = Default::default();
call_vpx!(vpx_codec_enc_init_ver(
&mut ctx,
i,
&c,
0,
VPX_ENCODER_ABI_VERSION as _
));
if config.codec == VideoCodecId::VP9 {
// set encoder internal speed settings
// in ffmpeg, it is --speed option
/*
set to 0 or a positive value 1-16, the codec will try to adapt its
complexity depending on the time it spends encoding. Increasing this
number will make the speed go up and the quality go down.
Negative values mean strict enforcement of this
while positive values are adaptive
*/
/* https://developers.google.com/media/vp9/live-encoding
Speed 5 to 8 should be used for live / real-time encoding.
Lower numbers (5 or 6) are higher quality but require more CPU power.
Higher numbers (7 or 8) will be lower quality but more manageable for lower latency
use cases and also for lower CPU power devices such as mobile.
*/
call_vpx!(vpx_codec_control_(&mut ctx, VP8E_SET_CPUUSED as _, speed,));
// set row level multi-threading
/*
as some people in comments and below have already commented,
more recent versions of libvpx support -row-mt 1 to enable tile row
multi-threading. This can increase the number of tiles by up to 4x in VP9
(since the max number of tile rows is 4, regardless of video height).
To enable this, use -tile-rows N where N is the number of tile rows in
log2 units (so -tile-rows 1 means 2 tile rows and -tile-rows 2 means 4 tile
rows). The total number of active threads will then be equal to
$tile_rows * $tile_columns
*/
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_ROW_MT as _,
1 as c_int
));
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_TILE_COLUMNS as _,
4 as c_int
));
}
Ok(Self {
ctx,
width: config.width as _,
height: config.height as _,
})
}
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
assert!(2 * data.len() >= 3 * self.width * self.height);
let mut image = Default::default();
call_vpx_ptr!(vpx_img_wrap(
&mut image,
vpx_img_fmt::VPX_IMG_FMT_I420,
self.width as _,
self.height as _,
stride_align as _,
data.as_ptr() as _,
));
call_vpx!(vpx_codec_encode(
&mut self.ctx,
&image,
pts as _,
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the encoder to return any pending packets
pub fn flush(&mut self) -> Result<EncodeFrames> {
call_vpx!(vpx_codec_encode(
&mut self.ctx,
ptr::null(),
-1, // PTS
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
}
impl Drop for Encoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct EncodeFrame<'a> {
/// Compressed data.
pub data: &'a [u8],
/// Whether the frame is a keyframe.
pub key: bool,
/// Presentation timestamp (in timebase units).
pub pts: i64,
}
#[derive(Clone, Copy, Debug)]
pub struct Config {
/// The width (in pixels).
pub width: c_uint,
/// The height (in pixels).
pub height: c_uint,
/// The timebase numerator and denominator (in seconds).
pub timebase: [c_int; 2],
/// The target bitrate (in kilobits per second).
pub bitrate: c_uint,
/// The codec
pub codec: VideoCodecId,
pub rc_min_quantizer: u32,
pub rc_max_quantizer: u32,
pub speed: i32,
}
pub struct EncodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for EncodeFrames<'a> {
type Item = EncodeFrame<'a>;
fn next(&mut self) -> Option<Self::Item> {
loop {
unsafe {
let pkt = vpx_codec_get_cx_data(self.ctx, &mut self.iter);
if pkt.is_null() {
return None;
} else if (*pkt).kind == vpx_codec_cx_pkt_kind::VPX_CODEC_CX_FRAME_PKT {
let f = &(*pkt).data.frame;
return Some(Self::Item {
data: slice::from_raw_parts(f.buf as _, f.sz as _),
key: (f.flags & VPX_FRAME_IS_KEY) != 0,
pts: f.pts,
});
} else {
// Ignore the packet.
}
}
}
}
}
impl Decoder {
/// Create a new decoder
///
/// # Errors
///
/// The function may fail if the underlying libvpx does not provide
/// the VP9 decoder.
pub fn new(codec: VideoCodecId, num_threads: u32) -> Result<Self> {
// This is sound because `vpx_codec_ctx` is a repr(C) struct without any field that can
// cause UB if uninitialized.
let i;
if cfg!(feature = "VP8") {
i = match codec {
VideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_dx()),
VideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_dx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_dx());
}
let mut ctx = Default::default();
let cfg = vpx_codec_dec_cfg_t {
threads: if num_threads == 0 {
num_cpus::get() as _
} else {
num_threads
},
w: 0,
h: 0,
};
/*
unsafe {
println!("{}", vpx_codec_get_caps(i));
}
*/
call_vpx!(vpx_codec_dec_init_ver(
&mut ctx,
i,
&cfg,
0,
VPX_DECODER_ABI_VERSION as _,
));
Ok(Self { ctx })
}
pub fn decode2rgb(&mut self, data: &[u8], rgba: bool) -> Result<Vec<u8>> {
let mut img = Image::new();
for frame in self.decode(data)? {
drop(img);
img = frame;
}
for frame in self.flush()? {
drop(img);
img = frame;
}
if img.is_null() {
Ok(Vec::new())
} else {
let mut out = Default::default();
img.rgb(1, rgba, &mut out);
Ok(out)
}
}
/// Feed some compressed data to the encoder
///
/// The `data` slice is sent to the decoder
///
/// It matches a call to `vpx_codec_decode`.
pub fn decode(&mut self, data: &[u8]) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
data.as_ptr(),
data.len() as _,
ptr::null_mut(),
0,
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the decoder to return any pending frame
pub fn flush(&mut self) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
ptr::null(),
0,
ptr::null_mut(),
0
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
}
impl Drop for Decoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
pub struct DecodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for DecodeFrames<'a> {
type Item = Image;
fn next(&mut self) -> Option<Self::Item> {
let img = unsafe { vpx_codec_get_frame(self.ctx, &mut self.iter) };
if img.is_null() {
return None;
} else {
return Some(Image(img));
}
}
}
// https://chromium.googlesource.com/webm/libvpx/+/bali/vpx/src/vpx_image.c
pub struct Image(*mut vpx_image_t);
impl Image {
#[inline]
pub fn new() -> Self {
Self(std::ptr::null_mut())
}
#[inline]
pub fn is_null(&self) -> bool {
self.0.is_null()
}
#[inline]
pub fn width(&self) -> usize {
self.inner().d_w as _
}
#[inline]
pub fn height(&self) -> usize {
self.inner().d_h as _
}
#[inline]
pub fn format(&self) -> vpx_img_fmt_t {
// VPX_IMG_FMT_I420
self.inner().fmt
}
#[inline]
pub fn inner(&self) -> &vpx_image_t {
unsafe { &*self.0 }
}
#[inline]
pub fn stride(&self, iplane: usize) -> i32 {
self.inner().stride[iplane]
}
pub fn rgb(&self, stride_align: usize, rgba: bool, dst: &mut Vec<u8>) {
let h = self.height();
let mut w = self.width();
let bps = if rgba { 4 } else { 3 };
w = (w + stride_align - 1) & !(stride_align - 1);
dst.resize(h * w * bps, 0);
let img = self.inner();
unsafe {
if rgba {
super::I420ToARGB(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
} else {
super::I420ToRAW(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
}
}
}
#[inline]
pub fn data(&self) -> (&[u8], &[u8], &[u8]) {
unsafe {
let img = self.inner();
let h = (img.d_h as usize + 1) & !1;
let n = img.stride[0] as usize * h;
let y = slice::from_raw_parts(img.planes[0], n);
let n = img.stride[1] as usize * (h >> 1);
let u = slice::from_raw_parts(img.planes[1], n);
let v = slice::from_raw_parts(img.planes[2], n);
(y, u, v)
}
}
}
impl Drop for Image {
fn drop(&mut self) {
if !self.0.is_null() {
unsafe { vpx_img_free(self.0) };
}
}
}
unsafe impl Send for vpx_codec_ctx_t {}
use std::ops::{Deref, DerefMut};
#[cfg(feature = "hwcodec")]
use std::{
collections::HashMap,
sync::{Arc, Mutex},
};
#[cfg(feature = "hwcodec")]
use crate::hwcodec::*;
use crate::vpxcodec::*;
use hbb_common::{
anyhow::anyhow,
log,
message_proto::{video_frame, Message, VP9s, VideoCodecState},
ResultType,
};
#[cfg(feature = "hwcodec")]
use hbb_common::{
lazy_static,
message_proto::{H264s, H265s},
};
#[cfg(feature = "hwcodec")]
lazy_static::lazy_static! {
static ref PEER_DECODER_STATES: Arc<Mutex<HashMap<i32, VideoCodecState>>> = Default::default();
static ref MY_DECODER_STATE: Arc<Mutex<VideoCodecState>> = Default::default();
}
const SCORE_VPX: i32 = 90;
#[derive(Debug, Clone)]
pub struct HwEncoderConfig {
pub codec_name: String,
pub width: usize,
pub height: usize,
pub bitrate_ratio: i32,
}
#[derive(Debug, Clone)]
pub enum EncoderCfg {
VPX(VpxEncoderConfig),
HW(HwEncoderConfig),
}
pub trait EncoderApi {
fn new(cfg: EncoderCfg) -> ResultType<Self>
where
Self: Sized;
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<Message>;
fn use_yuv(&self) -> bool;
}
pub struct DecoderCfg {
pub vpx: VpxDecoderConfig,
}
pub struct Encoder {
pub codec: Box<dyn EncoderApi>,
}
impl Deref for Encoder {
type Target = Box<dyn EncoderApi>;
fn deref(&self) -> &Self::Target {
&self.codec
}
}
impl DerefMut for Encoder {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.codec
}
}
pub struct Decoder {
vpx: VpxDecoder,
#[cfg(feature = "hwcodec")]
hw: HwDecoders,
#[cfg(feature = "hwcodec")]
i420: Vec<u8>,
}
#[derive(Debug, Clone)]
pub enum EncoderUpdate {
State(VideoCodecState),
Remove,
DisableHwIfNotExist,
}
impl Encoder {
pub fn new(config: EncoderCfg) -> ResultType<Encoder> {
log::info!("new encoder:{:?}", config);
match config {
EncoderCfg::VPX(_) => Ok(Encoder {
codec: Box::new(VpxEncoder::new(config)?),
}),
#[cfg(feature = "hwcodec")]
EncoderCfg::HW(_) => match HwEncoder::new(config) {
Ok(hw) => Ok(Encoder {
codec: Box::new(hw),
}),
Err(e) => {
HwEncoder::best(true, true);
Err(e)
}
},
#[cfg(not(feature = "hwcodec"))]
_ => Err(anyhow!("unsupported encoder type")),
}
}
// TODO
pub fn update_video_encoder(id: i32, update: EncoderUpdate) {
log::info!("encoder update: {:?}", update);
#[cfg(feature = "hwcodec")]
{
let mut states = PEER_DECODER_STATES.lock().unwrap();
match update {
EncoderUpdate::State(state) => {
states.insert(id, state);
}
EncoderUpdate::Remove => {
states.remove(&id);
}
EncoderUpdate::DisableHwIfNotExist => {
if !states.contains_key(&id) {
states.insert(id, VideoCodecState::default());
}
}
}
let current_encoder_name = HwEncoder::current_name();
if states.len() > 0 {
let (best, _) = HwEncoder::best(false, true);
let enabled_h264 =
best.h264.is_some() && states.len() > 0 && states.iter().all(|(_, s)| s.H264);
let enabled_h265 =
best.h265.is_some() && states.len() > 0 && states.iter().all(|(_, s)| s.H265);
// score encoder
let mut score_vpx = SCORE_VPX;
let mut score_h264 = best.h264.as_ref().map_or(0, |c| c.score);
let mut score_h265 = best.h265.as_ref().map_or(0, |c| c.score);
// score decoder
score_vpx += states.iter().map(|s| s.1.ScoreVpx).sum::<i32>();
if enabled_h264 {
score_h264 += states.iter().map(|s| s.1.ScoreH264).sum::<i32>();
}
if enabled_h265 {
score_h265 += states.iter().map(|s| s.1.ScoreH265).sum::<i32>();
}
if enabled_h265 && score_h265 >= score_vpx && score_h265 >= score_h264 {
*current_encoder_name.lock().unwrap() = Some(best.h265.unwrap().name);
} else if enabled_h264 && score_h264 >= score_vpx && score_h264 >= score_h265 {
*current_encoder_name.lock().unwrap() = Some(best.h264.unwrap().name);
} else {
*current_encoder_name.lock().unwrap() = None;
}
log::info!(
"connection count:{}, h264:{}, h265:{}, score: vpx({}), h264({}), h265({}), set current encoder name {:?}",
states.len(),
enabled_h264,
enabled_h265,
score_vpx,
score_h264,
score_h265,
current_encoder_name.lock().unwrap()
)
} else {
*current_encoder_name.lock().unwrap() = None;
}
}
#[cfg(not(feature = "hwcodec"))]
{
let _ = id;
let _ = update;
}
}
#[inline]
pub fn current_hw_encoder_name() -> Option<String> {
#[cfg(feature = "hwcodec")]
return HwEncoder::current_name().lock().unwrap().clone();
#[cfg(not(feature = "hwcodec"))]
return None;
}
}
#[cfg(feature = "hwcodec")]
impl Drop for Decoder {
fn drop(&mut self) {
*MY_DECODER_STATE.lock().unwrap() = VideoCodecState {
ScoreVpx: SCORE_VPX,
..Default::default()
};
}
}
impl Decoder {
pub fn video_codec_state() -> VideoCodecState {
// video_codec_state is mainted by creation and destruction of Decoder.
// It has been ensured to use after Decoder's creation.
#[cfg(feature = "hwcodec")]
return MY_DECODER_STATE.lock().unwrap().clone();
#[cfg(not(feature = "hwcodec"))]
VideoCodecState {
ScoreVpx: SCORE_VPX,
..Default::default()
}
}
pub fn new(config: DecoderCfg) -> Decoder {
let vpx = VpxDecoder::new(config.vpx).unwrap();
let decoder = Decoder {
vpx,
#[cfg(feature = "hwcodec")]
hw: HwDecoder::new_decoders(),
#[cfg(feature = "hwcodec")]
i420: vec![],
};
#[cfg(feature = "hwcodec")]
{
let mut state = MY_DECODER_STATE.lock().unwrap();
state.ScoreVpx = SCORE_VPX;
state.H264 = decoder.hw.h264.is_some();
state.ScoreH264 = decoder.hw.h264.as_ref().map_or(0, |d| d.info.score);
state.H265 = decoder.hw.h265.is_some();
state.ScoreH265 = decoder.hw.h265.as_ref().map_or(0, |d| d.info.score);
}
decoder
}
pub fn handle_video_frame(
&mut self,
frame: &video_frame::Union,
rgb: &mut Vec<u8>,
) -> ResultType<bool> {
match frame {
video_frame::Union::vp9s(vp9s) => {
Decoder::handle_vp9s_video_frame(&mut self.vpx, vp9s, rgb)
}
#[cfg(feature = "hwcodec")]
video_frame::Union::h264s(h264s) => {
if let Some(decoder) = &mut self.hw.h264 {
Decoder::handle_h264s_video_frame(decoder, h264s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h264!"))
}
}
#[cfg(feature = "hwcodec")]
video_frame::Union::h265s(h265s) => {
if let Some(decoder) = &mut self.hw.h265 {
Decoder::handle_h265s_video_frame(decoder, h265s, rgb, &mut self.i420)
} else {
Err(anyhow!("don't support h265!"))
}
}
_ => Err(anyhow!("unsupported video frame type!")),
}
}
fn handle_vp9s_video_frame(
decoder: &mut VpxDecoder,
vp9s: &VP9s,
rgb: &mut Vec<u8>,
) -> ResultType<bool> {
let mut last_frame = Image::new();
for vp9 in vp9s.frames.iter() {
for frame in decoder.decode(&vp9.data)? {
drop(last_frame);
last_frame = frame;
}
}
for frame in decoder.flush()? {
drop(last_frame);
last_frame = frame;
}
if last_frame.is_null() {
Ok(false)
} else {
last_frame.rgb(1, true, rgb);
Ok(true)
}
}
#[cfg(feature = "hwcodec")]
fn handle_h264s_video_frame(
decoder: &mut HwDecoder,
h264s: &H264s,
rgb: &mut Vec<u8>,
i420: &mut Vec<u8>,
) -> ResultType<bool> {
let mut ret = false;
for h264 in h264s.h264s.iter() {
for image in decoder.decode(&h264.data)? {
// TODO: just process the last frame
if image.bgra(rgb, i420).is_ok() {
ret = true;
}
}
}
return Ok(ret);
}
#[cfg(feature = "hwcodec")]
fn handle_h265s_video_frame(
decoder: &mut HwDecoder,
h265s: &H265s,
rgb: &mut Vec<u8>,
i420: &mut Vec<u8>,
) -> ResultType<bool> {
let mut ret = false;
for h265 in h265s.h265s.iter() {
for image in decoder.decode(&h265.data)? {
// TODO: just process the last frame
if image.bgra(rgb, i420).is_ok() {
ret = true;
}
}
}
return Ok(ret);
}
}

View File

@ -49,6 +49,17 @@ extern "C" {
height: c_int,
) -> c_int;
pub fn ARGBToNV12(
src_bgra: *const u8,
src_stride_bgra: c_int,
dst_y: *mut u8,
dst_stride_y: c_int,
dst_uv: *mut u8,
dst_stride_uv: c_int,
width: c_int,
height: c_int,
) -> c_int;
pub fn NV12ToI420(
src_y: *const u8,
src_stride_y: c_int,
@ -91,6 +102,17 @@ extern "C" {
width: c_int,
height: c_int,
) -> c_int;
pub fn NV12ToARGB(
src_y: *const u8,
src_stride_y: c_int,
src_uv: *const u8,
src_stride_uv: c_int,
dst_rgba: *mut u8,
dst_stride_rgba: c_int,
width: c_int,
height: c_int,
) -> c_int;
}
// https://github.com/webmproject/libvpx/blob/master/vpx/src/vpx_image.c
@ -220,3 +242,192 @@ pub unsafe fn nv12_to_i420(
height as _,
);
}
#[cfg(feature = "hwcodec")]
pub mod hw {
use hbb_common::{anyhow::anyhow, ResultType};
use hwcodec::{ffmpeg::ffmpeg_linesize_offset_length, AVPixelFormat};
pub fn hw_bgra_to_i420(
width: usize,
height: usize,
stride: &[i32],
offset: &[i32],
length: i32,
src: &[u8],
dst: &mut Vec<u8>,
) {
let stride_y = stride[0] as usize;
let stride_u = stride[1] as usize;
let stride_v = stride[2] as usize;
let offset_u = offset[0] as usize;
let offset_v = offset[1] as usize;
dst.resize(length as _, 0);
let dst_y = dst.as_mut_ptr();
let dst_u = dst[offset_u..].as_mut_ptr();
let dst_v = dst[offset_v..].as_mut_ptr();
unsafe {
super::ARGBToI420(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
stride_y as _,
dst_u,
stride_u as _,
dst_v,
stride_v as _,
width as _,
height as _,
);
}
}
pub fn hw_bgra_to_nv12(
width: usize,
height: usize,
stride: &[i32],
offset: &[i32],
length: i32,
src: &[u8],
dst: &mut Vec<u8>,
) {
let stride_y = stride[0] as usize;
let stride_uv = stride[1] as usize;
let offset_uv = offset[0] as usize;
dst.resize(length as _, 0);
let dst_y = dst.as_mut_ptr();
let dst_uv = dst[offset_uv..].as_mut_ptr();
unsafe {
super::ARGBToNV12(
src.as_ptr(),
(src.len() / height) as _,
dst_y,
stride_y as _,
dst_uv,
stride_uv as _,
width as _,
height as _,
);
}
}
#[cfg(target_os = "windows")]
pub fn hw_nv12_to_bgra(
width: usize,
height: usize,
src_y: &[u8],
src_uv: &[u8],
src_stride_y: usize,
src_stride_uv: usize,
dst: &mut Vec<u8>,
i420: &mut Vec<u8>,
align: usize,
) -> ResultType<()> {
let nv12_stride_y = src_stride_y;
let nv12_stride_uv = src_stride_uv;
if let Ok((linesize_i420, offset_i420, i420_len)) =
ffmpeg_linesize_offset_length(AVPixelFormat::AV_PIX_FMT_YUV420P, width, height, align)
{
dst.resize(width * height * 4, 0);
let i420_stride_y = linesize_i420[0];
let i420_stride_u = linesize_i420[1];
let i420_stride_v = linesize_i420[2];
i420.resize(i420_len as _, 0);
unsafe {
let i420_offset_y = i420.as_ptr().add(0) as _;
let i420_offset_u = i420.as_ptr().add(offset_i420[0] as _) as _;
let i420_offset_v = i420.as_ptr().add(offset_i420[1] as _) as _;
super::NV12ToI420(
src_y.as_ptr(),
nv12_stride_y as _,
src_uv.as_ptr(),
nv12_stride_uv as _,
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
width as _,
height as _,
);
super::I420ToARGB(
i420_offset_y,
i420_stride_y,
i420_offset_u,
i420_stride_u,
i420_offset_v,
i420_stride_v,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
return Ok(());
};
}
return Err(anyhow!("get linesize offset failed"));
}
#[cfg(not(target_os = "windows"))]
pub fn hw_nv12_to_bgra(
width: usize,
height: usize,
src_y: &[u8],
src_uv: &[u8],
src_stride_y: usize,
src_stride_uv: usize,
dst: &mut Vec<u8>,
) -> ResultType<()> {
dst.resize(width * height * 4, 0);
unsafe {
match super::NV12ToARGB(
src_y.as_ptr(),
src_stride_y as _,
src_uv.as_ptr(),
src_stride_uv as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
) {
0 => Ok(()),
_ => Err(anyhow!("NV12ToARGB failed")),
}
}
}
pub fn hw_i420_to_bgra(
width: usize,
height: usize,
src_y: &[u8],
src_u: &[u8],
src_v: &[u8],
src_stride_y: usize,
src_stride_u: usize,
src_stride_v: usize,
dst: &mut Vec<u8>,
) {
let src_y = src_y.as_ptr();
let src_u = src_u.as_ptr();
let src_v = src_v.as_ptr();
dst.resize(width * height * 4, 0);
unsafe {
super::I420ToARGB(
src_y,
src_stride_y as _,
src_u,
src_stride_u as _,
src_v,
src_stride_v as _,
dst.as_mut_ptr(),
(width * 4) as _,
width as _,
height as _,
);
};
}
}

View File

@ -0,0 +1,380 @@
use crate::{
codec::{EncoderApi, EncoderCfg},
hw, HW_STRIDE_ALIGN,
};
use hbb_common::{
anyhow::{anyhow, Context},
config::HwCodecConfig,
lazy_static, log,
message_proto::{H264s, H265s, Message, VideoFrame, H264, H265},
ResultType,
};
use hwcodec::{
decode::{DecodeContext, DecodeFrame, Decoder},
encode::{EncodeContext, EncodeFrame, Encoder},
ffmpeg::{CodecInfo, CodecInfos, DataFormat},
AVPixelFormat,
Quality::{self, *},
RateContorl::{self, *},
};
use std::{
collections::HashMap,
sync::{Arc, Mutex},
};
lazy_static::lazy_static! {
static ref HW_ENCODER_NAME: Arc<Mutex<Option<String>>> = Default::default();
}
const CFG_KEY_ENCODER: &str = "bestHwEncoders";
const CFG_KEY_DECODER: &str = "bestHwDecoders";
const DEFAULT_PIXFMT: AVPixelFormat = AVPixelFormat::AV_PIX_FMT_YUV420P;
const DEFAULT_TIME_BASE: [i32; 2] = [1, 30];
const DEFAULT_GOP: i32 = 60;
const DEFAULT_HW_QUALITY: Quality = Quality_Default;
const DEFAULT_RC: RateContorl = RC_DEFAULT;
pub struct HwEncoder {
encoder: Encoder,
yuv: Vec<u8>,
pub format: DataFormat,
pub pixfmt: AVPixelFormat,
}
impl EncoderApi for HwEncoder {
fn new(cfg: EncoderCfg) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
EncoderCfg::HW(config) => {
let (bitrate, timebase, gop, quality, rc) =
HwEncoder::convert_quality(&config.codec_name, config.bitrate_ratio);
let ctx = EncodeContext {
name: config.codec_name.clone(),
width: config.width as _,
height: config.height as _,
pixfmt: DEFAULT_PIXFMT,
align: HW_STRIDE_ALIGN as _,
bitrate,
timebase,
gop,
quality,
rc,
};
let format = match Encoder::format_from_name(config.codec_name.clone()) {
Ok(format) => format,
Err(_) => {
return Err(anyhow!(format!(
"failed to get format from name:{}",
config.codec_name
)))
}
};
match Encoder::new(ctx.clone()) {
Ok(encoder) => Ok(HwEncoder {
encoder,
yuv: vec![],
format,
pixfmt: ctx.pixfmt,
}),
Err(_) => Err(anyhow!(format!("Failed to create encoder"))),
}
}
_ => Err(anyhow!("encoder type mismatch")),
}
}
fn encode_to_message(
&mut self,
frame: &[u8],
_ms: i64,
) -> ResultType<hbb_common::message_proto::Message> {
let mut msg_out = Message::new();
let mut vf = VideoFrame::new();
match self.format {
DataFormat::H264 => {
let mut h264s = Vec::new();
for frame in self.encode(frame).with_context(|| "Failed to encode")? {
h264s.push(H264 {
data: frame.data,
pts: frame.pts as _,
..Default::default()
});
}
if h264s.len() > 0 {
vf.set_h264s(H264s {
h264s: h264s.into(),
..Default::default()
});
msg_out.set_video_frame(vf);
Ok(msg_out)
} else {
Err(anyhow!("no valid frame"))
}
}
DataFormat::H265 => {
let mut h265s = Vec::new();
for frame in self.encode(frame).with_context(|| "Failed to encode")? {
h265s.push(H265 {
data: frame.data,
pts: frame.pts,
..Default::default()
});
}
if h265s.len() > 0 {
vf.set_h265s(H265s {
h265s,
..Default::default()
});
msg_out.set_video_frame(vf);
Ok(msg_out)
} else {
Err(anyhow!("no valid frame"))
}
}
}
}
fn use_yuv(&self) -> bool {
false
}
}
impl HwEncoder {
/// Get best encoders.
///
/// # Parameter
/// `force_reset`: force to refresh config.
/// `write`: write to config file.
///
/// # Return
/// `CodecInfos`: infos.
/// `bool`: whether the config is refreshed.
pub fn best(force_reset: bool, write: bool) -> (CodecInfos, bool) {
let config = get_config(CFG_KEY_ENCODER);
if !force_reset && config.is_ok() {
(config.unwrap(), false)
} else {
let ctx = EncodeContext {
name: String::from(""),
width: 1920,
height: 1080,
pixfmt: DEFAULT_PIXFMT,
align: HW_STRIDE_ALIGN as _,
bitrate: 0,
timebase: DEFAULT_TIME_BASE,
gop: DEFAULT_GOP,
quality: DEFAULT_HW_QUALITY,
rc: DEFAULT_RC,
};
let encoders = CodecInfo::score(Encoder::avaliable_encoders(ctx));
if write {
let _ = set_config(CFG_KEY_ENCODER, &encoders)
.map_err(|e| log::error!("{:?}", e))
.ok();
}
(encoders, true)
}
}
pub fn current_name() -> Arc<Mutex<Option<String>>> {
HW_ENCODER_NAME.clone()
}
pub fn encode(&mut self, bgra: &[u8]) -> ResultType<Vec<EncodeFrame>> {
match self.pixfmt {
AVPixelFormat::AV_PIX_FMT_YUV420P => hw::hw_bgra_to_i420(
self.encoder.ctx.width as _,
self.encoder.ctx.height as _,
&self.encoder.linesize,
&self.encoder.offset,
self.encoder.length,
bgra,
&mut self.yuv,
),
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_bgra_to_nv12(
self.encoder.ctx.width as _,
self.encoder.ctx.height as _,
&self.encoder.linesize,
&self.encoder.offset,
self.encoder.length,
bgra,
&mut self.yuv,
),
}
match self.encoder.encode(&self.yuv) {
Ok(v) => {
let mut data = Vec::<EncodeFrame>::new();
data.append(v);
Ok(data)
}
Err(_) => Ok(Vec::<EncodeFrame>::new()),
}
}
fn convert_quality(
name: &str,
bitrate_ratio: i32,
) -> (i32, [i32; 2], i32, Quality, RateContorl) {
// TODO
let mut bitrate = if name.contains("qsv") {
1_000_000
} else {
2_000_000
};
if bitrate_ratio > 0 && bitrate_ratio <= 200 {
bitrate = bitrate * bitrate_ratio / 100;
};
(
bitrate,
DEFAULT_TIME_BASE,
DEFAULT_GOP,
DEFAULT_HW_QUALITY,
DEFAULT_RC,
)
}
}
pub struct HwDecoder {
decoder: Decoder,
pub info: CodecInfo,
}
pub struct HwDecoders {
pub h264: Option<HwDecoder>,
pub h265: Option<HwDecoder>,
}
impl HwDecoder {
/// See HwEncoder::best
fn best(force_reset: bool, write: bool) -> (CodecInfos, bool) {
let config = get_config(CFG_KEY_DECODER);
if !force_reset && config.is_ok() {
(config.unwrap(), false)
} else {
let decoders = CodecInfo::score(Decoder::avaliable_decoders());
if write {
set_config(CFG_KEY_DECODER, &decoders)
.map_err(|e| log::error!("{:?}", e))
.ok();
}
(decoders, true)
}
}
pub fn new_decoders() -> HwDecoders {
let (best, _) = HwDecoder::best(false, true);
let mut h264: Option<HwDecoder> = None;
let mut h265: Option<HwDecoder> = None;
let mut fail = false;
if let Some(info) = best.h264 {
h264 = HwDecoder::new(info).ok();
if h264.is_none() {
fail = true;
}
}
if let Some(info) = best.h265 {
h265 = HwDecoder::new(info).ok();
if h265.is_none() {
fail = true;
}
}
if fail {
HwDecoder::best(true, true);
}
HwDecoders { h264, h265 }
}
pub fn new(info: CodecInfo) -> ResultType<Self> {
let ctx = DecodeContext {
name: info.name.clone(),
device_type: info.hwdevice.clone(),
};
match Decoder::new(ctx) {
Ok(decoder) => Ok(HwDecoder { decoder, info }),
Err(_) => Err(anyhow!(format!("Failed to create decoder"))),
}
}
pub fn decode(&mut self, data: &[u8]) -> ResultType<Vec<HwDecoderImage>> {
match self.decoder.decode(data) {
Ok(v) => Ok(v.iter().map(|f| HwDecoderImage { frame: f }).collect()),
Err(_) => Ok(vec![]),
}
}
}
pub struct HwDecoderImage<'a> {
frame: &'a DecodeFrame,
}
impl HwDecoderImage<'_> {
pub fn bgra(&self, bgra: &mut Vec<u8>, i420: &mut Vec<u8>) -> ResultType<()> {
let frame = self.frame;
match frame.pixfmt {
AVPixelFormat::AV_PIX_FMT_NV12 => hw::hw_nv12_to_bgra(
frame.width as _,
frame.height as _,
&frame.data[0],
&frame.data[1],
frame.linesize[0] as _,
frame.linesize[1] as _,
bgra,
i420,
HW_STRIDE_ALIGN,
),
AVPixelFormat::AV_PIX_FMT_YUV420P => {
hw::hw_i420_to_bgra(
frame.width as _,
frame.height as _,
&frame.data[0],
&frame.data[1],
&frame.data[2],
frame.linesize[0] as _,
frame.linesize[1] as _,
frame.linesize[2] as _,
bgra,
);
return Ok(());
}
}
}
}
fn get_config(k: &str) -> ResultType<CodecInfos> {
let v = HwCodecConfig::get_option(k);
match CodecInfos::deserialize(&v) {
Ok(v) => Ok(v),
Err(_) => Err(anyhow!("Failed to get config:{}", k)),
}
}
fn set_config(k: &str, v: &CodecInfos) -> ResultType<()> {
match v.serialize() {
Ok(v) => {
HwCodecConfig::set_option(k.to_owned(), v);
Ok(())
}
Err(_) => Err(anyhow!("Failed to set config:{}", k)),
}
}
pub fn check_config() -> Option<HashMap<String, String>> {
let (encoders, update_encoders) = HwEncoder::best(false, false);
let (decoders, update_decoders) = HwDecoder::best(false, false);
if update_encoders || update_decoders {
if let Ok(encoders) = encoders.serialize() {
if let Ok(decoders) = decoders.serialize() {
return Some(HashMap::from([
(CFG_KEY_ENCODER.to_owned(), encoders),
(CFG_KEY_DECODER.to_owned(), decoders),
]));
}
}
log::error!("Failed to serialize codec info");
}
None
}

View File

@ -1,4 +1,4 @@
pub use self::codec::*;
pub use self::vpxcodec::*;
cfg_if! {
if #[cfg(quartz)] {
@ -29,8 +29,12 @@ cfg_if! {
pub mod codec;
mod convert;
#[cfg(feature = "hwcodec")]
pub mod hwcodec;
pub mod vpxcodec;
pub use self::convert::*;
pub const STRIDE_ALIGN: usize = 64; // commonly used in libvpx vpx_img_alloc caller
pub const HW_STRIDE_ALIGN: usize = 0; // recommended by av_frame_get_buffer
mod vpx;

View File

@ -0,0 +1,606 @@
// https://github.com/astraw/vpx-encode
// https://github.com/astraw/env-libvpx-sys
// https://github.com/rust-av/vpx-rs/blob/master/src/decoder.rs
use hbb_common::anyhow::{anyhow, Context};
use hbb_common::message_proto::{Message, VP9s, VideoFrame, VP9};
use hbb_common::ResultType;
use crate::codec::EncoderApi;
use crate::STRIDE_ALIGN;
use super::vpx::{vp8e_enc_control_id::*, vpx_codec_err_t::*, *};
use std::os::raw::{c_int, c_uint};
use std::{ptr, slice};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum VpxVideoCodecId {
VP8,
VP9,
}
impl Default for VpxVideoCodecId {
fn default() -> VpxVideoCodecId {
VpxVideoCodecId::VP9
}
}
pub struct VpxEncoder {
ctx: vpx_codec_ctx_t,
width: usize,
height: usize,
}
pub struct VpxDecoder {
ctx: vpx_codec_ctx_t,
}
#[derive(Debug)]
pub enum Error {
FailedCall(String),
BadPtr(String),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
macro_rules! call_vpx {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, i32>(result) };
if result_int != 0 {
return Err(Error::FailedCall(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
macro_rules! call_vpx_ptr {
($x:expr) => {{
let result = unsafe { $x }; // original expression
let result_int = unsafe { std::mem::transmute::<_, isize>(result) };
if result_int == 0 {
return Err(Error::BadPtr(format!(
"errcode={} {}:{}:{}:{}",
result_int,
module_path!(),
file!(),
line!(),
column!()
))
.into());
}
result
}};
}
impl EncoderApi for VpxEncoder {
fn new(cfg: crate::codec::EncoderCfg) -> ResultType<Self>
where
Self: Sized,
{
match cfg {
crate::codec::EncoderCfg::VPX(config) => {
let i;
if cfg!(feature = "VP8") {
i = match config.codec {
VpxVideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_cx()),
VpxVideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_cx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_cx());
}
let mut c = unsafe { std::mem::MaybeUninit::zeroed().assume_init() };
call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0));
// https://www.webmproject.org/docs/encoder-parameters/
// default: c.rc_min_quantizer = 0, c.rc_max_quantizer = 63
// try rc_resize_allowed later
c.g_w = config.width;
c.g_h = config.height;
c.g_timebase.num = config.timebase[0];
c.g_timebase.den = config.timebase[1];
c.rc_target_bitrate = config.bitrate;
c.rc_undershoot_pct = 95;
c.rc_dropframe_thresh = 25;
if config.rc_min_quantizer > 0 {
c.rc_min_quantizer = config.rc_min_quantizer;
}
if config.rc_max_quantizer > 0 {
c.rc_max_quantizer = config.rc_max_quantizer;
}
let mut speed = config.speed;
if speed <= 0 {
speed = 6;
}
c.g_threads = if config.num_threads == 0 {
num_cpus::get() as _
} else {
config.num_threads
};
c.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
// https://developers.google.com/media/vp9/bitrate-modes/
// Constant Bitrate mode (CBR) is recommended for live streaming with VP9.
c.rc_end_usage = vpx_rc_mode::VPX_CBR;
// c.kf_min_dist = 0;
// c.kf_max_dist = 999999;
c.kf_mode = vpx_kf_mode::VPX_KF_DISABLED; // reduce bandwidth a lot
/*
VPX encoder支持two-pass encoderate control的
bitrate下得到最好的PSNR
*/
let mut ctx = Default::default();
call_vpx!(vpx_codec_enc_init_ver(
&mut ctx,
i,
&c,
0,
VPX_ENCODER_ABI_VERSION as _
));
if config.codec == VpxVideoCodecId::VP9 {
// set encoder internal speed settings
// in ffmpeg, it is --speed option
/*
set to 0 or a positive value 1-16, the codec will try to adapt its
complexity depending on the time it spends encoding. Increasing this
number will make the speed go up and the quality go down.
Negative values mean strict enforcement of this
while positive values are adaptive
*/
/* https://developers.google.com/media/vp9/live-encoding
Speed 5 to 8 should be used for live / real-time encoding.
Lower numbers (5 or 6) are higher quality but require more CPU power.
Higher numbers (7 or 8) will be lower quality but more manageable for lower latency
use cases and also for lower CPU power devices such as mobile.
*/
call_vpx!(vpx_codec_control_(&mut ctx, VP8E_SET_CPUUSED as _, speed,));
// set row level multi-threading
/*
as some people in comments and below have already commented,
more recent versions of libvpx support -row-mt 1 to enable tile row
multi-threading. This can increase the number of tiles by up to 4x in VP9
(since the max number of tile rows is 4, regardless of video height).
To enable this, use -tile-rows N where N is the number of tile rows in
log2 units (so -tile-rows 1 means 2 tile rows and -tile-rows 2 means 4 tile
rows). The total number of active threads will then be equal to
$tile_rows * $tile_columns
*/
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_ROW_MT as _,
1 as c_int
));
call_vpx!(vpx_codec_control_(
&mut ctx,
VP9E_SET_TILE_COLUMNS as _,
4 as c_int
));
}
Ok(Self {
ctx,
width: config.width as _,
height: config.height as _,
})
}
_ => Err(anyhow!("encoder type mismatch")),
}
}
fn encode_to_message(&mut self, frame: &[u8], ms: i64) -> ResultType<Message> {
let mut frames = Vec::new();
for ref frame in self
.encode(ms, frame, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(VpxEncoder::create_frame(frame));
}
for ref frame in self.flush().with_context(|| "Failed to flush")? {
frames.push(VpxEncoder::create_frame(frame));
}
// to-do: flush periodically, e.g. 1 second
if frames.len() > 0 {
Ok(VpxEncoder::create_msg(frames))
} else {
Err(anyhow!("no valid frame"))
}
}
fn use_yuv(&self) -> bool {
true
}
}
impl VpxEncoder {
pub fn encode(&mut self, pts: i64, data: &[u8], stride_align: usize) -> Result<EncodeFrames> {
assert!(2 * data.len() >= 3 * self.width * self.height);
let mut image = Default::default();
call_vpx_ptr!(vpx_img_wrap(
&mut image,
vpx_img_fmt::VPX_IMG_FMT_I420,
self.width as _,
self.height as _,
stride_align as _,
data.as_ptr() as _,
));
call_vpx!(vpx_codec_encode(
&mut self.ctx,
&image,
pts as _,
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the encoder to return any pending packets
pub fn flush(&mut self) -> Result<EncodeFrames> {
call_vpx!(vpx_codec_encode(
&mut self.ctx,
ptr::null(),
-1, // PTS
1, // Duration
0, // Flags
VPX_DL_REALTIME as _,
));
Ok(EncodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
#[inline]
fn create_msg(vp9s: Vec<VP9>) -> Message {
let mut msg_out = Message::new();
let mut vf = VideoFrame::new();
vf.set_vp9s(VP9s {
frames: vp9s.into(),
..Default::default()
});
msg_out.set_video_frame(vf);
msg_out
}
#[inline]
fn create_frame(frame: &EncodeFrame) -> VP9 {
VP9 {
data: frame.data.to_vec(),
key: frame.key,
pts: frame.pts,
..Default::default()
}
}
}
impl Drop for VpxEncoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct EncodeFrame<'a> {
/// Compressed data.
pub data: &'a [u8],
/// Whether the frame is a keyframe.
pub key: bool,
/// Presentation timestamp (in timebase units).
pub pts: i64,
}
#[derive(Clone, Copy, Debug)]
pub struct VpxEncoderConfig {
/// The width (in pixels).
pub width: c_uint,
/// The height (in pixels).
pub height: c_uint,
/// The timebase numerator and denominator (in seconds).
pub timebase: [c_int; 2],
/// The target bitrate (in kilobits per second).
pub bitrate: c_uint,
/// The codec
pub codec: VpxVideoCodecId,
pub rc_min_quantizer: u32,
pub rc_max_quantizer: u32,
pub speed: i32,
pub num_threads: u32,
}
#[derive(Clone, Copy, Debug)]
pub struct VpxDecoderConfig {
pub codec: VpxVideoCodecId,
pub num_threads: u32,
}
pub struct EncodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for EncodeFrames<'a> {
type Item = EncodeFrame<'a>;
fn next(&mut self) -> Option<Self::Item> {
loop {
unsafe {
let pkt = vpx_codec_get_cx_data(self.ctx, &mut self.iter);
if pkt.is_null() {
return None;
} else if (*pkt).kind == vpx_codec_cx_pkt_kind::VPX_CODEC_CX_FRAME_PKT {
let f = &(*pkt).data.frame;
return Some(Self::Item {
data: slice::from_raw_parts(f.buf as _, f.sz as _),
key: (f.flags & VPX_FRAME_IS_KEY) != 0,
pts: f.pts,
});
} else {
// Ignore the packet.
}
}
}
}
}
impl VpxDecoder {
/// Create a new decoder
///
/// # Errors
///
/// The function may fail if the underlying libvpx does not provide
/// the VP9 decoder.
pub fn new(config: VpxDecoderConfig) -> Result<Self> {
// This is sound because `vpx_codec_ctx` is a repr(C) struct without any field that can
// cause UB if uninitialized.
let i;
if cfg!(feature = "VP8") {
i = match config.codec {
VpxVideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_dx()),
VpxVideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_dx()),
};
} else {
i = call_vpx_ptr!(vpx_codec_vp9_dx());
}
let mut ctx = Default::default();
let cfg = vpx_codec_dec_cfg_t {
threads: if config.num_threads == 0 {
num_cpus::get() as _
} else {
config.num_threads
},
w: 0,
h: 0,
};
/*
unsafe {
println!("{}", vpx_codec_get_caps(i));
}
*/
call_vpx!(vpx_codec_dec_init_ver(
&mut ctx,
i,
&cfg,
0,
VPX_DECODER_ABI_VERSION as _,
));
Ok(Self { ctx })
}
pub fn decode2rgb(&mut self, data: &[u8], rgba: bool) -> Result<Vec<u8>> {
let mut img = Image::new();
for frame in self.decode(data)? {
drop(img);
img = frame;
}
for frame in self.flush()? {
drop(img);
img = frame;
}
if img.is_null() {
Ok(Vec::new())
} else {
let mut out = Default::default();
img.rgb(1, rgba, &mut out);
Ok(out)
}
}
/// Feed some compressed data to the encoder
///
/// The `data` slice is sent to the decoder
///
/// It matches a call to `vpx_codec_decode`.
pub fn decode(&mut self, data: &[u8]) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
data.as_ptr(),
data.len() as _,
ptr::null_mut(),
0,
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
/// Notify the decoder to return any pending frame
pub fn flush(&mut self) -> Result<DecodeFrames> {
call_vpx!(vpx_codec_decode(
&mut self.ctx,
ptr::null(),
0,
ptr::null_mut(),
0
));
Ok(DecodeFrames {
ctx: &mut self.ctx,
iter: ptr::null(),
})
}
}
impl Drop for VpxDecoder {
fn drop(&mut self) {
unsafe {
let result = vpx_codec_destroy(&mut self.ctx);
if result != VPX_CODEC_OK {
panic!("failed to destroy vpx codec");
}
}
}
}
pub struct DecodeFrames<'a> {
ctx: &'a mut vpx_codec_ctx_t,
iter: vpx_codec_iter_t,
}
impl<'a> Iterator for DecodeFrames<'a> {
type Item = Image;
fn next(&mut self) -> Option<Self::Item> {
let img = unsafe { vpx_codec_get_frame(self.ctx, &mut self.iter) };
if img.is_null() {
return None;
} else {
return Some(Image(img));
}
}
}
// https://chromium.googlesource.com/webm/libvpx/+/bali/vpx/src/vpx_image.c
pub struct Image(*mut vpx_image_t);
impl Image {
#[inline]
pub fn new() -> Self {
Self(std::ptr::null_mut())
}
#[inline]
pub fn is_null(&self) -> bool {
self.0.is_null()
}
#[inline]
pub fn width(&self) -> usize {
self.inner().d_w as _
}
#[inline]
pub fn height(&self) -> usize {
self.inner().d_h as _
}
#[inline]
pub fn format(&self) -> vpx_img_fmt_t {
// VPX_IMG_FMT_I420
self.inner().fmt
}
#[inline]
pub fn inner(&self) -> &vpx_image_t {
unsafe { &*self.0 }
}
#[inline]
pub fn stride(&self, iplane: usize) -> i32 {
self.inner().stride[iplane]
}
pub fn rgb(&self, stride_align: usize, rgba: bool, dst: &mut Vec<u8>) {
let h = self.height();
let mut w = self.width();
let bps = if rgba { 4 } else { 3 };
w = (w + stride_align - 1) & !(stride_align - 1);
dst.resize(h * w * bps, 0);
let img = self.inner();
unsafe {
if rgba {
super::I420ToARGB(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
} else {
super::I420ToRAW(
img.planes[0],
img.stride[0],
img.planes[1],
img.stride[1],
img.planes[2],
img.stride[2],
dst.as_mut_ptr(),
(w * bps) as _,
self.width() as _,
self.height() as _,
);
}
}
}
#[inline]
pub fn data(&self) -> (&[u8], &[u8], &[u8]) {
unsafe {
let img = self.inner();
let h = (img.d_h as usize + 1) & !1;
let n = img.stride[0] as usize * h;
let y = slice::from_raw_parts(img.planes[0], n);
let n = img.stride[1] as usize * (h >> 1);
let u = slice::from_raw_parts(img.planes[1], n);
let v = slice::from_raw_parts(img.planes[2], n);
(y, u, v)
}
}
}
impl Drop for Image {
fn drop(&mut self) {
if !self.0.is_null() {
unsafe { vpx_img_free(self.0) };
}
}
}
unsafe impl Send for vpx_codec_ctx_t {}

View File

@ -282,7 +282,11 @@ impl CapturerMag {
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
let w = GetSystemMetrics(SM_CXVIRTUALSCREEN);
let h = GetSystemMetrics(SM_CYVIRTUALSCREEN);
if !(origin.0 == x as _ && origin.1 == y as _ && width == w as _ && height == h as _) {
if !(origin.0 == x as i32
&& origin.1 == y as i32
&& width == w as usize
&& height == h as usize)
{
return Err(Error::new(
ErrorKind::Other,
format!(
@ -510,10 +514,10 @@ impl CapturerMag {
let y = GetSystemMetrics(SM_YVIRTUALSCREEN);
let w = GetSystemMetrics(SM_CXVIRTUALSCREEN);
let h = GetSystemMetrics(SM_CYVIRTUALSCREEN);
if !(self.rect.left == x as _
&& self.rect.top == y as _
&& self.rect.right == (x + w) as _
&& self.rect.bottom == (y + h) as _)
if !(self.rect.left == x as i32
&& self.rect.top == y as i32
&& self.rect.right == (x + w) as i32
&& self.rect.bottom == (y + h) as i32)
{
return Err(Error::new(
ErrorKind::Other,

View File

@ -12,6 +12,11 @@ use cpal::{
Device, Host, StreamConfig,
};
use magnum_opus::{Channels::*, Decoder as AudioDecoder};
use scrap::{
codec::{Decoder, DecoderCfg},
VpxDecoderConfig, VpxVideoCodecId,
};
use sha2::{Digest, Sha256};
use uuid::Uuid;
@ -30,7 +35,6 @@ use hbb_common::{
tokio::time::Duration,
AddrMangle, ResultType, Stream,
};
use scrap::{Decoder, Image, VideoCodecId};
pub use super::lang::*;
pub mod file_trait;
@ -717,7 +721,12 @@ pub struct VideoHandler {
impl VideoHandler {
pub fn new(latency_controller: Arc<Mutex<LatencyController>>) -> Self {
VideoHandler {
decoder: Decoder::new(VideoCodecId::VP9, (num_cpus::get() / 2) as _).unwrap(),
decoder: Decoder::new(DecoderCfg {
vpx: VpxDecoderConfig {
codec: VpxVideoCodecId::VP9,
num_threads: (num_cpus::get() / 2) as _,
},
}),
latency_controller,
rgb: Default::default(),
}
@ -731,33 +740,18 @@ impl VideoHandler {
.update_video(vf.timestamp);
}
match &vf.union {
Some(video_frame::Union::vp9s(vp9s)) => self.handle_vp9s(vp9s),
Some(frame) => self.decoder.handle_video_frame(frame, &mut self.rgb),
_ => Ok(false),
}
}
pub fn handle_vp9s(&mut self, vp9s: &VP9s) -> ResultType<bool> {
let mut last_frame = Image::new();
for vp9 in vp9s.frames.iter() {
for frame in self.decoder.decode(&vp9.data)? {
drop(last_frame);
last_frame = frame;
}
}
for frame in self.decoder.flush()? {
drop(last_frame);
last_frame = frame;
}
if last_frame.is_null() {
Ok(false)
} else {
last_frame.rgb(1, true, &mut self.rgb);
Ok(true)
}
}
pub fn reset(&mut self) {
self.decoder = Decoder::new(VideoCodecId::VP9, 1).unwrap();
self.decoder = Decoder::new(DecoderCfg {
vpx: VpxDecoderConfig {
codec: VpxVideoCodecId::VP9,
num_threads: 1,
},
});
}
}
@ -952,6 +946,11 @@ impl LoginConfigHandler {
msg.disable_clipboard = BoolOption::Yes.into();
n += 1;
}
// TODO: add option
let state = Decoder::video_codec_state();
msg.video_codec_state = hbb_common::protobuf::MessageField::some(state);
n += 1;
if n > 0 {
Some(msg)
} else {
@ -1170,9 +1169,11 @@ where
let latency_controller = LatencyController::new();
let latency_controller_cl = latency_controller.clone();
// Create video_handler out of the thread below to ensure that the handler exists before client start.
// It will take a few tenths of a second for the first time, and then tens of milliseconds.
let mut video_handler = VideoHandler::new(latency_controller);
std::thread::spawn(move || {
let mut video_handler = VideoHandler::new(latency_controller);
loop {
if let Ok(data) = video_receiver.recv() {
match data {

View File

@ -1,6 +1,8 @@
use crate::rendezvous_mediator::RendezvousMediator;
#[cfg(not(any(target_os = "android", target_os = "ios")))]
pub use clipboard::ClipbaordFile;
#[cfg(feature = "hwcodec")]
use hbb_common::config::HwCodecConfig;
use hbb_common::{
allow_err, bail, bytes,
bytes_codec::BytesCodec,
@ -73,7 +75,7 @@ pub enum FS {
WriteOffset {
id: i32,
file_num: i32,
offset_blk: u32
offset_blk: u32,
},
CheckDigest {
id: i32,
@ -127,6 +129,8 @@ pub enum Data {
ClipbaordFile(ClipbaordFile),
ClipboardFileEnabled(bool),
PrivacyModeState((i32, PrivacyModeState)),
#[cfg(feature = "hwcodec")]
HwCodecConfig(Option<HashMap<String, String>>),
}
#[tokio::main(flavor = "current_thread")]
@ -336,6 +340,12 @@ async fn handle(data: Data, stream: &mut Connection) {
.await
);
}
#[cfg(feature = "hwcodec")]
Data::HwCodecConfig(Some(config)) => {
for (k, v) in config {
HwCodecConfig::set_option(k, v);
}
}
_ => {}
}
}
@ -635,3 +645,20 @@ pub async fn set_socks(value: config::Socks5Server) -> ResultType<()> {
.await?;
Ok(())
}
#[cfg(feature = "hwcodec")]
#[tokio::main]
pub async fn check_hwcodec_config() {
if let Some(config) = scrap::hwcodec::check_config() {
match connect(1000, "").await {
Ok(mut conn) => {
if conn.send(&Data::HwCodecConfig(Some(config))).await.is_err() {
log::error!("Failed to send hwcodec config by ipc");
}
}
Err(err) => {
log::info!("Failed to connect ipc: {:?}", err);
}
}
}
}

View File

@ -70,6 +70,15 @@ fn main() {
}
if args.is_empty() {
std::thread::spawn(move || start_server(false));
#[cfg(feature = "hwcodec")]
if let Ok(exe) = std::env::current_exe() {
std::thread::spawn(move || {
std::process::Command::new(exe)
.arg("--check-hwcodec-config")
.status()
.ok()
});
}
} else {
#[cfg(windows)]
{
@ -109,6 +118,9 @@ fn main() {
} else if args[0] == "--extract" {
#[cfg(feature = "with_rc")]
hbb_common::allow_err!(crate::rc::extract_resources(&args[1]));
} else if args[0] == "--check-hwcodec-config" {
#[cfg(feature = "hwcodec")]
ipc::check_hwcodec_config();
return;
}
}
@ -197,7 +209,7 @@ fn main() {
.about("RustDesk command line tool")
.args_from_usage(&args)
.get_matches();
use hbb_common::{env_logger::*, config::LocalConfig};
use hbb_common::{config::LocalConfig, env_logger::*};
init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
if let Some(p) = matches.value_of("port-forward") {
let options: Vec<String> = p.split(":").map(|x| x.to_owned()).collect();
@ -225,6 +237,13 @@ fn main() {
}
let key = matches.value_of("key").unwrap_or("").to_owned();
let token = LocalConfig::get_option("access_token");
cli::start_one_port_forward(options[0].clone(), port, remote_host, remote_port, key, token);
cli::start_one_port_forward(
options[0].clone(),
port,
remote_host,
remote_port,
key,
token,
);
}
}

View File

@ -396,6 +396,7 @@ impl Connection {
video_service::notify_video_frame_feched(id, None);
video_service::update_test_latency(id, 0);
video_service::update_image_quality(id, None);
scrap::codec::Encoder::update_video_encoder(id, scrap::codec::EncoderUpdate::Remove);
if let Err(err) = conn.try_port_forward_loop(&mut rx_from_cm).await {
conn.on_close(&err.to_string(), false);
}
@ -780,6 +781,22 @@ impl Connection {
if let Some(message::Union::login_request(lr)) = msg.union {
if let Some(o) = lr.option.as_ref() {
self.update_option(o).await;
if let Some(q) = o.video_codec_state.clone().take() {
scrap::codec::Encoder::update_video_encoder(
self.inner.id(),
scrap::codec::EncoderUpdate::State(q),
);
} else {
scrap::codec::Encoder::update_video_encoder(
self.inner.id(),
scrap::codec::EncoderUpdate::DisableHwIfNotExist,
);
}
} else {
scrap::codec::Encoder::update_video_encoder(
self.inner.id(),
scrap::codec::EncoderUpdate::DisableHwIfNotExist,
);
}
self.video_ack_required = lr.video_ack_required;
if self.authorized {

View File

@ -23,7 +23,11 @@ use hbb_common::tokio::sync::{
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
Mutex as TokioMutex,
};
use scrap::{Capturer, Config, Display, EncodeFrame, Encoder, Frame, VideoCodecId, STRIDE_ALIGN};
use scrap::{
codec::{Encoder, EncoderCfg, HwEncoderConfig},
vpxcodec::{VpxEncoderConfig, VpxVideoCodecId},
Capturer, Display, Frame,
};
use std::{
collections::HashSet,
io::{ErrorKind::WouldBlock, Result},
@ -201,9 +205,11 @@ fn check_display_changed(
}
// Capturer object is expensive, avoiding to create it frequently.
fn create_capturer(privacy_mode_id: i32, display: Display) -> ResultType<Box<dyn TraitCapturer>> {
let use_yuv = true;
fn create_capturer(
privacy_mode_id: i32,
display: Display,
use_yuv: bool,
) -> ResultType<Box<dyn TraitCapturer>> {
#[cfg(not(windows))]
let c: Option<Box<dyn TraitCapturer>> = None;
#[cfg(windows)]
@ -292,7 +298,7 @@ pub fn test_create_capturer(privacy_mode_id: i32, timeout_millis: u64) -> bool {
let test_begin = Instant::now();
while test_begin.elapsed().as_millis() < timeout_millis as _ {
if let Ok((_, _, display)) = get_current_display() {
if let Ok(_) = create_capturer(privacy_mode_id, display) {
if let Ok(_) = create_capturer(privacy_mode_id, display, true) {
return true;
}
}
@ -336,6 +342,36 @@ fn run(sp: GenericService) -> ResultType<()> {
num_cpus::get(),
);
let q = get_image_quality();
let (bitrate, rc_min_quantizer, rc_max_quantizer, speed) = get_quality(width, height, q);
log::info!("bitrate={}, rc_min_quantizer={}", bitrate, rc_min_quantizer);
let encoder_cfg = match Encoder::current_hw_encoder_name() {
Some(codec_name) => EncoderCfg::HW(HwEncoderConfig {
codec_name,
width,
height,
bitrate_ratio: q >> 8,
}),
None => EncoderCfg::VPX(VpxEncoderConfig {
width: width as _,
height: height as _,
timebase: [1, 1000], // Output timestamp precision
bitrate,
codec: VpxVideoCodecId::VP9,
rc_min_quantizer,
rc_max_quantizer,
speed,
num_threads: (num_cpus::get() / 2) as _,
}),
};
let mut encoder;
match Encoder::new(encoder_cfg) {
Ok(x) => encoder = x,
Err(err) => bail!("Failed to create encoder: {}", err),
}
let privacy_mode_id = *PRIVACY_MODE_CONN_ID.lock().unwrap();
#[cfg(not(windows))]
let captuerer_privacy_mode_id = privacy_mode_id;
@ -355,26 +391,7 @@ fn run(sp: GenericService) -> ResultType<()> {
} else {
log::info!("In privacy mode, the peer side cannot watch the screen");
}
let mut c = create_capturer(captuerer_privacy_mode_id, display)?;
let q = get_image_quality();
let (bitrate, rc_min_quantizer, rc_max_quantizer, speed) = get_quality(width, height, q);
log::info!("bitrate={}, rc_min_quantizer={}", bitrate, rc_min_quantizer);
let cfg = Config {
width: width as _,
height: height as _,
timebase: [1, 1000], // Output timestamp precision
bitrate,
codec: VideoCodecId::VP9,
rc_min_quantizer,
rc_max_quantizer,
speed,
};
let mut vpx;
match Encoder::new(&cfg, (num_cpus::get() / 2) as _) {
Ok(x) => vpx = x,
Err(err) => bail!("Failed to create encoder: {}", err),
}
let mut c = create_capturer(captuerer_privacy_mode_id, display, encoder.use_yuv())?;
if *SWITCH.lock().unwrap() {
log::debug!("Broadcasting display switch");
@ -464,7 +481,7 @@ fn run(sp: GenericService) -> ResultType<()> {
Ok(frame) => {
let time = now - start;
let ms = (time.as_secs() * 1000 + time.subsec_millis() as u64) as i64;
let send_conn_ids = handle_one_frame(&sp, &frame, ms, &mut vpx)?;
let send_conn_ids = handle_one_frame(&sp, &frame, ms, &mut encoder)?;
frame_controller.set_send(now, send_conn_ids);
#[cfg(windows)]
{
@ -531,7 +548,6 @@ fn run(sp: GenericService) -> ResultType<()> {
Ok(())
}
#[inline]
fn check_privacy_mode_changed(sp: &GenericService, privacy_mode_id: i32) -> ResultType<()> {
let privacy_mode_id_2 = *PRIVACY_MODE_CONN_ID.lock().unwrap();
if privacy_mode_id != privacy_mode_id_2 {
@ -547,6 +563,7 @@ fn check_privacy_mode_changed(sp: &GenericService, privacy_mode_id: i32) -> Resu
}
#[inline]
#[cfg(any(target_os = "android", target_os = "ios"))]
fn create_msg(vp9s: Vec<VP9>) -> Message {
let mut msg_out = Message::new();
let mut vf = VideoFrame::new();
@ -559,22 +576,12 @@ fn create_msg(vp9s: Vec<VP9>) -> Message {
msg_out
}
#[inline]
fn create_frame(frame: &EncodeFrame) -> VP9 {
VP9 {
data: frame.data.to_vec(),
key: frame.key,
pts: frame.pts,
..Default::default()
}
}
#[inline]
fn handle_one_frame(
sp: &GenericService,
frame: &[u8],
ms: i64,
vpx: &mut Encoder,
encoder: &mut Encoder,
) -> ResultType<HashSet<i32>> {
sp.snapshot(|sps| {
// so that new sub and old sub share the same encoder after switch
@ -585,20 +592,8 @@ fn handle_one_frame(
})?;
let mut send_conn_ids: HashSet<i32> = Default::default();
let mut frames = Vec::new();
for ref frame in vpx
.encode(ms, frame, STRIDE_ALIGN)
.with_context(|| "Failed to encode")?
{
frames.push(create_frame(frame));
}
for ref frame in vpx.flush().with_context(|| "Failed to flush")? {
frames.push(create_frame(frame));
}
// to-do: flush periodically, e.g. 1 second
if frames.len() > 0 {
send_conn_ids = sp.send_video_frame(create_msg(frames));
if let Ok(msg) = encoder.encode_to_message(frame, ms) {
send_conn_ids = sp.send_video_frame(msg);
}
Ok(send_conn_ids)
}