Added Y, UV accessors to cv::MediaFrame

This commit is contained in:
Anastasiya Pronina 2021-03-15 18:49:43 +03:00
parent 94f00cf096
commit b0d66e06f4
6 changed files with 533 additions and 102 deletions

View File

@ -20,6 +20,19 @@ G_API_OP(GBGR, <GMat(GFrame)>, "org.opencv.streaming.BGR")
static GMatDesc outMeta(const GFrameDesc& in) { return GMatDesc{CV_8U, 3, in.size}; }
};
G_API_OP(GY, <GMat(GFrame)>, "org.opencv.streaming.Y") {
static GMatDesc outMeta(const GFrameDesc& frameDesc) {
return GMatDesc { CV_8U, 1, frameDesc.size , false };
}
};
G_API_OP(GUV, <GMat(GFrame)>, "org.opencv.streaming.UV") {
static GMatDesc outMeta(const GFrameDesc& frameDesc) {
return GMatDesc { CV_8U, 2, cv::Size(frameDesc.size.width / 2, frameDesc.size.height / 2),
false };
}
};
/** @brief Gets bgr plane from input frame
@note Function textual ID is "org.opencv.streaming.BGR"
@ -29,6 +42,25 @@ G_API_OP(GBGR, <GMat(GFrame)>, "org.opencv.streaming.BGR")
*/
GAPI_EXPORTS cv::GMat BGR(const cv::GFrame& in);
/** @brief Extracts Y plane from media frame.
Output image is 8-bit 1-channel image of @ref CV_8UC1.
@note Function textual ID is "org.opencv.streaming.Y"
@param frame input media frame.
*/
GAPI_EXPORTS GMat Y(const cv::GFrame& frame);
/** @brief Extracts UV plane from media frame.
Output image is 8-bit 2-channel image of @ref CV_8UC2.
@note Function textual ID is "org.opencv.streaming.UV"
@param frame input media frame.
*/
GAPI_EXPORTS GMat UV(const cv::GFrame& frame);
} // namespace streaming
//! @addtogroup gapi_transform

View File

@ -78,3 +78,11 @@ cv::GMat cv::gapi::streaming::desync(const cv::GMat &g) {
cv::GMat cv::gapi::streaming::BGR(const cv::GFrame& in) {
return cv::gapi::streaming::GBGR::on(in);
}
cv::GMat cv::gapi::streaming::Y(const cv::GFrame& in){
return cv::gapi::streaming::GY::on(in);
}
cv::GMat cv::gapi::streaming::UV(const cv::GFrame& in){
return cv::gapi::streaming::GUV::on(in);
}

View File

@ -58,32 +58,51 @@ namespace gimpl {
struct Data;
struct RcDesc;
struct GAPI_EXPORTS RMatMediaAdapterBGR final: public cv::RMat::Adapter
struct GAPI_EXPORTS RMatMediaFrameAdapter final: public cv::RMat::Adapter
{
explicit RMatMediaAdapterBGR(const cv::MediaFrame& frame) : m_frame(frame) { };
using MapDescF = std::function<cv::GMatDesc(const GFrameDesc&)>;
using MapDataF = std::function<cv::Mat(const GFrameDesc&, const cv::MediaFrame::View&)>;
RMatMediaFrameAdapter(const cv::MediaFrame& frame,
const MapDescF& frameDescToMatDesc,
const MapDataF& frameViewToMat) :
m_frame(frame),
m_frameDesc(frame.desc()),
m_frameDescToMatDesc(frameDescToMatDesc),
m_frameViewToMat(frameViewToMat)
{ }
virtual cv::RMat::View access(cv::RMat::Access a) override
{
auto view = m_frame.access(a == cv::RMat::Access::W ? cv::MediaFrame::Access::W
: cv::MediaFrame::Access::R);
auto ptr = reinterpret_cast<uchar*>(view.ptr[0]);
auto stride = view.stride[0];
auto rmatToFrameAccess = [](cv::RMat::Access rmatAccess) {
switch(rmatAccess) {
case cv::RMat::Access::R:
return cv::MediaFrame::Access::R;
case cv::RMat::Access::W:
return cv::MediaFrame::Access::W;
default:
cv::util::throw_error(std::logic_error("cv::RMat::Access::R or "
"cv::RMat::Access::W can only be mapped to cv::MediaFrame::Access!"));
}
};
std::shared_ptr<cv::MediaFrame::View> view_ptr =
std::make_shared<cv::MediaFrame::View>(std::move(view));
auto callback = [view_ptr]() mutable { view_ptr.reset(); };
auto fv = m_frame.access(rmatToFrameAccess(a));
return cv::RMat::View(desc(), ptr, stride, callback);
auto fvHolder = std::make_shared<cv::MediaFrame::View>(std::move(fv));
auto callback = [fvHolder]() mutable { fvHolder.reset(); };
return asView(m_frameViewToMat(m_frame.desc(), *fvHolder), callback);
}
virtual cv::GMatDesc desc() const override
{
const auto& desc = m_frame.desc();
GAPI_Assert(desc.fmt == cv::MediaFormat::BGR);
return cv::GMatDesc{CV_8U, 3, desc.size};
return m_frameDescToMatDesc(m_frameDesc);
}
cv::MediaFrame m_frame;
cv::GFrameDesc m_frameDesc;
MapDescF m_frameDescToMatDesc;
MapDataF m_frameViewToMat;
};

View File

@ -4,6 +4,8 @@
//
// Copyright (C) 2020 Intel Corporation
#include <mutex>
#if !defined(GAPI_STANDALONE)
#include <opencv2/imgproc.hpp>
#endif // !defined(GAPI_STANDALONE)
@ -11,6 +13,7 @@
#include <opencv2/gapi/util/throw.hpp> // throw_error
#include <opencv2/gapi/streaming/format.hpp> // kernels
#include "logger.hpp"
#include "api/gbackend_priv.hpp"
#include "backends/common/gbackend.hpp"
@ -197,16 +200,47 @@ cv::gapi::GKernelPackage cv::gimpl::streaming::kernels()
#if !defined(GAPI_STANDALONE)
class GAccessorActorBase : public cv::gapi::streaming::IActor {
public:
explicit GAccessorActorBase(const cv::GCompileArgs&) {}
virtual void run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput &out) override {
const auto in_msg = in.get();
if (cv::util::holds_alternative<cv::gimpl::EndOfStream>(in_msg))
{
out.post(cv::gimpl::EndOfStream{});
return;
}
const cv::GRunArgs &in_args = cv::util::get<cv::GRunArgs>(in_msg);
GAPI_Assert(in_args.size() == 1u);
auto frame = cv::util::get<cv::MediaFrame>(in_args[0]);
cv::GRunArgP out_arg = out.get(0);
auto& rmat = *cv::util::get<cv::RMat*>(out_arg);
extractRMat(frame, rmat);
out.meta(out_arg, in_args[0].meta);
out.post(std::move(out_arg));
}
virtual void extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat) = 0;
protected:
std::once_flag m_warnFlag;
};
struct GOCVBGR: public cv::detail::KernelTag
{
using API = cv::gapi::streaming::GBGR;
static cv::gapi::GBackend backend() { return cv::gapi::streaming::backend(); }
class Actor final: public cv::gapi::streaming::IActor {
public:
explicit Actor(const cv::GCompileArgs&) {}
virtual void run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput&out) override;
class Actor final: public GAccessorActorBase
{
public:
using GAccessorActorBase::GAccessorActorBase;
virtual void extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat) override;
};
static cv::gapi::streaming::IActor::Ptr create(const cv::GCompileArgs& args)
@ -216,49 +250,173 @@ struct GOCVBGR: public cv::detail::KernelTag
static cv::gapi::streaming::GStreamingKernel kernel() { return {&create}; };
};
void GOCVBGR::Actor::run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput &out)
void GOCVBGR::Actor::extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat)
{
const auto in_msg = in.get();
if (cv::util::holds_alternative<cv::gimpl::EndOfStream>(in_msg))
{
out.post(cv::gimpl::EndOfStream{});
return;
}
const cv::GRunArgs &in_args = cv::util::get<cv::GRunArgs>(in_msg);
GAPI_Assert(in_args.size() == 1u);
cv::GRunArgP out_arg = out.get(0);
auto frame = cv::util::get<cv::MediaFrame>(in_args[0]);
const auto& desc = frame.desc();
auto& rmat = *cv::util::get<cv::RMat*>(out_arg);
switch (desc.fmt)
{
case cv::MediaFormat::BGR:
rmat = cv::make_rmat<cv::gimpl::RMatMediaAdapterBGR>(frame);
{
rmat = cv::make_rmat<cv::gimpl::RMatMediaFrameAdapter>(frame,
[](const cv::GFrameDesc& d){ return cv::GMatDesc(CV_8U, 3, d.size); },
[](const cv::GFrameDesc& d, const cv::MediaFrame::View& v){
return cv::Mat(d.size, CV_8UC3, v.ptr[0], v.stride[0]);
});
break;
}
case cv::MediaFormat::NV12:
{
cv::Mat bgr;
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat y_plane (desc.size, CV_8UC1, view.ptr[0], view.stride[0]);
cv::Mat uv_plane(desc.size / 2, CV_8UC2, view.ptr[1], view.stride[1]);
cv::cvtColorTwoPlane(y_plane, uv_plane, bgr, cv::COLOR_YUV2BGR_NV12);
rmat = cv::make_rmat<cv::gimpl::RMatAdapter>(bgr);
break;
}
{
std::call_once(m_warnFlag,
[](){
GAPI_LOG_WARNING(NULL, "\nOn-the-fly conversion from NV12 to BGR will happen.\n"
"Conversion may cost a lot for images with high resolution.\n"
"To retrieve cv::Mat-s from NV12 cv::MediaFrame for free, you may use "
"cv::gapi::streaming::Y and cv::gapi::streaming::UV accessors.\n");
});
cv::Mat bgr;
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat y_plane (desc.size, CV_8UC1, view.ptr[0], view.stride[0]);
cv::Mat uv_plane(desc.size / 2, CV_8UC2, view.ptr[1], view.stride[1]);
cv::cvtColorTwoPlane(y_plane, uv_plane, bgr, cv::COLOR_YUV2BGR_NV12);
rmat = cv::make_rmat<cv::gimpl::RMatAdapter>(bgr);
break;
}
default:
cv::util::throw_error(
std::logic_error("Unsupported MediaFormat for cv::gapi::streaming::BGR"));
}
out.post(std::move(out_arg));
}
struct GOCVY: public cv::detail::KernelTag
{
using API = cv::gapi::streaming::GY;
static cv::gapi::GBackend backend() { return cv::gapi::streaming::backend(); }
class Actor final: public GAccessorActorBase
{
public:
using GAccessorActorBase::GAccessorActorBase;
virtual void extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat) override;
};
static cv::gapi::streaming::IActor::Ptr create(const cv::GCompileArgs& args)
{
return cv::gapi::streaming::IActor::Ptr(new Actor(args));
}
static cv::gapi::streaming::GStreamingKernel kernel() { return {&create}; };
};
void GOCVY::Actor::extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat)
{
const auto& desc = frame.desc();
switch (desc.fmt)
{
case cv::MediaFormat::BGR:
{
std::call_once(m_warnFlag,
[](){
GAPI_LOG_WARNING(NULL, "\nOn-the-fly conversion from BGR to NV12 Y plane will "
"happen.\n"
"Conversion may cost a lot for images with high resolution.\n"
"To retrieve cv::Mat from BGR cv::MediaFrame for free, you may use "
"cv::gapi::streaming::BGR accessor.\n");
});
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat tmp_bgr(desc.size, CV_8UC3, view.ptr[0], view.stride[0]);
cv::Mat yuv;
cvtColor(tmp_bgr, yuv, cv::COLOR_BGR2YUV_I420);
rmat = cv::make_rmat<cv::gimpl::RMatAdapter>(yuv.rowRange(0, desc.size.height));
break;
}
case cv::MediaFormat::NV12:
{
rmat = cv::make_rmat<cv::gimpl::RMatMediaFrameAdapter>(frame,
[](const cv::GFrameDesc& d){ return cv::GMatDesc(CV_8U, 1, d.size); },
[](const cv::GFrameDesc& d, const cv::MediaFrame::View& v){
return cv::Mat(d.size, CV_8UC1, v.ptr[0], v.stride[0]);
});
break;
}
default:
cv::util::throw_error(
std::logic_error("Unsupported MediaFormat for cv::gapi::streaming::Y"));
}
}
struct GOCVUV: public cv::detail::KernelTag
{
using API = cv::gapi::streaming::GUV;
static cv::gapi::GBackend backend() { return cv::gapi::streaming::backend(); }
class Actor final: public GAccessorActorBase
{
public:
using GAccessorActorBase::GAccessorActorBase;
virtual void extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat) override;
};
static cv::gapi::streaming::IActor::Ptr create(const cv::GCompileArgs& args)
{
return cv::gapi::streaming::IActor::Ptr(new Actor(args));
}
static cv::gapi::streaming::GStreamingKernel kernel() { return {&create}; };
};
void GOCVUV::Actor::extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat)
{
const auto& desc = frame.desc();
switch (desc.fmt)
{
case cv::MediaFormat::BGR:
{
std::call_once(m_warnFlag,
[](){
GAPI_LOG_WARNING(NULL, "\nOn-the-fly conversion from BGR to NV12 UV plane will "
"happen.\n"
"Conversion may cost a lot for images with high resolution.\n"
"To retrieve cv::Mat from BGR cv::MediaFrame for free, you may use "
"cv::gapi::streaming::BGR accessor.\n");
});
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat tmp_bgr(desc.size, CV_8UC3, view.ptr[0], view.stride[0]);
cv::Mat yuv;
cvtColor(tmp_bgr, yuv, cv::COLOR_BGR2YUV_I420);
cv::Mat uv;
std::vector<int> dims = { desc.size.height / 2,
desc.size.width / 2 };
auto start = desc.size.height;
auto range_h = desc.size.height / 4;
std::vector<cv::Mat> uv_planes = {
yuv.rowRange(start, start + range_h).reshape(0, dims),
yuv.rowRange(start + range_h, start + range_h * 2).reshape(0, dims)
};
cv::merge(uv_planes, uv);
rmat = cv::make_rmat<cv::gimpl::RMatAdapter>(uv);
break;
}
case cv::MediaFormat::NV12:
{
rmat = cv::make_rmat<cv::gimpl::RMatMediaFrameAdapter>(frame,
[](const cv::GFrameDesc& d){ return cv::GMatDesc(CV_8U, 2, d.size / 2); },
[](const cv::GFrameDesc& d, const cv::MediaFrame::View& v){
return cv::Mat(d.size / 2, CV_8UC2, v.ptr[1], v.stride[1]);
});
break;
}
default:
cv::util::throw_error(
std::logic_error("Unsupported MediaFormat for cv::gapi::streaming::UV"));
}
}
cv::gapi::GKernelPackage cv::gapi::streaming::kernels()
{
return cv::gapi::kernels<GOCVBGR>();
return cv::gapi::kernels<GOCVBGR, GOCVY, GOCVUV>();
}
#else

View File

@ -58,7 +58,7 @@ namespace
return o;
}
inline void initTestDataPath()
inline bool initTestDataPathSilent()
{
#ifndef WINRT
static bool initialized = false;
@ -66,15 +66,32 @@ namespace
{
// Since G-API has no own test data (yet), it is taken from the common space
const char* testDataPath = getenv("OPENCV_TEST_DATA_PATH");
GAPI_Assert(testDataPath != nullptr &&
"OPENCV_TEST_DATA_PATH environment variable is either not set or set incorrectly.");
cvtest::addDataSearchPath(testDataPath);
initialized = true;
if (testDataPath != nullptr) {
cvtest::addDataSearchPath(testDataPath);
initialized = true;
}
}
return initialized;
#endif // WINRT
}
inline void initTestDataPath()
{
bool initialized = initTestDataPathSilent();
GAPI_Assert(initialized &&
"OPENCV_TEST_DATA_PATH environment variable is either not set or set incorrectly.");
}
inline void initTestDataPathOrSkip()
{
bool initialized = initTestDataPathSilent();
if (!initialized)
{
throw cvtest::SkipTestException("Can't find test data");
}
}
template <typename T> inline void initPointRandU(cv::RNG &rng, cv::Point_<T>& pt)
{
GAPI_Assert(std::is_integral<T>::value);

View File

@ -7,6 +7,8 @@
#include "../test_precomp.hpp"
#include "../common/gapi_tests_common.hpp"
#include <thread> // sleep_for (Delay)
#include <opencv2/gapi/cpu/core.hpp>
@ -140,7 +142,7 @@ public:
TestMediaNV12(cv::Mat y, cv::Mat uv) : m_y(y), m_uv(uv) {
}
cv::GFrameDesc meta() const override {
return cv::GFrameDesc{cv::MediaFormat::NV12, cv::Size(m_y.cols, m_y.rows)};
return cv::GFrameDesc{cv::MediaFormat::NV12, m_y.size()};
}
cv::MediaFrame::View access(cv::MediaFrame::Access) override {
cv::MediaFrame::View::Ptrs pp = {
@ -199,24 +201,24 @@ class NV12Source : public cv::gapi::wip::GCaptureSource {
public:
explicit NV12Source(const std::string& pipeline)
: cv::gapi::wip::GCaptureSource(pipeline) {
}
bool pull(cv::gapi::wip::Data& data) {
if (cv::gapi::wip::GCaptureSource::pull(data)) {
cv::Mat bgr = cv::util::get<cv::Mat>(data);
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
data = cv::MediaFrame::Create<TestMediaNV12>(y, uv);
return true;
}
return false;
}
GMetaArg descr_of() const override {
return cv::GMetaArg{cv::GFrameDesc{cv::MediaFormat::NV12,
cv::util::get<cv::GMatDesc>(
cv::gapi::wip::GCaptureSource::descr_of()).size}};
}
bool pull(cv::gapi::wip::Data& data) {
if (cv::gapi::wip::GCaptureSource::pull(data)) {
cv::Mat bgr = cv::util::get<cv::Mat>(data);
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
data = cv::MediaFrame::Create<TestMediaNV12>(y, uv);
return true;
}
return false;
}
GMetaArg descr_of() const override {
return cv::GMetaArg{cv::GFrameDesc{cv::MediaFormat::NV12,
cv::util::get<cv::GMatDesc>(
cv::gapi::wip::GCaptureSource::descr_of()).size}};
}
};
} // anonymous namespace
@ -1819,82 +1821,277 @@ TEST(GAPI_Streaming, Reshape)
}
}
TEST(GAPI_Streaming, AccessBGRFromBGRFrame)
namespace {
enum class TestSourceType {
BGR,
NV12
};
cv::gapi::wip::IStreamSource::Ptr createTestSource(TestSourceType sourceType,
const std::string& pipeline) {
assert(sourceType == TestSourceType::BGR || sourceType == TestSourceType::NV12);
cv::gapi::wip::IStreamSource::Ptr ptr { };
switch (sourceType) {
case TestSourceType::BGR: {
try {
ptr = cv::gapi::wip::make_src<BGRSource>(pipeline);
}
catch(...) {
throw SkipTestException(std::string("BGRSource for '") + pipeline +
"' couldn't be created!");
}
break;
}
case TestSourceType::NV12: {
try {
ptr = cv::gapi::wip::make_src<NV12Source>(pipeline);
}
catch(...) {
throw SkipTestException(std::string("NV12Source for '") + pipeline +
"' couldn't be created!");
}
break;
}
default: {
throw SkipTestException("Incorrect type of source! "
"Something went wrong in the test!");
}
}
return ptr;
}
} // anonymous namespace
struct GAPI_Accessors_In_Streaming : public TestWithParam<
std::tuple<std::string,
std::function<cv::GMat(const cv::GFrame&)>,
TestSourceType,
std::function<cv::Mat(const cv::Mat&)>>
>
{ };
TEST_P(GAPI_Accessors_In_Streaming, AccuracyTest)
{
initTestDataPath();
std::string filepath = findDataFile("cv/video/768x576.avi");
std::string filepath;
std::function<cv::GMat(const cv::GFrame&)> accessor;
TestSourceType sourceType = TestSourceType::BGR;
std::function<cv::Mat(const cv::Mat&)> fromBGR;
std::tie(filepath, accessor, sourceType, fromBGR) = GetParam();
initTestDataPathOrSkip();
const std::string& absFilePath = findDataFile(filepath, false);
cv::GFrame in;
auto out = cv::gapi::streaming::BGR(in);
cv::GMat out = accessor(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
auto cc = comp.compileStreaming();
try {
cc.setSource<BGRSource>(filepath);
} catch(...) {
throw SkipTestException("Video file can not be opened");
}
auto src = createTestSource(sourceType, absFilePath);
cc.setSource(src);
cv::VideoCapture cap;
cap.open(filepath);
cap.open(absFilePath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::Mat ocv_mat, gapi_mat;
cv::Mat cap_mat, ocv_mat, gapi_mat;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(gapi_mat)) && num_frames < max_frames)
while (num_frames < max_frames && cc.pull(cv::gout(gapi_mat)))
{
num_frames++;
cap >> ocv_mat;
cap >> cap_mat;
ocv_mat = fromBGR(cap_mat);
EXPECT_EQ(0, cvtest::norm(ocv_mat, gapi_mat, NORM_INF));
}
cc.stop();
}
TEST(GAPI_Streaming, AccessBGRFromNV12Frame)
INSTANTIATE_TEST_CASE_P(AccessBGRFromBGRFrame, GAPI_Accessors_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::BGR),
Values(TestSourceType::BGR),
Values([](const cv::Mat& bgr){ return bgr; })
)
);
INSTANTIATE_TEST_CASE_P(AccessBGRFromNV12Frame, GAPI_Accessors_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::BGR),
Values(TestSourceType::NV12),
Values([](const cv::Mat& bgr)
{
cv::Mat y, uv, out_bgr;
cvtBGR2NV12(bgr, y, uv);
cv::cvtColorTwoPlane(y, uv, out_bgr,
cv::COLOR_YUV2BGR_NV12);
return out_bgr;
})
)
);
INSTANTIATE_TEST_CASE_P(AccessYFromNV12Frame, GAPI_Accessors_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::Y),
Values(TestSourceType::NV12),
Values([](const cv::Mat& bgr)
{
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
return y;
})
)
);
INSTANTIATE_TEST_CASE_P(AccessYFromBGRFrame, GAPI_Accessors_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::Y),
Values(TestSourceType::BGR),
Values([](const cv::Mat& bgr)
{
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
return y;
})
)
);
INSTANTIATE_TEST_CASE_P(AccessUVFromNV12Frame, GAPI_Accessors_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::UV),
Values(TestSourceType::NV12),
Values([](const cv::Mat& bgr)
{
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
return uv;
})
)
);
INSTANTIATE_TEST_CASE_P(AccessUVFromBGRFrame, GAPI_Accessors_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::UV),
Values(TestSourceType::BGR),
Values([](const cv::Mat& bgr)
{
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
return uv;
})
)
);
struct GAPI_Accessors_Meta_In_Streaming : public TestWithParam<
std::tuple<std::string,
std::function<cv::GMat(const cv::GFrame&)>,
TestSourceType,
std::function<cv::Mat(const cv::Mat&)>>
>
{ };
TEST_P(GAPI_Accessors_Meta_In_Streaming, AccuracyTest)
{
initTestDataPath();
std::string filepath = findDataFile("cv/video/768x576.avi");
std::string filepath;
std::function<cv::GMat(const cv::GFrame&)> accessor;
TestSourceType sourceType = TestSourceType::BGR;
std::function<cv::Mat(const cv::Mat&)> fromBGR;
std::tie(filepath, accessor, sourceType, fromBGR) = GetParam();
initTestDataPathOrSkip();
const std::string& absFilePath = findDataFile(filepath, false);
cv::GFrame in;
auto out = cv::gapi::streaming::BGR(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
cv::GMat gmat = accessor(in);
cv::GMat resized = cv::gapi::resize(gmat, cv::Size(1920, 1080));
cv::GOpaque<int64_t> outId = cv::gapi::streaming::seq_id(resized);
cv::GOpaque<int64_t> outTs = cv::gapi::streaming::timestamp(resized);
cv::GComputation comp(cv::GIn(in), cv::GOut(resized, outId, outTs));
auto cc = comp.compileStreaming();
try {
cc.setSource<NV12Source>(filepath);
} catch(...) {
throw SkipTestException("Video file can not be opened");
}
auto src = createTestSource(sourceType, absFilePath);
cc.setSource(src);
cv::VideoCapture cap;
cap.open(filepath);
cap.open(absFilePath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::Mat ocv_mat, gapi_mat;
cv::Mat cap_mat, req_mat, ocv_mat, gapi_mat;
int64_t seq_id = 0, timestamp = 0;
std::set<int64_t> all_seq_ids;
std::vector<int64_t> all_timestamps;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(gapi_mat)) && num_frames < max_frames)
while (num_frames < max_frames && cc.pull(cv::gout(gapi_mat, seq_id, timestamp)))
{
num_frames++;
cap >> ocv_mat;
cv::Mat y, uv;
cvtBGR2NV12(ocv_mat, y, uv);
cv::cvtColorTwoPlane(y, uv, ocv_mat, cv::COLOR_YUV2BGR_NV12);
cap >> cap_mat;
req_mat = fromBGR(cap_mat);
cv::resize(req_mat, ocv_mat, cv::Size(1920, 1080));
EXPECT_EQ(0, cvtest::norm(ocv_mat, gapi_mat, NORM_INF));
all_seq_ids.insert(seq_id);
all_timestamps.push_back(timestamp);
}
cc.stop();
EXPECT_EQ(all_seq_ids.begin(), all_seq_ids.find(0L));
auto last_elem_it = --all_seq_ids.end();
EXPECT_EQ(last_elem_it, all_seq_ids.find(int64_t(max_frames - 1L)));
EXPECT_EQ(max_frames, all_seq_ids.size());
EXPECT_EQ(max_frames, all_timestamps.size());
EXPECT_TRUE(std::is_sorted(all_timestamps.begin(), all_timestamps.end()));
}
INSTANTIATE_TEST_CASE_P(BGRAccessorMeta, GAPI_Accessors_Meta_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::BGR),
Values(TestSourceType::BGR),
Values([](const cv::Mat& bgr) { return bgr; })
)
);
INSTANTIATE_TEST_CASE_P(YAccessorMeta, GAPI_Accessors_Meta_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::Y),
Values(TestSourceType::NV12),
Values([](const cv::Mat& bgr)
{
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
return y;
})
)
);
INSTANTIATE_TEST_CASE_P(UVAccessorMeta, GAPI_Accessors_Meta_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(cv::gapi::streaming::UV),
Values(TestSourceType::NV12),
Values([](const cv::Mat& bgr)
{
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
return uv;
})
)
);
TEST(GAPI_Streaming, TestPythonAPI)
{
cv::Size sz(200, 200);