Merge pull request #21511 from dbudniko:dbudniko/gapi_media_format_gray

G-API media format gray
This commit is contained in:
Dmitry Budnikov 2022-02-03 15:20:21 +03:00 committed by GitHub
parent 1605d1d24d
commit 4d0148b417
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 393 additions and 6 deletions

View File

@ -86,6 +86,7 @@ enum class MediaFormat: int
{
BGR = 0,
NV12,
GRAY,
};
/**

View File

@ -44,6 +44,7 @@ std::ostream& operator<<(std::ostream& os, const cv::GFrameDesc &d) {
switch (d.fmt) {
case MediaFormat::BGR: os << "BGR"; break;
case MediaFormat::NV12: os << "NV12"; break;
case MediaFormat::GRAY: os << "GRAY"; break;
default: GAPI_Assert(false && "Invalid media format");
}
os << ' ' << d.size << ']';

View File

@ -182,6 +182,10 @@ inline IE::Blob::Ptr wrapIE(const cv::MediaFrame::View& view,
auto uv_plane = cv::Mat(desc.size / 2, CV_8UC2, view.ptr[1], view.stride[1]);
return cv::gapi::ie::util::to_ie(y_plane, uv_plane);
}
case cv::MediaFormat::GRAY: {
auto gray = cv::Mat(desc.size, CV_8UC1, view.ptr[0], view.stride[0]);
return wrapIE(gray, cv::gapi::ie::TraitAs::IMAGE);
}
default:
GAPI_Assert(false && "Unsupported media format for IE backend");
}
@ -837,6 +841,9 @@ static void configureInputInfo(const IE::InputInfo::Ptr& ii, const cv::GMetaArg
case cv::MediaFormat::BGR:
// NB: Do nothing
break;
case cv::MediaFormat::GRAY:
// NB: Do nothing
break;
default:
GAPI_Assert(false && "Unsupported media format for IE backend");
}

View File

@ -282,6 +282,23 @@ void GOCVBGR::Actor::extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat)
rmat = cv::make_rmat<cv::gimpl::RMatOnMat>(bgr);
break;
}
case cv::MediaFormat::GRAY:
{
std::call_once(m_warnFlag,
[]() {
GAPI_LOG_WARNING(NULL, "\nOn-the-fly conversion from GRAY to BGR will happen.\n"
"Conversion may cost a lot for images with high resolution.\n"
"To retrieve cv::Mat from GRAY cv::MediaFrame for free, you may use "
"cv::gapi::streaming::Y.\n");
});
cv::Mat bgr;
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat gray(desc.size, CV_8UC1, view.ptr[0], view.stride[0]);
cv::cvtColor(gray, bgr, cv::COLOR_GRAY2BGR);
rmat = cv::make_rmat<cv::gimpl::RMatOnMat>(bgr);
break;
}
default:
cv::util::throw_error(
std::logic_error("Unsupported MediaFormat for cv::gapi::streaming::BGR"));
@ -339,6 +356,15 @@ void GOCVY::Actor::extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat)
});
break;
}
case cv::MediaFormat::GRAY:
{
rmat = cv::make_rmat<cv::gimpl::RMatMediaFrameAdapter>(frame,
[](const cv::GFrameDesc& d) { return cv::GMatDesc(CV_8U, 1, d.size); },
[](const cv::GFrameDesc& d, const cv::MediaFrame::View& v) {
return cv::Mat(d.size, CV_8UC1, v.ptr[0], v.stride[0]);
});
break;
}
default:
cv::util::throw_error(
std::logic_error("Unsupported MediaFormat for cv::gapi::streaming::Y"));
@ -408,6 +434,12 @@ void GOCVUV::Actor::extractRMat(const cv::MediaFrame& frame, cv::RMat& rmat)
});
break;
}
case cv::MediaFormat::GRAY:
{
cv::Mat uv(desc.size / 2, CV_8UC2, cv::Scalar::all(127));
rmat = cv::make_rmat<cv::gimpl::RMatOnMat>(uv);
break;
}
default:
cv::util::throw_error(
std::logic_error("Unsupported MediaFormat for cv::gapi::streaming::UV"));

View File

@ -1701,6 +1701,25 @@ namespace {
};
};
namespace {
class TestMediaGray final : public cv::MediaFrame::IAdapter {
cv::Mat m_mat;
public:
explicit TestMediaGray(cv::Mat m)
: m_mat(m) {
}
cv::GFrameDesc meta() const override {
return cv::GFrameDesc{ cv::MediaFormat::GRAY, cv::Size(m_mat.cols, m_mat.rows) };
}
cv::MediaFrame::View access(cv::MediaFrame::Access) override {
cv::MediaFrame::View::Ptrs pp = { m_mat.ptr(), nullptr, nullptr, nullptr };
cv::MediaFrame::View::Strides ss = { m_mat.step, 0u, 0u, 0u };
return cv::MediaFrame::View(std::move(pp), std::move(ss));
}
};
};
TEST_P(SizeMFTest, ParseTest)
{
cv::Size out_sz;
@ -1715,6 +1734,20 @@ TEST_P(SizeMFTest, ParseTest)
EXPECT_EQ(sz, out_sz);
}
TEST_P(SizeMFTest, ParseGrayTest)
{
cv::Size out_sz;
cv::Mat gray = cv::Mat::eye(sz.height, sz.width, CV_8UC1);
cv::MediaFrame frame = cv::MediaFrame::Create<TestMediaGray>(gray);
cv::GFrame in;
auto out = cv::gapi::streaming::size(in);
cv::GComputation c(cv::GIn(in), cv::GOut(out));
c.apply(cv::gin(frame), cv::gout(out_sz), getCompileArgs());
EXPECT_EQ(sz, out_sz);
}
} // opencv_test
#endif //OPENCV_GAPI_CORE_TESTS_INL_HPP

View File

@ -29,6 +29,23 @@ GAPI_OCV_KERNEL(OCVBlurFrame, GBlurFrame) {
}
};
G_API_OP(GBlurFrameGray, <GMat(GFrame)>, "test.blur_frame_gray") {
static GMatDesc outMeta(GFrameDesc in) {
return cv::GMatDesc(CV_8U, 1, in.size);
}
};
GAPI_OCV_KERNEL(OCVBlurFrameGray, GBlurFrameGray) {
static void run(const cv::MediaFrame & in, cv::Mat & out) {
GAPI_Assert(in.desc().fmt == cv::MediaFormat::GRAY);
cv::MediaFrame::View view = in.access(cv::MediaFrame::Access::R);
cv::blur(cv::Mat(in.desc().size, CV_8UC1, view.ptr[0], view.stride[0]),
out,
cv::Size{ 3,3 });
}
};
////////////////////////////////////////////////////////////////////////////////
// cv::MediaFrame tests
namespace {
@ -70,6 +87,26 @@ public:
return cv::MediaFrame::View(std::move(pp), std::move(ss));
}
};
class TestMediaGray final : public cv::MediaFrame::IAdapter {
cv::Mat m_mat;
using Cb = cv::MediaFrame::View::Callback;
Cb m_cb;
public:
explicit TestMediaGray(cv::Mat m, Cb cb = []() {})
: m_mat(m), m_cb(cb) {
}
cv::GFrameDesc meta() const override {
return cv::GFrameDesc{ cv::MediaFormat::GRAY, cv::Size(m_mat.cols, m_mat.rows) };
}
cv::MediaFrame::View access(cv::MediaFrame::Access) override {
cv::MediaFrame::View::Ptrs pp = { m_mat.ptr(), nullptr, nullptr, nullptr };
cv::MediaFrame::View::Strides ss = { m_mat.step, 0u, 0u, 0u };
return cv::MediaFrame::View(std::move(pp), std::move(ss), Cb{ m_cb });
}
};
} // anonymous namespace
struct MediaFrame_Test: public ::testing::Test {
@ -120,6 +157,49 @@ TEST_F(MediaFrame_BGR, Input) {
EXPECT_EQ(0, cvtest::norm(out_mat_ocv, out_mat_gapi, NORM_INF));
}
struct MediaFrame_Gray : public MediaFrame_Test {
M gray;
MediaFrame_Gray()
: gray(M::eye(240, 320, CV_8UC1)) {
cv::randn(gray, cv::Scalar::all(127.0f), cv::Scalar::all(40.f));
frame = MF::Create<TestMediaGray>(gray);
}
};
TEST_F(MediaFrame_Gray, Meta) {
auto meta = frame.desc();
EXPECT_EQ(cv::MediaFormat::GRAY, meta.fmt);
EXPECT_EQ(cv::Size(320, 240), meta.size);
}
TEST_F(MediaFrame_Gray, Access) {
cv::MediaFrame::View view1 = frame.access(cv::MediaFrame::Access::R);
EXPECT_EQ(gray.ptr(), view1.ptr[0]);
EXPECT_EQ(gray.step, view1.stride[0]);
cv::MediaFrame::View view2 = frame.access(cv::MediaFrame::Access::R);
EXPECT_EQ(gray.ptr(), view2.ptr[0]);
EXPECT_EQ(gray.step, view2.stride[0]);
}
TEST_F(MediaFrame_Gray, Input) {
// Run the OpenCV code
cv::Mat out_mat_ocv, out_mat_gapi;
cv::blur(gray, out_mat_ocv, cv::Size{ 3,3 });
// Run the G-API code
cv::GFrame in;
cv::GMat out = GBlurFrameGray::on(in);
cv::GComputation(cv::GIn(in), cv::GOut(out))
.apply(cv::gin(frame),
cv::gout(out_mat_gapi),
cv::compile_args(cv::gapi::kernels<OCVBlurFrameGray>()));
// Compare
EXPECT_EQ(0, cvtest::norm(out_mat_ocv, out_mat_gapi, NORM_INF));
}
struct MediaFrame_NV12: public MediaFrame_Test {
cv::Size sz;
cv::Mat buf, y, uv;

View File

@ -164,6 +164,26 @@ public:
}
};
class TestMediaGRAY final : public cv::MediaFrame::IAdapter {
cv::Mat m_mat;
using Cb = cv::MediaFrame::View::Callback;
Cb m_cb;
public:
explicit TestMediaGRAY(cv::Mat m, Cb cb = []() {})
: m_mat(m), m_cb(cb) {
}
cv::GFrameDesc meta() const override {
return cv::GFrameDesc{ cv::MediaFormat::GRAY, cv::Size(m_mat.cols, m_mat.rows) };
}
cv::MediaFrame::View access(cv::MediaFrame::Access) override {
cv::MediaFrame::View::Ptrs pp = { m_mat.ptr(), nullptr, nullptr, nullptr };
cv::MediaFrame::View::Strides ss = { m_mat.step, 0u, 0u, 0u };
return cv::MediaFrame::View(std::move(pp), std::move(ss), Cb{ m_cb });
}
};
class BGRSource : public cv::gapi::wip::GCaptureSource {
public:
explicit BGRSource(const std::string& pipeline)
@ -230,6 +250,31 @@ public:
}
};
class GRAYSource : public cv::gapi::wip::GCaptureSource {
public:
explicit GRAYSource(const std::string& pipeline)
: cv::gapi::wip::GCaptureSource(pipeline) {
}
bool pull(cv::gapi::wip::Data& data) {
if (cv::gapi::wip::GCaptureSource::pull(data)) {
cv::Mat bgr = cv::util::get<cv::Mat>(data);
cv::Mat gray;
cvtColor(bgr, gray, cv::COLOR_BGR2GRAY);
data = cv::MediaFrame::Create<TestMediaGRAY>(gray);
return true;
}
return false;
}
GMetaArg descr_of() const override {
return cv::GMetaArg{ cv::GFrameDesc{cv::MediaFormat::GRAY,
cv::util::get<cv::GMatDesc>(
cv::gapi::wip::GCaptureSource::descr_of()).size} };
}
};
void checkPullOverload(const cv::Mat& ref,
const bool has_output,
cv::util::variant<cv::GRunArgs, cv::GOptRunArgs>& args) {
@ -1789,6 +1834,46 @@ TEST(GAPI_Streaming, CopyFrame)
}
}
TEST(GAPI_Streaming, CopyFrameGray)
{
std::string filepath = findDataFile("cv/video/768x576.avi");
cv::GFrame in;
auto out = cv::gapi::copy(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
auto cc = comp.compileStreaming();
try {
cc.setSource<GRAYSource>(filepath);
}
catch (...) {
throw SkipTestException("Video file can not be opened");
}
cv::VideoCapture cap;
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::MediaFrame frame;
cv::Mat ocv_mat;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(frame)) && num_frames < max_frames)
{
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat gapi_mat(frame.desc().size, CV_8UC1, view.ptr[0]);
num_frames++;
cap >> ocv_mat;
cv::Mat gray;
cvtColor(ocv_mat, gray, cv::COLOR_BGR2GRAY);
EXPECT_EQ(0, cvtest::norm(gray, gapi_mat, NORM_INF));
}
}
TEST(GAPI_Streaming, CopyMat)
{
std::string filepath = findDataFile("cv/video/768x576.avi");
@ -1892,23 +1977,97 @@ TEST(GAPI_Streaming, Reshape)
}
}
TEST(GAPI_Streaming, ReshapeGray)
{
std::string filepath = findDataFile("cv/video/768x576.avi");
cv::GFrame in;
auto out = cv::gapi::copy(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
auto cc = comp.compileStreaming();
try {
cc.setSource<GRAYSource>(filepath);
}
catch (...) {
throw SkipTestException("Video file can not be opened");
}
cv::VideoCapture cap;
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::MediaFrame frame;
cv::Mat ocv_mat;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(frame)) && num_frames < max_frames)
{
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat gapi_mat(frame.desc().size, CV_8UC1, view.ptr[0]);
num_frames++;
cap >> ocv_mat;
cv::Mat gray;
cvtColor(ocv_mat, gray, cv::COLOR_BGR2GRAY);
EXPECT_EQ(0, cvtest::norm(gray, gapi_mat, NORM_INF));
}
// Reshape the graph meta
filepath = findDataFile("cv/video/1920x1080.avi");
cc.stop();
try {
cc.setSource<GRAYSource>(filepath);
}
catch (...) {
throw SkipTestException("Video file can not be opened");
}
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::MediaFrame frame2;
cv::Mat ocv_mat2;
num_frames = 0u;
cc.start();
while (cc.pull(cv::gout(frame2)) && num_frames < max_frames)
{
auto view = frame2.access(cv::MediaFrame::Access::R);
cv::Mat gapi_mat(frame2.desc().size, CV_8UC1, view.ptr[0]);
num_frames++;
cap >> ocv_mat2;
cv::Mat gray;
cvtColor(ocv_mat2, gray, cv::COLOR_BGR2GRAY);
EXPECT_EQ(0, cvtest::norm(gray, gapi_mat, NORM_INF));
}
}
namespace {
enum class TestSourceType {
BGR,
NV12
NV12,
GRAY
};
std::ostream& operator<<(std::ostream& os, TestSourceType a) {
os << "Source:";
switch (a) {
case TestSourceType::BGR: return os << "BGR";
case TestSourceType::NV12: return os << "NV12";
case TestSourceType::GRAY: return os << "GRAY";
default: CV_Assert(false && "unknown TestSourceType");
}
}
cv::gapi::wip::IStreamSource::Ptr createTestSource(TestSourceType sourceType,
const std::string& pipeline) {
assert(sourceType == TestSourceType::BGR || sourceType == TestSourceType::NV12);
assert(sourceType == TestSourceType::BGR || sourceType == TestSourceType::NV12 || sourceType == TestSourceType::GRAY);
cv::gapi::wip::IStreamSource::Ptr ptr { };
@ -1933,6 +2092,16 @@ namespace {
}
break;
}
case TestSourceType::GRAY: {
try {
ptr = cv::gapi::wip::make_src<GRAYSource>(pipeline);
}
catch (...) {
throw SkipTestException(std::string("GRAYSource for '") + pipeline +
"' couldn't be created!");
}
break;
}
default: {
throw SkipTestException("Incorrect type of source! "
"Something went wrong in the test!");
@ -2000,6 +2169,25 @@ namespace {
cvtBGR2NV12(bgr, y, uv);
return uv;
} },
{ std::make_pair(TestSourceType::GRAY, TestAccessType::BGR),
[](const cv::Mat& bgr) {
cv::Mat gray;
cv::cvtColor(bgr, gray, cv::COLOR_BGR2GRAY);
cv::Mat out_bgr;
cv::cvtColor(gray, out_bgr, cv::COLOR_GRAY2BGR);
return out_bgr;
} },
{ std::make_pair(TestSourceType::GRAY, TestAccessType::Y),
[](const cv::Mat& bgr) {
cv::Mat gray;
cv::cvtColor(bgr, gray, cv::COLOR_BGR2GRAY);
return gray;
} },
{ std::make_pair(TestSourceType::GRAY, TestAccessType::UV),
[](const cv::Mat& bgr) {
cv::Mat uv(bgr.size() / 2, CV_8UC2, cv::Scalar::all(127));
return uv;
} },
};
} // anonymous namespace
@ -2007,6 +2195,7 @@ struct GAPI_Accessors_In_Streaming : public TestWithParam<
std::tuple<std::string,TestSourceType,TestAccessType>>
{ };
TEST_P(GAPI_Accessors_In_Streaming, AccuracyTest)
{
std::string filepath{};
@ -2050,10 +2239,11 @@ TEST_P(GAPI_Accessors_In_Streaming, AccuracyTest)
INSTANTIATE_TEST_CASE_P(TestAccessor, GAPI_Accessors_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(TestSourceType::BGR, TestSourceType::NV12),
Values(TestSourceType::BGR, TestSourceType::NV12, TestSourceType::GRAY),
Values(TestAccessType::BGR, TestAccessType::Y, TestAccessType::UV)
));
struct GAPI_Accessors_Meta_In_Streaming : public TestWithParam<
std::tuple<std::string,TestSourceType,TestAccessType>>
{ };
@ -2120,7 +2310,7 @@ TEST_P(GAPI_Accessors_Meta_In_Streaming, AccuracyTest)
INSTANTIATE_TEST_CASE_P(AccessorMeta, GAPI_Accessors_Meta_In_Streaming,
Combine(Values("cv/video/768x576.avi"),
Values(TestSourceType::BGR, TestSourceType::NV12),
Values(TestSourceType::BGR, TestSourceType::NV12, TestSourceType::GRAY),
Values(TestAccessType::BGR, TestAccessType::Y, TestAccessType::UV)
));
@ -2232,7 +2422,7 @@ TEST(GAPI_Streaming, TestDesyncRMat) {
cv::optional<cv::RMat> out_desync;
cv::optional<cv::RMat> out_rmat;
while (true) {
// Initially it throwed "bad variant access" since there was
// Initially it threw "bad variant access" since there was
// no RMat handling in wrap_opt_arg
EXPECT_NO_THROW(pipe.pull(cv::gout(out_desync, out_rmat)));
if (out_rmat) break;
@ -2273,11 +2463,54 @@ TEST(GAPI_Streaming, TestDesyncMediaFrame) {
cv::optional<cv::MediaFrame> out_desync;
cv::optional<cv::MediaFrame> out_frame;
while (true) {
// Initially it throwed "bad variant access" since there was
// Initially it threw "bad variant access" since there was
// no MediaFrame handling in wrap_opt_arg
EXPECT_NO_THROW(pipe.pull(cv::gout(out_desync, out_frame)));
if (out_frame) break;
}
}
G_API_OP(GTestBlurGray, <GFrame(GFrame)>, "test.blur_gray") {
static GFrameDesc outMeta(GFrameDesc d) { return d; }
};
GAPI_OCV_KERNEL(GOcvTestBlurGray, GTestBlurGray) {
static void run(const cv::MediaFrame & in, cv::MediaFrame & out) {
auto d = in.desc();
GAPI_Assert(d.fmt == cv::MediaFormat::GRAY);
auto view = in.access(cv::MediaFrame::Access::R);
cv::Mat mat(d.size, CV_8UC1, view.ptr[0]);
cv::Mat blurred;
cv::blur(mat, blurred, cv::Size{ 3,3 });
out = cv::MediaFrame::Create<TestMediaGRAY>(blurred);
}
};
TEST(GAPI_Streaming, TestDesyncMediaFrameGray) {
cv::GFrame in;
auto blurred = GTestBlurGray::on(in);
auto desynced = cv::gapi::streaming::desync(blurred);
auto out = GTestBlurGray::on(blurred);
auto pipe = cv::GComputation(cv::GIn(in), cv::GOut(desynced, out))
.compileStreaming(cv::compile_args(cv::gapi::kernels<GOcvTestBlurGray>()));
std::string filepath = findDataFile("cv/video/768x576.avi");
try {
pipe.setSource<GRAYSource>(filepath);
}
catch (...) {
throw SkipTestException("Video file can not be opened");
}
pipe.start();
cv::optional<cv::MediaFrame> out_desync;
cv::optional<cv::MediaFrame> out_frame;
while (true) {
// Initially it threw "bad variant access" since there was
// no MediaFrame handling in wrap_opt_arg
EXPECT_NO_THROW(pipe.pull(cv::gout(out_desync, out_frame)));
if (out_frame) break;
}
}
} // namespace opencv_test