Merge pull request #27193 from mshabunin:fix-v4l-size

videoio: fixed V4L frame size for non-BGR output
This commit is contained in:
Alexander Smorkalov 2025-04-03 09:49:36 +03:00 committed by GitHub
commit d49dee83bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 86 additions and 20 deletions

View File

@ -428,6 +428,7 @@ struct CvCaptureCAM_V4L CV_FINAL : public IVideoCapture
bool tryIoctl(unsigned long ioctlCode, void *parameter, bool failIfBusy = true, int attempts = 10) const; bool tryIoctl(unsigned long ioctlCode, void *parameter, bool failIfBusy = true, int attempts = 10) const;
bool controlInfo(int property_id, __u32 &v4l2id, cv::Range &range) const; bool controlInfo(int property_id, __u32 &v4l2id, cv::Range &range) const;
bool icvControl(__u32 v4l2id, int &value, bool isSet) const; bool icvControl(__u32 v4l2id, int &value, bool isSet) const;
void initFrameNonBGR();
bool icvSetFrameSize(int _width, int _height); bool icvSetFrameSize(int _width, int _height);
bool v4l2_reset(); bool v4l2_reset();
@ -1427,18 +1428,13 @@ void CvCaptureCAM_V4L::convertToRgb(const Buffer &currentBuffer)
imageSize = cv::Size(form.fmt.pix.width, form.fmt.pix.height); imageSize = cv::Size(form.fmt.pix.width, form.fmt.pix.height);
} }
// Not found conversion
switch (palette)
{
case V4L2_PIX_FMT_YUV411P:
frame.create(imageSize, CV_8UC3); frame.create(imageSize, CV_8UC3);
switch (palette) {
case V4L2_PIX_FMT_YUV411P:
yuv411p_to_rgb24(imageSize.width, imageSize.height, start, frame.data); yuv411p_to_rgb24(imageSize.width, imageSize.height, start, frame.data);
return; return;
default:
break;
}
// Converted by cvtColor or imdecode
switch (palette) {
case V4L2_PIX_FMT_YVU420: case V4L2_PIX_FMT_YVU420:
cv::cvtColor(cv::Mat(imageSize.height * 3 / 2, imageSize.width, CV_8U, start), frame, cv::cvtColor(cv::Mat(imageSize.height * 3 / 2, imageSize.width, CV_8U, start), frame,
COLOR_YUV2BGR_YV12); COLOR_YUV2BGR_YV12);
@ -1537,15 +1533,15 @@ void CvCaptureCAM_V4L::convertToRgb(const Buffer &currentBuffer)
} }
case V4L2_PIX_FMT_GREY: case V4L2_PIX_FMT_GREY:
cv::cvtColor(cv::Mat(imageSize, CV_8UC1, start), frame, COLOR_GRAY2BGR); cv::cvtColor(cv::Mat(imageSize, CV_8UC1, start), frame, COLOR_GRAY2BGR);
break; return;
case V4L2_PIX_FMT_XBGR32: case V4L2_PIX_FMT_XBGR32:
case V4L2_PIX_FMT_ABGR32: case V4L2_PIX_FMT_ABGR32:
cv::cvtColor(cv::Mat(imageSize, CV_8UC4, start), frame, COLOR_BGRA2BGR); cv::cvtColor(cv::Mat(imageSize, CV_8UC4, start), frame, COLOR_BGRA2BGR);
break; return;
case V4L2_PIX_FMT_BGR24: case V4L2_PIX_FMT_BGR24:
default: default:
Mat(1, currentBuffer.bytesused, CV_8U, start).copyTo(frame); Mat(1, currentBuffer.bytesused, CV_8U, start).reshape(frame.channels(), frame.rows).copyTo(frame);
break; return;
} }
} }
@ -1914,11 +1910,13 @@ bool CvCaptureCAM_V4L::setProperty( int property_id, double _value )
} }
case cv::CAP_PROP_FOURCC: case cv::CAP_PROP_FOURCC:
{ {
if (palette == static_cast<__u32>(value)) __u32 new_palette = static_cast<__u32>(_value);
if (palette == new_palette)
return true; return true;
__u32 old_palette = palette; __u32 old_palette = palette;
palette = static_cast<__u32>(value); palette = new_palette;
if (v4l2_reset()) if (v4l2_reset())
return true; return true;
@ -2033,12 +2031,80 @@ bool CvCaptureCAM_V4L::streaming(bool startStream)
return startStream; return startStream;
} }
void CvCaptureCAM_V4L::initFrameNonBGR()
{
Size size;
if (V4L2_TYPE_IS_MULTIPLANAR(type)) {
CV_Assert(form.fmt.pix_mp.width <= (uint)std::numeric_limits<int>::max());
CV_Assert(form.fmt.pix_mp.height <= (uint)std::numeric_limits<int>::max());
size = Size{(int)form.fmt.pix_mp.width, (int)form.fmt.pix_mp.height};
} else {
CV_Assert(form.fmt.pix.width <= (uint)std::numeric_limits<int>::max());
CV_Assert(form.fmt.pix.height <= (uint)std::numeric_limits<int>::max());
size = Size{(int)form.fmt.pix.width, (int)form.fmt.pix.height};
}
int image_type = CV_8UC3;
switch (palette) {
case V4L2_PIX_FMT_BGR24:
case V4L2_PIX_FMT_RGB24:
image_type = CV_8UC3;
break;
case V4L2_PIX_FMT_XBGR32:
case V4L2_PIX_FMT_ABGR32:
image_type = CV_8UC4;
break;
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_UYVY:
image_type = CV_8UC2;
break;
case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21:
image_type = CV_8UC1;
size.height = size.height * 3 / 2; // "1.5" channels
break;
case V4L2_PIX_FMT_Y16:
case V4L2_PIX_FMT_Y16_BE:
case V4L2_PIX_FMT_Y12:
case V4L2_PIX_FMT_Y10:
image_type = CV_16UC1;
break;
case V4L2_PIX_FMT_GREY:
image_type = CV_8UC1;
break;
default:
image_type = CV_8UC1;
if(bufferIndex < 0)
size = Size(buffers[MAX_V4L_BUFFERS].memories[MEMORY_ORIG].length, 1);
else {
__u32 bytesused = 0;
if (V4L2_TYPE_IS_MULTIPLANAR(type)) {
__u32 data_offset;
for (unsigned char n_planes = 0; n_planes < num_planes; n_planes++) {
data_offset = buffers[bufferIndex].planes[n_planes].data_offset;
bytesused += buffers[bufferIndex].planes[n_planes].bytesused - data_offset;
}
} else {
bytesused = buffers[bufferIndex].buffer.bytesused;
}
size = Size(bytesused, 1);
}
break;
}
frame.create(size, image_type);
}
bool CvCaptureCAM_V4L::retrieveFrame(int, OutputArray ret) bool CvCaptureCAM_V4L::retrieveFrame(int, OutputArray ret)
{ {
havePendingFrame = false; // unlock .grab() havePendingFrame = false; // unlock .grab()
if (bufferIndex < 0) if (bufferIndex < 0)
{
frame.copyTo(ret); frame.copyTo(ret);
return true;
}
/* Now get what has already been captured as a IplImage return */ /* Now get what has already been captured as a IplImage return */
const Buffer &currentBuffer = buffers[bufferIndex]; const Buffer &currentBuffer = buffers[bufferIndex];
@ -2068,8 +2134,8 @@ bool CvCaptureCAM_V4L::retrieveFrame(int, OutputArray ret)
std::min(currentBuffer.memories[n_planes].length, (size_t)cur_plane.bytesused)); std::min(currentBuffer.memories[n_planes].length, (size_t)cur_plane.bytesused));
} }
} else { } else {
const Size sz(std::min(buffers[MAX_V4L_BUFFERS].memories[MEMORY_ORIG].length, (size_t)currentBuffer.buffer.bytesused), 1); initFrameNonBGR();
frame = Mat(sz, CV_8U, currentBuffer.memories[MEMORY_ORIG].start); Mat(frame.size(), frame.type(), currentBuffer.memories[MEMORY_ORIG].start).copyTo(frame);
} }
} }
//Revert buffer to the queue //Revert buffer to the queue

View File

@ -116,7 +116,7 @@ TEST_P(videoio_v4l2, formats)
EXPECT_EQ(3, img.channels()); EXPECT_EQ(3, img.channels());
EXPECT_EQ(CV_8U, img.depth()); EXPECT_EQ(CV_8U, img.depth());
#ifdef DUMP_CAMERA_FRAME #ifdef DUMP_CAMERA_FRAME
std::string img_name = "frame_" + fourccToString(params.pixel_format); std::string img_name = "frame_" + fourccToStringSafe(params.pixel_format);
// V4L2 flag for big-endian formats // V4L2 flag for big-endian formats
if(params.pixel_format & (1 << 31)) if(params.pixel_format & (1 << 31))
img_name += "-BE"; img_name += "-BE";
@ -147,8 +147,8 @@ vector<Format_Channels_Depth> all_params = {
{ V4L2_PIX_FMT_Y10, 1, CV_16U, 1.f, 1.f }, { V4L2_PIX_FMT_Y10, 1, CV_16U, 1.f, 1.f },
{ V4L2_PIX_FMT_GREY, 1, CV_8U, 1.f, 1.f }, { V4L2_PIX_FMT_GREY, 1, CV_8U, 1.f, 1.f },
{ V4L2_PIX_FMT_BGR24, 3, CV_8U, 1.f, 1.f }, { V4L2_PIX_FMT_BGR24, 3, CV_8U, 1.f, 1.f },
{ V4L2_PIX_FMT_XBGR32, 3, CV_8U, 1.f, 1.f }, { V4L2_PIX_FMT_XBGR32, 4, CV_8U, 1.f, 1.f },
{ V4L2_PIX_FMT_ABGR32, 3, CV_8U, 1.f, 1.f }, { V4L2_PIX_FMT_ABGR32, 4, CV_8U, 1.f, 1.f },
}; };
inline static std::string param_printer(const testing::TestParamInfo<videoio_v4l2::ParamType>& info) inline static std::string param_printer(const testing::TestParamInfo<videoio_v4l2::ParamType>& info)