mirror of
https://github.com/opencv/opencv.git
synced 2025-08-06 14:36:36 +08:00
Add support for changing fourcc and support mono formats (e.g. Y8, Y16)
This commit is contained in:
parent
e5d7f446d6
commit
13ede34516
@ -141,6 +141,10 @@ DEFINE_GUID(MEDIASUBTYPE_Y8, 0x20203859, 0x0000, 0x0010, 0x80, 0x00,
|
||||
0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
|
||||
DEFINE_GUID(MEDIASUBTYPE_Y800, 0x30303859, 0x0000, 0x0010, 0x80, 0x00,
|
||||
0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
|
||||
DEFINE_GUID(MEDIASUBTYPE_Y16, 0x20363159, 0x0000, 0x0010, 0x80, 0x00,
|
||||
0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71);
|
||||
DEFINE_GUID(MEDIASUBTYPE_BY8, 0x20385942, 0x0000, 0x0010, 0x80, 0x00,
|
||||
0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
|
||||
|
||||
DEFINE_GUID(CLSID_CaptureGraphBuilder2,0xbf87b6e1,0x8c27,0x11d0,0xb3,0xf0,0x00,0xaa,0x00,0x37,0x61,0xc5);
|
||||
DEFINE_GUID(CLSID_FilterGraph,0xe436ebb3,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
||||
@ -333,7 +337,7 @@ static void DebugPrintOut(const char *format, ...)
|
||||
//videoInput defines
|
||||
#define VI_VERSION 0.1995
|
||||
#define VI_MAX_CAMERAS 20
|
||||
#define VI_NUM_TYPES 20 //MGB
|
||||
#define VI_NUM_TYPES 22 //MGB
|
||||
#define VI_NUM_FORMATS 18 //DON'T TOUCH
|
||||
|
||||
//defines for setPhyCon - tuner is not as well supported as composite and s-video
|
||||
@ -427,6 +431,7 @@ class videoDevice{
|
||||
bool setupStarted;
|
||||
bool specificFormat;
|
||||
bool autoReconnect;
|
||||
bool convertRGB;
|
||||
int nFramesForReconnect;
|
||||
unsigned long nFramesRunning;
|
||||
int connection;
|
||||
@ -522,6 +527,10 @@ class videoInput{
|
||||
int getFourcc(int deviceID) const;
|
||||
double getFPS(int deviceID) const;
|
||||
|
||||
// RGB conversion setting
|
||||
bool getConvertRGB(int deviceID);
|
||||
bool setConvertRGB(int deviceID, bool enable);
|
||||
|
||||
//completely stops and frees a device
|
||||
void stopDevice(int deviceID);
|
||||
|
||||
@ -539,11 +548,13 @@ class videoInput{
|
||||
|
||||
int property_window_count(int device_idx);
|
||||
|
||||
GUID getMediasubtype(int deviceID);
|
||||
|
||||
private:
|
||||
void setPhyCon(int deviceID, int conn);
|
||||
void setAttemptCaptureSize(int deviceID, int w, int h,GUID mediaType=MEDIASUBTYPE_RGB24);
|
||||
bool setup(int deviceID);
|
||||
void processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip);
|
||||
void processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip, int bytesperpixel = 3);
|
||||
int start(int deviceID, videoDevice * VD);
|
||||
int getDeviceCount();
|
||||
void getMediaSubtypeAsString(GUID type, char * typeAsString);
|
||||
@ -586,6 +597,24 @@ class videoInput{
|
||||
|
||||
/////////////////////////// HANDY FUNCTIONS /////////////////////////////
|
||||
|
||||
//Included by e-con
|
||||
//Checks whether the current formattype is single byte format
|
||||
//Eg: MEDIASUBTYPE_Y800, MEDIASUBTYPE_Y8, MEDIASUBTYPE_GREY
|
||||
static bool checkSingleByteFormat(GUID formatType)
|
||||
{
|
||||
|
||||
if (formatType == MEDIASUBTYPE_Y800 ||
|
||||
formatType == MEDIASUBTYPE_Y8 ||
|
||||
formatType == MEDIASUBTYPE_GREY)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static void MyFreeMediaType(AM_MEDIA_TYPE& mt){
|
||||
if (mt.cbFormat != 0)
|
||||
{
|
||||
@ -761,6 +790,7 @@ videoDevice::videoDevice(){
|
||||
setupStarted = false;
|
||||
specificFormat = false;
|
||||
autoReconnect = false;
|
||||
convertRGB = true;
|
||||
requestedFrameTime = -1;
|
||||
|
||||
pBuffer = 0;
|
||||
@ -788,7 +818,20 @@ void videoDevice::setSize(int w, int h){
|
||||
{
|
||||
width = w;
|
||||
height = h;
|
||||
videoSize = w*h*3;
|
||||
|
||||
if (checkSingleByteFormat(pAmMediaType->subtype))
|
||||
{
|
||||
videoSize = w * h;
|
||||
}
|
||||
else if (pAmMediaType->subtype == MEDIASUBTYPE_Y16)
|
||||
{
|
||||
videoSize = w * h * 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
videoSize = w * h * 3;
|
||||
}
|
||||
|
||||
sizeSet = true;
|
||||
pixels = new unsigned char[videoSize];
|
||||
pBuffer = new char[videoSize];
|
||||
@ -1060,6 +1103,8 @@ videoInput::videoInput(){
|
||||
mediaSubtypes[17] = MEDIASUBTYPE_Y8;
|
||||
mediaSubtypes[18] = MEDIASUBTYPE_GREY;
|
||||
mediaSubtypes[19] = MEDIASUBTYPE_I420;
|
||||
mediaSubtypes[20] = MEDIASUBTYPE_BY8;
|
||||
mediaSubtypes[21] = MEDIASUBTYPE_Y16;
|
||||
|
||||
//The video formats we support
|
||||
formatTypes[VI_NTSC_M] = AnalogVideo_NTSC_M;
|
||||
@ -1181,6 +1226,9 @@ bool videoInput::setupDeviceFourcc(int deviceNumber, int w, int h,int fourcc){
|
||||
GUID *mediaType = getMediaSubtypeFromFourcc(fourcc);
|
||||
if ( mediaType ) {
|
||||
setAttemptCaptureSize(deviceNumber,w,h,*mediaType);
|
||||
} else {
|
||||
DebugPrintOut("SETUP: Unknown GUID \n");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
setAttemptCaptureSize(deviceNumber,w,h);
|
||||
@ -1448,6 +1496,37 @@ int videoInput::getSize(int id) const
|
||||
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
//
|
||||
//
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
bool videoInput::getConvertRGB(int id)
|
||||
{
|
||||
if (isDeviceSetup(id))
|
||||
{
|
||||
return VDList[id]->convertRGB;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
bool videoInput::setConvertRGB(int id, bool enable)
|
||||
{
|
||||
if (isDeviceSetup(id))
|
||||
{
|
||||
VDList[id]->convertRGB = enable;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
// Uses a supplied buffer
|
||||
@ -1472,7 +1551,24 @@ bool videoInput::getPixels(int id, unsigned char * dstBuffer, bool flipRedAndBlu
|
||||
int height = VDList[id]->height;
|
||||
int width = VDList[id]->width;
|
||||
|
||||
processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
|
||||
// Conditional processing for 8/16-bit images (e-Con systems)
|
||||
if (checkSingleByteFormat(VDList[id]->pAmMediaType->subtype))
|
||||
{
|
||||
memcpy(dst, src, width * height);
|
||||
}
|
||||
else if (VDList[id]->pAmMediaType->subtype == MEDIASUBTYPE_Y16)
|
||||
{
|
||||
if (!VDList[id]->convertRGB) {
|
||||
memcpy(dst, src, width * height * 2);
|
||||
}
|
||||
else {
|
||||
processPixels(src, dst, width, height, flipRedAndBlue, flipImage, 2);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
|
||||
}
|
||||
VDList[id]->sgCallback->newFrame = false;
|
||||
|
||||
LeaveCriticalSection(&VDList[id]->sgCallback->critSection);
|
||||
@ -2106,62 +2202,81 @@ bool videoInput::setup(int deviceNumber){
|
||||
// You have any combination of those.
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
void videoInput::processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip){
|
||||
void videoInput::processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip, int bytesperpixel){
|
||||
|
||||
int widthInBytes = width * 3;
|
||||
int widthInBytes = width * bytesperpixel;
|
||||
int numBytes = widthInBytes * height;
|
||||
|
||||
if(!bRGB){
|
||||
if (bytesperpixel == 2) {
|
||||
for (int i = 0; i < width*height; i++) {
|
||||
if (bytesperpixel == 2) {
|
||||
*dst = (uint8_t) (*((uint16_t*) src) >> 8);
|
||||
dst++;
|
||||
|
||||
//int x = 0;
|
||||
//int y = 0;
|
||||
*dst = (uint8_t) (*((uint16_t*)src) >> 8);
|
||||
dst++;
|
||||
|
||||
if(bFlip){
|
||||
for(int y = 0; y < height; y++){
|
||||
memcpy(dst + (y * widthInBytes), src + ( (height -y -1) * widthInBytes), widthInBytes);
|
||||
*dst = (uint8_t) (*((uint16_t*)src) >> 8);
|
||||
dst++;
|
||||
|
||||
src += 2;
|
||||
}
|
||||
|
||||
}else{
|
||||
memcpy(dst, src, numBytes);
|
||||
}
|
||||
}else{
|
||||
if(bFlip){
|
||||
}
|
||||
else
|
||||
{
|
||||
if(!bRGB){
|
||||
|
||||
int x = 0;
|
||||
int y = (height - 1) * widthInBytes;
|
||||
src += y;
|
||||
//int x = 0;
|
||||
//int y = 0;
|
||||
|
||||
for(int i = 0; i < numBytes; i+=3){
|
||||
if(x >= width){
|
||||
x = 0;
|
||||
src -= widthInBytes*2;
|
||||
if(bFlip){
|
||||
for(int y = 0; y < height; y++){
|
||||
memcpy(dst + (y * widthInBytes), src + ( (height -y -1) * widthInBytes), widthInBytes);
|
||||
}
|
||||
|
||||
*dst = *(src+2);
|
||||
dst++;
|
||||
|
||||
*dst = *(src+1);
|
||||
dst++;
|
||||
|
||||
*dst = *src;
|
||||
dst++;
|
||||
|
||||
src+=3;
|
||||
x++;
|
||||
}else{
|
||||
memcpy(dst, src, numBytes);
|
||||
}
|
||||
}
|
||||
else{
|
||||
for(int i = 0; i < numBytes; i+=3){
|
||||
*dst = *(src+2);
|
||||
dst++;
|
||||
}else{
|
||||
if(bFlip){
|
||||
|
||||
*dst = *(src+1);
|
||||
dst++;
|
||||
int x = 0;
|
||||
int y = (height - 1) * widthInBytes;
|
||||
src += y;
|
||||
|
||||
*dst = *src;
|
||||
dst++;
|
||||
for(int i = 0; i < numBytes; i+=3){
|
||||
if(x >= width){
|
||||
x = 0;
|
||||
src -= widthInBytes*2;
|
||||
}
|
||||
|
||||
src+=3;
|
||||
*dst = *(src+2);
|
||||
dst++;
|
||||
|
||||
*dst = *(src+1);
|
||||
dst++;
|
||||
|
||||
*dst = *src;
|
||||
dst++;
|
||||
|
||||
src+=3;
|
||||
x++;
|
||||
}
|
||||
}
|
||||
else{
|
||||
for(int i = 0; i < numBytes; i+=3){
|
||||
*dst = *(src+2);
|
||||
dst++;
|
||||
|
||||
*dst = *(src+1);
|
||||
dst++;
|
||||
|
||||
*dst = *src;
|
||||
dst++;
|
||||
|
||||
src+=3;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2192,6 +2307,8 @@ void videoInput::getMediaSubtypeAsString(GUID type, char * typeAsString){
|
||||
else if(type == MEDIASUBTYPE_Y8) sprintf(tmpStr, "Y8");
|
||||
else if(type == MEDIASUBTYPE_GREY) sprintf(tmpStr, "GREY");
|
||||
else if(type == MEDIASUBTYPE_I420) sprintf(tmpStr, "I420");
|
||||
else if (type == MEDIASUBTYPE_BY8) sprintf(tmpStr, "BY8");
|
||||
else if (type == MEDIASUBTYPE_Y16) sprintf(tmpStr, "Y16");
|
||||
else sprintf(tmpStr, "OTHER");
|
||||
|
||||
memcpy(typeAsString, tmpStr, sizeof(char)*8);
|
||||
@ -2333,6 +2450,10 @@ void videoInput::getCameraPropertyAsString(int prop, char * propertyAsString){
|
||||
memcpy(propertyAsString, tmpStr, sizeof(char)*16);
|
||||
}
|
||||
|
||||
GUID videoInput::getMediasubtype(int deviceID)
|
||||
{
|
||||
return VDList[deviceID]->pAmMediaType->subtype;
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------------------------
|
||||
static void findClosestSizeAndSubtype(videoDevice * VD, int widthIn, int heightIn, int &widthOut, int &heightOut, GUID & mediatypeOut){
|
||||
@ -2720,7 +2841,17 @@ int videoInput::start(int deviceID, videoDevice *VD){
|
||||
ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
|
||||
|
||||
mt.majortype = MEDIATYPE_Video;
|
||||
mt.subtype = MEDIASUBTYPE_RGB24;
|
||||
|
||||
// Disable format conversion if using 8/16-bit data (e-Con systems)
|
||||
if (checkSingleByteFormat(VD->pAmMediaType->subtype) || (VD->pAmMediaType->subtype == MEDIASUBTYPE_Y16)) {
|
||||
DebugPrintOut("SETUP: Not converting frames to RGB.\n");
|
||||
mt.subtype = VD->pAmMediaType->subtype;
|
||||
}
|
||||
else
|
||||
{
|
||||
DebugPrintOut("SETUP: Converting frames to RGB.\n");
|
||||
mt.subtype = MEDIASUBTYPE_RGB24; //Making it RGB24, does conversion from YUV to RGB
|
||||
}
|
||||
mt.formattype = FORMAT_VideoInfo;
|
||||
|
||||
//VD->pAmMediaType->subtype = VD->videoType;
|
||||
@ -3261,12 +3392,19 @@ bool VideoCapture_DShow::setProperty(int propIdx, double propVal)
|
||||
|
||||
case CV_CAP_PROP_FOURCC:
|
||||
m_fourcc = (int)(unsigned long)(propVal);
|
||||
m_width = (int)getProperty(CAP_PROP_FRAME_WIDTH);
|
||||
m_height = (int)getProperty(CAP_PROP_FRAME_HEIGHT);
|
||||
|
||||
if (-1 == m_fourcc)
|
||||
{
|
||||
// following cvCreateVideo usage will pop up caprturepindialog here if fourcc=-1
|
||||
// TODO - how to create a capture pin dialog
|
||||
}
|
||||
handled = true;
|
||||
else
|
||||
{
|
||||
handled = true;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case CV_CAP_PROP_FPS:
|
||||
@ -3295,6 +3433,12 @@ bool VideoCapture_DShow::setProperty(int propIdx, double propVal)
|
||||
}
|
||||
return g_VI.setVideoSettingCamera(m_index, CameraControl_Focus, currentFocus, enabled ? CameraControl_Flags_Auto | CameraControl_Flags_Manual : CameraControl_Flags_Manual, enabled ? true : false);
|
||||
}
|
||||
|
||||
case CV_CAP_PROP_CONVERT_RGB:
|
||||
{
|
||||
return g_VI.setConvertRGB(m_index, cvRound(propVal) == 1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (handled)
|
||||
@ -3302,7 +3446,7 @@ bool VideoCapture_DShow::setProperty(int propIdx, double propVal)
|
||||
// a stream setting
|
||||
if (m_width > 0 && m_height > 0)
|
||||
{
|
||||
if (m_width != g_VI.getWidth(m_index) || m_height != g_VI.getHeight(m_index) )//|| fourcc != VI.getFourcc(index) )
|
||||
if (m_width != g_VI.getWidth(m_index) || m_height != g_VI.getHeight(m_index) || m_fourcc != g_VI.getFourcc(m_index) )
|
||||
{
|
||||
int fps = static_cast<int>(g_VI.getFPS(m_index));
|
||||
g_VI.stopDevice(m_index);
|
||||
@ -3313,10 +3457,14 @@ bool VideoCapture_DShow::setProperty(int propIdx, double propVal)
|
||||
bool success = g_VI.isDeviceSetup(m_index);
|
||||
if (success)
|
||||
{
|
||||
DebugPrintOut("SETUP: Updated FourCC\n");
|
||||
m_widthSet = m_width;
|
||||
m_heightSet = m_height;
|
||||
m_width = m_height = m_fourcc = -1;
|
||||
}
|
||||
else {
|
||||
DebugPrintOut("SETUP: Couldn't update FourCC\n");
|
||||
}
|
||||
return success;
|
||||
}
|
||||
return true;
|
||||
@ -3366,7 +3514,18 @@ bool VideoCapture_DShow::grabFrame()
|
||||
}
|
||||
bool VideoCapture_DShow::retrieveFrame(int, OutputArray frame)
|
||||
{
|
||||
frame.create(Size(g_VI.getWidth(m_index), g_VI.getHeight(m_index)), CV_8UC3);
|
||||
int w = g_VI.getWidth(m_index), h = g_VI.getHeight(m_index);
|
||||
bool convertRGB = g_VI.getConvertRGB(m_index);
|
||||
|
||||
// Set suitable output matrix type (e-Con systems)
|
||||
if (checkSingleByteFormat(g_VI.getMediasubtype(m_index))){
|
||||
frame.create(Size(w, h), CV_8UC1);
|
||||
} else if (g_VI.getMediasubtype(m_index) == MEDIASUBTYPE_Y16 && !convertRGB) {
|
||||
frame.create(Size(w, h), CV_16UC1);
|
||||
} else {
|
||||
frame.create(Size(w, h), CV_8UC3);
|
||||
}
|
||||
|
||||
cv::Mat mat = frame.getMat();
|
||||
return g_VI.getPixels(m_index, mat.ptr(), false, true );
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user