Merge pull request #4047 from MSOpenTech:videoio-refactor-contrib

This commit is contained in:
Vadim Pisarevsky 2015-05-23 09:02:10 +00:00
commit f5be8f6c77
9 changed files with 134 additions and 134 deletions

View File

@ -54,9 +54,12 @@ CV_EXPORTS void winrt_startMessageLoop(void callback(Args...), Args... args);
/** @brief
@note
Sets the reporter method for the HighguiAssist singleton. Starts the main OpenCV as
an async thread in WinRT. See VideoCapture for the example of callback implementation.
Here is how the class can be used:
Starts (1) frame-grabbing loop and (2) message loop
1. Function passed as an argument must implement common OCV reading frames
pattern (see cv::VideoCapture documentation) AND call cv::winrt_imgshow().
2. Message processing loop required to overcome WinRT container and type
conversion restrictions. OCV provides default implementation
Here is how the class can be used:
@code
void cvMain()
{

View File

@ -729,7 +729,7 @@ VideoCapture& VideoCapture::operator >> (Mat& image)
bridge.bIsFrameNew = false;
// needed here because setting Mat 'image' is not allowed by OutputArray in read()
Mat m(bridge.height, bridge.width, CV_8UC3, p);
Mat m(bridge.getHeight(), bridge.getWidth(), CV_8UC3, p);
image = m;
}
}

View File

@ -227,7 +227,6 @@ HRESULT MediaStreamSink::IsMediaTypeSupported(__in IMFMediaType *mediaType, __de
HRESULT hr = ExceptionBoundary([this, mediaType, closestMediaType, &supported]()
{
auto lock = _lock.LockExclusive();
HRESULT hr = S_OK;
if (closestMediaType != nullptr)
{
@ -281,7 +280,6 @@ HRESULT MediaStreamSink::SetCurrentMediaType(__in IMFMediaType *mediaType)
return ExceptionBoundary([this, mediaType]()
{
auto lock = _lock.LockExclusive();
HRESULT hr = S_OK;
CHKNULL(mediaType);

View File

@ -45,7 +45,7 @@ using namespace ::std;
/***************************** VideoioBridge class ******************************/
// non-blocking
void VideoioBridge::requestForUIthreadAsync(int action, int widthp, int heightp)
void VideoioBridge::requestForUIthreadAsync(int action)
{
reporter.report(action);
}
@ -80,10 +80,79 @@ void VideoioBridge::allocateOutputBuffers()
backOutputBuffer = ref new WriteableBitmap(width, height);
}
// performed on UI thread
void VideoioBridge::allocateBuffers(int width, int height)
{
// allocate input Mats (bgra8 = CV_8UC4, RGB24 = CV_8UC3)
frontInputMat.create(height, width, CV_8UC3);
backInputMat.create(height, width, CV_8UC3);
frontInputPtr = frontInputMat.ptr(0);
backInputPtr = backInputMat.ptr(0);
allocateOutputBuffers();
}
// performed on UI thread
bool VideoioBridge::openCamera()
{
// buffers must alloc'd on UI thread
allocateBuffers(width, height);
// nb. video capture device init must be done on UI thread;
if (!Video::getInstance().isStarted())
{
Video::getInstance().initGrabber(deviceIndex, width, height);
return true;
}
return false;
}
// nb on UI thread
void VideoioBridge::updateFrameContainer()
{
// copy output Mat to WBM
Video::getInstance().CopyOutput();
// set XAML image element with image WBM
cvImage->Source = backOutputBuffer;
}
void VideoioBridge::imshow()
{
VideoioBridge::getInstance().swapOutputBuffers();
VideoioBridge::getInstance().requestForUIthreadAsync(cv::UPDATE_IMAGE_ELEMENT);
swapOutputBuffers();
requestForUIthreadAsync(cv::UPDATE_IMAGE_ELEMENT);
}
int VideoioBridge::getDeviceIndex()
{
return deviceIndex;
}
void VideoioBridge::setDeviceIndex(int index)
{
deviceIndex = index;
}
int VideoioBridge::getWidth()
{
return width;
}
int VideoioBridge::getHeight()
{
return height;
}
void VideoioBridge::setWidth(int _width)
{
width = _width;
}
void VideoioBridge::setHeight(int _height)
{
height = _height;
}
// end

View File

@ -50,19 +50,27 @@ public:
static VideoioBridge& getInstance();
// call after initialization
void setReporter(Concurrency::progress_reporter<int> pr) { reporter = pr; }
void setReporter(Concurrency::progress_reporter<int> pr) { reporter = pr; }
// to be called from cvMain via cap_winrt on bg thread - non-blocking (async)
void requestForUIthreadAsync( int action, int width=0, int height=0 );
void requestForUIthreadAsync(int action);
// TODO: modify in window.cpp: void cv::imshow( const String& winname, InputArray _img )
void imshow(/*cv::InputArray matToShow*/); // shows Mat in the cvImage element
void swapInputBuffers();
void allocateOutputBuffers();
void swapOutputBuffers();
void updateFrameContainer();
bool openCamera();
void allocateBuffers(int width, int height);
int getDeviceIndex();
void setDeviceIndex(int index);
int getWidth();
void setWidth(int width);
int getHeight();
void setHeight(int height);
int deviceIndex, width, height;
std::atomic<bool> bIsFrameNew;
std::mutex inputBufferMutex; // input is double buffered
unsigned char * frontInputPtr; // OpenCV reads this
@ -93,4 +101,17 @@ private:
std::atomic<bool> deviceReady;
Concurrency::progress_reporter<int> reporter;
// Mats are wrapped with singleton class, we do not support more than one
// capture device simultaneously with the design at this time
//
// nb. inputBufferMutex was not able to guarantee that OpenCV Mats were
// ready to accept data in the UI thread (memory access exceptions were thrown
// even though buffer address was good).
// Therefore allocation of Mats is also done on the UI thread before the video
// device is initialized.
cv::Mat frontInputMat;
cv::Mat backInputMat;
int deviceIndex, width, height;
};

View File

@ -43,23 +43,9 @@ using namespace Microsoft::WRL;
using namespace ::std;
// nb. VideoCapture_WinRT is not a singleton, so the Mats are made file statics
// we do not support more than one capture device simultaneously with the
// design at this time
// nb. inputBufferMutex was not able to guarantee that OpenCV Mats were
// ready to accept data in the UI thread (memory access exceptions were thrown
// even though buffer address was good).
// Therefore allocation of Mats is also done on the UI thread before the video
// device is initialized.
static cv::Mat frontInputMat;
static cv::Mat backInputMat;
namespace cv {
/***************************** exported control functions ******************************/
/******************************* exported API functions **************************************/
template <typename ...Args>
void winrt_startMessageLoop(std::function<void(Args...)>&& callback, Args... args)
@ -80,13 +66,13 @@ namespace cv {
switch (action)
{
case OPEN_CAMERA:
winrt_openCamera();
VideoioBridge::getInstance().openCamera();
break;
case CLOSE_CAMERA:
winrt_closeGrabber();
Video::getInstance().closeGrabber();
break;
case UPDATE_IMAGE_ELEMENT:
winrt_updateFrameContainer();
VideoioBridge::getInstance().updateFrameContainer();
break;
}
});
@ -98,7 +84,8 @@ namespace cv {
winrt_startMessageLoop(std::function<void(Args...)>(callback), args...);
}
void winrt_onVisibilityChanged(bool visible) {
void winrt_onVisibilityChanged(bool visible)
{
if (visible)
{
VideoioBridge& bridge = VideoioBridge::getInstance();
@ -108,99 +95,34 @@ namespace cv {
{
if (Video::getInstance().isStarted()) return;
int device = bridge.deviceIndex;
int width = bridge.width;
int height = bridge.height;
int device = bridge.getDeviceIndex();
int width = bridge.getWidth();
int height = bridge.getHeight();
winrt_initGrabber(device, width, height);
Video::getInstance().initGrabber(device, width, height);
}
} else
{
//grabberStarted = false;
winrt_closeGrabber();
Video::getInstance().closeGrabber();
}
}
void winrt_imshow() {
void winrt_imshow()
{
VideoioBridge::getInstance().imshow();
}
void winrt_setFrameContainer(::Windows::UI::Xaml::Controls::Image^ image) {
void winrt_setFrameContainer(::Windows::UI::Xaml::Controls::Image^ image)
{
VideoioBridge::getInstance().cvImage = image;
}
/********************************* Internal helpers ************************************/
void winrt_updateFrameContainer()
{
// copy output Mat to WBM
winrt_copyOutput();
// set XAML image element with image WBM
VideoioBridge::getInstance().cvImage->Source = VideoioBridge::getInstance().backOutputBuffer;
}
// performed on UI thread
bool winrt_openCamera()
{
VideoioBridge& bridge = VideoioBridge::getInstance();
int device = bridge.deviceIndex;
int width = bridge.width;
int height = bridge.height;
// buffers must alloc'd on UI thread
winrt_allocateBuffers(width, height);
// nb. video capture device init must be done on UI thread;
if (!Video::getInstance().isStarted())
{
winrt_initGrabber(device, width, height);
return true;
}
return false;
}
// performed on UI thread
void winrt_allocateBuffers(int width, int height)
{
VideoioBridge& bridge = VideoioBridge::getInstance();
// allocate input Mats (bgra8 = CV_8UC4, RGB24 = CV_8UC3)
frontInputMat.create(height, width, CV_8UC3);
backInputMat.create(height, width, CV_8UC3);
bridge.frontInputPtr = frontInputMat.ptr(0);
bridge.backInputPtr = backInputMat.ptr(0);
bridge.allocateOutputBuffers();
}
// non-blocking
bool winrt_initGrabber(int device, int w, int h) {
// nb. Video class is not exported outside of this DLL
// due to complexities in the CaptureFrameGrabber ref class
// as written in the header not mixing well with pure C++ classes
return Video::getInstance().initGrabber(device, w, h);
}
void winrt_closeGrabber() {
Video::getInstance().closeGrabber();
}
// nb on UI thread
void winrt_copyOutput() {
Video::getInstance().CopyOutput();
}
/********************************* VideoCapture_WinRT class ****************************/
VideoCapture_WinRT::VideoCapture_WinRT(int device) : started(false)
{
VideoioBridge::getInstance().deviceIndex = device;
VideoioBridge::getInstance().setDeviceIndex(device);
}
bool VideoCapture_WinRT::isOpened() const
@ -240,14 +162,13 @@ namespace cv {
if (width == 0) width = 640;
if (height == 0) height = 480;
VideoioBridge::getInstance().width = width;
VideoioBridge::getInstance().height = height;
VideoioBridge::getInstance().setWidth(width);
VideoioBridge::getInstance().setHeight(height);
// nb. Mats will be alloc'd on UI thread
// request device init on UI thread - this does not block, and is async
VideoioBridge::getInstance().requestForUIthreadAsync(OPEN_CAMERA,
outArray.size().width, outArray.size().height);
VideoioBridge::getInstance().requestForUIthreadAsync(OPEN_CAMERA);
started = true;
return false;

View File

@ -42,17 +42,6 @@
namespace cv {
/******************* Internal helpers **************************************/
void winrt_updateFrameContainer();
bool winrt_openCamera();
bool winrt_initGrabber(int device, int w, int h);
void winrt_closeGrabber();
void winrt_copyOutput();
void winrt_allocateBuffers(int width, int height);
/******************* VideoCapture_WinRT class ******************************/
class VideoCapture_WinRT : public IVideoCapture
{
public:

View File

@ -78,7 +78,7 @@ void Video::closeGrabber() {
bGrabberInitInProgress = false;
}
// non-blocking
bool Video::initGrabber(int device, int w, int h) {
// already started?
if (bGrabberInited || bGrabberInitInProgress) return false;
@ -124,7 +124,7 @@ bool Video::initGrabber(int device, int w, int h) {
// for 24 bpp
props->Subtype = MediaEncodingSubtypes::Rgb24; bytesPerPixel = 3;
// format used by XAML & WBM (for testing)
// XAML & WBM use BGRA8, so it would look like
// props->Subtype = MediaEncodingSubtypes::Bgra8; bytesPerPixel = 4;
props->Width = width;
@ -282,22 +282,20 @@ bool Video::listDevicesTask() {
auto settings = ref new MediaCaptureInitializationSettings();
//vector <int> devices;
create_task(DeviceInformation::FindAllAsync(DeviceClass::VideoCapture))
.then([this, &ready](task<DeviceInformationCollection^> findTask)
{
m_devices = findTask.get();
for (size_t i = 0; i < m_devices->Size; i++)
{
// ofVideoDevice deviceInfo;
auto d = m_devices->GetAt(i);
//deviceInfo.bAvailable = true;
//deviceInfo.deviceName = PlatformStringToString(d->Name);
//deviceInfo.hardwareName = deviceInfo.deviceName;
// devices.push_back(deviceInfo);
}
// TODO: collect device data
// for (size_t i = 0; i < m_devices->Size; i++)
// {
// .. deviceInfo;
// auto d = m_devices->GetAt(i);
// deviceInfo.bAvailable = true;
// deviceInfo.deviceName = PlatformStringToString(d->Name);
// deviceInfo.hardwareName = deviceInfo.deviceName;
// }
ready = true;
});

View File

@ -34,6 +34,7 @@
class Video {
public:
// non-blocking
bool initGrabber(int device, int w, int h);
void closeGrabber();
bool isStarted();