opencv/modules/videoio/src/cap_avfoundation.mm

1333 lines
41 KiB
Plaintext
Raw Normal View History

/*
* cap_avfoundation.mm
* For iOS video I/O
2012-10-17 15:12:04 +08:00
* by Xiaochao Yang on 06/15/11 modified from
* cap_qtkit.mm for Nicholas Butko for Mac OS version.
* Copyright 2011. All rights reserved.
*
2012-10-17 15:12:04 +08:00
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
2012-10-17 15:12:04 +08:00
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
2012-10-17 15:12:04 +08:00
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "precomp.hpp"
#include "opencv2/imgproc.hpp"
#include <iostream>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/NSException.h>
/********************** Declaration of class headers ************************/
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CaptureDelegate Declaration.
*
* CaptureDelegate is notified on a separate thread by the OS whenever there
* is a new frame. When "updateImage" is called from the main thread, it
* copies this new frame into an IplImage, but only if this frame has not
2012-10-17 15:12:04 +08:00
* been copied before. When "getOutput" is called from the main thread,
* it gives the last copied IplImage.
*
*****************************************************************************/
#define DISABLE_AUTO_RESTART 999
@interface CaptureDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
{
2012-10-17 15:12:04 +08:00
int newFrame;
CVImageBufferRef mCurrentImageBuffer;
char* imagedata;
IplImage* image;
char* bgr_imagedata;
IplImage* bgr_image;
IplImage* bgr_image_r90;
size_t currSize;
}
2012-10-17 15:12:04 +08:00
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection;
2012-10-17 15:12:04 +08:00
- (int)updateImage;
- (IplImage*)getOutput;
@end
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CvCaptureCAM Declaration.
*
* CvCaptureCAM is the instantiation of a capture source for cameras.
*
*****************************************************************************/
class CvCaptureCAM : public CvCapture {
2012-10-17 15:12:04 +08:00
public:
CvCaptureCAM(int cameraNum = -1) ;
~CvCaptureCAM();
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
virtual IplImage* queryFrame();
virtual double getProperty(int property_id) const;
2012-10-17 15:12:04 +08:00
virtual bool setProperty(int property_id, double value);
virtual int didStart();
private:
AVCaptureSession *mCaptureSession;
AVCaptureDeviceInput *mCaptureDeviceInput;
AVCaptureVideoDataOutput *mCaptureDecompressedVideoOutput;
AVCaptureDevice *mCaptureDevice;
CaptureDelegate *capture;
int startCaptureDevice(int cameraNum);
void stopCaptureDevice();
void setWidthHeight();
bool grabFrame(double timeOut);
int camNum;
int width;
int height;
int settingWidth;
int settingHeight;
int started;
int disableAutoRestart;
};
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CvCaptureFile Declaration.
*
* CvCaptureFile is the instantiation of a capture source for video files.
*
*****************************************************************************/
class CvCaptureFile : public CvCapture {
2012-10-17 15:12:04 +08:00
public:
CvCaptureFile(const char* filename) ;
~CvCaptureFile();
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
virtual IplImage* queryFrame();
virtual double getProperty(int property_id) const;
2012-10-17 15:12:04 +08:00
virtual bool setProperty(int property_id, double value);
virtual int didStart();
private:
AVAssetReader *mMovieReader;
char* imagedata;
IplImage* image;
char* bgr_imagedata;
IplImage* bgr_image;
size_t currSize;
IplImage* retrieveFramePixelBuffer();
double getFPS();
int movieWidth;
int movieHeight;
double movieFPS;
double currentFPS;
double movieDuration;
int changedPos;
int started;
};
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CvCaptureFile Declaration.
*
* CvCaptureFile is the instantiation of a capture source for video files.
*
*****************************************************************************/
class CvVideoWriter_AVFoundation : public CvVideoWriter{
2012-10-17 15:12:04 +08:00
public:
CvVideoWriter_AVFoundation(const char* filename, int fourcc,
double fps, CvSize frame_size,
int is_color=1);
~CvVideoWriter_AVFoundation();
bool writeFrame(const IplImage* image);
int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_AVFOUNDATION; }
2012-10-17 15:12:04 +08:00
private:
IplImage* argbimage;
AVAssetWriter *mMovieWriter;
AVAssetWriterInput* mMovieWriterInput;
AVAssetWriterInputPixelBufferAdaptor* mMovieWriterAdaptor;
NSString* path;
NSString* codec;
NSString* fileType;
double movieFPS;
CvSize movieSize;
int movieColor;
unsigned long frameCount;
};
/****************** Implementation of interface functions ********************/
CvCapture* cvCreateFileCapture_AVFoundation(const char* filename) {
2012-10-17 15:12:04 +08:00
CvCaptureFile *retval = new CvCaptureFile(filename);
2012-10-17 15:12:04 +08:00
if(retval->didStart())
return retval;
delete retval;
return NULL;
}
CvCapture* cvCreateCameraCapture_AVFoundation(int index ) {
2012-10-17 15:12:04 +08:00
CvCapture* retval = new CvCaptureCAM(index);
if (!((CvCaptureCAM *)retval)->didStart())
cvReleaseCapture(&retval);
return retval;
}
2012-10-17 15:12:04 +08:00
CvVideoWriter* cvCreateVideoWriter_AVFoundation(const char* filename, int fourcc,
double fps, CvSize frame_size,
int is_color) {
return new CvVideoWriter_AVFoundation(filename, fourcc, fps, frame_size,is_color);
}
2012-10-17 15:12:04 +08:00
/********************** Implementation of Classes ****************************/
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CvCaptureCAM Implementation.
*
* CvCaptureCAM is the instantiation of a capture source for cameras.
*
*****************************************************************************/
CvCaptureCAM::CvCaptureCAM(int cameraNum) {
2012-10-17 15:12:04 +08:00
mCaptureSession = nil;
mCaptureDeviceInput = nil;
mCaptureDecompressedVideoOutput = nil;
capture = nil;
width = 0;
height = 0;
settingWidth = 0;
settingHeight = 0;
disableAutoRestart = 0;
camNum = cameraNum;
if (!startCaptureDevice(camNum)) {
std::cout << "Warning, camera failed to properly initialize!" << std::endl;
2012-10-17 15:12:04 +08:00
started = 0;
} else {
started = 1;
}
}
CvCaptureCAM::~CvCaptureCAM() {
2012-10-17 15:12:04 +08:00
stopCaptureDevice();
//cout << "Cleaned up camera." << endl;
}
int CvCaptureCAM::didStart() {
2012-10-17 15:12:04 +08:00
return started;
}
bool CvCaptureCAM::grabFrame() {
2012-10-17 15:12:04 +08:00
return grabFrame(5);
}
bool CvCaptureCAM::grabFrame(double timeOut) {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
double sleepTime = 0.005;
double total = 0;
2012-10-17 15:12:04 +08:00
NSDate *loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
while (![capture updateImage] && (total += sleepTime)<=timeOut &&
[[NSRunLoop currentRunLoop] runMode: NSDefaultRunLoopMode
beforeDate:loopUntil])
loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
2012-10-17 15:12:04 +08:00
[localpool drain];
2012-10-17 15:12:04 +08:00
return total <= timeOut;
}
IplImage* CvCaptureCAM::retrieveFrame(int) {
2012-10-17 15:12:04 +08:00
return [capture getOutput];
}
IplImage* CvCaptureCAM::queryFrame() {
2012-10-17 15:12:04 +08:00
while (!grabFrame()) {
std::cout << "WARNING: Couldn't grab new frame from camera!!!" << std::endl;
2012-10-17 15:12:04 +08:00
/*
cout << "Attempting to restart camera; set capture property DISABLE_AUTO_RESTART to disable." << endl;
stopCaptureDevice();
startCaptureDevice(camNum);
*/
}
return retrieveFrame(0);
}
void CvCaptureCAM::stopCaptureDevice() {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
2012-10-17 15:12:04 +08:00
[mCaptureSession stopRunning];
2012-10-17 15:12:04 +08:00
[mCaptureSession release];
[mCaptureDeviceInput release];
2012-10-17 15:12:04 +08:00
[mCaptureDecompressedVideoOutput release];
[capture release];
[localpool drain];
}
int CvCaptureCAM::startCaptureDevice(int cameraNum) {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
capture = [[CaptureDelegate alloc] init];
AVCaptureDevice *device;
NSArray* devices = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]
arrayByAddingObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]];
2012-10-17 15:12:04 +08:00
if ([devices count] == 0) {
std::cout << "AV Foundation didn't find any attached Video Input Devices!" << std::endl;
2012-10-17 15:12:04 +08:00
[localpool drain];
return 0;
}
if (cameraNum >= 0) {
camNum = cameraNum % [devices count];
if (camNum != cameraNum) {
std::cout << "Warning: Max Camera Num is " << [devices count]-1 << "; Using camera " << camNum << std::endl;
2012-10-17 15:12:04 +08:00
}
device = [devices objectAtIndex:camNum];
} else {
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] ;
}
mCaptureDevice = device;
//int success;
NSError* error;
if (device) {
mCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error] ;
mCaptureSession = [[AVCaptureSession alloc] init] ;
/*
success = [mCaptureSession addInput:mCaptureDeviceInput];
if (!success) {
cout << "AV Foundation failed to start capture session with opened Capture Device" << endl;
[localpool drain];
return 0;
}
*/
mCaptureDecompressedVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
[mCaptureDecompressedVideoOutput setSampleBufferDelegate:capture queue:queue];
dispatch_release(queue);
NSDictionary *pixelBufferOptions ;
if (width > 0 && height > 0) {
pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
(id)kCVPixelBufferPixelFormatTypeKey,
nil];
} else {
pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
(id)kCVPixelBufferPixelFormatTypeKey,
nil];
}
//TODO: add new interface for setting fps and capturing resolution.
[mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
mCaptureDecompressedVideoOutput.alwaysDiscardsLateVideoFrames = YES;
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
mCaptureDecompressedVideoOutput.minFrameDuration = CMTimeMake(1, 30);
#endif
2012-10-17 15:12:04 +08:00
//Slow. 1280*720 for iPhone4, iPod back camera. 640*480 for front camera
//mCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; // fps ~= 5 slow for OpenCV
mCaptureSession.sessionPreset = AVCaptureSessionPresetMedium; //480*360
if (width == 0 ) width = 480;
if (height == 0 ) height = 360;
[mCaptureSession addInput:mCaptureDeviceInput];
[mCaptureSession addOutput:mCaptureDecompressedVideoOutput];
/*
// Does not work! This is the preferred way (hardware acceleration) to change pixel buffer orientation.
// I'm now using cvtranspose and cvflip instead, which takes cpu cycles.
AVCaptureConnection *connection = [[mCaptureDecompressedVideoOutput connections] objectAtIndex:0];
if([connection isVideoOrientationSupported]) {
//NSLog(@"Setting pixel buffer orientation");
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
*/
[mCaptureSession startRunning];
grabFrame(60);
[localpool drain];
return 1;
}
[localpool drain];
return 0;
}
2012-10-17 15:12:04 +08:00
void CvCaptureCAM::setWidthHeight() {
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
NSDictionary* pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
(id)kCVPixelBufferPixelFormatTypeKey,
nil];
[mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
grabFrame(60);
[localpool drain];
}
//added macros into headers in videoio_c.h
/*
#define CV_CAP_PROP_IOS_DEVICE_FOCUS 9001
#define CV_CAP_PROP_IOS_DEVICE_EXPOSURE 9002
#define CV_CAP_PROP_IOS_DEVICE_FLASH 9003
#define CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE 9004
#define CV_CAP_PROP_IOS_DEVICE_TORCH 9005
*/
/*
// All available settings are taken from iOS API
enum {
AVCaptureFlashModeOff = 0,
AVCaptureFlashModeOn = 1,
AVCaptureFlashModeAuto = 2
};
typedef NSInteger AVCaptureFlashMode;
enum {
AVCaptureTorchModeOff = 0,
AVCaptureTorchModeOn = 1,
AVCaptureTorchModeAuto = 2
};
typedef NSInteger AVCaptureTorchMode;
enum {
AVCaptureFocusModeLocked = 0,
AVCaptureFocusModeAutoFocus = 1,
AVCaptureFocusModeContinuousAutoFocus = 2,
};
typedef NSInteger AVCaptureFocusMode;
enum {
AVCaptureExposureModeLocked = 0,
AVCaptureExposureModeAutoExpose = 1,
AVCaptureExposureModeContinuousAutoExposure = 2,
};
typedef NSInteger AVCaptureExposureMode;
enum {
AVCaptureWhiteBalanceModeLocked = 0,
AVCaptureWhiteBalanceModeAutoWhiteBalance = 1,
AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
};
typedef NSInteger AVCaptureWhiteBalanceMode;
*/
double CvCaptureCAM::getProperty(int property_id) const{
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
2012-10-17 15:12:04 +08:00
/*
NSArray* connections = [mCaptureDeviceInput connections];
QTFormatDescription* format = [[connections objectAtIndex:0] formatDescription];
NSSize s1 = [[format attributeForKey:QTFormatDescriptionVideoCleanApertureDisplaySizeAttribute] sizeValue];
*/
2012-10-17 15:12:04 +08:00
NSArray* ports = mCaptureDeviceInput.ports;
CMFormatDescriptionRef format = [[ports objectAtIndex:0] formatDescription];
CGSize s1 = CMVideoFormatDescriptionGetPresentationDimensions(format, YES, YES);
2014-10-17 19:45:13 +08:00
int w=(int)s1.width, h=(int)s1.height;
2012-10-17 15:12:04 +08:00
[localpool drain];
2012-10-17 15:12:04 +08:00
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
2014-10-17 19:45:13 +08:00
return w;
2012-10-17 15:12:04 +08:00
case CV_CAP_PROP_FRAME_HEIGHT:
2014-10-17 19:45:13 +08:00
return h;
2012-10-17 15:12:04 +08:00
case CV_CAP_PROP_IOS_DEVICE_FOCUS:
return mCaptureDevice.focusMode;
case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
return mCaptureDevice.exposureMode;
case CV_CAP_PROP_IOS_DEVICE_FLASH:
return mCaptureDevice.flashMode;
case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
return mCaptureDevice.whiteBalanceMode;
case CV_CAP_PROP_IOS_DEVICE_TORCH:
return mCaptureDevice.torchMode;
2012-10-17 15:12:04 +08:00
default:
return 0;
}
}
bool CvCaptureCAM::setProperty(int property_id, double value) {
2012-10-17 15:12:04 +08:00
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
width = value;
settingWidth = 1;
if (settingWidth && settingHeight) {
setWidthHeight();
settingWidth =0;
settingHeight = 0;
}
return true;
case CV_CAP_PROP_FRAME_HEIGHT:
height = value;
settingHeight = 1;
if (settingWidth && settingHeight) {
setWidthHeight();
settingWidth =0;
settingHeight = 0;
}
return true;
case CV_CAP_PROP_IOS_DEVICE_FOCUS:
if ([mCaptureDevice isFocusModeSupported:(AVCaptureFocusMode)value]){
2012-10-17 15:12:04 +08:00
NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error];
if (error) return false;
[mCaptureDevice setFocusMode:(AVCaptureFocusMode)value];
2012-10-17 15:12:04 +08:00
[mCaptureDevice unlockForConfiguration];
//NSLog(@"Focus set");
return true;
}else {
return false;
}
case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
if ([mCaptureDevice isExposureModeSupported:(AVCaptureExposureMode)value]){
2012-10-17 15:12:04 +08:00
NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error];
if (error) return false;
[mCaptureDevice setExposureMode:(AVCaptureExposureMode)value];
2012-10-17 15:12:04 +08:00
[mCaptureDevice unlockForConfiguration];
//NSLog(@"Exposure set");
return true;
}else {
return false;
}
case CV_CAP_PROP_IOS_DEVICE_FLASH:
if ( [mCaptureDevice hasFlash] && [mCaptureDevice isFlashModeSupported:(AVCaptureFlashMode)value]){
2012-10-17 15:12:04 +08:00
NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error];
if (error) return false;
[mCaptureDevice setFlashMode:(AVCaptureFlashMode)value];
2012-10-17 15:12:04 +08:00
[mCaptureDevice unlockForConfiguration];
//NSLog(@"Flash mode set");
return true;
}else {
return false;
}
case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
if ([mCaptureDevice isWhiteBalanceModeSupported:(AVCaptureWhiteBalanceMode)value]){
2012-10-17 15:12:04 +08:00
NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error];
if (error) return false;
[mCaptureDevice setWhiteBalanceMode:(AVCaptureWhiteBalanceMode)value];
2012-10-17 15:12:04 +08:00
[mCaptureDevice unlockForConfiguration];
//NSLog(@"White balance set");
return true;
}else {
return false;
}
case CV_CAP_PROP_IOS_DEVICE_TORCH:
if ([mCaptureDevice hasFlash] && [mCaptureDevice isTorchModeSupported:(AVCaptureTorchMode)value]){
2012-10-17 15:12:04 +08:00
NSError* error = nil;
[mCaptureDevice lockForConfiguration:&error];
if (error) return false;
[mCaptureDevice setTorchMode:(AVCaptureTorchMode)value];
2012-10-17 15:12:04 +08:00
[mCaptureDevice unlockForConfiguration];
//NSLog(@"Torch mode set");
return true;
}else {
return false;
}
case DISABLE_AUTO_RESTART:
disableAutoRestart = value;
return 1;
default:
return false;
}
}
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CaptureDelegate Implementation.
*
* CaptureDelegate is notified on a separate thread by the OS whenever there
* is a new frame. When "updateImage" is called from the main thread, it
* copies this new frame into an IplImage, but only if this frame has not
2012-10-17 15:12:04 +08:00
* been copied before. When "getOutput" is called from the main thread,
* it gives the last copied IplImage.
*
*****************************************************************************/
2012-10-17 15:12:04 +08:00
@implementation CaptureDelegate
- (id)init {
2012-10-17 15:12:04 +08:00
[super init];
newFrame = 0;
imagedata = NULL;
bgr_imagedata = NULL;
currSize = 0;
image = NULL;
bgr_image = NULL;
bgr_image_r90 = NULL;
return self;
}
-(void)dealloc {
2012-10-17 15:12:04 +08:00
if (imagedata != NULL) free(imagedata);
if (bgr_imagedata != NULL) free(bgr_imagedata);
cvReleaseImage(&image);
cvReleaseImage(&bgr_image);
cvReleaseImage(&bgr_image_r90);
[super dealloc];
}
2012-10-17 15:12:04 +08:00
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection{
2012-10-17 15:12:04 +08:00
// Failed
// connection.videoOrientation = AVCaptureVideoOrientationPortrait;
2014-10-17 19:45:13 +08:00
(void)captureOutput;
(void)connection;
2012-10-17 15:12:04 +08:00
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
2012-10-17 15:12:04 +08:00
CVBufferRetain(imageBuffer);
CVImageBufferRef imageBufferToRelease = mCurrentImageBuffer;
2012-10-17 15:12:04 +08:00
@synchronized (self) {
2012-10-17 15:12:04 +08:00
mCurrentImageBuffer = imageBuffer;
newFrame = 1;
}
2012-10-17 15:12:04 +08:00
CVBufferRelease(imageBufferToRelease);
}
-(IplImage*) getOutput {
2012-10-17 15:12:04 +08:00
//return bgr_image;
return bgr_image_r90;
}
-(int) updateImage {
2012-10-17 15:12:04 +08:00
if (newFrame==0) return 0;
CVPixelBufferRef pixels;
@synchronized (self){
pixels = CVBufferRetain(mCurrentImageBuffer);
newFrame = 0;
}
CVPixelBufferLockBaseAddress(pixels, 0);
uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels);
size_t width = CVPixelBufferGetWidth(pixels);
size_t height = CVPixelBufferGetHeight(pixels);
size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels);
if (rowBytes != 0) {
if (currSize != rowBytes*height*sizeof(char)) {
currSize = rowBytes*height*sizeof(char);
if (imagedata != NULL) free(imagedata);
if (bgr_imagedata != NULL) free(bgr_imagedata);
imagedata = (char*)malloc(currSize);
bgr_imagedata = (char*)malloc(currSize);
}
memcpy(imagedata, baseaddress, currSize);
if (image == NULL) {
2014-10-17 20:32:53 +08:00
image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
2012-10-17 15:12:04 +08:00
}
2014-10-17 20:32:53 +08:00
image->width = (int)width;
image->height = (int)height;
2012-10-17 15:12:04 +08:00
image->nChannels = 4;
image->depth = IPL_DEPTH_8U;
image->widthStep = (int)rowBytes;
image->imageData = imagedata;
2014-10-17 20:32:53 +08:00
image->imageSize = (int)currSize;
2012-10-17 15:12:04 +08:00
if (bgr_image == NULL) {
2014-10-17 20:32:53 +08:00
bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
2012-10-17 15:12:04 +08:00
}
2014-10-17 20:32:53 +08:00
bgr_image->width = (int)width;
bgr_image->height = (int)height;
2012-10-17 15:12:04 +08:00
bgr_image->nChannels = 3;
bgr_image->depth = IPL_DEPTH_8U;
bgr_image->widthStep = (int)rowBytes;
bgr_image->imageData = bgr_imagedata;
2014-10-17 20:32:53 +08:00
bgr_image->imageSize = (int)currSize;
2012-10-17 15:12:04 +08:00
cvCvtColor(image, bgr_image, CV_BGRA2BGR);
// image taken from the buffer is incorrected rotated. I'm using cvTranspose + cvFlip.
// There should be an option in iOS API to rotate the buffer output orientation.
// iOS provides hardware accelerated rotation through AVCaptureConnection class
// I can't get it work.
if (bgr_image_r90 == NULL){
2014-10-17 20:32:53 +08:00
bgr_image_r90 = cvCreateImage(cvSize((int)height, (int)width), IPL_DEPTH_8U, 3);
2012-10-17 15:12:04 +08:00
}
cvTranspose(bgr_image, bgr_image_r90);
cvFlip(bgr_image_r90, NULL, 1);
}
CVPixelBufferUnlockBaseAddress(pixels, 0);
CVBufferRelease(pixels);
return 1;
}
@end
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CvCaptureFile Implementation.
*
* CvCaptureFile is the instantiation of a capture source for video files.
*
*****************************************************************************/
CvCaptureFile::CvCaptureFile(const char* filename) {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
mMovieReader = nil;
image = NULL;
bgr_image = NULL;
imagedata = NULL;
bgr_imagedata = NULL;
currSize = 0;
movieWidth = 0;
movieHeight = 0;
movieFPS = 0;
currentFPS = 0;
movieDuration = 0;
changedPos = 0;
started = 0;
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:
[NSURL fileURLWithPath: [NSString stringWithUTF8String:filename]]
options:nil];
AVAssetTrack* videoTrack = nil;
NSArray* tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] == 1)
{
videoTrack = [tracks objectAtIndex:0];
movieWidth = videoTrack.naturalSize.width;
movieHeight = videoTrack.naturalSize.height;
movieFPS = videoTrack.nominalFrameRate;
currentFPS = movieFPS; //Debugging !! should be getFPS();
//Debugging. need to be checked
// In ms
movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
started = 1;
NSError* error = nil;
mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
if (error)
NSLog(@"%@", [error localizedDescription]);
NSDictionary* videoSettings =
[NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[mMovieReader addOutput:[AVAssetReaderTrackOutput
assetReaderTrackOutputWithTrack:videoTrack
outputSettings:videoSettings]];
[mMovieReader startReading];
}
/*
// Asynchronously open the video in another thread. Always fail.
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:
^{
// The completion block goes here.
dispatch_async(dispatch_get_main_queue(),
^{
AVAssetTrack* ::videoTrack = nil;
NSArray* ::tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] == 1)
{
videoTrack = [tracks objectAtIndex:0];
movieWidth = videoTrack.naturalSize.width;
movieHeight = videoTrack.naturalSize.height;
movieFPS = videoTrack.nominalFrameRate;
currentFPS = movieFPS; //Debugging !! should be getFPS();
//Debugging. need to be checked
movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
started = 1;
NSError* ::error = nil;
// mMovieReader is a member variable
mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
if (error)
NSLog(@"%@", [error localizedDescription]);
NSDictionary* ::videoSettings =
[NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[mMovieReader addOutput:[AVAssetReaderTrackOutput
assetReaderTrackOutputWithTrack:videoTrack
outputSettings:videoSettings]];
[mMovieReader startReading];
}
});
}];
2012-10-17 15:12:04 +08:00
*/
2012-10-17 15:12:04 +08:00
[localpool drain];
}
CvCaptureFile::~CvCaptureFile() {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
if (imagedata != NULL) free(imagedata);
if (bgr_imagedata != NULL) free(bgr_imagedata);
cvReleaseImage(&image);
cvReleaseImage(&bgr_image);
[mMovieReader release];
[localpool drain];
}
int CvCaptureFile::didStart() {
2012-10-17 15:12:04 +08:00
return started;
}
bool CvCaptureFile::grabFrame() {
2012-10-17 15:12:04 +08:00
//everything is done in queryFrame;
currentFPS = movieFPS;
return 1;
2012-10-17 15:12:04 +08:00
/*
double t1 = getProperty(CV_CAP_PROP_POS_MSEC);
[mCaptureSession stepForward];
double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
if (t2>t1 && !changedPos) {
currentFPS = 1000.0/(t2-t1);
} else {
currentFPS = movieFPS;
}
changedPos = 0;
2012-10-17 15:12:04 +08:00
*/
}
IplImage* CvCaptureFile::retrieveFramePixelBuffer() {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
if (mMovieReader.status != AVAssetReaderStatusReading){
2012-10-17 15:12:04 +08:00
return NULL;
}
AVAssetReaderOutput * output = [mMovieReader.outputs objectAtIndex:0];
2012-10-17 15:12:04 +08:00
CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
if (!sampleBuffer) {
[localpool drain];
return NULL;
}
CVPixelBufferRef frame = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferRef pixels = CVBufferRetain(frame);
2012-10-17 15:12:04 +08:00
CVPixelBufferLockBaseAddress(pixels, 0);
2012-10-17 15:12:04 +08:00
uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels);
size_t width = CVPixelBufferGetWidth(pixels);
size_t height = CVPixelBufferGetHeight(pixels);
size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels);
2012-10-17 15:12:04 +08:00
if (rowBytes != 0) {
2012-10-17 15:12:04 +08:00
if (currSize != rowBytes*height*sizeof(char)) {
currSize = rowBytes*height*sizeof(char);
if (imagedata != NULL) free(imagedata);
if (bgr_imagedata != NULL) free(bgr_imagedata);
imagedata = (char*)malloc(currSize);
bgr_imagedata = (char*)malloc(currSize);
}
2012-10-17 15:12:04 +08:00
memcpy(imagedata, baseaddress, currSize);
2012-10-17 15:12:04 +08:00
if (image == NULL) {
2014-10-17 20:32:53 +08:00
image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
2012-10-17 15:12:04 +08:00
}
2014-10-17 20:32:53 +08:00
image->width = (int)width;
image->height = (int)height;
2012-10-17 15:12:04 +08:00
image->nChannels = 4;
image->depth = IPL_DEPTH_8U;
2014-10-17 20:32:53 +08:00
image->widthStep = (int)rowBytes;
2012-10-17 15:12:04 +08:00
image->imageData = imagedata;
2014-10-17 20:32:53 +08:00
image->imageSize = (int)currSize;
2012-10-17 15:12:04 +08:00
if (bgr_image == NULL) {
2014-10-17 20:32:53 +08:00
bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
2012-10-17 15:12:04 +08:00
}
2014-10-17 20:32:53 +08:00
bgr_image->width = (int)width;
bgr_image->height = (int)height;
2012-10-17 15:12:04 +08:00
bgr_image->nChannels = 3;
bgr_image->depth = IPL_DEPTH_8U;
2014-10-17 20:32:53 +08:00
bgr_image->widthStep = (int)rowBytes;
2012-10-17 15:12:04 +08:00
bgr_image->imageData = bgr_imagedata;
2014-10-17 20:32:53 +08:00
bgr_image->imageSize = (int)currSize;
2012-10-17 15:12:04 +08:00
cvCvtColor(image, bgr_image,CV_BGRA2BGR);
2012-10-17 15:12:04 +08:00
}
2012-10-17 15:12:04 +08:00
CVPixelBufferUnlockBaseAddress(pixels, 0);
CVBufferRelease(pixels);
2012-04-30 22:33:52 +08:00
CMSampleBufferInvalidate(sampleBuffer);
CFRelease(sampleBuffer);
2012-10-17 15:12:04 +08:00
[localpool drain];
return bgr_image;
}
IplImage* CvCaptureFile::retrieveFrame(int) {
2012-10-17 15:12:04 +08:00
return retrieveFramePixelBuffer();
}
IplImage* CvCaptureFile::queryFrame() {
2012-10-17 15:12:04 +08:00
grabFrame();
return retrieveFrame(0);
}
double CvCaptureFile::getFPS() {
2012-10-17 15:12:04 +08:00
/*
if (mCaptureSession == nil) return 0;
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
double now = getProperty(CV_CAP_PROP_POS_MSEC);
double retval = 0;
if (now == 0) {
[mCaptureSession stepForward];
double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
[mCaptureSession stepBackward];
retval = 1000.0 / (t2-now);
} else {
[mCaptureSession stepBackward];
double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
[mCaptureSession stepForward];
retval = 1000.0 / (now-t2);
}
[localpool drain];
return retval;
*/
return 30.0; //TODO: Debugging
}
double CvCaptureFile::getProperty(int /*property_id*/) const{
2012-10-17 15:12:04 +08:00
/*
if (mCaptureSession == nil) return 0;
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
double retval;
QTTime t;
switch (property_id) {
case CV_CAP_PROP_POS_MSEC:
[[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t];
retval = t.timeValue * 1000.0 / t.timeScale;
break;
case CV_CAP_PROP_POS_FRAMES:
retval = movieFPS * getProperty(CV_CAP_PROP_POS_MSEC) / 1000;
break;
case CV_CAP_PROP_POS_AVI_RATIO:
retval = (getProperty(CV_CAP_PROP_POS_MSEC)) / (movieDuration );
break;
case CV_CAP_PROP_FRAME_WIDTH:
retval = movieWidth;
break;
case CV_CAP_PROP_FRAME_HEIGHT:
retval = movieHeight;
break;
case CV_CAP_PROP_FPS:
retval = currentFPS;
break;
case CV_CAP_PROP_FOURCC:
default:
retval = 0;
}
[localpool drain];
return retval;
*/
return 1.0; //Debugging
}
2014-10-17 19:45:13 +08:00
bool CvCaptureFile::setProperty(int /*property_id*/, double /*value*/) {
2012-10-17 15:12:04 +08:00
/*
if (mCaptureSession == nil) return false;
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
bool retval = false;
QTTime t;
double ms;
switch (property_id) {
case CV_CAP_PROP_POS_MSEC:
[[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t];
t.timeValue = value * t.timeScale / 1000;
[mCaptureSession setCurrentTime:t];
changedPos = 1;
retval = true;
break;
case CV_CAP_PROP_POS_FRAMES:
ms = (value*1000.0 -5)/ currentFPS;
retval = setProperty(CV_CAP_PROP_POS_MSEC, ms);
break;
case CV_CAP_PROP_POS_AVI_RATIO:
ms = value * movieDuration;
retval = setProperty(CV_CAP_PROP_POS_MSEC, ms);
break;
case CV_CAP_PROP_FRAME_WIDTH:
//retval = movieWidth;
break;
case CV_CAP_PROP_FRAME_HEIGHT:
//retval = movieHeight;
break;
case CV_CAP_PROP_FPS:
//etval = currentFPS;
break;
case CV_CAP_PROP_FOURCC:
default:
retval = false;
}
[localpool drain];
return retval;
*/
return true;
}
/*****************************************************************************
*
2012-10-17 15:12:04 +08:00
* CvVideoWriter Implementation.
*
* CvVideoWriter is the instantiation of a video output class
*
*****************************************************************************/
2012-10-17 15:12:04 +08:00
CvVideoWriter_AVFoundation::CvVideoWriter_AVFoundation(const char* filename, int fourcc,
double fps, CvSize frame_size,
int is_color) {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
2012-10-17 15:12:04 +08:00
frameCount = 0;
movieFPS = fps;
movieSize = frame_size;
movieColor = is_color;
argbimage = cvCreateImage(movieSize, IPL_DEPTH_8U, 4);
path = [[[NSString stringWithCString:filename encoding:NSASCIIStringEncoding] stringByExpandingTildeInPath] retain];
2012-10-17 15:12:04 +08:00
/*
AVFileTypeQuickTimeMovie
UTI for the QuickTime movie file format.
The value of this UTI is com.apple.quicktime-movie. Files are identified with the .mov and .qt extensions.
2012-10-17 15:12:04 +08:00
AVFileTypeMPEG4
UTI for the MPEG-4 file format.
The value of this UTI is public.mpeg-4. Files are identified with the .mp4 extension.
2012-10-17 15:12:04 +08:00
AVFileTypeAppleM4V
UTI for the iTunes video file format.
The value of this UTI is com.apple.mpeg-4-video. Files are identified with the .m4v extension.
2012-10-17 15:12:04 +08:00
AVFileType3GPP
UTI for the 3GPP file format.
The value of this UTI is public.3gpp. Files are identified with the .3gp, .3gpp, and .sdv extensions.
*/
2012-10-17 15:12:04 +08:00
NSString *fileExt =[[[path pathExtension] lowercaseString] copy];
if ([fileExt isEqualToString:@"mov"] || [fileExt isEqualToString:@"qt"]){
fileType = [AVFileTypeQuickTimeMovie copy];
}else if ([fileExt isEqualToString:@"mp4"]){
fileType = [AVFileTypeMPEG4 copy];
}else if ([fileExt isEqualToString:@"m4v"]){
fileType = [AVFileTypeAppleM4V copy];
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
2012-10-17 15:12:04 +08:00
}else if ([fileExt isEqualToString:@"3gp"] || [fileExt isEqualToString:@"3gpp"] || [fileExt isEqualToString:@"sdv"] ){
fileType = [AVFileType3GPP copy];
#endif
} else{
fileType = [AVFileTypeMPEG4 copy]; //default mp4
}
[fileExt release];
char cc[5];
cc[0] = fourcc & 255;
cc[1] = (fourcc >> 8) & 255;
cc[2] = (fourcc >> 16) & 255;
cc[3] = (fourcc >> 24) & 255;
cc[4] = 0;
int cc2 = CV_FOURCC(cc[0], cc[1], cc[2], cc[3]);
if (cc2!=fourcc) {
std::cout << "WARNING: Didn't properly encode FourCC. Expected " << fourcc
<< " but got " << cc2 << "." << std::endl;
2012-10-17 15:12:04 +08:00
//exception;
}
// Two codec supported AVVideoCodecH264 AVVideoCodecJPEG
// On iPhone 3G H264 is not supported.
if (fourcc == CV_FOURCC('J','P','E','G') || fourcc == CV_FOURCC('j','p','e','g') ||
fourcc == CV_FOURCC('M','J','P','G') || fourcc == CV_FOURCC('m','j','p','g') ){
codec = [AVVideoCodecJPEG copy]; // Use JPEG codec if specified, otherwise H264
}else if(fourcc == CV_FOURCC('H','2','6','4') || fourcc == CV_FOURCC('a','v','c','1')){
codec = [AVVideoCodecH264 copy];
}else{
codec = [AVVideoCodecH264 copy]; // default canonical H264.
}
//NSLog(@"Path: %@", path);
NSError *error = nil;
// Make sure the file does not already exist. Necessary to overwirte??
/*
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:path]){
[fileManager removeItemAtPath:path error:&error];
}
*/
// Wire the writer:
// Supported file types:
// AVFileTypeQuickTimeMovie AVFileTypeMPEG4 AVFileTypeAppleM4V AVFileType3GPP
mMovieWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:fileType
error:&error];
//NSParameterAssert(mMovieWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
codec, AVVideoCodecKey,
[NSNumber numberWithInt:movieSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:movieSize.height], AVVideoHeightKey,
nil];
mMovieWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
//NSParameterAssert(mMovieWriterInput);
//NSParameterAssert([mMovieWriter canAddInput:mMovieWriterInput]);
[mMovieWriter addInput:mMovieWriterInput];
mMovieWriterAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:mMovieWriterInput sourcePixelBufferAttributes:nil];
//Start a session:
[mMovieWriter startWriting];
[mMovieWriter startSessionAtSourceTime:kCMTimeZero];
if(mMovieWriter.status == AVAssetWriterStatusFailed){
NSLog(@"%@", [mMovieWriter.error localizedDescription]);
// TODO: error handling, cleanup. Throw execption?
// return;
}
[localpool drain];
}
CvVideoWriter_AVFoundation::~CvVideoWriter_AVFoundation() {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
2012-10-17 15:12:04 +08:00
[mMovieWriterInput markAsFinished];
[mMovieWriter finishWriting];
[mMovieWriter release];
[mMovieWriterInput release];
[mMovieWriterAdaptor release];
[path release];
[codec release];
[fileType release];
cvReleaseImage(&argbimage);
2012-10-17 15:12:04 +08:00
[localpool drain];
}
bool CvVideoWriter_AVFoundation::writeFrame(const IplImage* iplimage) {
2012-10-17 15:12:04 +08:00
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
// writer status check
if (![mMovieWriterInput isReadyForMoreMediaData] || mMovieWriter.status != AVAssetWriterStatusWriting ) {
NSLog(@"[mMovieWriterInput isReadyForMoreMediaData] Not ready for media data or ...");
2014-10-17 20:32:53 +08:00
NSLog(@"mMovieWriter.status: %d. Error: %@", (int)mMovieWriter.status, [mMovieWriter.error localizedDescription]);
2012-10-17 15:12:04 +08:00
[localpool drain];
return false;
}
BOOL success = FALSE;
if (iplimage->height!=movieSize.height || iplimage->width!=movieSize.width){
std::cout<<"Frame size does not match video size."<<std::endl;
2012-10-17 15:12:04 +08:00
[localpool drain];
return false;
}
if (movieColor) {
//assert(iplimage->nChannels == 3);
cvCvtColor(iplimage, argbimage, CV_BGR2BGRA);
}else{
//assert(iplimage->nChannels == 1);
cvCvtColor(iplimage, argbimage, CV_GRAY2BGRA);
}
//IplImage -> CGImage conversion
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
NSData *nsData = [NSData dataWithBytes:argbimage->imageData length:argbimage->imageSize];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)nsData);
CGImageRef cgImage = CGImageCreate(argbimage->width, argbimage->height,
argbimage->depth, argbimage->depth * argbimage->nChannels, argbimage->widthStep,
colorSpace, kCGImageAlphaLast|kCGBitmapByteOrderDefault,
provider, NULL, false, kCGRenderingIntentDefault);
//CGImage -> CVPixelBufferRef coversion
CVPixelBufferRef pixelBuffer = NULL;
CFDataRef cfData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferCreateWithBytes(NULL,
movieSize.width,
movieSize.height,
kCVPixelFormatType_32BGRA,
(void*)CFDataGetBytePtr(cfData),
CGImageGetBytesPerRow(cgImage),
NULL,
0,
NULL,
&pixelBuffer);
if(status == kCVReturnSuccess){
success = [mMovieWriterAdaptor appendPixelBuffer:pixelBuffer
withPresentationTime:CMTimeMake(frameCount, movieFPS)];
}
//cleanup
2014-03-24 05:00:16 +08:00
CFRelease(cfData);
CVPixelBufferRelease(pixelBuffer);
2012-10-17 15:12:04 +08:00
CGImageRelease(cgImage);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
[localpool drain];
if (success) {
frameCount ++;
//NSLog(@"Frame #%d", frameCount);
return true;
}else{
NSLog(@"Frame appendPixelBuffer failed.");
return false;
}
}