Merge branch 'android_camera'

This commit is contained in:
Andrey Kamaev 2012-08-24 16:44:52 +04:00
commit 42a4ed6ebf
34 changed files with 1358 additions and 1451 deletions

View File

@ -39,7 +39,7 @@ using namespace std;
static inline cv::Point2f centerRect(const cv::Rect& r)
{
return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
};
}
static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
{
@ -50,7 +50,7 @@ static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
int y=cvRound(m.y - height/2);
return cv::Rect(x, y, cvRound(width), cvRound(height));
};
}
namespace cv
{
@ -66,6 +66,7 @@ class cv::DetectionBasedTracker::SeparateDetectionWork
bool run();
void stop();
void resetTracking();
inline bool isWorking()
{
return (stateThread==STATE_THREAD_WORKING_SLEEPING) || (stateThread==STATE_THREAD_WORKING_WITH_IMAGE);
@ -171,33 +172,33 @@ bool cv::DetectionBasedTracker::SeparateDetectionWork::run()
}
#ifdef __GNUC__
#define CATCH_ALL_AND_LOG(_block) \
do { \
#define CATCH_ALL_AND_LOG(_block) \
do { \
try { \
_block; \
break; \
} \
catch(cv::Exception& e) { \
LOGE0("\n %s: ERROR: OpenCV Exception caught: \n'%s'\n\n", __func__, e.what()); \
LOGE0("\n %s: ERROR: OpenCV Exception caught: \n'%s'\n\n", __func__, e.what()); \
} catch(std::exception& e) { \
LOGE0("\n %s: ERROR: Exception caught: \n'%s'\n\n", __func__, e.what()); \
LOGE0("\n %s: ERROR: Exception caught: \n'%s'\n\n", __func__, e.what()); \
} catch(...) { \
LOGE0("\n %s: ERROR: UNKNOWN Exception caught\n\n", __func__); \
LOGE0("\n %s: ERROR: UNKNOWN Exception caught\n\n", __func__); \
} \
} while(0)
#else
#define CATCH_ALL_AND_LOG(_block) \
do { \
#define CATCH_ALL_AND_LOG(_block) \
do { \
try { \
_block; \
break; \
} \
catch(cv::Exception& e) { \
LOGE0("\n ERROR: OpenCV Exception caught: \n'%s'\n\n", e.what()); \
LOGE0("\n ERROR: OpenCV Exception caught: \n'%s'\n\n", e.what()); \
} catch(std::exception& e) { \
LOGE0("\n ERROR: Exception caught: \n'%s'\n\n", e.what()); \
LOGE0("\n ERROR: Exception caught: \n'%s'\n\n", e.what()); \
} catch(...) { \
LOGE0("\n ERROR: UNKNOWN Exception caught\n\n"); \
LOGE0("\n ERROR: UNKNOWN Exception caught\n\n"); \
} \
} while(0)
#endif
@ -298,8 +299,8 @@ void cv::DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
break;
}
int64 t2_detect=getTickCount();
int64 dt_detect=t2_detect-t1_detect;
int64 t2_detect = getTickCount();
int64 dt_detect = t2_detect-t1_detect;
double dt_detect_ms=((double)dt_detect)/freq * 1000.0;
LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- objects num==%d, t_ms=%.4f", (int)objects.size(), dt_detect_ms);
@ -378,26 +379,26 @@ bool cv::DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingT
{
static double freq = getTickFrequency();
bool shouldCommunicateWithDetectingThread=(stateThread==STATE_THREAD_WORKING_SLEEPING);
bool shouldCommunicateWithDetectingThread = (stateThread==STATE_THREAD_WORKING_SLEEPING);
LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: shouldCommunicateWithDetectingThread=%d", (shouldCommunicateWithDetectingThread?1:0));
if (!shouldCommunicateWithDetectingThread) {
return false;
}
bool shouldHandleResult=false;
bool shouldHandleResult = false;
pthread_mutex_lock(&mutex);
if (isObjectDetectingReady) {
shouldHandleResult=true;
rectsWhereRegions=resultDetect;
rectsWhereRegions = resultDetect;
isObjectDetectingReady=false;
double lastBigDetectionDuration=1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq);
double lastBigDetectionDuration = 1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq);
LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: lastBigDetectionDuration=%f ms", (double)lastBigDetectionDuration);
}
bool shouldSendNewDataToWorkThread=true;
bool shouldSendNewDataToWorkThread = true;
if (timeWhenDetectingThreadStartedWork > 0) {
double time_from_previous_launch_in_ms=1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq); //the same formula as for lastBigDetectionDuration
shouldSendNewDataToWorkThread = (time_from_previous_launch_in_ms >= detectionBasedTracker.parameters.minDetectionPeriod);
@ -496,24 +497,24 @@ void DetectionBasedTracker::process(const Mat& imageGray)
LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
} else {
LOGD("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
for(size_t i=0; i < trackedObjects.size(); i++) {
int n=trackedObjects[i].lastPositions.size();
for(size_t i = 0; i < trackedObjects.size(); i++) {
int n = trackedObjects[i].lastPositions.size();
CV_Assert(n > 0);
Rect r=trackedObjects[i].lastPositions[n-1];
if(r.area()==0) {
Rect r = trackedObjects[i].lastPositions[n-1];
if(r.area() == 0) {
LOGE("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
continue;
}
//correction by speed of rectangle
if (n > 1) {
Point2f center=centerRect(r);
Point2f center_prev=centerRect(trackedObjects[i].lastPositions[n-2]);
Point2f shift=(center - center_prev) * innerParameters.coeffObjectSpeedUsingInPrediction;
Point2f center = centerRect(r);
Point2f center_prev = centerRect(trackedObjects[i].lastPositions[n-2]);
Point2f shift = (center - center_prev) * innerParameters.coeffObjectSpeedUsingInPrediction;
r.x+=cvRound(shift.x);
r.y+=cvRound(shift.y);
r.x += cvRound(shift.x);
r.y += cvRound(shift.y);
}
@ -526,7 +527,7 @@ void DetectionBasedTracker::process(const Mat& imageGray)
LOGD("DetectionBasedTracker::process: rectsWhereRegions.size()=%d", (int)rectsWhereRegions.size());
for(size_t i=0; i < rectsWhereRegions.size(); i++) {
Rect r=rectsWhereRegions[i];
Rect r = rectsWhereRegions[i];
detectInRegion(imageDetect, r, detectedObjectsInRegions);
}
@ -799,8 +800,9 @@ Rect cv::DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
void cv::DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions)
{
Rect r0(Point(), img.size());
Rect r1=scale_rect(r, innerParameters.coeffTrackingWindowSize);
r1=r1 & r0;
Rect r1 = scale_rect(r, innerParameters.coeffTrackingWindowSize);
r1 = r1 & r0;
if ( (r1.width <=0) || (r1.height <= 0) ) {
LOGD("DetectionBasedTracker::detectInRegion: Empty intersection");
return;

View File

@ -48,12 +48,15 @@
#include <android/log.h>
#include <camera_activity.hpp>
#if !defined(LOGD) && !defined(LOGI) && !defined(LOGE)
//#if !defined(LOGD) && !defined(LOGI) && !defined(LOGE)
#undef LOGD
#undef LOGE
#undef LOGI
#define LOG_TAG "CV_CAP"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
#endif
//#endif
class HighguiAndroidCameraActivity;
@ -86,8 +89,8 @@ protected:
//raw from camera
int m_width;
int m_height;
unsigned char *m_frameYUV420;
unsigned char *m_frameYUV420next;
cv::Mat m_frameYUV420;
cv::Mat m_frameYUV420next;
enum YUVformat
{
@ -115,9 +118,9 @@ private:
bool m_hasColor;
enum CvCapture_Android_DataState {
CVCAPTURE_ANDROID_STATE_NO_FRAME=0,
CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED,
CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED
CVCAPTURE_ANDROID_STATE_NO_FRAME=0,
CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED,
CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED
};
volatile CvCapture_Android_DataState m_dataState;
@ -189,8 +192,8 @@ CvCapture_Android::CvCapture_Android(int cameraId)
m_height = 0;
m_activity = 0;
m_isOpened = false;
m_frameYUV420 = 0;
m_frameYUV420next = 0;
// m_frameYUV420 = 0;
// m_frameYUV420next = 0;
m_hasGray = false;
m_hasColor = false;
m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME;
@ -231,20 +234,19 @@ CvCapture_Android::~CvCapture_Android()
{
((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate();
pthread_mutex_lock(&m_nextFrameMutex);
pthread_mutex_lock(&m_nextFrameMutex);
// unsigned char *tmp1=m_frameYUV420;
// unsigned char *tmp2=m_frameYUV420next;
// m_frameYUV420 = 0;
// m_frameYUV420next = 0;
// delete tmp1;
// delete tmp2;
unsigned char *tmp1=m_frameYUV420;
unsigned char *tmp2=m_frameYUV420next;
m_frameYUV420 = 0;
m_frameYUV420next = 0;
delete tmp1;
delete tmp2;
m_dataState=CVCAPTURE_ANDROID_STATE_NO_FRAME;
pthread_cond_broadcast(&m_nextFrameCond);
m_dataState=CVCAPTURE_ANDROID_STATE_NO_FRAME;
pthread_cond_broadcast(&m_nextFrameCond);
pthread_mutex_unlock(&m_nextFrameMutex);
pthread_mutex_unlock(&m_nextFrameMutex);
//m_activity->disconnect() will be automatically called inside destructor;
delete m_activity;
@ -257,7 +259,7 @@ CvCapture_Android::~CvCapture_Android()
double CvCapture_Android::getProperty( int propIdx )
{
switch ( propIdx )
switch ( propIdx )
{
case CV_CAP_PROP_FRAME_WIDTH:
return (double)m_activity->getFrameWidth();
@ -308,7 +310,7 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT, propValue);
break;
case CV_CAP_PROP_AUTOGRAB:
m_shouldAutoGrab=(propValue != 0);
m_shouldAutoGrab=(propValue != 0);
break;
case CV_CAP_PROP_EXPOSURE:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_EXPOSURE, propValue);
@ -327,13 +329,13 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
break;
default:
CV_Error( CV_StsOutOfRange, "Failed attempt to SET unsupported camera property." );
return false;
return false;
}
if (propIdx != CV_CAP_PROP_AUTOGRAB) {// property for highgui class CvCapture_Android only
m_CameraParamsChanged = true;
}
res = true;
if (propIdx != CV_CAP_PROP_AUTOGRAB) {// property for highgui class CvCapture_Android only
m_CameraParamsChanged = true;
}
res = true;
}
return res;
@ -342,8 +344,8 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
bool CvCapture_Android::grabFrame()
{
if( !isOpened() ) {
LOGE("CvCapture_Android::grabFrame(): camera is not opened");
return false;
LOGE("CvCapture_Android::grabFrame(): camera is not opened");
return false;
}
bool res=false;
@ -352,38 +354,38 @@ bool CvCapture_Android::grabFrame()
{
m_activity->applyProperties();
m_CameraParamsChanged = false;
m_dataState= CVCAPTURE_ANDROID_STATE_NO_FRAME;//we will wait new frame
m_dataState= CVCAPTURE_ANDROID_STATE_NO_FRAME;//we will wait new frame
}
if (m_dataState!=CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) {
m_waitingNextFrame = true;
pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex);
if (m_dataState!=CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED)
{
m_waitingNextFrame = true;
pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex);
}
if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) {
//LOGD("CvCapture_Android::grabFrame: get new frame");
//swap current and new frames
unsigned char* tmp = m_frameYUV420;
m_frameYUV420 = m_frameYUV420next;
m_frameYUV420next = tmp;
if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED)
{
//LOGD("CvCapture_Android::grabFrame: get new frame");
//swap current and new frames
cv::swap(m_frameYUV420, m_frameYUV420next);
//discard cached frames
m_hasGray = false;
m_hasColor = false;
//discard cached frames
m_hasGray = false;
m_hasColor = false;
m_dataState=CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED;
m_framesGrabbed++;
m_dataState=CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED;
m_framesGrabbed++;
res=true;
res=true;
} else {
LOGE("CvCapture_Android::grabFrame: NO new frame");
LOGE("CvCapture_Android::grabFrame: NO new frame");
}
int res_unlock=pthread_mutex_unlock(&m_nextFrameMutex);
if (res_unlock) {
LOGE("Error in CvCapture_Android::grabFrame: pthread_mutex_unlock returned %d --- probably, this object has been destroyed", res_unlock);
return false;
LOGE("Error in CvCapture_Android::grabFrame: pthread_mutex_unlock returned %d --- probably, this object has been destroyed", res_unlock);
return false;
}
return res;
@ -393,7 +395,8 @@ IplImage* CvCapture_Android::retrieveFrame( int outputType )
{
IplImage* image = NULL;
unsigned char *current_frameYUV420=m_frameYUV420;
cv::Mat m_frameYUV420_ref = m_frameYUV420;
unsigned char *current_frameYUV420=m_frameYUV420_ref.ptr();
//Attention! all the operations in this function below should occupy less time than the period between two frames from camera
if (NULL != current_frameYUV420)
{
@ -456,19 +459,10 @@ void CvCapture_Android::setFrame(const void* buffer, int bufferSize)
prepareCacheForYUV(width, height);
//copy data
memcpy(m_frameYUV420next, buffer, bufferSize);
//LOGD("CvCapture_Android::setFrame -- memcpy is done");
#if 0 //moved this part of code into grabFrame
//swap current and new frames
unsigned char* tmp = m_frameYUV420;
m_frameYUV420 = m_frameYUV420next;
m_frameYUV420next = tmp;
//discard cached frames
m_hasGray = false;
m_hasColor = false;
#endif
cv::Mat m_frameYUV420next_ref = m_frameYUV420next;
memcpy(m_frameYUV420next_ref.ptr(), buffer, bufferSize);
// LOGD("CvCapture_Android::setFrame -- memcpy is done");
// ((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate();
m_dataState = CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED;
m_waitingNextFrame = false;//set flag that no more frames required at this moment
@ -482,17 +476,22 @@ void CvCapture_Android::prepareCacheForYUV(int width, int height)
LOGD("CvCapture_Android::prepareCacheForYUV: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height);
m_width = width;
m_height = height;
/*
unsigned char *tmp = m_frameYUV420next;
m_frameYUV420next = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) {
delete[] tmp;
}
if (tmp != NULL)
{
delete[] tmp;
}
tmp = m_frameYUV420;
m_frameYUV420 = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) {
delete[] tmp;
}
if (tmp != NULL)
{
delete[] tmp;
}*/
m_frameYUV420.create(height * 3 / 2, width, CV_8UC1);
m_frameYUV420next.create(height * 3 / 2, width, CV_8UC1);
}
}

View File

@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "Sample-15puzzle::SurfaceView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
@ -29,28 +29,27 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
@ -77,7 +76,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);

View File

@ -12,109 +12,98 @@ import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
/** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/
public class puzzle15Activity extends Activity
{
private static final String TAG = "Sample::Activity";
private static final String TAG = "Sample::Activity";
private MenuItem mItemNewGame;
private MenuItem mItemToggleNumbers;
private puzzle15View mView = null;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public puzzle15Activity()
{
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public puzzle15Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (null != mView)
mView.releaseCamera();
}
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( mView!=null && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState)
{
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");

View File

@ -18,7 +18,7 @@ import android.view.View;
import android.view.View.OnTouchListener;
public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
private Mat mRgba;
private Mat mRgba;
private Mat mRgba15;
private Mat[] mCells;
private Mat[] mCells15;
@ -45,13 +45,13 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
super.surfaceCreated(holder);
}
public static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) {
@ -83,11 +83,11 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
return sum % 2 == 0;
}
private void createPuzzle(int cols, int rows) {
private void createPuzzle(int cols, int rows, int type) {
mCells = new Mat[gridArea];
mCells15 = new Mat[gridArea];
mRgba15 = new Mat(rows, cols, mRgba.type());
mRgba15 = new Mat(rows, cols, type);
mIndexses = new int[gridArea];
for (int i = 0; i < gridSize; i++) {
@ -117,12 +117,16 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
int cols = mRgba.cols();
int rows = mRgba.rows();
rows = rows - rows%4;
cols = cols - cols%4;
rows = rows - rows%4;
cols = cols - cols%4;
if (mCells == null)
createPuzzle(cols, rows);
createPuzzle(cols, rows, mRgba.type());
else if(mRgba15.cols() != cols || mRgba15.rows() != rows) {
releaseMats();
createPuzzle(cols, rows, mRgba.type());
}
// copy shuffled tiles
for (int i = 0; i < gridArea; i++) {
@ -141,10 +145,10 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
drawGrid(cols, rows);
Bitmap bmp = Bitmap.createBitmap(cols, rows, Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba15, bmp);
Utils.matToBitmap(mRgba15, bmp);
return bmp;
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}
@ -162,32 +166,38 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mCells != null) {
for (Mat m : mCells)
m.release();
}
if (mCells15 != null) {
for (Mat m : mCells15)
m.release();
}
releaseMats();
if (mRgba != null)
mRgba.release();
if (mRgba15 != null)
mRgba15.release();
mRgba = null;
mRgba15 = null;
mCells = null;
mCells15 = null;
mIndexses = null;
}
}
private void releaseMats() {
// Explicitly deallocate Mats
if (mCells != null) {
for (Mat m : mCells)
m.release();
}
if (mCells15 != null) {
for (Mat m : mCells15)
m.release();
}
if (mRgba15 != null)
mRgba15.release();
mRgba15 = null;
mCells = null;
mCells15 = null;
mIndexses = null;
}
public boolean onTouch(View v, MotionEvent event) {
if(mRgba==null) return false;
int cols = mRgba.cols();
int cols = mRgba.cols();
int rows = mRgba.rows();
float xoffset = (getWidth() - cols) / 2;
float yoffset = (getHeight() - rows) / 2;

View File

@ -10,101 +10,90 @@ import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
public class ColorBlobDetectionActivity extends Activity {
private static final String TAG = "Example/ColorBlobDetection";
private ColorBlobDetectionView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new ColorBlobDetectionView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public ColorBlobDetectionActivity()
{
Log.i(TAG, "Instantiated new " + this.getClass());
}
private static final String TAG = "Sample-ColorBlobDetection::Activity";
private ColorBlobDetectionView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new ColorBlobDetectionView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public ColorBlobDetectionActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (null != mView)
mView.releaseCamera();
}
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}

View File

@ -23,127 +23,121 @@ import android.view.View.OnTouchListener;
public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchListener {
private Mat mRgba;
private Mat mRgba;
private boolean mIsColorSelected = false;
private Scalar mBlobColorRgba = new Scalar(255);
private Scalar mBlobColorHsv = new Scalar(255);
private ColorBlobDetector mDetector = new ColorBlobDetector();
private Mat mSpectrum = new Mat();
private static Size SPECTRUM_SIZE = new Size(200, 32);
private boolean mIsColorSelected = false;
private Scalar mBlobColorRgba = new Scalar(255);
private Scalar mBlobColorHsv = new Scalar(255);
private ColorBlobDetector mDetector = new ColorBlobDetector();
private Mat mSpectrum = new Mat();
private static Size SPECTRUM_SIZE = new Size(200, 32);
// Logcat tag
private static final String TAG = "Example/ColorBlobDetection";
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
public ColorBlobDetectionView(Context context)
{
// Logcat tag
private static final String TAG = "Sample-ColorBlobDetection::View";
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
public ColorBlobDetectionView(Context context) {
super(context);
setOnTouchListener(this);
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
public boolean onTouch(View v, MotionEvent event)
{
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (getWidth() - cols) / 2;
int yOffset = (getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
{
mBlobColorHsv.val[i] /= pointCount;
}
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
return false; // don't need subsequent touch events
}
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
return false; // don't need subsequent touch events
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
if (mIsColorSelected)
{
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Core.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(2, 34, 2, 34);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(2, 2 + mSpectrum.rows(), 38, 38 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
try {
Utils.matToBitmap(mRgba, bmp);
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor)
{
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}
@Override
public void run() {
super.run();

View File

@ -11,66 +11,58 @@ import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector
{
public void setColorRadius(Scalar radius)
{
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor)
{
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
public class ColorBlobDetector {
public void setColorRadius(Scalar radius) {
mColorRadius = radius;
}
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
public void setHsvColor(Scalar hsvColor) {
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
for (int j = 0; j < maxH-minH; j++)
{
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
for (int j = 0; j < maxH-minH; j++) {
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
}
public Mat getSpectrum()
{
return mSpectrum;
}
public void setMinContourArea(double area)
{
mMinContourArea = area;
}
public void process(Mat rgbaImage)
{
Mat pyrDownMat = new Mat();
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
Imgproc.pyrDown(rgbaImage, pyrDownMat);
Imgproc.pyrDown(pyrDownMat, pyrDownMat);
public Mat getSpectrum() {
return mSpectrum;
}
Mat hsvMat = new Mat();
Imgproc.cvtColor(pyrDownMat, hsvMat, Imgproc.COLOR_RGB2HSV_FULL);
public void setMinContourArea(double area) {
mMinContourArea = area;
}
Mat Mask = new Mat();
Core.inRange(hsvMat, mLowerBound, mUpperBound, Mask);
Mat dilatedMask = new Mat();
Imgproc.dilate(Mask, dilatedMask, new Mat());
public void process(Mat rgbaImage) {
Mat pyrDownMat = new Mat();
Imgproc.pyrDown(rgbaImage, pyrDownMat);
Imgproc.pyrDown(pyrDownMat, pyrDownMat);
Mat hsvMat = new Mat();
Imgproc.cvtColor(pyrDownMat, hsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Mat Mask = new Mat();
Core.inRange(hsvMat, mLowerBound, mUpperBound, Mask);
Mat dilatedMask = new Mat();
Imgproc.dilate(Mask, dilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Mat hierarchy = new Mat();
@ -80,40 +72,36 @@ public class ColorBlobDetector
// Find max contour area
double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext())
{
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
while (each.hasNext()) {
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size
mContours.clear();
each = contours.iterator();
while (each.hasNext())
{
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea*maxArea)
{
Core.multiply(contour, new Scalar(4,4), contour);
mContours.add(contour);
}
while (each.hasNext()) {
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea*maxArea) {
Core.multiply(contour, new Scalar(4,4), contour);
mContours.add(contour);
}
}
}
}
public List<MatOfPoint> getContours()
{
return mContours;
}
public List<MatOfPoint> getContours() {
return mContours;
}
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
}

View File

@ -29,30 +29,29 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes();
@ -77,7 +76,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
@ -111,12 +110,12 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
bmp = processFrame(mCamera);
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();

View File

@ -9,7 +9,7 @@ include ../../sdk/native/jni/OpenCV.mk
LOCAL_SRC_FILES := DetectionBasedTracker_jni.cpp
LOCAL_C_INCLUDES += $(LOCAL_PATH)
LOCAL_LDLIBS += -llog -ldl
LOCAL_LDLIBS += -llog -ldl
LOCAL_MODULE := detection_based_tracker

View File

@ -22,26 +22,26 @@ class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
{
public:
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
IDetector(),
Detector(detector)
IDetector(),
Detector(detector)
{
LOGD("CascadeDetectorAdapter::Detect::Detect");
CV_Assert(!detector.empty());
LOGD("CascadeDetectorAdapter::Detect::Detect");
CV_Assert(!detector.empty());
}
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
{
LOGD("CascadeDetectorAdapter::Detect: begin");
LOGD("CascadeDetectorAdapter::Detect: scaleFactor=%.2f, minNeighbours=%d, minObjSize=(%dx%d), maxObjSize=(%dx%d)", scaleFactor, minNeighbours, minObjSize.width, minObjSize.height, maxObjSize.width, maxObjSize.height);
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
LOGD("CascadeDetectorAdapter::Detect: end");
LOGD("CascadeDetectorAdapter::Detect: begin");
LOGD("CascadeDetectorAdapter::Detect: scaleFactor=%.2f, minNeighbours=%d, minObjSize=(%dx%d), maxObjSize=(%dx%d)", scaleFactor, minNeighbours, minObjSize.width, minObjSize.height, maxObjSize.width, maxObjSize.height);
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
LOGD("CascadeDetectorAdapter::Detect: end");
}
virtual ~CascadeDetectorAdapter()
{
LOGD("CascadeDetectorAdapter::Detect::~Detect");
LOGD("CascadeDetectorAdapter::Detect::~Detect");
}
private:
CascadeDetectorAdapter();
cv::Ptr<cv::CascadeClassifier> Detector;
@ -51,56 +51,58 @@ struct DetectorAgregator
{
cv::Ptr<CascadeDetectorAdapter> mainDetector;
cv::Ptr<CascadeDetectorAdapter> trackingDetector;
cv::Ptr<DetectionBasedTracker> tracker;
DetectorAgregator(cv::Ptr<CascadeDetectorAdapter>& _mainDetector, cv::Ptr<CascadeDetectorAdapter>& _trackingDetector):
mainDetector(_mainDetector),
trackingDetector(_trackingDetector)
mainDetector(_mainDetector),
trackingDetector(_trackingDetector)
{
CV_Assert(!_mainDetector.empty());
CV_Assert(!_trackingDetector.empty());
DetectionBasedTracker::Parameters DetectorParams;
tracker = new DetectionBasedTracker(mainDetector.ptr<DetectionBasedTracker::IDetector>(), trackingDetector.ptr<DetectionBasedTracker::IDetector>(), DetectorParams);
CV_Assert(!_mainDetector.empty());
CV_Assert(!_trackingDetector.empty());
DetectionBasedTracker::Parameters DetectorParams;
tracker = new DetectionBasedTracker(mainDetector.ptr<DetectionBasedTracker::IDetector>(), trackingDetector.ptr<DetectionBasedTracker::IDetector>(), DetectorParams);
}
};
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
(JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
{
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject enter");
const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
string stdFileName(jnamestr);
jlong result = 0;
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject");
try
{
cv::Ptr<CascadeDetectorAdapter> mainDetector = new CascadeDetectorAdapter(new CascadeClassifier(stdFileName));
cv::Ptr<CascadeDetectorAdapter> trackingDetector = new CascadeDetectorAdapter(new CascadeClassifier(stdFileName));
result = (jlong)new DetectorAgregator(mainDetector, trackingDetector);
if (faceSize > 0)
{
mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
cv::Ptr<CascadeDetectorAdapter> mainDetector = new CascadeDetectorAdapter(new CascadeClassifier(stdFileName));
cv::Ptr<CascadeDetectorAdapter> trackingDetector = new CascadeDetectorAdapter(new CascadeClassifier(stdFileName));
result = (jlong)new DetectorAgregator(mainDetector, trackingDetector);
if (faceSize > 0)
{
mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
}
catch(cv::Exception e)
{
LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeCreateObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject(...)}");
return 0;
catch (...)
{
LOGD("nativeCreateObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject(...)}");
return 0;
}
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject exit");
return result;
}
@ -108,104 +110,110 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDe
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject");
try
{
((DetectorAgregator*)thiz)->tracker->stop();
delete (DetectorAgregator*)thiz;
if(thiz != 0)
{
((DetectorAgregator*)thiz)->tracker->stop();
delete (DetectorAgregator*)thiz;
}
}
catch(cv::Exception e)
{
LOGD("nativeestroyObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
LOGD("nativeestroyObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeDestroyObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject(...)}");
LOGD("nativeDestroyObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject(...)}");
}
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart");
try
{
((DetectorAgregator*)thiz)->tracker->run();
((DetectorAgregator*)thiz)->tracker->run();
}
catch(cv::Exception e)
{
LOGD("nativeStart catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
LOGD("nativeStart catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeStart catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart(...)}");
LOGD("nativeStart catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart(...)}");
}
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop");
try
{
((DetectorAgregator*)thiz)->tracker->stop();
((DetectorAgregator*)thiz)->tracker->stop();
}
catch(cv::Exception e)
{
LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeStop catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop(...)}");
LOGD("nativeStop catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop(...)}");
}
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv * jenv, jclass, jlong thiz, jint faceSize)
{
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize -- BEGIN");
try
{
if (faceSize > 0)
{
((DetectorAgregator*)thiz)->mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//((DetectorAgregator*)thiz)->trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
if (faceSize > 0)
{
((DetectorAgregator*)thiz)->mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//((DetectorAgregator*)thiz)->trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
}
catch(cv::Exception e)
{
LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeSetFaceSize catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize(...)}");
LOGD("nativeSetFaceSize catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize(...)}");
}
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize -- END");
}
@ -215,27 +223,27 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDe
(JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces)
{
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect");
try
{
vector<Rect> RectFaces;
((DetectorAgregator*)thiz)->tracker->process(*((Mat*)imageGray));
((DetectorAgregator*)thiz)->tracker->getObjects(RectFaces);
*((Mat*)faces) = Mat(RectFaces, true);
vector<Rect> RectFaces;
((DetectorAgregator*)thiz)->tracker->process(*((Mat*)imageGray));
((DetectorAgregator*)thiz)->tracker->getObjects(RectFaces);
*((Mat*)faces) = Mat(RectFaces, true);
}
catch(cv::Exception e)
{
LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeDetect catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect(...)}");
LOGD("nativeDetect catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect(...)}");
}
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect END");
}

View File

@ -46,7 +46,7 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSt
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv *, jclass, jlong, jint);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeDetect

View File

@ -4,49 +4,42 @@ import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
public class DetectionBasedTracker
{
public DetectionBasedTracker(String cascadeName, int minFaceSize)
{
mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
}
public void start()
{
nativeStart(mNativeObj);
}
public void stop()
{
nativeStop(mNativeObj);
}
public void setMinFaceSize(int size)
{
nativeSetFaceSize(mNativeObj, size);
}
public void detect(Mat imageGray, MatOfRect faces)
{
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
}
public void release()
{
nativeDestroyObject(mNativeObj);
mNativeObj = 0;
}
private long mNativeObj = 0;
private static native long nativeCreateObject(String cascadeName, int minFaceSize);
private static native void nativeDestroyObject(long thiz);
private static native void nativeStart(long thiz);
private static native void nativeStop(long thiz);
private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces);
static
{
System.loadLibrary("detection_based_tracker");
}
{
public DetectionBasedTracker(String cascadeName, int minFaceSize) {
mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
}
public void start() {
nativeStart(mNativeObj);
}
public void stop() {
nativeStop(mNativeObj);
}
public void setMinFaceSize(int size) {
nativeSetFaceSize(mNativeObj, size);
}
public void detect(Mat imageGray, MatOfRect faces) {
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
}
public void release() {
nativeDestroyObject(mNativeObj);
mNativeObj = 0;
}
private long mNativeObj = 0;
private static native long nativeCreateObject(String cascadeName, int minFaceSize);
private static native void nativeDestroyObject(long thiz);
private static native void nativeStart(long thiz);
private static native void nativeStop(long thiz);
private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces);
static {
System.loadLibrary("detection_based_tracker");
}
}

View File

@ -12,74 +12,72 @@ import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class FdActivity extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "Sample-FD::Activity";
private MenuItem mItemFace50;
private MenuItem mItemFace40;
private MenuItem mItemFace30;
private MenuItem mItemFace20;
private MenuItem mItemType;
private FdView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native libs after OpenCV initialization
System.loadLibrary("detection_based_tracker");
// Create and set View
mView = new FdView(mAppContext);
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
private int mDetectorType = 0;
private String[] mDetectorName;
private String[] mDetectorName;
private FdView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native libs after OpenCV initialization
System.loadLibrary("detection_based_tracker");
// Create and set View
mView = new FdView(mAppContext);
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public FdActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
@ -88,31 +86,25 @@ public class FdActivity extends Activity {
mDetectorName[FdView.NATIVE_DETECTOR] = "Native (tracking)";
}
@Override
protected void onPause() {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (mView != null)
mView.releaseCamera();
}
if (mView != null)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( mView != null && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
@ -120,12 +112,7 @@ public class FdActivity extends Activity {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
@ -136,7 +123,6 @@ public class FdActivity extends Activity {
mItemFace30 = menu.add("Face size 30%");
mItemFace20 = menu.add("Face size 20%");
mItemType = menu.add(mDetectorName[mDetectorType]);
return true;
}
@ -146,16 +132,16 @@ public class FdActivity extends Activity {
if (item == mItemFace50)
mView.setMinFaceSize(0.5f);
else if (item == mItemFace40)
mView.setMinFaceSize(0.4f);
mView.setMinFaceSize(0.4f);
else if (item == mItemFace30)
mView.setMinFaceSize(0.3f);
mView.setMinFaceSize(0.3f);
else if (item == mItemFace20)
mView.setMinFaceSize(0.2f);
mView.setMinFaceSize(0.2f);
else if (item == mItemType)
{
mDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[mDetectorType]);
mView.setDetectorType(mDetectorType);
mDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[mDetectorType]);
mView.setDetectorType(mDetectorType);
}
return true;
}

View File

@ -22,7 +22,7 @@ import android.util.Log;
import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase {
private static final String TAG = "Sample::FdView";
private static final String TAG = "Sample-FD::View";
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
@ -30,38 +30,32 @@ class FdView extends SampleCvViewBase {
private DetectionBasedTracker mNativeDetector;
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private int mDetectorType = JAVA_DETECTOR;
private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize)
{
mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0;
private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize) {
mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0;
}
public void setDetectorType(int type)
{
if (mDetectorType != type)
{
mDetectorType = type;
if (type == NATIVE_DETECTOR)
{
Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start();
}
else
{
Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop();
}
}
public void setDetectorType(int type) {
if (mDetectorType != type) {
mDetectorType = type;
if (type == NATIVE_DETECTOR) {
Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start();
} else {
Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop();
}
}
}
public FdView(Context context) {
@ -89,7 +83,7 @@ class FdView extends SampleCvViewBase {
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
@ -99,7 +93,7 @@ class FdView extends SampleCvViewBase {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
@ -107,41 +101,36 @@ class FdView extends SampleCvViewBase {
}
super.surfaceCreated(holder);
}
}
@Override
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mAbsoluteFaceSize == 0)
{
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0);
{
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR)
{
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
else if (mDetectorType == NATIVE_DETECTOR)
{
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
}
else
{
Log.e(TAG, "Detection method is not selected!");
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
@ -149,13 +138,13 @@ class FdView extends SampleCvViewBase {
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
@ -170,9 +159,9 @@ class FdView extends SampleCvViewBase {
if (mGray != null)
mGray.release();
if (mCascadeFile != null)
mCascadeFile.delete();
mCascadeFile.delete();
if (mNativeDetector != null)
mNativeDetector.release();
mNativeDetector.release();
mRgba = null;
mGray = null;

View File

@ -31,28 +31,27 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
Log.e(TAG, "Failed to open native camera");
releaseCamera();
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
@ -79,7 +78,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
@ -93,6 +92,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "surfaceDestroyed2");
}
protected abstract Bitmap processFrame(VideoCapture capture);

View File

@ -12,10 +12,11 @@ import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class ImageManipulationsActivity extends Activity {
private static final String TAG = "Sample-ImageManipulations::Activity";
private static final String TAG = "Sample-ImageManipulations::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1;
@ -36,85 +37,79 @@ public class ImageManipulationsActivity extends Activity {
private MenuItem mItemPreviewPosterize;
public static int viewMode = VIEW_MODE_RGBA;
private ImageManipulationsView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new ImageManipulationsView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new ImageManipulationsView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public ImageManipulationsActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (null != mView)
mView.releaseCamera();
}
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
@ -122,12 +117,7 @@ public class ImageManipulationsActivity extends Activity {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override

View File

@ -24,7 +24,7 @@ class ImageManipulationsView extends SampleCvViewBase {
private Size mSize0;
private Size mSizeRgba;
private Size mSizeRgbaInner;
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
@ -55,7 +55,7 @@ class ImageManipulationsView extends SampleCvViewBase {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
@ -71,11 +71,11 @@ class ImageManipulationsView extends SampleCvViewBase {
mMat0 = new Mat();
mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) };
mColorsHue = new Scalar[] {
new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
};
mWhilte = Scalar.all(255);
mP1 = new Point();
@ -83,13 +83,13 @@ class ImageManipulationsView extends SampleCvViewBase {
}
super.surfaceCreated(holder);
}
}
private void CreateAuxiliaryMats() {
private void CreateAuxiliaryMats() {
if (mRgba.empty())
return;
mSizeRgba = mRgba.size();
mSizeRgba = mRgba.size();
int rows = (int) mSizeRgba.height;
int cols = (int) mSizeRgba.width;
@ -102,7 +102,7 @@ class ImageManipulationsView extends SampleCvViewBase {
if (mRgbaInnerWindow == null)
mRgbaInnerWindow = mRgba.submat(top, top + height, left, left + width);
mSizeRgbaInner = mRgbaInnerWindow.size();
mSizeRgbaInner = mRgbaInnerWindow.size();
if (mGrayInnerWindow == null && !mGray.empty())
mGrayInnerWindow = mGray.submat(top, top + height, left, left + width);
@ -134,38 +134,38 @@ class ImageManipulationsView extends SampleCvViewBase {
int offset = (int) ((mSizeRgba.width - (5*mHistSizeNum + 4*10)*thikness)/2);
// RGB
for(int c=0; c<3; c++) {
Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
}
Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
}
}
// Value and Hue
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
// Value
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mWhilte, thikness);
}
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mWhilte, thikness);
}
// Hue
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
}
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
}
break;
case ImageManipulationsActivity.VIEW_MODE_CANNY:
@ -231,10 +231,10 @@ class ImageManipulationsView extends SampleCvViewBase {
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}

View File

@ -31,28 +31,27 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
@ -79,7 +78,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);

View File

@ -10,8 +10,8 @@ import android.view.MenuItem;
import android.view.Window;
public class Sample0Base extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "Sample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
@ -22,31 +22,31 @@ public class Sample0Base extends Activity {
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
@ -68,9 +68,9 @@ public class Sample0Base extends Activity {
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA)
mView.setViewMode(Sample0View.VIEW_MODE_RGBA);
mView.setViewMode(Sample0View.VIEW_MODE_RGBA);
else if (item == mItemPreviewGray)
mView.setViewMode(Sample0View.VIEW_MODE_GRAY);
mView.setViewMode(Sample0View.VIEW_MODE_GRAY);
return true;
}
}

View File

@ -5,17 +5,17 @@ import android.graphics.Bitmap;
import android.util.Log;
class Sample0View extends SampleViewBase {
private static final String TAG = "Sample0View";
int mSize;
int[] mRGBA;
private Bitmap mBitmap;
private static final String TAG = "Sample0View";
int mSize;
int[] mRGBA;
private Bitmap mBitmap;
private int mViewMode;
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public Sample0View(Context context) {
super(context);
mSize = 0;
@ -25,7 +25,7 @@ class Sample0View extends SampleViewBase {
@Override
protected Bitmap processFrame(byte[] data) {
int frameSize = getFrameWidth() * getFrameHeight();
int[] rgba = mRGBA;
final int view_mode = mViewMode;
@ -35,15 +35,15 @@ class Sample0View extends SampleViewBase {
rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y;
}
} else if (view_mode == VIEW_MODE_RGBA) {
for (int i = 0; i < getFrameHeight(); i++)
for (int i = 0; i < getFrameHeight(); i++) {
for (int j = 0; j < getFrameWidth(); j++) {
int index = i * getFrameWidth() + j;
int supply_index = frameSize + (i >> 1) * getFrameWidth() + (j & ~1);
int index = i * getFrameWidth() + j;
int supply_index = frameSize + (i >> 1) * getFrameWidth() + (j & ~1);
int y = (0xff & ((int) data[index]));
int u = (0xff & ((int) data[supply_index + 0]));
int v = (0xff & ((int) data[supply_index + 1]));
y = y < 16 ? 16 : y;
float y_conv = 1.164f * (y - 16);
int r = Math.round(y_conv + 1.596f * (v - 128));
int g = Math.round(y_conv - 0.813f * (v - 128) - 0.391f * (u - 128));
@ -55,35 +55,36 @@ class Sample0View extends SampleViewBase {
rgba[i * getFrameWidth() + j] = 0xff000000 + (b << 16) + (g << 8) + r;
}
}
}
mBitmap.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return mBitmap;
}
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "onPreviewStarted("+previewWidth+", "+previewHeight+")");
/* Create a bitmap that will be used through to calculate the image to */
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "onPreviewStarted("+previewWidth+", "+previewHeight+")");
/* Create a bitmap that will be used through to calculate the image to */
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
mRGBA = new int[previewWidth * previewHeight];
}
mRGBA = new int[previewWidth * previewHeight];
}
@Override
protected void onPreviewStopped() {
Log.i(TAG, "onPreviewStopped");
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
if(mRGBA != null) {
mRGBA = null;
}
}
@Override
protected void onPreviewStopped() {
Log.i(TAG, "onPreviewStopped");
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
public void setViewMode(int viewMode) {
Log.i(TAG, "setViewMode("+viewMode+")");
mViewMode = viewMode;
}
if(mRGBA != null) {
mRGBA = null;
}
}
public void setViewMode(int viewMode) {
Log.i(TAG, "setViewMode("+viewMode+")");
mViewMode = viewMode;
}
}

View File

@ -25,6 +25,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) {
@ -43,39 +44,40 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
mCamera.setPreviewTexture( new SurfaceTexture(10) );
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -84,7 +86,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
@ -107,15 +109,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
@ -125,21 +127,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class Sample1Java extends Activity {
private static final String TAG = "Sample::Activity";
@ -22,81 +23,76 @@ public class Sample1Java extends Activity {
private Sample1View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new Sample1View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new Sample1View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Sample1Java() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (null != mView)
mView.releaseCamera();
}
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
@ -104,12 +100,7 @@ public class Sample1Java extends Activity {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
@ -125,11 +116,11 @@ public class Sample1Java extends Activity {
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample1View.VIEW_MODE_RGBA);
mView.setViewMode(Sample1View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) {
mView.setViewMode(Sample1View.VIEW_MODE_GRAY);
mView.setViewMode(Sample1View.VIEW_MODE_GRAY);
} else if (item == mItemPreviewCanny) {
mView.setViewMode(Sample1View.VIEW_MODE_CANNY);
mView.setViewMode(Sample1View.VIEW_MODE_CANNY);
}
return true;
}

View File

@ -22,35 +22,35 @@ class Sample1View extends SampleViewBase {
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private Bitmap mBitmap;
private int mViewMode;
private Bitmap mBitmap;
private int mViewMode;
public Sample1View(Context context) {
super(context);
mViewMode = VIEW_MODE_RGBA;
}
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
synchronized (this) {
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
synchronized (this) {
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
mRgba = new Mat();
mIntermediateMat = new Mat();
mRgba = new Mat();
mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
}
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
}
@Override
protected void onPreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
}
@Override
protected void onPreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
}
synchronized (this) {
synchronized (this) {
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
@ -101,7 +101,7 @@ class Sample1View extends SampleViewBase {
}
public void setViewMode(int viewMode) {
mViewMode = viewMode;
mViewMode = viewMode;
}
}

View File

@ -25,6 +25,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) {
@ -43,38 +44,40 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
mCamera.setPreviewTexture( new SurfaceTexture(10) );
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
}
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -83,7 +86,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
@ -106,15 +109,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
@ -124,21 +127,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class Sample2NativeCamera extends Activity {
private static final String TAG = "Sample::Activity";
@ -25,85 +26,79 @@ public class Sample2NativeCamera extends Activity {
private MenuItem mItemPreviewCanny;
public static int viewMode = VIEW_MODE_RGBA;
private Sample2View mView;
private Sample2View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new Sample2View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new Sample2View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Sample2NativeCamera() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (null != mView)
mView.releaseCamera();
}
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
@ -111,11 +106,7 @@ public class Sample2NativeCamera extends Activity {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override

View File

@ -31,7 +31,7 @@ class Sample2View extends SampleCvViewBase {
mRgba = new Mat();
mIntermediateMat = new Mat();
}
super.surfaceCreated(holder);
}
@ -50,16 +50,16 @@ class Sample2View extends SampleCvViewBase {
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
break;
}
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) {
Log.e("org.opencv.samples.tutorial2", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e("org.opencv.samples.tutorial2", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}

View File

@ -29,28 +29,27 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
@ -77,7 +76,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
@ -115,8 +114,8 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth()-bmp.getWidth()) / 2, (canvas.getHeight()-bmp.getHeight()) / 2, null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth()-bmp.getWidth()) / 2, (canvas.getHeight()-bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();

View File

@ -10,91 +10,86 @@ import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
public class Sample3Native extends Activity {
private static final String TAG = "Sample::Activity";
private Sample3View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample");
// Create and set View
mView = new Sample3View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample");
// Create and set View
mView = new Sample3View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Sample3Native() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (null != mView)
mView.releaseCamera();
}
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
@ -102,10 +97,6 @@ public class Sample3Native extends Activity {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}

View File

@ -4,30 +4,30 @@ import android.content.Context;
import android.graphics.Bitmap;
class Sample3View extends SampleViewBase {
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
public Sample3View(Context context) {
super(context);
}
@Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}
@Override
protected void onPreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}
@Override
protected Bitmap processFrame(byte[] data) {
@ -35,7 +35,7 @@ class Sample3View extends SampleViewBase {
FindFeatures(getFrameWidth(), getFrameHeight(), data, rgba);
Bitmap bmp = mBitmap;
Bitmap bmp = mBitmap;
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return bmp;
}

View File

@ -24,6 +24,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) {
@ -42,38 +43,40 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
mCamera.setPreviewTexture( new SurfaceTexture(10) );
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
}
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -82,7 +85,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
@ -105,15 +108,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
@ -123,21 +126,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class Sample4Mixed extends Activity {
private static final String TAG = "Sample::Activity";
@ -20,103 +21,91 @@ public class Sample4Mixed extends Activity {
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewFeatures;
private Sample4View mView;
private Sample4View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("mixed_sample");
// Create and set View
mView = new Sample4View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("mixed_sample");
// Create and set View
mView = new Sample4View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Sample4Mixed() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
if (null != mView)
mView.releaseCamera();
}
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(TAG, "onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
public boolean onCreateOptionsMenu(Menu menu) {
@ -131,13 +120,13 @@ public class Sample4Mixed extends Activity {
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample4View.VIEW_MODE_RGBA);
mView.setViewMode(Sample4View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) {
mView.setViewMode(Sample4View.VIEW_MODE_GRAY);
mView.setViewMode(Sample4View.VIEW_MODE_GRAY);
} else if (item == mItemPreviewCanny) {
mView.setViewMode(Sample4View.VIEW_MODE_CANNY);
mView.setViewMode(Sample4View.VIEW_MODE_CANNY);
} else if (item == mItemPreviewFeatures) {
mView.setViewMode(Sample4View.VIEW_MODE_FEATURES);
mView.setViewMode(Sample4View.VIEW_MODE_FEATURES);
}
return true;
}

View File

@ -15,39 +15,39 @@ class Sample4View extends SampleViewBase {
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_FEATURES = 5;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private int mViewMode;
private Bitmap mBitmap;
private Bitmap mBitmap;
public Sample4View(Context context) {
super(context);
}
@Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) {
@Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) {
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
mRgba = new Mat();
mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
if (mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
if (mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
@ -62,8 +62,8 @@ class Sample4View extends SampleViewBase {
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
}
}
@Override
@ -105,6 +105,6 @@ class Sample4View extends SampleViewBase {
public native void FindFeatures(long matAddrGr, long matAddrRgba);
public void setViewMode(int viewMode) {
mViewMode = viewMode;
mViewMode = viewMode;
}
}

View File

@ -24,6 +24,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) {
@ -42,38 +43,40 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
mCamera.setPreviewTexture( new SurfaceTexture(10) );
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
}
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -82,7 +85,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
@ -105,15 +108,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
@ -123,21 +126,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);