Android samples are updated: onPause()/onResume() release/open camera, a message is shown on camera open error, minor fixes in code and resources

This commit is contained in:
Andrey Pavlenko 2012-05-17 14:51:04 +00:00
parent 0ba3236ce0
commit 5855c4905e
31 changed files with 820 additions and 427 deletions

View File

@ -26,13 +26,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width;
int mFrameHeight = height;
@ -52,28 +75,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) {
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
releaseCamera();
}
protected abstract Bitmap processFrame(VideoCapture capture);

View File

@ -1,6 +1,8 @@
package org.opencv.samples.puzzle15;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -18,6 +20,31 @@ public class puzzle15Activity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {

View File

@ -18,7 +18,7 @@ import android.view.View;
import android.view.View.OnTouchListener;
public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
private Mat mRgba;
private Mat mRgba;
private Mat mRgba15;
private Mat[] mCells;
private Mat[] mCells15;
@ -45,13 +45,13 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
}
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
super.surfaceChanged(_holder, format, width, height);
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
}
}
super.surfaceCreated(holder);
}
public static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) {
@ -185,7 +185,9 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
if(mRgba==null) return false;
int cols = mRgba.cols();
int rows = mRgba.rows();
float xoffset = (getWidth() - cols) / 2;
float yoffset = (getHeight() - rows) / 2;

View File

@ -1,6 +1,8 @@
package org.opencv.samples.colorblobdetect;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
@ -15,6 +17,31 @@ public class ColorBlobDetectionActivity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {

View File

@ -56,12 +56,13 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
}
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
super.surfaceChanged(_holder, format, width, height);
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
public boolean onTouch(View v, MotionEvent event)

View File

@ -26,13 +26,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width;
int mFrameHeight = height;
@ -52,28 +75,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) {
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
releaseCamera();
}
protected abstract Bitmap processFrame(VideoCapture capture);

View File

@ -1,6 +1,8 @@
package org.opencv.samples.fd;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -14,6 +16,8 @@ public class FdActivity extends Activity {
private MenuItem mItemFace40;
private MenuItem mItemFace30;
private MenuItem mItemFace20;
private FdView mView;
public static float minFaceSize = 0.5f;
@ -21,13 +25,39 @@ public class FdActivity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new FdView(this));
mView = new FdView(this);
setContentView(mView);
}
@Override

View File

@ -62,17 +62,17 @@ class FdView extends SampleCvViewBase {
}
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
super.surfaceChanged(_holder, format, width, height);
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
mRgba = new Mat();
}
}
@Override
super.surfaceCreated(holder);
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

View File

@ -28,13 +28,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width;
int mFrameHeight = height;
@ -54,28 +77,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) {
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
releaseCamera();
}
protected abstract Bitmap processFrame(VideoCapture capture);

View File

@ -1,6 +1,8 @@
package org.opencv.samples.imagemanipulations;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -8,7 +10,8 @@ import android.view.MenuItem;
import android.view.Window;
public class ImageManipulationsActivity extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "Sample-ImageManipulations::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1;
@ -29,18 +32,46 @@ public class ImageManipulationsActivity extends Activity {
private MenuItem mItemPreviewPosterize;
public static int viewMode = VIEW_MODE_RGBA;
private ImageManipulationsView mView;
public ImageManipulationsActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new ImageManipulationsView(this));
mView = new ImageManipulationsView(this);
setContentView(mView);
}
@Override

View File

@ -55,9 +55,7 @@ class ImageManipulationsView extends SampleCvViewBase {
}
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
super.surfaceChanged(_holder, format, width, height);
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
@ -83,9 +81,11 @@ class ImageManipulationsView extends SampleCvViewBase {
mP1 = new Point();
mP2 = new Point();
}
}
private void CreateAuxiliaryMats() {
super.surfaceCreated(holder);
}
private void CreateAuxiliaryMats() {
if (mRgba.empty())
return;

View File

@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "Sample-ImageManipulations::SurfaceView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
@ -28,13 +28,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width;
int mFrameHeight = height;
@ -54,28 +77,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) {
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
releaseCamera();
}
protected abstract Bitmap processFrame(VideoCapture capture);
@ -88,8 +105,10 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
if (mCamera == null) {
Log.i(TAG, "mCamera == null");
break;
}
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Tutorial 1 Basic - 0. Android Camera</string>
<string name="app_name">Tutorial 0 (Basic) - Android Camera</string>
</resources>

View File

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial0;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -19,7 +21,32 @@ public class Sample0Base extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");

View File

@ -63,6 +63,7 @@ class Sample0View extends SampleViewBase {
@Override
protected void onPreviewStared(int previewWidth, int previewHeight) {
Log.i(TAG, "onPreviewStared("+previewWidth+", "+previewHeight+")");
/* Create a bitmap that will be used through to calculate the image to */
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
mRGBA = new int[previewWidth * previewHeight];
@ -70,12 +71,19 @@ class Sample0View extends SampleViewBase {
@Override
protected void onPreviewStopped() {
mBitmap.recycle();
mBitmap = null;
mRGBA = null;
Log.i(TAG, "onPreviewStopped");
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
if(mRGBA != null) {
mRGBA = null;
}
}
public void setViewMode(int viewMode) {
Log.i(TAG, "setViewMode("+viewMode+")");
mViewMode = viewMode;
}
}

View File

@ -50,62 +50,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
@ -116,15 +68,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
(new Thread(this)).start();
return true;
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -133,6 +84,76 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
@ -175,5 +196,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
}
}
Log.i(TAG, "Finishing processing thread");
}
}

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Tutorial 1 Basic - 1. Add OpenCV</string>
<string name="app_name">Tutorial 1 (Basic) - Add OpenCV</string>
</resources>

View File

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial1;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -19,6 +21,31 @@ public class Sample1Java extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {

View File

@ -2,16 +2,15 @@ package org.opencv.samples.tutorial1;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.CvType;
import org.opencv.imgproc.Imgproc;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;
class Sample1View extends SampleViewBase {
@ -94,7 +93,7 @@ class Sample1View extends SampleViewBase {
try {
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e("org.opencv.samples.tutorial1", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}

View File

@ -49,63 +49,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera.setPreviewDisplay(null);
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
@ -116,15 +67,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
(new Thread(this)).start();
return true;
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -133,6 +83,76 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Tutorial 1 Basic - 2. Use OpenCV Camera</string>
<string name="app_name">Tutorial 2 (Basic) - Use OpenCV Camera</string>
</resources>

View File

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial2;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -19,18 +21,46 @@ public class Sample2NativeCamera extends Activity {
private MenuItem mItemPreviewCanny;
public static int viewMode = VIEW_MODE_RGBA;
private Sample2View mView;
public Sample2NativeCamera() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample2View(this));
mView = new Sample2View(this);
setContentView(mView);
}
@Override

View File

@ -1,12 +1,8 @@
package org.opencv.samples.tutorial2;
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.highgui.Highgui;
@ -22,36 +18,25 @@ class Sample2View extends SampleCvViewBase {
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
private Mat mIntermediateMat2;
private Mat mEmpty;
private Scalar lo, hi;
private Scalar bl, wh;
public Sample2View(Context context) {
super(context);
}
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
super.surfaceChanged(_holder, format, width, height);
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
mRgba = new Mat();
mIntermediateMat = new Mat();
mIntermediateMat2 = new Mat();
mEmpty = new Mat();
lo = new Scalar(85, 100, 30);
hi = new Scalar(130, 255, 255);
bl = new Scalar(0, 0, 0, 255);
wh = new Scalar(255, 255, 255, 255);
}
super.surfaceCreated(holder);
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
/**/
switch (Sample2NativeCamera.viewMode) {
case Sample2NativeCamera.VIEW_MODE_GRAY:
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
@ -62,36 +47,11 @@ class Sample2View extends SampleCvViewBase {
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new Scalar(255, 0, 0, 255), 3);
break;
case Sample2NativeCamera.VIEW_MODE_CANNY:
/*capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
*/
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
Core.inRange(mIntermediateMat, lo, hi, mIntermediateMat2); // green
Imgproc.dilate(mIntermediateMat2, mIntermediateMat2, mEmpty);
//
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Mat hierarchy = new Mat();
Imgproc.findContours(mIntermediateMat2, contours, hierarchy,Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
Log.d("processFrame", "contours.size()" + contours.size());
double maxArea = 0;
int indexMaxArea = -1;
for (int i = 0; i < contours.size(); i++) {
double s = Imgproc.contourArea(contours.get(i));
if(s > maxArea){
indexMaxArea = i;
maxArea = s;
}
}
mRgba.setTo(bl);
Imgproc.drawContours(mRgba, contours, indexMaxArea, wh);
//
//Imgproc.cvtColor(mIntermediateMat2, mRgba, Imgproc.COLOR_GRAY2RGBA);
break;
}
/**/
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
@ -99,7 +59,7 @@ class Sample2View extends SampleCvViewBase {
Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e("org.opencv.samples.tutorial2", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}
@ -118,9 +78,6 @@ class Sample2View extends SampleCvViewBase {
if (mIntermediateMat != null)
mIntermediateMat.release();
if (mIntermediateMat2 != null)
mIntermediateMat2.release();
mRgba = null;
mGray = null;
mIntermediateMat = null;

View File

@ -26,13 +26,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width;
int mFrameHeight = height;
@ -52,28 +75,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) {
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
releaseCamera();
}
protected abstract Bitmap processFrame(VideoCapture capture);

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Tutorial 2 Advanced - 1. Add Native OpenCV</string>
<string name="app_name">Tutorial 3 (Advanced) - Add Native OpenCV</string>
</resources>

View File

@ -1,23 +1,52 @@
package org.opencv.samples.tutorial3;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
public class Sample3Native extends Activity {
private static final String TAG = "Sample::Activity";
private Sample3View mView;
public Sample3Native() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample3View(this));
mView = new Sample3View(this);
setContentView(mView);
}
}

View File

@ -2,7 +2,6 @@ package org.opencv.samples.tutorial3;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
@ -49,62 +48,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera.setPreviewDisplay(null);
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
@ -115,15 +66,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
(new Thread(this)).start();
return true;
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -132,6 +82,77 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Tutorial 2 Advanced - 2. Mix Java+Native OpenCV</string>
<string name="app_name">Tutorial 4 (Advanced) - Mix Java+Native OpenCV</string>
</resources>

View File

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial4;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -21,6 +23,31 @@ public class Sample4Mixed extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {

View File

@ -1,14 +1,13 @@
package org.opencv.samples.tutorial4;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;
class Sample4View extends SampleViewBase {

View File

@ -2,7 +2,6 @@ package org.opencv.samples.tutorial4;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
@ -49,62 +48,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera.setPreviewDisplay(null);
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
@ -115,15 +66,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
(new Thread(this)).start();
return true;
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
@ -132,6 +82,78 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);