mirror of
https://github.com/opencv/opencv.git
synced 2024-11-29 05:29:54 +08:00
Tutorial3 ported on new framework.
This commit is contained in:
parent
50500e5923
commit
31763bdcf0
@ -8,28 +8,16 @@ using namespace std;
|
||||
using namespace cv;
|
||||
|
||||
extern "C" {
|
||||
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial3_Sample3View_FindFeatures(JNIEnv* env, jobject, jint width, jint height, jbyteArray yuv, jintArray bgra)
|
||||
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial3_Sample3Native_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba)
|
||||
{
|
||||
jbyte* _yuv = env->GetByteArrayElements(yuv, 0);
|
||||
jint* _bgra = env->GetIntArrayElements(bgra, 0);
|
||||
|
||||
Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv);
|
||||
Mat mbgra(height, width, CV_8UC4, (unsigned char *)_bgra);
|
||||
Mat mgray(height, width, CV_8UC1, (unsigned char *)_yuv);
|
||||
|
||||
//Please make attention about BGRA byte order
|
||||
//ARGB stored in java as int array becomes BGRA at native level
|
||||
cvtColor(myuv, mbgra, CV_YUV420sp2BGR, 4);
|
||||
|
||||
Mat* pMatGr=(Mat*)addrGray;
|
||||
Mat* pMatRgb=(Mat*)addrRgba;
|
||||
vector<KeyPoint> v;
|
||||
|
||||
FastFeatureDetector detector(50);
|
||||
detector.detect(mgray, v);
|
||||
detector.detect(*pMatGr, v);
|
||||
for( size_t i = 0; i < v.size(); i++ )
|
||||
circle(mbgra, Point(v[i].pt.x, v[i].pt.y), 10, Scalar(0,0,255,255));
|
||||
|
||||
env->ReleaseIntArrayElements(bgra, _bgra, 0);
|
||||
env->ReleaseByteArrayElements(yuv, _yuv, 0);
|
||||
circle(*pMatRgb, Point(v[i].pt.x, v[i].pt.y), 10, Scalar(255,0,0,255));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,11 @@
|
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent" >
|
||||
|
||||
<org.opencv.framework.OpenCvJavaCameraView
|
||||
android:layout_width="fill_parent"
|
||||
android:layout_height="fill_parent"
|
||||
android:id="@+id/tutorial4_activity_surface_view" />
|
||||
|
||||
</LinearLayout>
|
@ -3,21 +3,26 @@ package org.opencv.samples.tutorial3;
|
||||
import org.opencv.android.BaseLoaderCallback;
|
||||
import org.opencv.android.LoaderCallbackInterface;
|
||||
import org.opencv.android.OpenCVLoader;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.framework.OpenCvJavaCameraView;
|
||||
import org.opencv.framework.OpenCvCameraBridgeViewBase.CvCameraViewListener;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.AlertDialog;
|
||||
import android.content.DialogInterface;
|
||||
import android.os.Bundle;
|
||||
import android.util.Log;
|
||||
import android.view.Window;
|
||||
import android.view.WindowManager;
|
||||
|
||||
public class Sample3Native extends Activity {
|
||||
public class Sample3Native extends Activity implements CvCameraViewListener {
|
||||
private static final String TAG = "OCVSample::Activity";
|
||||
|
||||
private Sample3View mView;
|
||||
private Mat mRgba;
|
||||
private Mat mGrayMat;
|
||||
private OpenCvJavaCameraView mOpenCvCameraView;
|
||||
|
||||
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
|
||||
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
|
||||
@Override
|
||||
public void onManagerConnected(int status) {
|
||||
switch (status) {
|
||||
@ -28,37 +33,7 @@ public class Sample3Native extends Activity {
|
||||
// Load native library after(!) OpenCV initialization
|
||||
System.loadLibrary("native_sample");
|
||||
|
||||
// Create and set View
|
||||
mView = new Sample3View(mAppContext);
|
||||
setContentView(mView);
|
||||
// Check native OpenCV camera
|
||||
if( !mView.openCamera() ) {
|
||||
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
|
||||
ad.setCancelable(false); // This blocks the 'BACK' button
|
||||
ad.setMessage("Fatal error: can't open camera!");
|
||||
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int which) {
|
||||
dialog.dismiss();
|
||||
finish();
|
||||
}
|
||||
});
|
||||
ad.show();
|
||||
}
|
||||
} break;
|
||||
/** OpenCV loader cannot start Google Play **/
|
||||
case LoaderCallbackInterface.MARKET_ERROR:
|
||||
{
|
||||
Log.d(TAG, "Google Play service is not accessible!");
|
||||
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
|
||||
MarketErrorMessage.setTitle("OpenCV Manager");
|
||||
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
|
||||
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
|
||||
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int which) {
|
||||
finish();
|
||||
}
|
||||
});
|
||||
MarketErrorMessage.show();
|
||||
mOpenCvCameraView.enableView();
|
||||
} break;
|
||||
default:
|
||||
{
|
||||
@ -72,25 +47,6 @@ public class Sample3Native extends Activity {
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPause() {
|
||||
Log.i(TAG, "called onPause");
|
||||
if (null != mView)
|
||||
mView.releaseCamera();
|
||||
super.onPause();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onResume() {
|
||||
Log.i(TAG, "called onResume");
|
||||
super.onResume();
|
||||
|
||||
Log.i(TAG, "Trying to load OpenCV library");
|
||||
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
|
||||
Log.e(TAG, "Cannot connect to OpenCV Manager");
|
||||
}
|
||||
}
|
||||
|
||||
/** Called when the activity is first created. */
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
@ -98,5 +54,49 @@ public class Sample3Native extends Activity {
|
||||
super.onCreate(savedInstanceState);
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
||||
|
||||
setContentView(R.layout.tutorial3_surface_view);
|
||||
|
||||
mOpenCvCameraView = (OpenCvJavaCameraView)findViewById(R.id.tutorial4_activity_surface_view);
|
||||
mOpenCvCameraView.setCvCameraViewListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPause()
|
||||
{
|
||||
mOpenCvCameraView.disableView();
|
||||
super.onPause();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume()
|
||||
{
|
||||
super.onResume();
|
||||
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mLoaderCallback);
|
||||
}
|
||||
|
||||
public void onDestroy() {
|
||||
super.onDestroy();
|
||||
mOpenCvCameraView.disableView();
|
||||
}
|
||||
|
||||
public void onCameraViewStarted(int width, int height) {
|
||||
mRgba = new Mat(height, width, CvType.CV_8UC4);
|
||||
mGrayMat = new Mat(height, width, CvType.CV_8UC1);
|
||||
}
|
||||
|
||||
public void onCameraViewStopped() {
|
||||
mRgba.release();
|
||||
mGrayMat.release();
|
||||
}
|
||||
|
||||
public Mat onCameraFrame(Mat inputFrame) {
|
||||
inputFrame.copyTo(mRgba);
|
||||
Imgproc.cvtColor(mRgba, mGrayMat, Imgproc.COLOR_RGBA2GRAY);
|
||||
FindFeatures(mGrayMat.getNativeObjAddr(), mRgba.getNativeObjAddr());
|
||||
|
||||
return mRgba;
|
||||
}
|
||||
|
||||
public native void FindFeatures(long matAddrGr, long matAddrRgba);
|
||||
}
|
||||
|
@ -1,49 +0,0 @@
|
||||
package org.opencv.samples.tutorial3;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.util.Log;
|
||||
|
||||
class Sample3View extends SampleViewBase {
|
||||
private static final String TAG = "OCVSample::View";
|
||||
|
||||
private int mFrameSize;
|
||||
private Bitmap mBitmap;
|
||||
private int[] mRGBA;
|
||||
|
||||
public Sample3View(Context context) {
|
||||
super(context);
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPreviewStarted(int previewWidth, int previewHeight) {
|
||||
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
|
||||
|
||||
mFrameSize = previewWidth * previewHeight;
|
||||
mRGBA = new int[mFrameSize];
|
||||
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPreviewStopped() {
|
||||
if(mBitmap != null) {
|
||||
mBitmap.recycle();
|
||||
mBitmap = null;
|
||||
}
|
||||
mRGBA = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(byte[] data) {
|
||||
int[] rgba = mRGBA;
|
||||
|
||||
FindFeatures(getFrameWidth(), getFrameHeight(), data, rgba);
|
||||
|
||||
Bitmap bmp = mBitmap;
|
||||
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
|
||||
return bmp;
|
||||
}
|
||||
|
||||
public native void FindFeatures(int width, int height, byte yuv[], int[] rgba);
|
||||
}
|
@ -1,229 +0,0 @@
|
||||
package org.opencv.samples.tutorial3;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.Camera;
|
||||
import android.hardware.Camera.PreviewCallback;
|
||||
import android.os.Build;
|
||||
import android.util.Log;
|
||||
import android.view.SurfaceHolder;
|
||||
import android.view.SurfaceView;
|
||||
|
||||
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
|
||||
private static final String TAG = "OCVSample::BaseView";
|
||||
|
||||
private Camera mCamera;
|
||||
private SurfaceHolder mHolder;
|
||||
private int mFrameWidth;
|
||||
private int mFrameHeight;
|
||||
private byte[] mFrame;
|
||||
private volatile boolean mThreadRun;
|
||||
private byte[] mBuffer;
|
||||
private SurfaceTexture mSf;
|
||||
|
||||
|
||||
public SampleViewBase(Context context) {
|
||||
super(context);
|
||||
mHolder = getHolder();
|
||||
mHolder.addCallback(this);
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
}
|
||||
|
||||
public int getFrameWidth() {
|
||||
return mFrameWidth;
|
||||
}
|
||||
|
||||
public int getFrameHeight() {
|
||||
return mFrameHeight;
|
||||
}
|
||||
|
||||
public void setPreview() throws IOException {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
|
||||
mSf = new SurfaceTexture(10);
|
||||
mCamera.setPreviewTexture( mSf );
|
||||
}
|
||||
else
|
||||
mCamera.setPreviewDisplay(null);
|
||||
}
|
||||
|
||||
public boolean openCamera() {
|
||||
Log.i(TAG, "Opening Camera");
|
||||
mCamera = null;
|
||||
|
||||
try {
|
||||
mCamera = Camera.open();
|
||||
}
|
||||
catch (Exception e){
|
||||
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
|
||||
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
|
||||
try {
|
||||
mCamera = Camera.open(camIdx);
|
||||
}
|
||||
catch (RuntimeException e) {
|
||||
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(mCamera == null) {
|
||||
Log.e(TAG, "Can't open any camera");
|
||||
return false;
|
||||
}
|
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
|
||||
public void onPreviewFrame(byte[] data, Camera camera) {
|
||||
synchronized (SampleViewBase.this) {
|
||||
System.arraycopy(data, 0, mFrame, 0, data.length);
|
||||
SampleViewBase.this.notify();
|
||||
}
|
||||
camera.addCallbackBuffer(mBuffer);
|
||||
}
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public void releaseCamera() {
|
||||
Log.i(TAG, "Releasing Camera");
|
||||
mThreadRun = false;
|
||||
synchronized (this) {
|
||||
if (mCamera != null) {
|
||||
mCamera.stopPreview();
|
||||
mCamera.setPreviewCallback(null);
|
||||
mCamera.release();
|
||||
mCamera = null;
|
||||
}
|
||||
}
|
||||
onPreviewStopped();
|
||||
}
|
||||
|
||||
public synchronized void setupCamera(int width, int height) {
|
||||
if (mCamera != null) {
|
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height);
|
||||
Camera.Parameters params = mCamera.getParameters();
|
||||
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
|
||||
mFrameWidth = width;
|
||||
mFrameHeight = height;
|
||||
|
||||
// selecting optimal camera preview size
|
||||
{
|
||||
int minDiff = Integer.MAX_VALUE;
|
||||
for (Camera.Size size : sizes) {
|
||||
if (Math.abs(size.height - height) < minDiff) {
|
||||
mFrameWidth = size.width;
|
||||
mFrameHeight = size.height;
|
||||
minDiff = Math.abs(size.height - height);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight());
|
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes();
|
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
|
||||
{
|
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
|
||||
mCamera.setParameters(params);
|
||||
|
||||
/* Now allocate the buffer */
|
||||
params = mCamera.getParameters();
|
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height;
|
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
|
||||
mBuffer = new byte[size];
|
||||
/* The buffer where the current frame will be copied */
|
||||
mFrame = new byte [size];
|
||||
mCamera.addCallbackBuffer(mBuffer);
|
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */
|
||||
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
|
||||
|
||||
try {
|
||||
setPreview();
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
|
||||
}
|
||||
|
||||
/* Now we can start a preview */
|
||||
mCamera.startPreview();
|
||||
}
|
||||
}
|
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
|
||||
Log.i(TAG, "called surfaceChanged");
|
||||
// stop preview before making changes
|
||||
try {
|
||||
mCamera.stopPreview();
|
||||
} catch (Exception e){
|
||||
// ignore: tried to stop a non-existent preview
|
||||
}
|
||||
|
||||
// start preview with new settings
|
||||
setupCamera(width, height);
|
||||
}
|
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "called surfaceCreated");
|
||||
(new Thread(this)).start();
|
||||
}
|
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||
Log.i(TAG, "called surfaceDestroyed");
|
||||
}
|
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
|
||||
protected abstract Bitmap processFrame(byte[] data);
|
||||
|
||||
/**
|
||||
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
|
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
|
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
|
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
|
||||
*/
|
||||
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
|
||||
|
||||
/**
|
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
|
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
|
||||
* Any other resources used during the preview can be released.
|
||||
*/
|
||||
protected abstract void onPreviewStopped();
|
||||
|
||||
public void run() {
|
||||
mThreadRun = true;
|
||||
Log.i(TAG, "Started processing thread");
|
||||
while (mThreadRun) {
|
||||
Bitmap bmp = null;
|
||||
|
||||
synchronized (this) {
|
||||
try {
|
||||
this.wait();
|
||||
if (!mThreadRun)
|
||||
break;
|
||||
bmp = processFrame(mFrame);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
if (bmp != null) {
|
||||
Canvas canvas = mHolder.lockCanvas();
|
||||
if (canvas != null) {
|
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
|
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
|
||||
mHolder.unlockCanvasAndPost(canvas);
|
||||
}
|
||||
}
|
||||
}
|
||||
Log.i(TAG, "Finished processing thread");
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user