Migrate Android Face Detection sample to DNN.

This commit is contained in:
Alexander Smorkalov 2023-11-27 15:38:19 +03:00
parent 1c4f441507
commit 7b1a5fb3de
15 changed files with 288 additions and 2185 deletions

View File

@ -62,7 +62,9 @@ Net readNet(const String& _framework, const std::vector<uchar>& bufferModel,
const std::vector<uchar>& bufferConfig)
{
String framework = toLowerCase(_framework);
if (framework == "caffe")
if (framework == "onnx")
return readNetFromONNX(bufferModel);
else if (framework == "caffe")
return readNetFromCaffe(bufferConfig, bufferModel);
else if (framework == "tensorflow")
return readNetFromTensorflow(bufferModel, bufferConfig);

View File

@ -71,7 +71,7 @@ public:
*/
CV_WRAP virtual int detect(InputArray image, OutputArray faces) = 0;
/** @brief Creates an instance of this class with given parameters
/** @brief Creates an instance of face detector class with given parameters
*
* @param model the path to the requested model
* @param config the path to the config file for compability, which is not requested for ONNX models
@ -90,6 +90,29 @@ public:
int top_k = 5000,
int backend_id = 0,
int target_id = 0);
/** @overload
*
* @param framework Name of origin framework
* @param bufferModel A buffer with a content of binary file with weights
* @param bufferConfig A buffer with a content of text file contains network configuration
* @param input_size the size of the input image
* @param score_threshold the threshold to filter out bounding boxes of score smaller than the given value
* @param nms_threshold the threshold to suppress bounding boxes of IoU bigger than the given value
* @param top_k keep top K bboxes before NMS
* @param backend_id the id of backend
* @param target_id the id of target device
*/
CV_WRAP static Ptr<FaceDetectorYN> create(const String& framework,
const std::vector<uchar>& bufferModel,
const std::vector<uchar>& bufferConfig,
const Size& input_size,
float score_threshold = 0.9f,
float nms_threshold = 0.3f,
int top_k = 5000,
int backend_id = 0,
int target_id = 0);
};
/** @brief DNN-based face recognizer

View File

@ -48,6 +48,35 @@ public:
topK = top_k;
}
FaceDetectorYNImpl(const String& framework,
const std::vector<uchar>& bufferModel,
const std::vector<uchar>& bufferConfig,
const Size& input_size,
float score_threshold,
float nms_threshold,
int top_k,
int backend_id,
int target_id)
:divisor(32),
strides({8, 16, 32})
{
net = dnn::readNet(framework, bufferModel, bufferConfig);
CV_Assert(!net.empty());
net.setPreferableBackend(backend_id);
net.setPreferableTarget(target_id);
inputW = input_size.width;
inputH = input_size.height;
padW = (int((inputW - 1) / divisor) + 1) * divisor;
padH = (int((inputH - 1) / divisor) + 1) * divisor;
scoreThreshold = score_threshold;
nmsThreshold = nms_threshold;
topK = top_k;
}
void setInputSize(const Size& input_size) override
{
inputW = input_size.width;
@ -264,4 +293,22 @@ Ptr<FaceDetectorYN> FaceDetectorYN::create(const String& model,
#endif
}
Ptr<FaceDetectorYN> FaceDetectorYN::create(const String& framework,
const std::vector<uchar>& bufferModel,
const std::vector<uchar>& bufferConfig,
const Size& input_size,
const float score_threshold,
const float nms_threshold,
const int top_k,
const int backend_id,
const int target_id)
{
#ifdef HAVE_OPENCV_DNN
return makePtr<FaceDetectorYNImpl>(framework, bufferModel, bufferConfig, input_size, score_threshold, nms_threshold, top_k, backend_id, target_id);
#else
CV_UNUSED(model); CV_UNUSED(config); CV_UNUSED(input_size); CV_UNUSED(score_threshold); CV_UNUSED(nms_threshold); CV_UNUSED(top_k); CV_UNUSED(backend_id); CV_UNUSED(target_id);
CV_Error(cv::Error::StsNotImplemented, "cv::FaceDetectorYN requires enabled 'dnn' module.");
#endif
}
} // namespace cv

View File

@ -1,12 +1,16 @@
set(sample example-face-detection)
if(BUILD_FAT_JAVA_LIB)
set(native_deps opencv_java)
else()
set(native_deps opencv_objdetect)
endif()
ocv_download(FILENAME "face_detection_yunet_2023mar.onnx"
HASH "4ae92eeb150c82ce15ac80738b3b8167"
URL
"${OPENCV_FACE_DETECT_YN_URL}"
"$ENV{OPENCV_FACE_DETECT_YN_URL}"
"https://media.githubusercontent.com/media/opencv/opencv_zoo/main/models/face_detection_yunet/face_detection_yunet_2023mar.onnx"
DESTINATION_DIR "${CMAKE_CURRENT_LIST_DIR}/res/raw"
ID OPENCV_FACE_DETECT_YN
STATUS res)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}" NATIVE_DEPS ${native_deps})
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -9,20 +9,6 @@ android {
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
externalNativeBuild {
cmake {
if (gradle.opencv_source == "sdk_path") {
arguments "-DOpenCV_DIR=" + project(':opencv').projectDir + "/@ANDROID_PROJECT_JNI_PATH@",
"-DOPENCV_FROM_SDK=TRUE"@OPENCV_ANDROID_CMAKE_EXTRA_ARGS@
} else {
arguments "-DOPENCV_VERSION_MAJOR=@OPENCV_VERSION_MAJOR@",
"-DOPENCV_FROM_SDK=FALSE"@OPENCV_ANDROID_CMAKE_EXTRA_ARGS@
}
targets "detection_based_tracker"
}
}
}
buildTypes {
release {
@ -38,16 +24,6 @@ android {
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
externalNativeBuild {
cmake {
path '@ANDROID_SAMPLE_JNI_PATH@/CMakeLists.txt'
}
}
buildFeatures {
if (gradle.opencv_source == "maven_local" || gradle.opencv_source == "maven_cenral") {
prefab true
}
}
}
dependencies {

View File

@ -9,7 +9,7 @@
<activity
android:exported="true"
android:name="FdActivity"
android:name="FaceDetectActivity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">

View File

@ -1,23 +0,0 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
#OPENCV_INSTALL_MODULES:=off
#OPENCV_LIB_TYPE:=SHARED
ifdef OPENCV_ANDROID_SDK
ifneq ("","$(wildcard $(OPENCV_ANDROID_SDK)/OpenCV.mk)")
include ${OPENCV_ANDROID_SDK}/OpenCV.mk
else
include ${OPENCV_ANDROID_SDK}/sdk/native/jni/OpenCV.mk
endif
else
include ../../sdk/native/jni/OpenCV.mk
endif
LOCAL_SRC_FILES := DetectionBasedTracker_jni.cpp
LOCAL_C_INCLUDES += $(LOCAL_PATH)
LOCAL_LDLIBS += -llog -ldl
LOCAL_MODULE := detection_based_tracker
include $(BUILD_SHARED_LIBRARY)

View File

@ -1,4 +0,0 @@
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a
APP_PLATFORM := android-8

View File

@ -1,23 +0,0 @@
cmake_minimum_required(VERSION 3.6)
set(target detection_based_tracker)
project(${target} CXX)
if (OPENCV_FROM_SDK)
message(STATUS "Using OpenCV from local SDK")
set(ANDROID_OPENCV_COMPONENTS "opencv_java" CACHE STRING "")
else()
message(STATUS "Using OpenCV from AAR (Maven repo)")
set(ANDROID_OPENCV_COMPONENTS "OpenCV::opencv_java${OPENCV_VERSION_MAJOR}" CACHE STRING "")
endif()
message(STATUS "ANDROID_ABI=${ANDROID_ABI}")
find_package(OpenCV REQUIRED COMPONENTS ${ANDROID_OPENCV_COMPONENTS})
file(GLOB srcs *.cpp *.c)
file(GLOB hdrs *.hpp *.h)
include_directories("${CMAKE_CURRENT_LIST_DIR}")
add_library(${target} SHARED ${srcs} ${hdrs})
find_library(log_lib log)
target_link_libraries(${target} ${ANDROID_OPENCV_COMPONENTS} ${log_lib})

View File

@ -1,251 +0,0 @@
#include <DetectionBasedTracker_jni.h>
#include <opencv2/core.hpp>
#include <opencv2/objdetect.hpp>
#include <string>
#include <vector>
#include <android/log.h>
#define LOG_TAG "FaceDetection/DetectionBasedTracker"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
using namespace std;
using namespace cv;
inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
{
mat = Mat(v_rect, true);
}
class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
{
public:
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
IDetector(),
Detector(detector)
{
LOGD("CascadeDetectorAdapter::Detect::Detect");
CV_Assert(detector);
}
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
{
LOGD("CascadeDetectorAdapter::Detect: begin");
LOGD("CascadeDetectorAdapter::Detect: scaleFactor=%.2f, minNeighbours=%d, minObjSize=(%dx%d), maxObjSize=(%dx%d)", scaleFactor, minNeighbours, minObjSize.width, minObjSize.height, maxObjSize.width, maxObjSize.height);
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
LOGD("CascadeDetectorAdapter::Detect: end");
}
virtual ~CascadeDetectorAdapter()
{
LOGD("CascadeDetectorAdapter::Detect::~Detect");
}
private:
CascadeDetectorAdapter();
cv::Ptr<cv::CascadeClassifier> Detector;
};
struct DetectorAgregator
{
cv::Ptr<CascadeDetectorAdapter> mainDetector;
cv::Ptr<CascadeDetectorAdapter> trackingDetector;
cv::Ptr<DetectionBasedTracker> tracker;
DetectorAgregator(cv::Ptr<CascadeDetectorAdapter>& _mainDetector, cv::Ptr<CascadeDetectorAdapter>& _trackingDetector):
mainDetector(_mainDetector),
trackingDetector(_trackingDetector)
{
CV_Assert(_mainDetector);
CV_Assert(_trackingDetector);
DetectionBasedTracker::Parameters DetectorParams;
tracker = makePtr<DetectionBasedTracker>(mainDetector, trackingDetector, DetectorParams);
}
};
JNIEXPORT jlong JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject
(JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject enter");
const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
string stdFileName(jnamestr);
jlong result = 0;
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject");
try
{
cv::Ptr<CascadeDetectorAdapter> mainDetector = makePtr<CascadeDetectorAdapter>(
makePtr<CascadeClassifier>(stdFileName));
cv::Ptr<CascadeDetectorAdapter> trackingDetector = makePtr<CascadeDetectorAdapter>(
makePtr<CascadeClassifier>(stdFileName));
result = (jlong)new DetectorAgregator(mainDetector, trackingDetector);
if (faceSize > 0)
{
mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
}
catch(const cv::Exception& e)
{
LOGD("nativeCreateObject caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeCreateObject caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeCreateObject()");
return 0;
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject exit");
return result;
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject");
try
{
if(thiz != 0)
{
((DetectorAgregator*)thiz)->tracker->stop();
delete (DetectorAgregator*)thiz;
}
}
catch(const cv::Exception& e)
{
LOGD("nativeestroyObject caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeDestroyObject caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeDestroyObject()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart");
try
{
((DetectorAgregator*)thiz)->tracker->run();
}
catch(const cv::Exception& e)
{
LOGD("nativeStart caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeStart caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeStart()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop");
try
{
((DetectorAgregator*)thiz)->tracker->stop();
}
catch(const cv::Exception& e)
{
LOGD("nativeStop caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeStop caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeStop()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv * jenv, jclass, jlong thiz, jint faceSize)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize -- BEGIN");
try
{
if (faceSize > 0)
{
((DetectorAgregator*)thiz)->mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//((DetectorAgregator*)thiz)->trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
}
catch(const cv::Exception& e)
{
LOGD("nativeStop caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeSetFaceSize caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeSetFaceSize()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize -- END");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect
(JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect");
try
{
vector<Rect> RectFaces;
((DetectorAgregator*)thiz)->tracker->process(*((Mat*)imageGray));
((DetectorAgregator*)thiz)->tracker->getObjects(RectFaces);
*((Mat*)faces) = Mat(RectFaces, true);
}
catch(const cv::Exception& e)
{
LOGD("nativeCreateObject caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeDetect caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code DetectionBasedTracker.nativeDetect()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect END");
}

View File

@ -1,61 +0,0 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_opencv_samples_fd_DetectionBasedTracker */
#ifndef _Included_org_opencv_samples_fd_DetectionBasedTracker
#define _Included_org_opencv_samples_fd_DetectionBasedTracker
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeCreateObject
* Signature: (Ljava/lang/String;F)J
*/
JNIEXPORT jlong JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject
(JNIEnv *, jclass, jstring, jint);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeDestroyObject
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeStart
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeStop
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeSetFaceSize
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv *, jclass, jlong, jint);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeDetect
* Signature: (JJJ)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect
(JNIEnv *, jclass, jlong, jlong, jlong);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,41 +0,0 @@
package org.opencv.samples.facedetect;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
public class DetectionBasedTracker
{
public DetectionBasedTracker(String cascadeName, int minFaceSize) {
mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
}
public void start() {
nativeStart(mNativeObj);
}
public void stop() {
nativeStop(mNativeObj);
}
public void setMinFaceSize(int size) {
nativeSetFaceSize(mNativeObj, size);
}
public void detect(Mat imageGray, MatOfRect faces) {
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
}
public void release() {
nativeDestroyObject(mNativeObj);
mNativeObj = 0;
}
private long mNativeObj = 0;
private static native long nativeCreateObject(String cascadeName, int minFaceSize);
private static native void nativeDestroyObject(long thiz);
private static native void nativeStart(long thiz);
private static native void nativeStop(long thiz);
private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces);
}

View File

@ -0,0 +1,203 @@
package org.opencv.samples.facedetect;
import java.lang.Math;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.objdetect.FaceDetectorYN;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.WindowManager;
import android.widget.Toast;
public class FaceDetectActivity extends CameraActivity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private static final Scalar BOX_COLOR = new Scalar(0, 255, 0);
private static final Scalar RIGHT_EYE_COLOR = new Scalar(255, 0, 0);
private static final Scalar LEFT_EYE_COLOR = new Scalar(0, 0, 255);
private static final Scalar NOSE_TIP_COLOR = new Scalar(0, 255, 0);
private static final Scalar MOUTH_RIGHT_COLOR = new Scalar(255, 0, 255);
private static final Scalar MOUTH_LEFT_COLOR = new Scalar(0, 255, 255);
private Mat mRgba;
private Mat mBgr;
private Mat mBgrScaled;
private Size mInputSize = null;
private float mScale = 2.f;
private MatOfByte mModelBuffer;
private MatOfByte mConfigBuffer;
private FaceDetectorYN mFaceDetector;
private Mat mFaces;
private CameraBridgeViewBase mOpenCvCameraView;
public FaceDetectActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
if (OpenCVLoader.initLocal()) {
Log.i(TAG, "OpenCV loaded successfully");
} else {
Log.e(TAG, "OpenCV initialization failed!");
(Toast.makeText(this, "OpenCV initialization failed!", Toast.LENGTH_LONG)).show();
return;
}
byte[] buffer;
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.face_detection_yunet_2023mar);
int size = is.available();
buffer = new byte[size];
int bytesRead = is.read(buffer);
is.close();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to ONNX model from resources! Exception thrown: " + e);
(Toast.makeText(this, "Failed to ONNX model from resources!", Toast.LENGTH_LONG)).show();
return;
}
mModelBuffer = new MatOfByte(buffer);
mConfigBuffer = new MatOfByte();
mFaceDetector = FaceDetectorYN.create("onnx", mModelBuffer, mConfigBuffer, new Size(320, 320));
if (mFaceDetector == null) {
Log.e(TAG, "Failed to create FaceDetectorYN!");
(Toast.makeText(this, "Failed to create FaceDetectorYN!", Toast.LENGTH_LONG)).show();
return;
} else
Log.i(TAG, "FaceDetectorYN initialized successfully!");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (mOpenCvCameraView != null)
mOpenCvCameraView.enableView();
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat();
mBgr = new Mat();
mBgrScaled = new Mat();
mFaces = new Mat();
}
public void onCameraViewStopped() {
mRgba.release();
mBgr.release();
mBgrScaled.release();
mFaces.release();
}
public void visualize(Mat rgba, Mat faces) {
int thickness = 2;
float[] faceData = new float[faces.cols() * faces.channels()];
for (int i = 0; i < faces.rows(); i++)
{
faces.get(i, 0, faceData);
Log.d(TAG, "Detected face (" + faceData[0] + ", " + faceData[1] + ", " +
faceData[2] + ", " + faceData[3] + ")");
// Draw bounding box
Imgproc.rectangle(rgba, new Rect(Math.round(mScale*faceData[0]), Math.round(mScale*faceData[1]),
Math.round(mScale*faceData[2]), Math.round(mScale*faceData[3])),
BOX_COLOR, thickness);
// Draw landmarks
Imgproc.circle(rgba, new Point(Math.round(mScale*faceData[4]), Math.round(mScale*faceData[5])),
2, RIGHT_EYE_COLOR, thickness);
Imgproc.circle(rgba, new Point(Math.round(mScale*faceData[6]), Math.round(mScale*faceData[7])),
2, LEFT_EYE_COLOR, thickness);
Imgproc.circle(rgba, new Point(Math.round(mScale*faceData[8]), Math.round(mScale*faceData[9])),
2, NOSE_TIP_COLOR, thickness);
Imgproc.circle(rgba, new Point(Math.round(mScale*faceData[10]), Math.round(mScale*faceData[11])),
2, MOUTH_RIGHT_COLOR, thickness);
Imgproc.circle(rgba, new Point(Math.round(mScale*faceData[12]), Math.round(mScale*faceData[13])),
2, MOUTH_LEFT_COLOR, thickness);
}
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mInputSize == null) {
mInputSize = new Size(Math.round(mRgba.cols()/mScale), Math.round(mRgba.rows()/mScale));
mFaceDetector.setInputSize(mInputSize);
}
Imgproc.cvtColor(mRgba, mBgr, Imgproc.COLOR_RGBA2BGR);
Imgproc.resize(mBgr, mBgrScaled, mInputSize);
if (mFaceDetector != null) {
int status = mFaceDetector.detect(mBgrScaled, mFaces);
Log.d(TAG, "Detector returned status " + status);
visualize(mRgba, mFaces);
}
return mRgba;
}
}

View File

@ -1,244 +0,0 @@
package org.opencv.samples.facedetect;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.WindowManager;
import android.widget.Toast;
public class FdActivity extends CameraActivity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private MenuItem mItemFace50;
private MenuItem mItemFace40;
private MenuItem mItemFace30;
private MenuItem mItemFace20;
private MenuItem mItemType;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
private String[] mDetectorName;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private CameraBridgeViewBase mOpenCvCameraView;
public FdActivity() {
mDetectorName = new String[2];
mDetectorName[JAVA_DETECTOR] = "Java";
mDetectorName[NATIVE_DETECTOR] = "Native (tracking)";
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
if (OpenCVLoader.initLocal()) {
Log.i(TAG, "OpenCV loaded successfully");
} else {
Log.e(TAG, "OpenCV initialization failed!");
(Toast.makeText(this, "OpenCV initialization failed!", Toast.LENGTH_LONG)).show();
return;
}
// Load native library after(!) OpenCV initialization
System.loadLibrary("detection_based_tracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (mOpenCvCameraView != null)
mOpenCvCameraView.enableView();
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
}
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
return mRgba;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemFace50 = menu.add("Face size 50%");
mItemFace40 = menu.add("Face size 40%");
mItemFace30 = menu.add("Face size 30%");
mItemFace20 = menu.add("Face size 20%");
mItemType = menu.add(mDetectorName[mDetectorType]);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemFace50)
setMinFaceSize(0.5f);
else if (item == mItemFace40)
setMinFaceSize(0.4f);
else if (item == mItemFace30)
setMinFaceSize(0.3f);
else if (item == mItemFace20)
setMinFaceSize(0.2f);
else if (item == mItemType) {
int tmpDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[tmpDetectorType]);
setDetectorType(tmpDetectorType);
}
return true;
}
private void setMinFaceSize(float faceSize) {
mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0;
}
private void setDetectorType(int type) {
if (mDetectorType != type) {
mDetectorType = type;
if (type == NATIVE_DETECTOR) {
Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start();
} else {
Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop();
}
}
}
}