added build scripts for Android, contributed by Ethan Rublee. The scripts are not quite usable yet.

This commit is contained in:
Vadim Pisarevsky 2010-09-14 18:25:09 +00:00
parent 16ae6fa6ad
commit 3d9c78f4aa
60 changed files with 5296 additions and 0 deletions

76
android/Makefile Normal file
View File

@ -0,0 +1,76 @@
# The path to the NDK, requires crystax version r-4 for now, due to support
#for the standard library
ifndef ANDROID_NDK_ROOT
$(error ANDROID_NDK_ROOT not defined)
endif
ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT)
ifndef PROJECT_PATH
$(info PROJECT_PATH defaulting to this directory)
PROJECT_PATH=.
endif
#define OPENCV_ROOT when calling this makefile
ifndef OPENCV_ROOT
$(error Please define OPENCV_ROOT with something like the command \
make OPENCV_ROOT=<opencv>)
endif
$(info OPENCV_ROOT = $(OPENCV_ROOT))
# The name of the native library
LIBNAME = libandroid-opencv.so
# Find all the C++ sources in the native folder
SOURCES = $(wildcard jni/*.cpp)
HEADERS = $(wildcard jni/*.h)
SWIG_IS = $(wildcard jni/*.i)
ANDROID_MKS = $(wildcard jni/*.mk)
SWIG_MAIN = jni/android-cv.i
SWIG_JAVA_DIR = src/com/opencv/jni
SWIG_JAVA_OUT = $(wildcard $(SWIG_JAVA_DIR)/*.java)
SWIG_C_DIR = jni/gen
SWIG_C_OUT = $(SWIG_C_DIR)/android_cv_wrap.cpp
# The real native library stripped of symbols
LIB = libs/armeabi-v7a/$(LIBNAME) libs/armeabi/$(LIBNAME)
all: $(LIB)
#calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR
$(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS)
$(ANDROID_NDK_BASE)/ndk-build OPENCV_ROOT=$(OPENCV_ROOT) \
PROJECT_PATH=$(PROJECT_PATH) V=$(V) $(NDK_FLAGS)
#this creates the swig wrappers
$(SWIG_C_OUT): $(SWIG_IS)
make clean-swig &&\
mkdir -p $(SWIG_C_DIR) &&\
mkdir -p $(SWIG_JAVA_DIR) &&\
swig -java -c++ -package "com.opencv.jni" \
-outdir $(SWIG_JAVA_DIR) \
-o $(SWIG_C_OUT) $(SWIG_MAIN)
#clean targets
.PHONY: clean clean-swig cleanall nogdb
nogdb: $(LIB)
rm -f libs/armeabi*/gdb*
#this deletes the generated swig java and the generated c wrapper
clean-swig:
rm -f $(SWIG_JAVA_OUT) $(SWIG_C_OUT)
#does clean-swig and then uses the ndk-build clean
clean: clean-swig
$(ANDROID_NDK_BASE)/ndk-build OPENCV_ROOT=$(OPENCV_ROOT) \
PROJECT_PATH=$(PROJECT_PATH) clean V=$(V) $(NDK_FLAGS)

109
android/README.android.txt Normal file
View File

@ -0,0 +1,109 @@
#summary OpenCV build instructions
#labels Featured
Using the NDK, OpenCV may be built for the android platform.
The opencv port is in svn/trunk/opencv. It is essentially a snapshot of the opencv trunk - rev 3096. In the future this will be made compatible with trunk, but for
simplicity we are freezing opencv until this works consistantly.
= pre-req =
use crystax ndk 4 - http://crystax.net/android/ndk.php
the crystax ndk supports the STL, expections, RTTI. opencv will not build with the standard android ndk!
= The way =
Using {{{r4}}} of the ndk, cd to the top level of opencv, and run
{{{ndk-build NDK_APPLICATION_MK=Application.mk}}}
or build using the build.sh, which is just that line above...
Assuming the ndk directory is in your path of course.
this has advantages because it stores the library locally in the opencv folder. This is now the preferred method for building opencv. The libraries will all be build as static libs which may be linked to from an external ndk project(see samples).
== Using opencv in your applications ==
See the samples directory.
Two convenience makefiles have been created, one for the libraries and one for includes. They expect OPENCV_ROOT and OPENCV_LIBS_DIR
to be defined before including them.
A sample Android.mk file for CVCamera follows, that requires opencv
{{{
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
#pass in OPENCV_ROOT or define it here
#OPENCV_ROOT := ~/android-opencv/opencv
#OPENCV_LIBS_DIR := $(OPENCV_ROOT)/bin/ndk/local/armeabi
#define OPENCV_INCLUDES
include $(OPENCV_ROOT)/includes.mk
#define OPENCV_LIBS
include $(OPENCV_ROOT)/libs.mk
LOCAL_LDLIBS += $(OPENCV_LIBS) -llog
LOCAL_C_INCLUDES += $(OPENCV_INCLUDES)
LOCAL_MODULE := cvcamera
LOCAL_SRC_FILES := android_cv_wrap.cpp image_pool.cpp yuv420sp2rgb.c Processor.cpp
include $(BUILD_SHARED_LIBRARY)
}}}
= old way (not supported) =
in {{{<ndk-dir>/apps}}} make a link to opencv
{{{
cd android-ndk-r4-crystax/apps
ln -s ~/android-opencv/opencv/
cd ..
make APP=opencv -j4
}}}
this should make everything as a static lib. These libs will be located in the android-ndk-r4-crystax/out/apps/opencv/armeabi
folder.
now in you ndk project do the following:
try building the samples/hello-jni project
a sample Android.mk:
{{{
LOCAL_PATH := $(call my-dir)
OpenCV_Root := apps/opencv
OpenCVInclude := $(OpenCV_Root)/include/opencv $(OpenCV_Root)/3rdparty/include/ \
$(OpenCV_Root)/modules/core/include/ $(OpenCV_Root)/modules/highgui/include/ \
$(OpenCV_Root)/modules/imgproc/include $(OpenCV_Root)/modules/ml/include \
$(OpenCV_Root)/modules/features2d/include \
$(OpenCV_Root)/modules/legacy/include \
$(OpenCV_Root)/modules/calib3d/include \
$(OpenCV_Root)/modules/objdetect/include \
$(OpenCV_Root)/modules/contrib/include \
$(OpenCV_Root)/modules/video/include
include $(CLEAR_VARS)
LOCAL_MODULE := my-project
LOCAL_SRC_FILES := test-opencv.cpp
LOCAL_LDLIBS := -L$(NDK_APP_OUT)/opencv/armeabi -lcalib3d -lfeatures2d \
-lobjdetect -lvideo -limgproc -lhighgui -lcore -llegacy -lml -lopencv_lapack -lflann \
-lzlib -L$(SYSROOT)/usr/lib -lstdc++ -lgcc -lsupc++ -lc -ldl
LOCAL_C_INCLUDES := $(OpenCVInclude)
include $(BUILD_SHARED_LIBRARY)
}}}
The LOCAL_LDLIBS are very picky. {{{-L$(NDK_APP_OUT)/opencv/armeabi}}} is where the ndk builds opencv, usually in {{{<ndk>/out/apps/opencv/armeabi}}}. You can navigate there and see the static libraries that were built.

1
android/clean.sh Normal file
View File

@ -0,0 +1 @@
make OPENCV_ROOT=../ clean

16
android/includes.mk Normal file
View File

@ -0,0 +1,16 @@
ifndef OPENCV_ROOT
$(error Please define OPENCV_ROOT to point to the root folder of opencv)
endif
OPENCV_INCLUDES := $(OPENCV_ROOT)/3rdparty/include/ \
$(OPENCV_ROOT)/modules/core/include/ $(OPENCV_ROOT)/modules/highgui/include/ \
$(OPENCV_ROOT)/modules/imgproc/include $(OPENCV_ROOT)/modules/ml/include \
$(OPENCV_ROOT)/modules/features2d/include \
$(OPENCV_ROOT)/modules/legacy/include \
$(OPENCV_ROOT)/modules/calib3d/include \
$(OPENCV_ROOT)/modules/objdetect/include \
$(OPENCV_ROOT)/modules/contrib/include \
$(OPENCV_ROOT)/modules/video/include
ANDROID_OPENCV_INCLUDES := $(OPENCV_ROOT)/android/jni
#$(info the opencv includes are here: $(OPENCV_INCLUDES) )

17
android/libs.mk Normal file
View File

@ -0,0 +1,17 @@
#ifndef OPENCV_LIBS_DIR
#$(error please define to something like: OPENCV_LIBS_DIR=$(OPENCV_ROOT)/bin/ndk/local/armeabi )
#endif
#$(info the opencv libs are here: $(OPENCV_LIBS_DIR) )
#newest ndk from crystax stores the libs in the obj folder????
OPENCV_LIB_DIRS := -L$(OPENCV_ROOT)/bin/ndk/local/armeabi-v7a -L$(OPENCV_ROOT)/bin/ndk/local/armeabi -L$(OPENCV_ROOT)/obj/local/armeabi-v7a -L$(OPENCV_ROOT)/obj/local/armeabi
#order of linking very important ---- may have stuff out of order here, but
#important that modules that are more dependent come first...
OPENCV_LIBS := $(OPENCV_LIB_DIRS) -lfeatures2d -lcalib3d -limgproc -lobjdetect \
-lvideo -lhighgui -lml -llegacy -lcore -lopencv_lapack -lflann \
-lzlib -lpng -ljpeg -ljasper
ANDROID_OPENCV_LIBS := -L$(OPENCV_ROOT)/android/libs/armeabi -L$(OPENCV_ROOT)/android/libs/armeabi-v7a -landroid-opencv

3
android/opencv/3rdparty/Android.mk vendored Normal file
View File

@ -0,0 +1,3 @@
LOCAL_PATH := $(call my-dir)
include $(call all-subdir-makefiles)

View File

@ -0,0 +1,14 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := flann
MODULE_ROOT := ${OpenCV_Root}/3rdparty/flann
sources := $(wildcard $(MODULE_ROOT)/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := $(OpenCV_Root)/3rdparty/include/ $(MODULE_ROOT)
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,13 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := opencv_lapack
MODULE_PATH := ${OpenCV_Root}/3rdparty/lapack
sources := $(wildcard $(MODULE_PATH)/*.c)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := $(OpenCV_Root)/3rdparty/include
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := jasper
MODULE_PATH := $(OpenCV_Root)/3rdparty/libjasper
sources := $(wildcard ${MODULE_PATH}/*.c)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := $(OpenCV_Root)/3rdparty/include/
LOCAL_CFLAGS += -DEXCLUDE_MIF_SUPPORT -DEXCLUDE_PNM_SUPPORT -DEXCLUDE_BMP_SUPPORT -DEXCLUDE_RAS_SUPPORT -DEXCLUDE_JPG_SUPPORT -DEXCLUDE_PGX_SUPPORT
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,13 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := jpeg
MODULE_PATH := $(OpenCV_Root)/3rdparty/libjpeg
sources := $(wildcard ${MODULE_PATH}/*.c)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := $(OpenCV_Root)/3rdparty/include/ $(MODULE_PATH)
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,13 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := png
MODULE_PATH := $(OpenCV_Root)/3rdparty/libpng
sources := $(wildcard $(MODULE_PATH)/*.c)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := $(OpenCV_Root)/3rdparty/include/
include $(BUILD_STATIC_LIBRARY)

13
android/opencv/3rdparty/zlib/Android.mk vendored Normal file
View File

@ -0,0 +1,13 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := zlib
MODULE_PATH := $(OpenCV_Root)/3rdparty/zlib
sources := $(wildcard $(MODULE_PATH)/*.c)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := $(OpenCV_Root)/3rdparty/include/ $(MODULE_PATH)
include $(BUILD_STATIC_LIBRARY)

15
android/opencv/Android.mk Normal file
View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
OpenCV_Root := $(LOCAL_PATH)/../..
OpenCVInclude := $(LOCAL_PATH) $(OpenCV_Root)/include/opencv $(OpenCV_Root)/3rdparty/include/ \
$(OpenCV_Root)/modules/core/include/ $(OpenCV_Root)/modules/highgui/include/ \
$(OpenCV_Root)/modules/imgproc/include $(OpenCV_Root)/modules/ml/include \
$(OpenCV_Root)/modules/features2d/include \
$(OpenCV_Root)/modules/legacy/include \
$(OpenCV_Root)/modules/calib3d/include \
$(OpenCV_Root)/modules/objdetect/include \
$(OpenCV_Root)/modules/video/include \
$(OpenCV_Root)/modules/contrib/include
include 3rdparty/Android.mk
include modules/Android.mk

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.opencv"
android:versionCode="2"
android:versionName="2.2">
</manifest>

View File

@ -0,0 +1,6 @@
APP_BUILD_SCRIPT := $(call my-dir)/Android.mk
APP_PROJECT_PATH := $(call my-dir)
# The ARMv7 is significanly faster due to the use of the hardware FPU
APP_ABI := armeabi armeabi-v7a
APP_MODULES := opencv_lapack flann zlib core imgproc ml highgui features2d \
legacy objdetect calib3d video contrib png jpeg jasper

158
android/opencv/cvconfig.h Normal file
View File

@ -0,0 +1,158 @@
/* Define to one of `_getb67', `GETB67', `getb67' for Cray-2 and Cray-YMP
systems. This function is required for `alloca.c' support on those systems.
*/
/* #undef CRAY_STACKSEG_END */
/* Define to 1 if using `alloca.c'. */
/* #undef C_ALLOCA */
/* Define to 1 if you have `alloca', as a function or macro. */
/* #undef HAVE_ALLOCA */
/* Define to 1 if you have <alloca.h> and it should be used (not on Ultrix).
*/
#define HAVE_ALLOCA_H 1
/* V4L capturing support */
//#define HAVE_CAMV4L
/* V4L2 capturing support */
//#define HAVE_CAMV4L2
/* Carbon windowing environment */
/* #undef HAVE_CARBON */
/* IEEE1394 capturing support */
/* #undef HAVE_DC1394 */
/* libdc1394 0.9.4 or 0.9.5 */
/* #undef HAVE_DC1394_095 */
/* IEEE1394 capturing support - libdc1394 v2.x */
//#define HAVE_DC1394_2
/* ffmpeg in Gentoo */
/* #undef HAVE_GENTOO_FFMPEG */
/* FFMpeg video library */
/* #undef HAVE_FFMPEG */
/* ffmpeg's libswscale */
/* #undef HAVE_FFMPEG_SWSCALE */
/* GStreamer multimedia framework */
/* #undef HAVE_GSTREAMER */
/* GStreamer with gstappsink & gstappsrc */
/* #undef HAVE_GSTREAMER_APP */
/* GTK+ 2.0 Thread support */
//#define HAVE_GTHREAD
/* GTK+ 2.x toolkit */
//#define HAVE_GTK
/* OpenEXR codec */
/* #undef HAVE_ILMIMF */
/* Apple ImageIO Framework */
/* #undef HAVE_IMAGEIO */
/* Define to 1 if you have the <inttypes.h> header file. */
/* #undef HAVE_INTTYPES_H */
/* JPEG-2000 codec */
#define HAVE_JASPER
/* IJG JPEG codec */
#define HAVE_JPEG
/* Define to 1 if you have the `dl' library (-ldl). */
/* #undef HAVE_LIBDL */
/* Define to 1 if you have the `gomp' library (-lgomp). */
/* #undef HAVE_LIBGOMP */
/* Define to 1 if you have the `m' library (-lm). */
/* #undef HAVE_LIBM */
/* libpng/png.h needs to be included */
#undef HAVE_LIBPNG_PNG_H
/* Define to 1 if you have the `pthread' library (-lpthread). */
//#define HAVE_LIBPTHREAD 1
/* Define to 1 if you have the `lrint' function. */
/* #undef HAVE_LRINT */
/* PNG codec */
#define HAVE_PNG
/* Define to 1 if you have the `png_get_valid' function. */
/* #undef HAVE_PNG_GET_VALID */
/* png.h needs to be included */
#define HAVE_PNG_H
/* Define to 1 if you have the `png_set_tRNS_to_alpha' function. */
/* #undef HAVE_PNG_SET_TRNS_TO_ALPHA */
/* QuickTime video libraries */
/* #undef HAVE_QUICKTIME */
/* TIFF codec */
/* #undef HAVE_TIFF */
/* Unicap video capture library */
/* #undef HAVE_UNICAP */
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Xine video library */
/* #undef HAVE_XINE */
/* LZ77 compression/decompression library (used for PNG) */
/* #undef HAVE_ZLIB */
/* Intel Integrated Performance Primitives */
/* #undef HAVE_IPP */
/* OpenCV compiled as static or dynamic libs */
//#define OPENCV_BUILD_SHARED_LIB
/* Name of package */
#define PACKAGE "opencv"
/* Define to the address where bug reports for this package should be sent. */
//#define PACKAGE_BUGREPORT "opencvlibrary-devel@lists.sourceforge.net"
/* Define to the full name of this package. */
#define PACKAGE_NAME "opencv"
/* Define to the full name and version of this package. */
#define PACKAGE_STRING "opencv 2.1.0"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "opencv"
/* Define to the version of this package. */
#define PACKAGE_VERSION "2.1.0"
/* If using the C implementation of alloca, define if you know the
direction of stack growth for your system; otherwise it will be
automatically deduced at runtime.
STACK_DIRECTION > 0 => grows toward higher addresses
STACK_DIRECTION < 0 => grows toward lower addresses
STACK_DIRECTION = 0 => direction of growth unknown */
/* #undef STACK_DIRECTION */
/* Version number of package */
#define VERSION "2.1.0"
/* Define to 1 if your processor stores words with the most significant byte
first (like Motorola and SPARC, unlike Intel and VAX). */
/* #undef WORDS_BIGENDIAN */
/* Intel Threading Building Blocks */
/* #undef HAVE_TBB */

View File

@ -0,0 +1,12 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
android.library=true
# Project target.
target=android-7

View File

@ -0,0 +1,31 @@
# date: Summer, 2010
# author: Ethan Rublee
# contact: ethan.rublee@gmail.com
#
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
#pass in OPENCV_ROOT or define it here
#OPENCV_ROOT := ~/android-opencv/opencv
ifndef OPENCV_ROOT
OPENCV_ROOT := /home/ethan/workspace/googlecode_android_opencv/opencv
endif
#OPENCV_LIBS_DIR := $(OPENCV_ROOT)/bin/ndk/local/armeabi
#define OPENCV_INCLUDES
include $(OPENCV_ROOT)/includes.mk
#define OPENCV_LIBS
include $(OPENCV_ROOT)/libs.mk
LOCAL_LDLIBS += $(OPENCV_LIBS) -llog -lGLESv2
LOCAL_C_INCLUDES += $(OPENCV_INCLUDES)
LOCAL_MODULE := android-opencv
LOCAL_SRC_FILES := gen/android_cv_wrap.cpp image_pool.cpp yuv420sp2rgb.c gl_code.cpp Calibration.cpp
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,4 @@
# The ARMv7 is significanly faster due to the use of the hardware FPU
APP_ABI := armeabi armeabi-v7a
APP_BUILD_SCRIPT := $(call my-dir)/Android.mk
APP_PROJECT_PATH := $(PROJECT_PATH)

View File

@ -0,0 +1,261 @@
/*
* Processor.cpp
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#include "Calibration.h"
#include <sys/stat.h>
using namespace cv;
Calibration::Calibration():patternsize(6,8)
{
}
Calibration::~Calibration() {
}
namespace
{
double computeReprojectionErrors(
const vector<vector<Point3f> >& objectPoints, const vector<vector<
Point2f> >& imagePoints, const vector<Mat>& rvecs,
const vector<Mat>& tvecs, const Mat& cameraMatrix,
const Mat& distCoeffs, vector<float>& perViewErrors) {
vector<Point2f> imagePoints2;
int i, totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for (i = 0; i < (int) objectPoints.size(); i++) {
projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix,
distCoeffs, imagePoints2);
err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1 );
int n = (int) objectPoints[i].size();
perViewErrors[i] = err / n;
totalErr += err;
totalPoints += n;
}
return totalErr / totalPoints;
}
void calcChessboardCorners(Size boardSize, float squareSize, vector<
Point3f>& corners) {
corners.resize(0);
for (int i = 0; i < boardSize.height; i++)
for (int j = 0; j < boardSize.width; j++)
corners.push_back(Point3f(float(j * squareSize), float(i
* squareSize), 0));
}
/**from opencv/samples/cpp/calibration.cpp
*
*/
bool runCalibration(vector<vector<Point2f> > imagePoints,
Size imageSize, Size boardSize, float squareSize, float aspectRatio,
int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr) {
cameraMatrix = Mat::eye(3, 3, CV_64F);
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
cameraMatrix.at<double> (0, 0) = aspectRatio;
distCoeffs = Mat::zeros(5, 1, CV_64F);
vector<vector<Point3f> > objectPoints(1);
calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
for (size_t i = 1; i < imagePoints.size(); i++)
objectPoints.push_back(objectPoints[0]);
calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix,
distCoeffs, rvecs, tvecs, flags);
bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET ) && checkRange(
distCoeffs, CV_CHECK_QUIET );
totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs,
tvecs, cameraMatrix, distCoeffs, reprojErrs);
return ok;
}
void saveCameraParams(const string& filename, Size imageSize, Size boardSize,
float squareSize, float aspectRatio, int flags,
const Mat& cameraMatrix, const Mat& distCoeffs,
const vector<Mat>& rvecs, const vector<Mat>& tvecs,
const vector<float>& reprojErrs,
const vector<vector<Point2f> >& imagePoints, double totalAvgErr) {
FileStorage fs(filename, FileStorage::WRITE);
time_t t;
time(&t);
struct tm *t2 = localtime(&t);
char buf[1024];
strftime(buf, sizeof(buf) - 1, "%c", t2);
fs << "calibration_time" << buf;
if (!rvecs.empty() || !reprojErrs.empty())
fs << "nframes" << (int) std::max(rvecs.size(), reprojErrs.size());
fs << "image_width" << imageSize.width;
fs << "image_height" << imageSize.height;
fs << "board_width" << boardSize.width;
fs << "board_height" << boardSize.height;
fs << "squareSize" << squareSize;
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
fs << "aspectRatio" << aspectRatio;
if (flags != 0) {
sprintf(buf, "flags: %s%s%s%s",
flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess"
: "",
flags & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "",
flags & CV_CALIB_FIX_PRINCIPAL_POINT ? "+fix_principal_point"
: "",
flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
cvWriteComment(*fs, buf, 0);
}
fs << "flags" << flags;
fs << "camera_matrix" << cameraMatrix;
fs << "distortion_coefficients" << distCoeffs;
fs << "avg_reprojection_error" << totalAvgErr;
if (!reprojErrs.empty())
fs << "per_view_reprojection_errors" << Mat(reprojErrs);
if (!rvecs.empty() && !tvecs.empty()) {
Mat bigmat(rvecs.size(), 6, CV_32F);
for (size_t i = 0; i < rvecs.size(); i++) {
Mat r = bigmat(Range(i, i + 1), Range(0, 3));
Mat t = bigmat(Range(i, i + 1), Range(3, 6));
rvecs[i].copyTo(r);
tvecs[i].copyTo(t);
}
cvWriteComment(
*fs,
"a set of 6-tuples (rotation vector + translation vector) for each view",
0);
fs << "extrinsic_parameters" << bigmat;
}
if (!imagePoints.empty()) {
Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
for (size_t i = 0; i < imagePoints.size(); i++) {
Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
Mat(imagePoints[i]).copyTo(r);
}
fs << "image_points" << imagePtMat;
}
}
}//anon namespace
bool Calibration::detectAndDrawChessboard(int idx,image_pool* pool) {
Mat grey;
pool->getGrey(idx, grey);
if (grey.empty())
return false;
vector<Point2f> corners;
IplImage iplgrey = grey;
if (!cvCheckChessboard(&iplgrey, patternsize))
return false;
bool patternfound = findChessboardCorners(grey, patternsize, corners);
Mat * img = pool->getImage(idx);
if (corners.size() < 1)
return false;
cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(
CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
if(patternfound)
imagepoints.push_back(corners);
drawChessboardCorners(*img, patternsize, Mat(corners), patternfound);
imgsize = grey.size();
return patternfound;
}
void Calibration::drawText(int i, image_pool* pool, const char* ctext){
// Use "y" to show that the baseLine is about
string text = ctext;
int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
double fontScale = .8;
int thickness = .5;
Mat img = *pool->getImage(i);
int baseline=0;
Size textSize = getTextSize(text, fontFace,
fontScale, thickness, &baseline);
baseline += thickness;
// center the text
Point textOrg((img.cols - textSize.width)/2,
(img.rows - textSize.height *2));
// draw the box
rectangle(img, textOrg + Point(0, baseline),
textOrg + Point(textSize.width, -textSize.height),
Scalar(0,0,255),CV_FILLED);
// ... and the baseline first
line(img, textOrg + Point(0, thickness),
textOrg + Point(textSize.width, thickness),
Scalar(0, 0, 255));
// then put the text itself
putText(img, text, textOrg, fontFace, fontScale,
Scalar::all(255), thickness, 8);
}
void Calibration::resetChess() {
imagepoints.clear();
}
void Calibration::calibrate(const char* filename) {
vector<Mat> rvecs, tvecs;
vector<float> reprojErrs;
double totalAvgErr = 0;
int flags = 0;
flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO;
bool writeExtrinsics = true;
bool writePoints = true;
bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f,
flags, K, distortion, rvecs, tvecs, reprojErrs, totalAvgErr);
if (ok){
saveCameraParams(filename, imgsize, patternsize, 1.f,
1.f, flags, K, distortion, writeExtrinsics ? rvecs
: vector<Mat> (), writeExtrinsics ? tvecs
: vector<Mat> (), writeExtrinsics ? reprojErrs
: vector<float> (), writePoints ? imagepoints : vector<
vector<Point2f> > (), totalAvgErr);
}
}
int Calibration::getNumberDetectedChessboards() {
return imagepoints.size();
}

View File

@ -0,0 +1,59 @@
/*
* Processor.h
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#ifndef PROCESSOR_H_
#define PROCESSOR_H_
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <vector>
#include "image_pool.h"
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
class Calibration {
std::vector<cv::KeyPoint> keypoints;
vector<vector<Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
public:
cv::Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};
#endif /* PROCESSOR_H_ */

View File

@ -0,0 +1,28 @@
/*
* include the headers required by the generated cpp code
*/
%{
#include "Calibration.h"
#include "image_pool.h"
using namespace cv;
%}
class Calibration {
public:
Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};

View File

@ -0,0 +1,6 @@
%feature("director") Mat;
%feature("director") glcamera;
%feature("director") image_pool;
%typemap("javapackage") Mat, Mat *, Mat & "com.opencv.jni";
%typemap("javapackage") glcamera, glcamera *, glcamera & "com.opencv.jni";
%typemap("javapackage") image_pool, image_pool *, image_pool & "com.opencv.jni";

View File

@ -0,0 +1,57 @@
/* File : android-cv.i
import this file, and make sure to add the System.loadlibrary("android-opencv")
before loading any lib that depends on this.
*/
%module opencv
%{
#include "image_pool.h"
#include "glcamera.h"
using namespace cv;
%}
#ifndef SWIGIMPORTED
%include "various.i"
%include "typemaps.i"
%include "arrays_java.i"
#endif
/**
* Make all the swig pointers public, so that
* external libraries can refer to these, otherwise they default to
* protected...
*/
%typemap(javabody) SWIGTYPE %{
private long swigCPtr;
protected boolean swigCMemOwn;
public $javaclassname(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr($javaclassname obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
%}
%pragma(java) jniclasscode=%{
static {
try {
//load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
//so that android sdk automatically installs it along with the app.
System.loadLibrary("android-opencv");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
%}
%include "cv.i"
%include "glcamera.i"
%include "image_pool.i"
%include "Calibration.i"

View File

@ -0,0 +1,165 @@
/*
* These typemaps provide support for sharing data between JNI and JVM code
* using NIO direct buffers. It is the responsibility of the JVM code to
* allocate a direct buffer of the appropriate size.
*
* Example use:
* Wrapping:
* %include "buffers.i"
* %apply int* BUFF {int* buffer}
* int read_foo_int(int* buffer);
*
* Java:
* IntBuffer buffer = IntBuffer.allocateDirect(nInts*4).order(ByteOrder.nativeOrder()).asIntBuffer();
* Example.read_foo_int(buffer);
*
* The following typemaps are defined:
* void* BUFF <--> javax.nio.Buffer
* char* BUFF <--> javax.nio.ByteBuffer
* char* CBUFF <--> javax.nio.CharBuffer
* unsigned char* INBUFF/OUTBUFF <--> javax.nio.ShortBuffer
* short* BUFF <--> javax.nio.ShortBuffer
* unsigned short* INBUFF/OUTBUFF <--> javax.nio.IntBuffer
* int* BUFF <--> javax.nio.IntBuffer
* unsigned int* INBUFF/OUTBUFF <--> javax.nio.LongBuffer
* long* BUFF <--> javax.nio.IntBuffer
* unsigned long* INBUFF/OUTBUF <--> javax.nio.LongBuffer
* long long* BUFF <--> javax.nio.LongBuffer
* float* BUFF <--> javax.nio.FloatBuffer
* double* BUFF <--> javax.nio.DoubleBuffer
*
* Note the potential for data loss in the conversion from
* the C type 'unsigned long' to the signed Java long type.
* Hopefully, I can implement a workaround with BigNumber in the future.
*
* The use of ByteBuffer vs CharBuffer for the char* type should
* depend on the type of data. In general you'll probably
* want to use CharBuffer for actual text data.
*/
/*
* This macro is used to define the nio buffers for primitive types.
*/
%define NIO_BUFFER_TYPEMAP(CTYPE, LABEL, BUFFERTYPE)
%typemap(jni) CTYPE* LABEL "jobject"
%typemap(jtype) CTYPE* LABEL "BUFFERTYPE"
%typemap(jstype) CTYPE* LABEL "BUFFERTYPE"
%typemap(javain,
pre=" assert $javainput.isDirect() : \"Buffer must be allocated direct.\";") CTYPE* LABEL "$javainput"
%typemap(javaout) CTYPE* LABEL {
return $jnicall;
}
%typemap(in) CTYPE* LABEL {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* LABEL {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* LABEL ""
%enddef
NIO_BUFFER_TYPEMAP(void, BUFF, java.nio.Buffer);
NIO_BUFFER_TYPEMAP(char, BUFF, java.nio.ByteBuffer);
NIO_BUFFER_TYPEMAP(char, CBUFF, java.nio.CharBuffer);
/*NIO_BUFFER_TYPEMAP(unsigned char, BUFF, java.nio.ShortBuffer);*/
NIO_BUFFER_TYPEMAP(short, BUFF, java.nio.ShortBuffer);
NIO_BUFFER_TYPEMAP(unsigned short, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(int, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned int, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(float, BUFF, java.nio.FloatBuffer);
NIO_BUFFER_TYPEMAP(double, BUFF, java.nio.DoubleBuffer);
#undef NIO_BUFFER_TYPEMAP
%define UNSIGNED_NIO_BUFFER_TYPEMAP(CTYPE, BSIZE, BUFFERTYPE, PACKFCN, UNPACKFCN)
%typemap(jni) CTYPE* INBUFF "jobject"
%typemap(jtype) CTYPE* INBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* INBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = PACKFCN($javainput, true);") CTYPE* INBUFF "tmp$javainput"
%typemap(javaout) CTYPE* INBUFF {
return $jnicall;
}
%typemap(in) CTYPE* INBUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* INBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* INBUFF ""
%typemap(jni) CTYPE* OUTBUFF "jobject"
%typemap(jtype) CTYPE* OUTBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* OUTBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = java.nio.ByteBuffer.allocateDirect($javainput.capacity()*BSIZE).order($javainput.order());",
post=" UNPACKFCN(tmp$javainput, $javainput);") CTYPE* OUTBUFF "tmp$javainput"
%typemap(javaout) CTYPE* OUTBUFF {
return $jnicall;
}
%typemap(in) CTYPE* OUTBUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* OUTBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* OUTBUFF ""
%enddef
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned char, 1, java.nio.ShortBuffer, permafrost.hdf.libhdf.BufferUtils.packUChar, permafrost.hdf.libhdf.BufferUtils.unpackUChar);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned short, 2, java.nio.IntBuffer, permafrost.hdf.libhdf.BufferUtils.packUShort, permafrost.hdf.libhdf.BufferUtils.unpackUShort);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned int, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned long, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
/*
%typemap(jni) unsigned char* BUFF "jobject"
%typemap(jtype) unsigned char* BUFF "java.nio.ByteBuffer"
%typemap(jstype) unsigned char* BUFF "java.nio.ShortBuffer"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = permafrost.hdf.libhdf.BufferUtils.packUChar($javainput, true);",
post=" permafrost.hdf.libhdf.BufferUtils.unpackUChar(tmp$javainput, $javainput);") unsigned char* BUFF "tmp$javainput"
%typemap(javaout) unsigned char* BUFF {
return $jnicall;
}
%typemap(in) unsigned char* BUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) unsigned char* BUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) unsigned char* BUFF ""
*/
#undef UNSIGNED_NIO_BUFFER_TYPEMAP

59
android/opencv/jni/cv.i Normal file
View File

@ -0,0 +1,59 @@
%typemap(javaimports) Mat "
/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
*/"
%typemap(javaimports) Size "
/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
*/"
class Mat {
public:
%immutable;
int rows;
int cols;
};
class Size{
public:
Size(int width,int height);
int width;
int height;
};
template<class _Tp> class Ptr
{
public:
//! empty constructor
Ptr();
//! take ownership of the pointer. The associated reference counter is allocated and set to 1
Ptr(_Tp* _obj);
//! calls release()
~Ptr();
//! copy constructor. Copies the members and calls addref()
Ptr(const Ptr& ptr);
//! copy operator. Calls ptr.addref() and release() before copying the members
// Ptr& operator = (const Ptr& ptr);
//! increments the reference counter
void addref();
//! decrements the reference counter. If it reaches 0, delete_obj() is called
void release();
//! deletes the object. Override if needed
void delete_obj();
//! returns true iff obj==NULL
bool empty() const;
//! helper operators making "Ptr<T> ptr" use very similar to "T* ptr".
_Tp* operator -> ();
// const _Tp* operator -> () const;
// operator _Tp* ();
// operator const _Tp*() const;
protected:
_Tp* obj; //< the object pointer.
int* refcount; //< the associated reference counter
};
%template(PtrMat) Ptr<Mat>;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,309 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// OpenGL ES 2.0 code
#include <jni.h>
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <stdint.h>
#include "glcamera.h"
#include "image_pool.h"
using namespace cv;
#define LOG_TAG "libandroid-opencv"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
static void printGLString(const char *name, GLenum s) {
const char *v = (const char *) glGetString(s);
LOGI("GL %s = %s\n", name, v);
}
static void checkGlError(const char* op) {
for (GLint error = glGetError(); error; error = glGetError()) {
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
static const char gVertexShader[] = "attribute vec4 a_position; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
static const char gFragmentShader[] =
"precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D s_texture; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D( s_texture, v_texCoord );\n"
"} \n";
const GLfloat gTriangleVertices[] = { 0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f };
GLubyte testpixels[4 * 3] = { 255, 0, 0, // Red
0, 255, 0, // Green
0, 0, 255, // Blue
255, 255, 0 // Yellow
};
GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels,
int width, int height, int channels) {
// Bind the texture
glActiveTexture(GL_TEXTURE0);
checkGlError("glActiveTexture");
// Bind the texture object
glBindTexture(GL_TEXTURE_2D, _textureid);
checkGlError("glBindTexture");
GLenum format;
switch (channels) {
case 3:
format = GL_RGB;
break;
case 1:
format = GL_LUMINANCE;
break;
case 4:
format = GL_RGBA;
break;
}
// Load the texture
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format,
GL_UNSIGNED_BYTE, pixels);
checkGlError("glTexImage2D");
// Set the filtering mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST );
return _textureid;
}
GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint glcamera::createProgram(const char* pVertexSource,
const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
//GLuint textureID;
bool glcamera::setupGraphics(int w, int h) {
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
LOGI("setupGraphics(%d, %d)", w, h);
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram) {
LOGE("Could not create program.");
return false;
}
gvPositionHandle = glGetAttribLocation(gProgram, "a_position");
gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord");
gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture");
// Use tightly packed data
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Generate a texture object
glGenTextures(1, &textureID);
textureID = createSimpleTexture2D(textureID, testpixels, 2, 2, 3);
checkGlError("glGetAttribLocation");
LOGI("glGetAttribLocation(\"vPosition\") = %d\n",
gvPositionHandle);
glViewport(0, 0, w, h);
checkGlError("glViewport");
return true;
}
void glcamera::renderFrame() {
GLfloat vVertices[] = { -1.0f, 1.0f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-1.0f, -1.0f, 0.0f, // Position 1
0.0f, 1.0f, // TexCoord 1
1.0f, -1.0f, 0.0f, // Position 2
1.0f, 1.0f, // TexCoord 2
1.0f, 1.0f, 0.0f, // Position 3
1.0f, 0.0f // TexCoord 3
};
GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
checkGlError("glClearColor");
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
checkGlError("glClear");
glUseProgram(gProgram);
checkGlError("glUseProgram");
// Load the vertex position
glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride,
vVertices);
// Load the texture coordinate
glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride,
&vVertices[3]);
glEnableVertexAttribArray(gvPositionHandle);
glEnableVertexAttribArray(gvTexCoordHandle);
// Bind the texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID);
// Set the sampler texture unit to 0
glUniform1i(gvSamplerHandle, 0);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
//checkGlError("glVertexAttribPointer");
//glEnableVertexAttribArray(gvPositionHandle);
//checkGlError("glEnableVertexAttribArray");
//glDrawArrays(GL_TRIANGLES, 0, 3);
//checkGlError("glDrawArrays");
}
void glcamera::init(int width, int height) {
newimage = false;
nimg = Mat();
setupGraphics(width, height);
}
void glcamera::step() {
if (newimage && !nimg.empty()) {
textureID = createSimpleTexture2D(textureID,
nimg.ptr<unsigned char> (0), nimg.rows, nimg.cols,
nimg.channels());
newimage = false;
}
renderFrame();
}
void glcamera::setTextureImage(Ptr<Mat> img) {
//int p2 = (int)(std::log(img->size().width)/0.69315);
int sz = 256;//std::pow(2,p2);
Size size(sz, sz);
resize(*img, nimg, size,cv::INTER_NEAREST);
newimage = true;
}
void glcamera::drawMatToGL(int idx, image_pool* pool) {
Ptr<Mat> img = pool->getImage(idx);
if (img.empty())
return; //no image at input_idx!
setTextureImage(img);
}
glcamera::glcamera():newimage(false) {
LOGI("glcamera constructor");
}
glcamera::~glcamera() {
LOGI("glcamera destructor");
}

View File

@ -0,0 +1,40 @@
#ifndef GLCAMERA_H_
#define GLCAMERA_H_
#include <opencv2/core/core.hpp>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include "image_pool.h"
class glcamera {
Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
public:
glcamera();
~glcamera();
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
void setTextureImage(Ptr<Mat> img);
private:
GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width,
int height, int channels);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint
createProgram(const char* pVertexSource,
const char* pFragmentSource);
bool setupGraphics(int w, int h);
void renderFrame();
};
#endif

View File

@ -0,0 +1,43 @@
%typemap(javaimports) glcamera "
/** a class for doing the native rendering of images
this class renders using GL2 es, the native ndk version
This class is used by the GL2CameraViewer to do the rendering,
and is inspired by the gl2 example in the ndk samples
*/"
%javamethodmodifiers glcamera::init"
/** should be called onSurfaceChanged by the GLSurfaceView that is using this
* as the drawing engine
* @param width the width of the surface view that this will be drawing to
* @param width the height of the surface view that this will be drawing to
*
*/
public";
%javamethodmodifiers glcamera::step"
/** should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
handles the rendering of the opengl scene, and requires that the opengl context be
valid.
*
*/
public";
%javamethodmodifiers glcamera::drawMatToGL"
/** copies an image from a pool and queues it for drawing in opengl.
* this does transformation into power of two texture sizes
* @param idx the image index to copy
* @param pool the image_pool to look up the image from
*
*/
public";
class glcamera {
public:
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
};

View File

@ -0,0 +1,86 @@
#include "image_pool.h"
#include "yuv420sp2rgb.h"
#include <android/log.h>
#include <opencv2/imgproc/imgproc.hpp>
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env,
jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer,
jint jidx, jint jwidth, jint jheight, jboolean jgrey) {
image_pool *pool = (image_pool *) ppool;
Ptr<Mat> mat = pool->getYUV(jidx);
if (mat.empty() || mat->cols != jwidth || mat->rows != jheight * 2) {
//pool->deleteGrey(jidx);
mat = new Mat(jheight * 2, jwidth, CV_8UC1);
}
jsize sz = env->GetArrayLength(jbuffer);
uchar* buff = mat->ptr<uchar> (0);
env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff);
pool->addYUVMat(jidx, mat);
Ptr<Mat> color = pool->getImage(jidx);
if (color.empty() || color->cols != jwidth || color->rows != jheight) {
//pool->deleteImage(jidx);
color = new Mat(jheight, jwidth, CV_8UC3);
}
if (!jgrey) {
//doesn't work unfortunately..
//cvtColor(*mat,*color, CV_YCrCb2RGB);
color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight,
color->ptr<uchar> (0), false);
}
if (jgrey) {
Mat grey;
pool->getGrey(jidx, grey);
cvtColor(grey, *color, CV_GRAY2RGB);
}
pool->addImage(jidx, color);
}
image_pool::image_pool() {
}
image_pool::~image_pool() {
__android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called");
}
cv::Ptr<Mat> image_pool::getImage(int i) {
return imagesmap[i];
}
void image_pool::getGrey(int i, Mat & grey) {
cv::Ptr<Mat> tm = yuvImagesMap[i];
if (tm.empty())
return;
grey = (*tm)(Range(0, tm->rows / 2), Range::all());
}
cv::Ptr<Mat> image_pool::getYUV(int i) {
return yuvImagesMap[i];
}
void image_pool::addYUVMat(int i, cv::Ptr<Mat> mat) {
yuvImagesMap[i] = mat;
}
void image_pool::addImage(int i, cv::Ptr<Mat> mat) {
imagesmap[i] = mat;
}

View File

@ -0,0 +1,62 @@
#ifndef IMAGE_POOL_H
#define IMAGE_POOL_H
#include <opencv2/core/core.hpp>
#include <jni.h>
#include <map>
using namespace cv;
#ifdef __cplusplus
extern "C" {
#endif
//
//JNIEXPORT jobject JNICALL Java_com_opencv_jni_opencvJNI_getBitmapBuffer(
// JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool
(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint, jint, jint, jboolean);
#ifdef __cplusplus
}
#endif
//bool yuv2mat2(char *data, int size, int width, int height, bool grey, Mat& mat);
class image_pool {
std::map<int, Ptr< Mat> > imagesmap;
std::map<int, Ptr< Mat> > yuvImagesMap;
//uchar * mbuffer;
//int length;
public:
image_pool();
~image_pool();
cv::Ptr<Mat> getImage(int i);
void getGrey(int i, Mat & grey);
cv::Ptr<Mat> getYUV(int i);
int getCount(){
return imagesmap.size();
}
void addImage(int i, Ptr< Mat> mat);
/** this function stores the given matrix in the the yuvImagesMap. Also,
* after this call getGrey will work, as the grey image is just the top
* half of the YUV mat.
*
* \param i index to store yuv image at
* \param mat the yuv matrix to store
*/
void addYUVMat(int i, Ptr< Mat> mat);
int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
void getBitmap(int * outintarray, int size, int idx);
};
#endif

View File

@ -0,0 +1,58 @@
%typemap(javaimports) image_pool "
/** image_pool is used for keeping track of a pool of native images. It stores images as cv::Mat's and
references them by an index. It allows one to get a pointer to an underlying mat, and handles memory deletion.*/"
%javamethodmodifiers image_pool::getImage"
/** gets a pointer to a stored image, by an index. If the index is new, returns a null pointer
* @param idx the index in the pool that is associated with a cv::Mat
* @return the pointer to a cv::Mat, null pointer if the given idx is novel
*/
public";
%javamethodmodifiers image_pool::deleteImage"
/** deletes the image from the pool
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%javamethodmodifiers addYUVtoPool"
/** adds a yuv
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%include "various.i"
%apply (char* BYTE) { (char *data)}; //byte[] to char*
%native (addYUVtoPool) void addYUVtoPool(image_pool* pool, char* data,int idx, int width, int height, bool grey);
%feature("director") image_pool;
class image_pool {
public:
image_pool();
~image_pool();
Ptr<Mat> getImage(int i);
void addImage(int i, Ptr< Mat> mat);
};

View File

@ -0,0 +1,36 @@
/*
* int *INTARRAY typemaps.
* These are input typemaps for mapping a Java int[] array to a C int array.
* Note that as a Java array is used and thus passeed by reference, the C routine
* can return data to Java via the parameter.
*
* Example usage wrapping:
* void foo((int *INTARRAY, int INTARRAYSIZE);
*
* Java usage:
* byte b[] = new byte[20];
* modulename.foo(b);
*/
%typemap(in) (int *INTARRAY, int INTARRAYSIZE) {
$1 = (int *) JCALL2(GetIntArrayElements, jenv, $input, 0);
jsize sz = JCALL1(GetArrayLength, jenv, $input);
$2 = (int)sz;
}
%typemap(argout) (int *INTARRAY, int INTARRAYSIZE) {
JCALL3(ReleaseIntArrayElements, jenv, $input, (jint *) $1, 0);
}
/* Prevent default freearg typemap from being used */
%typemap(freearg) (int *INTARRAY, int INTARRAYSIZE) ""
%typemap(jni) (int *INTARRAY, int INTARRAYSIZE) "jintArray"
%typemap(jtype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(jstype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(javain) (int *INTARRAY, int INTARRAYSIZE) "$javainput"

View File

@ -0,0 +1,98 @@
#include <string.h>
#include <jni.h>
#include <yuv420sp2rgb.h>
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
/*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed.
H V
Y Sample Period 1 1
U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2
*/
/*
size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \;
*/
const int bytes_per_pixel = 2;
void color_convert_common(
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey)
{
int i, j;
int nR, nG, nB;
int nY, nU, nV;
unsigned char *out = buffer;
int offset = 0;
if(grey){
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
unsigned char nB = *(pY + i * width + j);
out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
}
}
}else
// YUV 4:2:0
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
nY = *(pY + i * width + j);
nV = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
nU = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
// Yuv Convert
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0)
nY = 0;
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
nB = (int)(1192 * nY + 2066 * nU);
nG = (int)(1192 * nY - 833 * nV - 400 * nU);
nR = (int)(1192 * nY + 1634 * nV);
nR = min(262143, max(0, nR));
nG = min(262143, max(0, nG));
nB = min(262143, max(0, nB));
nR >>= 10; nR &= 0xff;
nG >>= 10; nG &= 0xff;
nB >>= 10; nB &= 0xff;
out[offset++] = (unsigned char)nR;
out[offset++] = (unsigned char)nG;
out[offset++] = (unsigned char)nB;
//out[offset++] = 0xff; //set alpha for ARGB 8888 format
}
//offset = i * width * 3; //non power of two
//offset = i * texture_size + j;//power of two
//offset *= 3; //3 byte per pixel
//out = buffer + offset;
}
}

View File

@ -0,0 +1,18 @@
//yuv420sp2rgb.h
#ifndef YUV420SP2RGB_H
#define YUV420SP2RGB_H
#ifdef __cplusplus
extern "C" {
#endif
void color_convert_common(
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -0,0 +1,2 @@
include $(call all-subdir-makefiles)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := calib3d
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := contrib
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := core
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := features2d
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,30 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := highgui
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := \
bitstrm.cpp \
cap.cpp \
grfmt_base.cpp \
grfmt_bmp.cpp \
grfmt_jpeg2000.cpp \
grfmt_jpeg.cpp \
grfmt_png.cpp \
grfmt_tiff.cpp \
grfmt_sunras.cpp \
grfmt_pxm.cpp \
loadsave.cpp \
precomp.cpp \
utils.cpp \
window.cpp
LOCAL_SRC_FILES := $(sources:%=../../$(MODULE_PATH)/src/%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := imgproc
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := legacy
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := ml
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := objdetect
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := video
MODULE_PATH := $(OpenCV_Root)/modules/$(LOCAL_MODULE)
sources := $(wildcard $(MODULE_PATH)/src/*.cpp)
LOCAL_SRC_FILES := $(sources:%=../../%)
LOCAL_C_INCLUDES := \
$(OpenCVInclude) \
$(MODULE_PATH)/src/ \
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1,157 @@
package com.opencv;
import java.util.LinkedList;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.view.ViewGroup.LayoutParams;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.opencv.camera.NativePreviewer;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.opengl.GL2CameraViewer;
public class OpenCV extends Activity {
private NativePreviewer mPreview;
private GL2CameraViewer glview;
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
return super.onKeyUp(keyCode, event);
}
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
return super.onKeyLongPress(keyCode, event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
public void setNoTitle() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// menu.add("Sample");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// if(item.getTitle().equals("Sample")){
// //do stuff...
// }
return true;
}
@Override
public void onOptionsMenuClosed(Menu menu) {
// TODO Auto-generated method stub
super.onOptionsMenuClosed(menu);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
disableScreenTurnOff();
FrameLayout frame = new FrameLayout(getApplication());
// Create our Preview view and set it as the content of our activity.
mPreview = new NativePreviewer(getApplication(), 400, 300);
LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
params.height = getWindowManager().getDefaultDisplay().getHeight();
params.width = (int) (params.height * 4.0 / 2.88);
LinearLayout vidlay = new LinearLayout(getApplication());
vidlay.setGravity(Gravity.CENTER);
vidlay.addView(mPreview, params);
frame.addView(vidlay);
// make the glview overlay ontop of video preview
mPreview.setZOrderMediaOverlay(false);
glview = new GL2CameraViewer(getApplication(), false, 0, 0);
glview.setZOrderMediaOverlay(true);
glview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
frame.addView(glview);
setContentView(frame);
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
glview.onResume();
LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<PoolCallback>();
callbackstack.add(glview.getDrawCallback());
mPreview.addCallbackStack(callbackstack);
mPreview.onResume();
}
}

View File

@ -0,0 +1,124 @@
package com.opencv.calibration;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.locks.ReentrantLock;
import android.os.AsyncTask;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.jni.Calibration;
import com.opencv.jni.Size;
import com.opencv.jni.image_pool;
public class Calibrator implements PoolCallback {
private Calibration calibration;
static public interface CalibrationCallback{
public void onFoundChessboard(Calibrator calibrator);
public void onDoneCalibration(Calibrator calibration, File calibfile);
public void onFailedChessboard(Calibrator calibrator);
}
private CalibrationCallback callback;
public Calibrator(CalibrationCallback callback) {
calibration = new Calibration();
this.callback = callback;
}
public void resetCalibration(){
calibration.resetChess();
}
public void setPatternSize(Size size){
Size csize = calibration.getPatternsize();
if(size.getWidth() == csize.getWidth()&&
size.getHeight() == csize.getHeight())
return;
calibration.setPatternsize(size);
resetCalibration();
}
public void setPatternSize(int width, int height){
Size patternsize = new Size(width,height);
setPatternSize(patternsize);
}
private boolean capture_chess;
ReentrantLock lock = new ReentrantLock();
public void calibrate(File calibration_file) throws IOException{
if(getNumberPatternsDetected() < 3){
return;
}
CalibrationTask calibtask = new CalibrationTask(calibration_file);
calibtask.execute((Object[])null);
}
public void queueChessCapture(){
capture_chess = true;
}
private class CalibrationTask extends AsyncTask<Object, Object, Object> {
File calibfile;
public CalibrationTask(File calib) throws IOException{
super();
calibfile = calib;
calibfile.createNewFile();
}
@Override
protected Object doInBackground(Object... params) {
lock.lock();
try{
calibration.calibrate(calibfile.getAbsolutePath());
}
finally{
lock.unlock();
}
return null;
}
@Override
protected void onPostExecute(Object result) {
callback.onDoneCalibration(Calibrator.this, calibfile);
}
}
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor) {
if(lock.tryLock()){
try{
if(capture_chess){
if(calibration.detectAndDrawChessboard(idx, pool)){
callback.onFoundChessboard(this);
}else
callback.onFailedChessboard(this);
capture_chess = false;
}
}finally{
lock.unlock();
}
}
}
public int getNumberPatternsDetected(){
return calibration.getNumberDetectedChessboards();
}
public void setCallback(CalibrationCallback callback) {
this.callback = callback;
}
}

View File

@ -0,0 +1,410 @@
package com.opencv.camera;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import android.content.Context;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.opencv.camera.NativeProcessor.NativeProcessorCallback;
import com.opencv.camera.NativeProcessor.PoolCallback;
public class NativePreviewer extends SurfaceView implements
SurfaceHolder.Callback, Camera.PreviewCallback, NativeProcessorCallback {
SurfaceHolder mHolder;
Camera mCamera;
private NativeProcessor processor;
private int preview_width, preview_height;
private int pixelformat;
private PixelFormat pixelinfo;
public NativePreviewer(Context context,AttributeSet attributes){
super(context,attributes);
listAllCameraMethods();
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
this.preview_width = attributes.getAttributeIntValue("opencv", "preview_width", 600);
this.preview_height= attributes.getAttributeIntValue("opencv", "preview_height", 600);
processor = new NativeProcessor();
setZOrderMediaOverlay(false);
}
public NativePreviewer(Context context, int preview_width,
int preview_height) {
super(context);
listAllCameraMethods();
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
this.preview_width = preview_width;
this.preview_height = preview_height;
processor = new NativeProcessor();
setZOrderMediaOverlay(false);
}
Handler camerainiter = new Handler();
void initCamera(SurfaceHolder holder) throws InterruptedException{
if(mCamera == null){
// The Surface has been created, acquire the camera and tell it where
// to draw.
int i = 0;
while(i++ < 5){
try{
mCamera = Camera.open();
break;
}catch(RuntimeException e){
Thread.sleep(200);
}
}
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}catch(RuntimeException e){
Log.e("camera", "stacktrace", e);
}
}
}
void releaseCamera(){
if(mCamera !=null){
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
mCamera.stopPreview();
mCamera.release();
}
// processor = null;
mCamera = null;
mAcb = null;
mPCWB = null;
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
releaseCamera();
}
private boolean hasAutoFocus = false;
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
try {
initCamera(mHolder);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
List<Camera.Size> pvsizes = mCamera.getParameters().getSupportedPreviewSizes();
int best_width = 1000000;
int best_height = 1000000;
for(Size x: pvsizes){
if(x.width - preview_width >= 0 && x.width <= best_width){
best_width = x.width;
best_height = x.height;
}
}
preview_width = best_width;
preview_height = best_height;
List<String> fmodes = mCamera.getParameters().getSupportedFocusModes();
int idx = fmodes.indexOf(Camera.Parameters.FOCUS_MODE_INFINITY);
if(idx != -1){
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
}else if(fmodes.indexOf(Camera.Parameters.FOCUS_MODE_FIXED) != -1){
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
}
if(fmodes.indexOf(Camera.Parameters.FOCUS_MODE_AUTO) != -1){
hasAutoFocus = true;
}
List<String> scenemodes = mCamera.getParameters().getSupportedSceneModes();
if(scenemodes != null)
if(scenemodes.indexOf(Camera.Parameters.SCENE_MODE_STEADYPHOTO) != -1){
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_STEADYPHOTO);
}
parameters.setPreviewSize(preview_width, preview_height);
mCamera.setParameters(parameters);
pixelinfo = new PixelFormat();
pixelformat = mCamera.getParameters().getPreviewFormat();
PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
Size preview_size = mCamera.getParameters().getPreviewSize();
preview_width = preview_size.width;
preview_height = preview_size.height;
int bufSize = preview_width * preview_height * pixelinfo.bitsPerPixel
/ 8;
// Must call this before calling addCallbackBuffer to get all the
// reflection variables setup
initForACB();
initForPCWB();
// Use only one buffer, so that we don't preview to many frames and bog
// down system
byte[] buffer = new byte[bufSize];
addCallbackBuffer(buffer);
setPreviewCallbackWithBuffer();
mCamera.startPreview();
//postautofocus(0);
}
public void postautofocus(int delay) {
if(hasAutoFocus)
handler.postDelayed(autofocusrunner, delay);
}
private Runnable autofocusrunner = new Runnable() {
@Override
public void run() {
mCamera.autoFocus(autocallback);
}
};
Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if(!success)
postautofocus(1000);
}
};
Handler handler = new Handler();
/**
* This method will list all methods of the android.hardware.Camera class,
* even the hidden ones. With the information it provides, you can use the
* same approach I took below to expose methods that were written but hidden
* in eclair
*/
private void listAllCameraMethods() {
try {
Class<?> c = Class.forName("android.hardware.Camera");
Method[] m = c.getMethods();
for (int i = 0; i < m.length; i++) {
Log.d("NativePreviewer", " method:" + m[i].toString());
}
} catch (Exception e) {
// TODO Auto-generated catch block
Log.e("NativePreviewer", e.toString());
}
}
/**
* These variables are re-used over and over by addCallbackBuffer
*/
Method mAcb;
private void initForACB() {
try {
mAcb = Class.forName("android.hardware.Camera").getMethod(
"addCallbackBuffer", byte[].class);
} catch (Exception e) {
Log
.e("NativePreviewer",
"Problem setting up for addCallbackBuffer: "
+ e.toString());
}
}
/**
* This method allows you to add a byte buffer to the queue of buffers to be
* used by preview. See:
* http://android.git.kernel.org/?p=platform/frameworks
* /base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9d
* b3d07b9620b4269ab33f78604a36327e536ce1
*
* @param b
* The buffer to register. Size should be width * height *
* bitsPerPixel / 8.
*/
private void addCallbackBuffer(byte[] b) {
try {
mAcb.invoke(mCamera, b);
} catch (Exception e) {
Log.e("NativePreviewer", "invoking addCallbackBuffer failed: "
+ e.toString());
}
}
Method mPCWB;
private void initForPCWB() {
try {
mPCWB = Class.forName("android.hardware.Camera").getMethod(
"setPreviewCallbackWithBuffer", PreviewCallback.class);
} catch (Exception e) {
Log.e("NativePreviewer",
"Problem setting up for setPreviewCallbackWithBuffer: "
+ e.toString());
}
}
/**
* Use this method instead of setPreviewCallback if you want to use manually
* allocated buffers. Assumes that "this" implements Camera.PreviewCallback
*/
private void setPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, this);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: Called method");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
protected void clearPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, (PreviewCallback) null);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: cleared");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
Date start;
int fcount = 0;
boolean processing = false;
/**
* Demonstration of how to use onPreviewFrame. In this case I'm not
* processing the data, I'm just adding the buffer back to the buffer queue
* for re-use
*/
public void onPreviewFrame(byte[] data, Camera camera) {
if (start == null) {
start = new Date();
}
processor.post(data, preview_width, preview_height, pixelformat, System.nanoTime(),
this);
fcount++;
if (fcount % 100 == 0) {
double ms = (new Date()).getTime() - start.getTime();
Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0));
start = new Date();
fcount = 0;
}
}
@Override
public void onDoneNativeProcessing(byte[] buffer) {
addCallbackBuffer(buffer);
}
public void addCallbackStack(LinkedList<PoolCallback> callbackstack) {
processor.addCallbackStack(callbackstack);
}
/**This must be called when the activity pauses, in Activity.onPause
* This has the side effect of clearing the callback stack.
*
*/
public void onPause() {
releaseCamera();
addCallbackStack(null);
processor.stop();
}
public void onResume() {
processor.start();
}
}

View File

@ -0,0 +1,241 @@
package com.opencv.camera;
import java.util.LinkedList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import android.graphics.PixelFormat;
import android.util.Log;
import com.opencv.jni.image_pool;
import com.opencv.jni.opencv;
public class NativeProcessor {
private class ProcessorThread extends Thread {
private void process(NPPostObject pobj) throws Exception {
if (pobj.format == PixelFormat.YCbCr_420_SP) {
// add as color image, because we know how to decode this
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, false);
} else if (pobj.format == PixelFormat.YCbCr_422_SP) {
// add as gray image, because this format is not coded
// for...//TODO figure out how to decode this
// format
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, true);
} else
throw new Exception("bad pixel format!");
for (PoolCallback x : stack) {
if (interrupted()) {
throw new InterruptedException(
"Native Processor interupted while processing");
}
x.process(0, pool, pobj.timestamp, NativeProcessor.this);
}
pobj.done(); // tell the postobject that we're done doing
// all the processing.
}
@Override
public void run() {
try {
while (true) {
yield();
while(!stacklock.tryLock(5, TimeUnit.MILLISECONDS)){
}
try {
if (nextStack != null) {
stack = nextStack;
nextStack = null;
}
} finally {
stacklock.unlock();
}
NPPostObject pobj = null;
while(!lock.tryLock(5, TimeUnit.MILLISECONDS)){
}
try {
if(postobjects.isEmpty()) continue;
pobj = postobjects.removeLast();
} finally {
lock.unlock();
}
if(interrupted())throw new InterruptedException();
if(stack != null && pobj != null)
process(pobj);
}
} catch (InterruptedException e) {
Log.i("NativeProcessor",
"native processor interupted, ending now");
} catch (Exception e) {
e.printStackTrace();
} finally {
}
}
}
ProcessorThread mthread;
static public interface PoolCallback {
void process(int idx, image_pool pool,long timestamp, NativeProcessor nativeProcessor);
}
Lock stacklock = new ReentrantLock();
LinkedList<PoolCallback> nextStack;
void addCallbackStack(LinkedList<PoolCallback> stack) {
try {
while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
}
try {
nextStack = stack;
} finally {
stacklock.unlock();
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* A callback that allows the NativeProcessor to pass back the buffer when
* it has completed processing a frame.
*
* @author ethan
*
*/
static public interface NativeProcessorCallback {
/**
* Called after processing, meant to be recieved by the NativePreviewer
* wich reuses the byte buffer for the camera preview...
*
* @param buffer
* the buffer passed to the NativeProcessor with post.
*/
void onDoneNativeProcessing(byte[] buffer);
}
/**
* Create a NativeProcessor. The processor will not start running until
* start is called, at which point it will operate in its own thread and
* sleep until a post is called. The processor should not be started until
* an onSurfaceChange event, and should be shut down when the surface is
* destroyed by calling interupt.
*
*/
public NativeProcessor() {
}
/**
* post is used to notify the processor that a preview frame is ready, this
* will return almost immediately. if the processor is busy, returns false
* and is essentially a nop.
*
* @param buffer
* a preview frame from the Android Camera onPreviewFrame
* callback
* @param width
* of preview frame
* @param height
* of preview frame
* @param format
* of preview frame
* @return true if the processor wasn't busy and accepted the post, false if
* the processor is still processing.
*/
public boolean post(byte[] buffer, int width, int height, int format,long timestamp,
NativeProcessorCallback callback) {
lock.lock();
try {
NPPostObject pobj = new NPPostObject(buffer, width, height,
format,timestamp, callback);
postobjects.addFirst(pobj);
} finally {
lock.unlock();
}
return true;
}
static private class NPPostObject {
public NPPostObject(byte[] buffer, int width, int height, int format, long timestamp,
NativeProcessorCallback callback) {
this.buffer = buffer;
this.width = width;
this.height = height;
this.format = format;
this.timestamp = timestamp;
this.callback = callback;
}
public void done() {
callback.onDoneNativeProcessing(buffer);
}
int width, height;
byte[] buffer;
int format;
long timestamp;
NativeProcessorCallback callback;
}
private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>();
private image_pool pool = new image_pool();
private final Lock lock = new ReentrantLock();
private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>();
void stop() {
mthread.interrupt();
try {
mthread.join();
} catch (InterruptedException e) {
Log.w("NativeProcessor","interupted while stoping " + e.getMessage());
}
mthread = null;
}
void start() {
mthread = new ProcessorThread();
mthread.start();
}
}

View File

@ -0,0 +1,69 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
public class Calibration {
private long swigCPtr;
protected boolean swigCMemOwn;
public Calibration(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr(Calibration obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
protected void finalize() {
delete();
}
public synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
opencvJNI.delete_Calibration(swigCPtr);
}
swigCPtr = 0;
}
}
public void setPatternsize(Size value) {
opencvJNI.Calibration_patternsize_set(swigCPtr, this, Size.getCPtr(value), value);
}
public Size getPatternsize() {
long cPtr = opencvJNI.Calibration_patternsize_get(swigCPtr, this);
return (cPtr == 0) ? null : new Size(cPtr, false);
}
public Calibration() {
this(opencvJNI.new_Calibration(), true);
}
public boolean detectAndDrawChessboard(int idx, image_pool pool) {
return opencvJNI.Calibration_detectAndDrawChessboard(swigCPtr, this, idx, image_pool.getCPtr(pool), pool);
}
public void resetChess() {
opencvJNI.Calibration_resetChess(swigCPtr, this);
}
public int getNumberDetectedChessboards() {
return opencvJNI.Calibration_getNumberDetectedChessboards(swigCPtr, this);
}
public void calibrate(String filename) {
opencvJNI.Calibration_calibrate(swigCPtr, this, filename);
}
public void drawText(int idx, image_pool pool, String text) {
opencvJNI.Calibration_drawText(swigCPtr, this, idx, image_pool.getCPtr(pool), pool, text);
}
}

View File

@ -0,0 +1,50 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
*/
public class Mat {
private long swigCPtr;
protected boolean swigCMemOwn;
public Mat(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr(Mat obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
protected void finalize() {
delete();
}
public synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
opencvJNI.delete_Mat(swigCPtr);
}
swigCPtr = 0;
}
}
public int getRows() {
return opencvJNI.Mat_rows_get(swigCPtr, this);
}
public int getCols() {
return opencvJNI.Mat_cols_get(swigCPtr, this);
}
public Mat() {
this(opencvJNI.new_Mat(), true);
}
}

View File

@ -0,0 +1,77 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
public class PtrMat {
private long swigCPtr;
protected boolean swigCMemOwn;
public PtrMat(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr(PtrMat obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
protected void finalize() {
delete();
}
public synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
opencvJNI.delete_PtrMat(swigCPtr);
}
swigCPtr = 0;
}
}
public PtrMat() {
this(opencvJNI.new_PtrMat__SWIG_0(), true);
}
public PtrMat(Mat _obj) {
this(opencvJNI.new_PtrMat__SWIG_1(Mat.getCPtr(_obj), _obj), true);
}
public PtrMat(PtrMat ptr) {
this(opencvJNI.new_PtrMat__SWIG_2(PtrMat.getCPtr(ptr), ptr), true);
}
public void addref() {
opencvJNI.PtrMat_addref(swigCPtr, this);
}
public void release() {
opencvJNI.PtrMat_release(swigCPtr, this);
}
public void delete_obj() {
opencvJNI.PtrMat_delete_obj(swigCPtr, this);
}
public boolean empty() {
return opencvJNI.PtrMat_empty(swigCPtr, this);
}
public Mat __deref__() {
long cPtr = opencvJNI.PtrMat___deref__(swigCPtr, this);
return (cPtr == 0) ? null : new Mat(cPtr, false);
}
public int getRows() {
return opencvJNI.PtrMat_rows_get(swigCPtr, this);
}
public int getCols() {
return opencvJNI.PtrMat_cols_get(swigCPtr, this);
}
}

View File

@ -0,0 +1,58 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
*/
public class Size {
private long swigCPtr;
protected boolean swigCMemOwn;
public Size(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr(Size obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
protected void finalize() {
delete();
}
public synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
opencvJNI.delete_Size(swigCPtr);
}
swigCPtr = 0;
}
}
public Size(int width, int height) {
this(opencvJNI.new_Size(width, height), true);
}
public void setWidth(int value) {
opencvJNI.Size_width_set(swigCPtr, this, value);
}
public int getWidth() {
return opencvJNI.Size_width_get(swigCPtr, this);
}
public void setHeight(int value) {
opencvJNI.Size_height_set(swigCPtr, this, value);
}
public int getHeight() {
return opencvJNI.Size_height_get(swigCPtr, this);
}
}

View File

@ -0,0 +1,78 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
/** a class for doing the native rendering of images
this class renders using GL2 es, the native ndk version
This class is used by the GL2CameraViewer to do the rendering,
and is inspired by the gl2 example in the ndk samples
*/
public class glcamera {
private long swigCPtr;
protected boolean swigCMemOwn;
public glcamera(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr(glcamera obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
protected void finalize() {
delete();
}
public synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
opencvJNI.delete_glcamera(swigCPtr);
}
swigCPtr = 0;
}
}
/** should be called onSurfaceChanged by the GLSurfaceView that is using this
* as the drawing engine
* @param width the width of the surface view that this will be drawing to
* @param width the height of the surface view that this will be drawing to
*
*/
public void init(int width, int height) {
opencvJNI.glcamera_init(swigCPtr, this, width, height);
}
/** should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
handles the rendering of the opengl scene, and requires that the opengl context be
valid.
*
*/
public void step() {
opencvJNI.glcamera_step(swigCPtr, this);
}
/** copies an image from a pool and queues it for drawing in opengl.
* this does transformation into power of two texture sizes
* @param idx the image index to copy
* @param pool the image_pool to look up the image from
*
*/
public void drawMatToGL(int idx, image_pool pool) {
opencvJNI.glcamera_drawMatToGL(swigCPtr, this, idx, image_pool.getCPtr(pool), pool);
}
public glcamera() {
this(opencvJNI.new_glcamera(), true);
}
}

View File

@ -0,0 +1,55 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
/** image_pool is used for keeping track of a pool of native images. It stores images as cv::Mat's and
references them by an index. It allows one to get a pointer to an underlying mat, and handles memory deletion.*/
public class image_pool {
private long swigCPtr;
protected boolean swigCMemOwn;
public image_pool(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr(image_pool obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
protected void finalize() {
delete();
}
public synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
opencvJNI.delete_image_pool(swigCPtr);
}
swigCPtr = 0;
}
}
public image_pool() {
this(opencvJNI.new_image_pool(), true);
}
/** gets a pointer to a stored image, by an index. If the index is new, returns a null pointer
* @param idx the index in the pool that is associated with a cv::Mat
* @return the pointer to a cv::Mat, null pointer if the given idx is novel
*/
public PtrMat getImage(int i) {
return new PtrMat(opencvJNI.image_pool_getImage(swigCPtr, this, i), true);
}
public void addImage(int i, PtrMat mat) {
opencvJNI.image_pool_addImage(swigCPtr, this, i, PtrMat.getCPtr(mat), mat);
}
}

View File

@ -0,0 +1,20 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
public class opencv {
/** adds a yuv
* @param idx the index in the pool that is associated with a cv::Mat
*/
public static void addYUVtoPool(image_pool pool, byte[] data, int idx, int width, int height, boolean grey) {
opencvJNI.addYUVtoPool(image_pool.getCPtr(pool), pool, data, idx, width, height, grey);
}
}

View File

@ -0,0 +1,65 @@
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.40
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.opencv.jni;
class opencvJNI {
static {
try {
//load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
//so that android sdk automatically installs it along with the app.
System.loadLibrary("android-opencv");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
public final static native int Mat_rows_get(long jarg1, Mat jarg1_);
public final static native int Mat_cols_get(long jarg1, Mat jarg1_);
public final static native long new_Mat();
public final static native void delete_Mat(long jarg1);
public final static native long new_Size(int jarg1, int jarg2);
public final static native void Size_width_set(long jarg1, Size jarg1_, int jarg2);
public final static native int Size_width_get(long jarg1, Size jarg1_);
public final static native void Size_height_set(long jarg1, Size jarg1_, int jarg2);
public final static native int Size_height_get(long jarg1, Size jarg1_);
public final static native void delete_Size(long jarg1);
public final static native long new_PtrMat__SWIG_0();
public final static native long new_PtrMat__SWIG_1(long jarg1, Mat jarg1_);
public final static native void delete_PtrMat(long jarg1);
public final static native long new_PtrMat__SWIG_2(long jarg1, PtrMat jarg1_);
public final static native void PtrMat_addref(long jarg1, PtrMat jarg1_);
public final static native void PtrMat_release(long jarg1, PtrMat jarg1_);
public final static native void PtrMat_delete_obj(long jarg1, PtrMat jarg1_);
public final static native boolean PtrMat_empty(long jarg1, PtrMat jarg1_);
public final static native long PtrMat___deref__(long jarg1, PtrMat jarg1_);
public final static native int PtrMat_rows_get(long jarg1, PtrMat jarg1_);
public final static native int PtrMat_cols_get(long jarg1, PtrMat jarg1_);
public final static native void glcamera_init(long jarg1, glcamera jarg1_, int jarg2, int jarg3);
public final static native void glcamera_step(long jarg1, glcamera jarg1_);
public final static native void glcamera_drawMatToGL(long jarg1, glcamera jarg1_, int jarg2, long jarg3, image_pool jarg3_);
public final static native long new_glcamera();
public final static native void delete_glcamera(long jarg1);
public final static native void addYUVtoPool(long jarg1, image_pool jarg1_, byte[] jarg2, int jarg3, int jarg4, int jarg5, boolean jarg6);
public final static native long new_image_pool();
public final static native void delete_image_pool(long jarg1);
public final static native long image_pool_getImage(long jarg1, image_pool jarg1_, int jarg2);
public final static native void image_pool_addImage(long jarg1, image_pool jarg1_, int jarg2, long jarg3, PtrMat jarg3_);
public final static native void Calibration_patternsize_set(long jarg1, Calibration jarg1_, long jarg2, Size jarg2_);
public final static native long Calibration_patternsize_get(long jarg1, Calibration jarg1_);
public final static native long new_Calibration();
public final static native void delete_Calibration(long jarg1);
public final static native boolean Calibration_detectAndDrawChessboard(long jarg1, Calibration jarg1_, int jarg2, long jarg3, image_pool jarg3_);
public final static native void Calibration_resetChess(long jarg1, Calibration jarg1_);
public final static native int Calibration_getNumberDetectedChessboards(long jarg1, Calibration jarg1_);
public final static native void Calibration_calibrate(long jarg1, Calibration jarg1_, String jarg2);
public final static native void Calibration_drawText(long jarg1, Calibration jarg1_, int jarg2, long jarg3, image_pool jarg3_, String jarg4);
}

View File

@ -0,0 +1,405 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opencv.opengl;
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.jni.glcamera;
import com.opencv.jni.image_pool;
import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
/**
* A simple GLSurfaceView sub-class that demonstrate how to perform
* OpenGL ES 2.0 rendering into a GL Surface. Note the following important
* details:
*
* - The class must use a custom context factory to enable 2.0 rendering.
* See ContextFactory class definition below.
*
* - The class must use a custom EGLConfigChooser to be able to select
* an EGLConfig that supports 2.0. This is done by providing a config
* specification to eglChooseConfig() that has the attribute
* EGL10.ELG_RENDERABLE_TYPE containing the EGL_OPENGL_ES2_BIT flag
* set. See ConfigChooser class definition below.
*
* - The class must select the surface's format, then choose an EGLConfig
* that matches it exactly (with regards to red/green/blue/alpha channels
* bit depths). Failure to do so would result in an EGL_BAD_MATCH error.
*/
public class GL2CameraViewer extends GLSurfaceView{
private static String TAG = "GL2JNIView";
private static final boolean DEBUG = false;
private PoolCallback poolcallback = new PoolCallback() {
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor){
drawMatToGL(idx, pool);
requestRender();
}
};
public GL2CameraViewer(Context context,AttributeSet attributeSet) {
super(context,attributeSet);
init(false, 0, 0);
setZOrderMediaOverlay(true);
}
public GL2CameraViewer(Context context) {
super(context);
init(false, 0, 0);
setZOrderMediaOverlay(true);
}
public GL2CameraViewer(Context context, boolean translucent, int depth, int stencil) {
super(context);
init(translucent, depth, stencil);
setZOrderMediaOverlay(true);
}
private void init(boolean translucent, int depth, int stencil) {
/* By default, GLSurfaceView() creates a RGB_565 opaque surface.
* If we want a translucent one, we should change the surface's
* format here, using PixelFormat.TRANSLUCENT for GL Surfaces
* is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
*/
if (translucent) {
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
/* Setup the context factory for 2.0 rendering.
* See ContextFactory class definition below
*/
setEGLContextFactory(new ContextFactory());
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our
* custom config chooser. See ConfigChooser class definition
* below.
*/
setEGLConfigChooser( translucent ?
new ConfigChooser(8, 8, 8, 8, depth, stencil) :
new ConfigChooser(5, 6, 5, 0, depth, stencil) );
/* Set the renderer responsible for frame rendering */
setRenderer(new Renderer());
setRenderMode(RENDERMODE_WHEN_DIRTY);
}
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Log.w(TAG, "creating OpenGL ES 2.0 context");
checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
checkEglError("After eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
}
}
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
/* This EGL config specification is used to specify 2.0 rendering.
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below.
*/
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
/* Allocate then read the array of minimally matching EGL configs
*/
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
if (DEBUG) {
printConfigs(egl, display, configs);
}
/* Now return the "best" one
*/
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
int numConfigs = configs.length;
Log.w(TAG, String.format("%d configurations", numConfigs));
for (int i = 0; i < numConfigs; i++) {
Log.w(TAG, String.format("Configuration %d:\n", i));
printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display,
EGLConfig config) {
int[] attributes = {
EGL10.EGL_BUFFER_SIZE,
EGL10.EGL_ALPHA_SIZE,
EGL10.EGL_BLUE_SIZE,
EGL10.EGL_GREEN_SIZE,
EGL10.EGL_RED_SIZE,
EGL10.EGL_DEPTH_SIZE,
EGL10.EGL_STENCIL_SIZE,
EGL10.EGL_CONFIG_CAVEAT,
EGL10.EGL_CONFIG_ID,
EGL10.EGL_LEVEL,
EGL10.EGL_MAX_PBUFFER_HEIGHT,
EGL10.EGL_MAX_PBUFFER_PIXELS,
EGL10.EGL_MAX_PBUFFER_WIDTH,
EGL10.EGL_NATIVE_RENDERABLE,
EGL10.EGL_NATIVE_VISUAL_ID,
EGL10.EGL_NATIVE_VISUAL_TYPE,
0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
EGL10.EGL_SAMPLES,
EGL10.EGL_SAMPLE_BUFFERS,
EGL10.EGL_SURFACE_TYPE,
EGL10.EGL_TRANSPARENT_TYPE,
EGL10.EGL_TRANSPARENT_RED_VALUE,
EGL10.EGL_TRANSPARENT_GREEN_VALUE,
EGL10.EGL_TRANSPARENT_BLUE_VALUE,
0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
EGL10.EGL_LUMINANCE_SIZE,
EGL10.EGL_ALPHA_MASK_SIZE,
EGL10.EGL_COLOR_BUFFER_TYPE,
EGL10.EGL_RENDERABLE_TYPE,
0x3042 // EGL10.EGL_CONFORMANT
};
String[] names = {
"EGL_BUFFER_SIZE",
"EGL_ALPHA_SIZE",
"EGL_BLUE_SIZE",
"EGL_GREEN_SIZE",
"EGL_RED_SIZE",
"EGL_DEPTH_SIZE",
"EGL_STENCIL_SIZE",
"EGL_CONFIG_CAVEAT",
"EGL_CONFIG_ID",
"EGL_LEVEL",
"EGL_MAX_PBUFFER_HEIGHT",
"EGL_MAX_PBUFFER_PIXELS",
"EGL_MAX_PBUFFER_WIDTH",
"EGL_NATIVE_RENDERABLE",
"EGL_NATIVE_VISUAL_ID",
"EGL_NATIVE_VISUAL_TYPE",
"EGL_PRESERVED_RESOURCES",
"EGL_SAMPLES",
"EGL_SAMPLE_BUFFERS",
"EGL_SURFACE_TYPE",
"EGL_TRANSPARENT_TYPE",
"EGL_TRANSPARENT_RED_VALUE",
"EGL_TRANSPARENT_GREEN_VALUE",
"EGL_TRANSPARENT_BLUE_VALUE",
"EGL_BIND_TO_TEXTURE_RGB",
"EGL_BIND_TO_TEXTURE_RGBA",
"EGL_MIN_SWAP_INTERVAL",
"EGL_MAX_SWAP_INTERVAL",
"EGL_LUMINANCE_SIZE",
"EGL_ALPHA_MASK_SIZE",
"EGL_COLOR_BUFFER_TYPE",
"EGL_RENDERABLE_TYPE",
"EGL_CONFORMANT"
};
int[] value = new int[1];
for (int i = 0; i < attributes.length; i++) {
int attribute = attributes[i];
String name = names[i];
if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.w(TAG, String.format(" %s: %d\n", name, value[0]));
} else {
// Log.w(TAG, String.format(" %s: failed\n", name));
while (egl.eglGetError() != EGL10.EGL_SUCCESS);
}
}
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
glcamera mglcamera;
public void drawMatToGL(int idx, image_pool pool){
if(mglcamera != null)
mglcamera.drawMatToGL(idx, pool);
else
Log.e("android-opencv", "null glcamera!!!!");
}
private class Renderer implements GLSurfaceView.Renderer {
public void onDrawFrame(GL10 gl) {
mglcamera.step();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
mglcamera.init(width, height);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
}
@Override
public void onPause() {
mglcamera = null;
// TODO Auto-generated method stub
super.onPause();
}
@Override
public void onResume() {
mglcamera = new glcamera();
// TODO Auto-generated method stub
super.onResume();
}
public PoolCallback getDrawCallback() {
// TODO Auto-generated method stub
return poolcallback;
}
}