Removed outdated android-opencv subproject

This commit is contained in:
Andrey Kamaev 2011-08-09 11:55:43 +00:00
parent d9d74678a9
commit e553a37fe7
120 changed files with 0 additions and 8393 deletions

View File

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.opencv" android:versionCode="1" android:versionName="0.1">
<application android:debuggable="true">
<!-- The activity tag here is currently not used. The main project TicTacToeMain
must currently redefine the activities to be used from the libraries. However
later the tools will pick up the activities from here and merge them automatically,
so it's best to define your activities here like for any regular Android
project. -->
<activity android:name="com.opencv.OpenCV">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="com.opencv.calibration.ChessBoardChooser" />
<activity android:name="com.opencv.calibration.CameraConfig" />
<activity android:name="com.opencv.calibration.CalibrationViewer" />
<service android:name="com.opencv.calibration.services.CalibrationService" />
</application>
<uses-sdk android:minSdkVersion="7" />
<!-- set the opengl version -->
<uses-feature android:glEsVersion="0x00020000" />
<!-- set the opengl version -->
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
</manifest>

View File

@ -1,44 +0,0 @@
# ============================================================================
# The AndroidOpenCV CMake configuration file
#
# ** File generated automatically, do not modify **
#
# Usage from an external project:
# In your CMakeLists.txt, add these lines:
#
# FIND_PACKAGE(AndroidOpenCV REQUIRED )
# TARGET_LINK_LIBRARIES(MY_TARGET_NAME ${AndroidOpenCV_LIBS})
#
# This file will define the following variables:
# - AndroidOpenCV_LIBS : The list of libraries to links against.
# - AndroidOpenCV_LIB_DIR : The directory where lib files are.
# Calling LINK_DIRECTORIES with this path
# is NOT needed.
# - AndroidOpenCV_INCLUDE_DIRS : The AndroidOpenCV include directories.
# - AndroidOpenCV_SWIG_DIR : The swig path
#
# ===========================================================================
# ======================================================
# Include directories to add to the user project:
# ======================================================
# Provide the include directories to the caller
SET(AndroidOpenCV_INCLUDE_DIRS @CMAKE_INCLUDE_DIRS_CONFIGCMAKE@)
INCLUDE_DIRECTORIES(${AndroidOpenCV_INCLUDE_DIRS})
# ======================================================
# Link directories to add to the user project:
# ======================================================
# Provide the libs directory anyway, it may be needed in some cases.
SET(AndroidOpenCV_LIB_DIR @CMAKE_LIB_DIRS_CONFIGCMAKE@)
LINK_DIRECTORIES(${AndroidOpenCV_LIB_DIR})
# ======================================================
# Libraries to add to the user project:
# ======================================================
SET(AndroidOpenCV_LIBS @CMAKE_LIBS_CONFIGCMAKE@)
SET(AndroidOpenCV_SWIG_DIR @CMAKE_SWIG_DIR_CONFIGCMAKE@)

View File

@ -1,5 +0,0 @@
cmake_minimum_required(VERSION 2.8)
project(android-jni)
add_subdirectory(jni)

View File

@ -1,15 +0,0 @@
=========================================
CMake Build
=========================================
#path to the android build of opencv
opencv_dir=`pwd`/../build
mkdir build
cd build
cmake -DOpenCV_DIR=$opencv_dir -DCMAKE_TOOLCHAIN_FILE=$ANDTOOLCHAIN ..
=========================================
Android Build
=========================================
sh project_create.sh
ant compile

View File

@ -1,7 +0,0 @@
@ECHO OFF
SETLOCAL
PUSHD %~dp0
SET PROJECT_NAME=android-opencv
CALL ..\scripts\build.cmd %*
POPD
ENDLOCAL

View File

@ -1,9 +0,0 @@
#!/bin/sh
cd `dirname $0`
opencv_build_dir=`pwd`/../build
mkdir -p build
cd build
cmake -DOpenCVDIR=$opencv_build_dir -DCMAKE_TOOLCHAIN_FILE=../../android.toolchain.cmake ..

View File

@ -1,8 +0,0 @@
#!/bin/sh
cd `dirname $0`
opencv_build_dir=`pwd`/../build_neon
mkdir -p build_neon
cd build_neon
cmake -DOpenCVDIR=$opencv_build_dir -DARM_TARGET="armeabi-v7a with NEON" -DCMAKE_TOOLCHAIN_FILE=../../android.toolchain.cmake ..

View File

@ -1,11 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-7

View File

@ -1,70 +0,0 @@
#########################################################
# Find opencv and android-opencv
#########################################################
set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../build
CACHE PATH "The path where you built opencv for android")
find_package(OpenCV REQUIRED)
#########################################################
#c flags, included, and lib dependencies
#########################################################
#notice the "recycling" of CMAKE_C_FLAGS
#this is necessary to pick up android flags
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/include)
set( LIBRARY_DEPS ${OpenCV_LIBS} )
if(ANDROID)
set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl GLESv2)
endif(ANDROID)
#########################################################
#SWIG STUFF
#########################################################
#the java package to place swig generated java files in
set(MY_PACKAGE com.opencv.jni)
if(NOT ANDROID)
#non android swig and jni
#jni is available by default on android
find_package(JNI REQUIRED)
include_directories(${JNI_INCLUDE_DIRS})
FIND_PACKAGE(SWIG)
endif()
INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
if(ANDROID)
#this will set the output path for the java package
#and properly create the package declarations in generated java sources
SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
endif(ANDROID)
SET_SOURCE_FILES_PROPERTIES(android-cv.i PROPERTIES CPLUSPLUS ON)
SWIG_ADD_MODULE(android-opencv java
android-cv.i
Calibration.cpp
gl_code.cpp
image_pool.cpp
yuv420sp2rgb.c
#yuv420rgb888c.c
#yuv420rgb888.s
yuv2rgb16tab.c
)
target_link_libraries(android-opencv ${LIBRARY_DEPS} )
###################################################################
# Setup the configure file for other's to link against.
###################################################################
set(CMAKE_INCLUDE_DIRS_CONFIGCMAKE ${CMAKE_CURRENT_SOURCE_DIR}/include)
set(CMAKE_LIB_DIRS_CONFIGCMAKE ${LIBRARY_OUTPUT_PATH})
set(CMAKE_LIBS_CONFIGCMAKE android-opencv)
set(CMAKE_SWIG_DIR_CONFIGCMAKE ${CMAKE_CURRENT_SOURCE_DIR})
configure_file( "${CMAKE_SOURCE_DIR}/AndroidOpenCVConfig.cmake.in"
"${CMAKE_BINARY_DIR}/AndroidOpenCVConfig.cmake"
IMMEDIATE @ONLY)

View File

@ -1,245 +0,0 @@
/*
* Processor.cpp
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#include "Calibration.h"
#include <sys/stat.h>
using namespace cv;
Calibration::Calibration() :
patternsize(6, 8)
{
}
Calibration::~Calibration()
{
}
namespace
{
double computeReprojectionErrors(const vector<vector<Point3f> >& objectPoints,
const vector<vector<Point2f> >& imagePoints, const vector<Mat>& rvecs, const vector<
Mat>& tvecs, const Mat& cameraMatrix, const Mat& distCoeffs,
vector<float>& perViewErrors)
{
vector<Point2f> imagePoints2;
int i, totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for (i = 0; i < (int)objectPoints.size(); i++)
{
projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1);
int n = (int)objectPoints[i].size();
perViewErrors[i] = err / n;
totalErr += err;
totalPoints += n;
}
return totalErr / totalPoints;
}
void calcChessboardCorners(Size boardSize, float squareSize, vector<Point3f>& corners)
{
corners.resize(0);
for (int i = 0; i < boardSize.height; i++)
for (int j = 0; j < boardSize.width; j++)
corners.push_back(Point3f(float(j * squareSize), float(i * squareSize), 0));
}
/**from opencv/samples/cpp/calibration.cpp
*
*/
bool runCalibration(vector<vector<Point2f> > imagePoints, Size imageSize, Size boardSize, float squareSize,
float aspectRatio, int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr)
{
cameraMatrix = Mat::eye(3, 3, CV_64F);
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
cameraMatrix.at<double> (0, 0) = aspectRatio;
distCoeffs = Mat::zeros(4, 1, CV_64F);
vector<vector<Point3f> > objectPoints(1);
calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
for (size_t i = 1; i < imagePoints.size(); i++)
objectPoints.push_back(objectPoints[0]);
calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags);
bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET) && checkRange(distCoeffs, CV_CHECK_QUIET);
totalAvgErr
= computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs);
return ok;
}
void saveCameraParams(const string& filename, Size imageSize, Size boardSize, float squareSize, float aspectRatio,
int flags, const Mat& cameraMatrix, const Mat& distCoeffs, const vector<Mat>& rvecs,
const vector<Mat>& tvecs, const vector<float>& reprojErrs,
const vector<vector<Point2f> >& imagePoints, double totalAvgErr)
{
FileStorage fs(filename, FileStorage::WRITE);
time_t t;
time(&t);
struct tm *t2 = localtime(&t);
char buf[1024];
strftime(buf, sizeof(buf) - 1, "%c", t2);
fs << "calibration_time" << buf;
if (!rvecs.empty() || !reprojErrs.empty())
fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size());
fs << "image_width" << imageSize.width;
fs << "image_height" << imageSize.height;
fs << "board_width" << boardSize.width;
fs << "board_height" << boardSize.height;
fs << "squareSize" << squareSize;
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
fs << "aspectRatio" << aspectRatio;
if (flags != 0)
{
sprintf(buf, "flags: %s%s%s%s", flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "", flags
& CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", flags & CV_CALIB_FIX_PRINCIPAL_POINT
? "+fix_principal_point" : "", flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
cvWriteComment(*fs, buf, 0);
}
fs << "flags" << flags;
fs << "camera_matrix" << cameraMatrix;
fs << "distortion_coefficients" << distCoeffs;
fs << "avg_reprojection_error" << totalAvgErr;
if (!reprojErrs.empty())
fs << "per_view_reprojection_errors" << Mat(reprojErrs);
if (!rvecs.empty() && !tvecs.empty())
{
Mat bigmat(rvecs.size(), 6, CV_32F);
for (size_t i = 0; i < rvecs.size(); i++)
{
Mat r = bigmat(Range(i, i + 1), Range(0, 3));
Mat t = bigmat(Range(i, i + 1), Range(3, 6));
rvecs[i].copyTo(r);
tvecs[i].copyTo(t);
}
cvWriteComment(*fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0);
fs << "extrinsic_parameters" << bigmat;
}
if (!imagePoints.empty())
{
Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
for (size_t i = 0; i < imagePoints.size(); i++)
{
Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
Mat(imagePoints[i]).copyTo(r);
}
fs << "image_points" << imagePtMat;
}
}
}//anon namespace
bool Calibration::detectAndDrawChessboard(int idx, image_pool* pool)
{
bool patternfound = false;
Mat grey = pool->getGrey(idx);
if (grey.empty())
return false;
vector<Point2f> corners;
patternfound = findChessboardCorners(grey, patternsize, corners,CALIB_CB_FILTER_QUADS + CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE
+ CALIB_CB_FAST_CHECK);
Mat img = pool->getImage(idx);
if (corners.size() < 1)
return false;
if (patternfound)
{
cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
imagepoints.push_back(corners);
}
drawChessboardCorners(img, patternsize, Mat(corners), patternfound);
imgsize = grey.size();
return patternfound;
}
void Calibration::drawText(int i, image_pool* pool, const char* ctext)
{
// Use "y" to show that the baseLine is about
string text = ctext;
int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
double fontScale = .8;
int thickness = .5;
Mat img = pool->getImage(i);
int baseline = 0;
Size textSize = getTextSize(text, fontFace, fontScale, thickness, &baseline);
baseline += thickness;
// center the text
Point textOrg((img.cols - textSize.width) / 2, (img.rows - textSize.height * 2));
// draw the box
rectangle(img, textOrg + Point(0, baseline), textOrg + Point(textSize.width, -textSize.height), Scalar(0, 0, 255),
CV_FILLED);
// ... and the baseline first
line(img, textOrg + Point(0, thickness), textOrg + Point(textSize.width, thickness), Scalar(0, 0, 255));
// then put the text itself
putText(img, text, textOrg, fontFace, fontScale, Scalar::all(255), thickness, 8);
}
void Calibration::resetChess()
{
imagepoints.clear();
}
void Calibration::calibrate(const char* filename)
{
vector<Mat> rvecs, tvecs;
vector<float> reprojErrs;
double totalAvgErr = 0;
int flags = 0;
flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO;
bool writeExtrinsics = true;
bool writePoints = true;
bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, rvecs, tvecs, reprojErrs,
totalAvgErr);
if (ok)
{
saveCameraParams(filename, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, writeExtrinsics ? rvecs : vector<
Mat> (), writeExtrinsics ? tvecs : vector<Mat> (), writeExtrinsics ? reprojErrs : vector<float> (), writePoints
? imagepoints : vector<vector<Point2f> > (), totalAvgErr);
}
}
int Calibration::getNumberDetectedChessboards()
{
return imagepoints.size();
}

View File

@ -1,28 +0,0 @@
/*
* include the headers required by the generated cpp code
*/
%{
#include "Calibration.h"
#include "image_pool.h"
using namespace cv;
%}
class Calibration {
public:
Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};

View File

@ -1,6 +0,0 @@
%feature("director") Mat;
%feature("director") glcamera;
%feature("director") image_pool;
%typemap("javapackage") Mat, Mat *, Mat & "com.opencv.jni";
%typemap("javapackage") glcamera, glcamera *, glcamera & "com.opencv.jni";
%typemap("javapackage") image_pool, image_pool *, image_pool & "com.opencv.jni";

View File

@ -1,58 +0,0 @@
/* File : android-cv.i
import this file, and make sure to add the System.loadlibrary("android-opencv")
before loading any lib that depends on this.
*/
%module opencv
%{
#include "image_pool.h"
#include "glcamera.h"
using namespace cv;
%}
#ifndef SWIGIMPORTED
%include "various.i"
%include "typemaps.i"
%include "arrays_java.i"
#endif
/**
* Make all the swig pointers public, so that
* external libraries can refer to these, otherwise they default to
* protected...
*/
%typemap(javabody) SWIGTYPE %{
private long swigCPtr;
protected boolean swigCMemOwn;
public $javaclassname(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr($javaclassname obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
%}
%pragma(java) jniclasscode=%{
static {
try {
//load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
//so that android sdk automatically installs it along with the app.
System.loadLibrary("android-opencv");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
%}
%include "cv.i"
%include "glcamera.i"
%include "image_pool.i"
%include "Calibration.i"

View File

@ -1,165 +0,0 @@
/*
* These typemaps provide support for sharing data between JNI and JVM code
* using NIO direct buffers. It is the responsibility of the JVM code to
* allocate a direct buffer of the appropriate size.
*
* Example use:
* Wrapping:
* %include "buffers.i"
* %apply int* BUFF {int* buffer}
* int read_foo_int(int* buffer);
*
* Java:
* IntBuffer buffer = IntBuffer.allocateDirect(nInts*4).order(ByteOrder.nativeOrder()).asIntBuffer();
* Example.read_foo_int(buffer);
*
* The following typemaps are defined:
* void* BUFF <--> javax.nio.Buffer
* char* BUFF <--> javax.nio.ByteBuffer
* char* CBUFF <--> javax.nio.CharBuffer
* unsigned char* INBUFF/OUTBUFF <--> javax.nio.ShortBuffer
* short* BUFF <--> javax.nio.ShortBuffer
* unsigned short* INBUFF/OUTBUFF <--> javax.nio.IntBuffer
* int* BUFF <--> javax.nio.IntBuffer
* unsigned int* INBUFF/OUTBUFF <--> javax.nio.LongBuffer
* long* BUFF <--> javax.nio.IntBuffer
* unsigned long* INBUFF/OUTBUF <--> javax.nio.LongBuffer
* long long* BUFF <--> javax.nio.LongBuffer
* float* BUFF <--> javax.nio.FloatBuffer
* double* BUFF <--> javax.nio.DoubleBuffer
*
* Note the potential for data loss in the conversion from
* the C type 'unsigned long' to the signed Java long type.
* Hopefully, I can implement a workaround with BigNumber in the future.
*
* The use of ByteBuffer vs CharBuffer for the char* type should
* depend on the type of data. In general you'll probably
* want to use CharBuffer for actual text data.
*/
/*
* This macro is used to define the nio buffers for primitive types.
*/
%define NIO_BUFFER_TYPEMAP(CTYPE, LABEL, BUFFERTYPE)
%typemap(jni) CTYPE* LABEL "jobject"
%typemap(jtype) CTYPE* LABEL "BUFFERTYPE"
%typemap(jstype) CTYPE* LABEL "BUFFERTYPE"
%typemap(javain,
pre=" assert $javainput.isDirect() : \"Buffer must be allocated direct.\";") CTYPE* LABEL "$javainput"
%typemap(javaout) CTYPE* LABEL {
return $jnicall;
}
%typemap(in) CTYPE* LABEL {
$1 = (CTYPE*)(jenv)->GetDirectBufferAddress( $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* LABEL {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* LABEL ""
%enddef
NIO_BUFFER_TYPEMAP(void, BUFF, java.nio.Buffer);
NIO_BUFFER_TYPEMAP(char, BUFF, java.nio.ByteBuffer);
NIO_BUFFER_TYPEMAP(char, CBUFF, java.nio.CharBuffer);
/*NIO_BUFFER_TYPEMAP(unsigned char, BUFF, java.nio.ShortBuffer);*/
NIO_BUFFER_TYPEMAP(short, BUFF, java.nio.ShortBuffer);
NIO_BUFFER_TYPEMAP(unsigned short, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(int, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned int, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(float, BUFF, java.nio.FloatBuffer);
NIO_BUFFER_TYPEMAP(double, BUFF, java.nio.DoubleBuffer);
#undef NIO_BUFFER_TYPEMAP
%define UNSIGNED_NIO_BUFFER_TYPEMAP(CTYPE, BSIZE, BUFFERTYPE, PACKFCN, UNPACKFCN)
%typemap(jni) CTYPE* INBUFF "jobject"
%typemap(jtype) CTYPE* INBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* INBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = PACKFCN($javainput, true);") CTYPE* INBUFF "tmp$javainput"
%typemap(javaout) CTYPE* INBUFF {
return $jnicall;
}
%typemap(in) CTYPE* INBUFF {
$1 = (jenv)->GetDirectBufferAddress($input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* INBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* INBUFF ""
%typemap(jni) CTYPE* OUTBUFF "jobject"
%typemap(jtype) CTYPE* OUTBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* OUTBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = java.nio.ByteBuffer.allocateDirect($javainput.capacity()*BSIZE).order($javainput.order());",
post=" UNPACKFCN(tmp$javainput, $javainput);") CTYPE* OUTBUFF "tmp$javainput"
%typemap(javaout) CTYPE* OUTBUFF {
return $jnicall;
}
%typemap(in) CTYPE* OUTBUFF {
$1 = (jenv)->GetDirectBufferAddress( $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* OUTBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* OUTBUFF ""
%enddef
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned char, 1, java.nio.ShortBuffer, permafrost.hdf.libhdf.BufferUtils.packUChar, permafrost.hdf.libhdf.BufferUtils.unpackUChar);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned short, 2, java.nio.IntBuffer, permafrost.hdf.libhdf.BufferUtils.packUShort, permafrost.hdf.libhdf.BufferUtils.unpackUShort);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned int, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned long, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
/*
%typemap(jni) unsigned char* BUFF "jobject"
%typemap(jtype) unsigned char* BUFF "java.nio.ByteBuffer"
%typemap(jstype) unsigned char* BUFF "java.nio.ShortBuffer"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = permafrost.hdf.libhdf.BufferUtils.packUChar($javainput, true);",
post=" permafrost.hdf.libhdf.BufferUtils.unpackUChar(tmp$javainput, $javainput);") unsigned char* BUFF "tmp$javainput"
%typemap(javaout) unsigned char* BUFF {
return $jnicall;
}
%typemap(in) unsigned char* BUFF {
$1 = (const char*)(jenv)->GetDirectBufferAddress( $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) unsigned char* BUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) unsigned char* BUFF ""
*/
#undef UNSIGNED_NIO_BUFFER_TYPEMAP

View File

@ -1,156 +0,0 @@
%typemap(javaimports) Mat "
/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
*/"
%typemap(javaimports) Size "
/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
*/"
class Size{
public:
Size();
Size(int width,int height);
~Size();
int width;
int height;
};
#define CV_CN_MAX 512
#define CV_CN_SHIFT 3
#define CV_DEPTH_MAX (1 << CV_CN_SHIFT)
#define CV_8U 0
#define CV_8S 1
#define CV_16U 2
#define CV_16S 3
#define CV_32S 4
#define CV_32F 5
#define CV_64F 6
#define CV_USRTYPE1 7
#define CV_MAT_DEPTH_MASK (CV_DEPTH_MAX - 1)
#define CV_MAT_DEPTH(flags) ((flags) & CV_MAT_DEPTH_MASK)
#define CV_MAKETYPE(depth,cn) (CV_MAT_DEPTH(depth) + (((cn)-1) << CV_CN_SHIFT))
#define CV_MAKE_TYPE CV_MAKETYPE
#define CV_8UC1 CV_MAKETYPE(CV_8U,1)
#define CV_8UC2 CV_MAKETYPE(CV_8U,2)
#define CV_8UC3 CV_MAKETYPE(CV_8U,3)
#define CV_8UC4 CV_MAKETYPE(CV_8U,4)
#define CV_8UC(n) CV_MAKETYPE(CV_8U,(n))
#define CV_8SC1 CV_MAKETYPE(CV_8S,1)
#define CV_8SC2 CV_MAKETYPE(CV_8S,2)
#define CV_8SC3 CV_MAKETYPE(CV_8S,3)
#define CV_8SC4 CV_MAKETYPE(CV_8S,4)
#define CV_8SC(n) CV_MAKETYPE(CV_8S,(n))
#define CV_16UC1 CV_MAKETYPE(CV_16U,1)
#define CV_16UC2 CV_MAKETYPE(CV_16U,2)
#define CV_16UC3 CV_MAKETYPE(CV_16U,3)
#define CV_16UC4 CV_MAKETYPE(CV_16U,4)
#define CV_16UC(n) CV_MAKETYPE(CV_16U,(n))
#define CV_16SC1 CV_MAKETYPE(CV_16S,1)
#define CV_16SC2 CV_MAKETYPE(CV_16S,2)
#define CV_16SC3 CV_MAKETYPE(CV_16S,3)
#define CV_16SC4 CV_MAKETYPE(CV_16S,4)
#define CV_16SC(n) CV_MAKETYPE(CV_16S,(n))
#define CV_32SC1 CV_MAKETYPE(CV_32S,1)
#define CV_32SC2 CV_MAKETYPE(CV_32S,2)
#define CV_32SC3 CV_MAKETYPE(CV_32S,3)
#define CV_32SC4 CV_MAKETYPE(CV_32S,4)
#define CV_32SC(n) CV_MAKETYPE(CV_32S,(n))
#define CV_32FC1 CV_MAKETYPE(CV_32F,1)
#define CV_32FC2 CV_MAKETYPE(CV_32F,2)
#define CV_32FC3 CV_MAKETYPE(CV_32F,3)
#define CV_32FC4 CV_MAKETYPE(CV_32F,4)
#define CV_32FC(n) CV_MAKETYPE(CV_32F,(n))
#define CV_64FC1 CV_MAKETYPE(CV_64F,1)
#define CV_64FC2 CV_MAKETYPE(CV_64F,2)
#define CV_64FC3 CV_MAKETYPE(CV_64F,3)
#define CV_64FC4 CV_MAKETYPE(CV_64F,4)
#define CV_64FC(n) CV_MAKETYPE(CV_64F,(n))
#define CV_AUTO_STEP 0x7fffffff
#define CV_WHOLE_ARR cvSlice( 0, 0x3fffffff )
#define CV_MAT_CN_MASK ((CV_CN_MAX - 1) << CV_CN_SHIFT)
#define CV_MAT_CN(flags) ((((flags) & CV_MAT_CN_MASK) >> CV_CN_SHIFT) + 1)
#define CV_MAT_TYPE_MASK (CV_DEPTH_MAX*CV_CN_MAX - 1)
#define CV_MAT_TYPE(flags) ((flags) & CV_MAT_TYPE_MASK)
#define CV_MAT_CONT_FLAG_SHIFT 14
#define CV_MAT_CONT_FLAG (1 << CV_MAT_CONT_FLAG_SHIFT)
#define CV_IS_MAT_CONT(flags) ((flags) & CV_MAT_CONT_FLAG)
#define CV_IS_CONT_MAT CV_IS_MAT_CONT
#define CV_SUBMAT_FLAG_SHIFT 15
#define CV_SUBMAT_FLAG (1 << CV_SUBMAT_FLAG_SHIFT)
#define CV_IS_SUBMAT(flags) ((flags) & CV_MAT_SUBMAT_FLAG)
#define CV_MAGIC_MASK 0xFFFF0000
#define CV_MAT_MAGIC_VAL 0x42420000
#define CV_TYPE_NAME_MAT "opencv-matrix"
class Mat {
public:
Mat();
~Mat();
void create(Size size, int type);
int channels() const;
%immutable;
int rows;
int cols;
};
template<class _Tp> class Ptr
{
public:
//! empty constructor
Ptr();
//! take ownership of the pointer. The associated reference counter is allocated and set to 1
Ptr(_Tp* _obj);
//! calls release()
~Ptr();
//! copy constructor. Copies the members and calls addref()
Ptr(const Ptr& ptr);
//! copy operator. Calls ptr.addref() and release() before copying the members
// Ptr& operator = (const Ptr& ptr);
//! increments the reference counter
void addref();
//! decrements the reference counter. If it reaches 0, delete_obj() is called
void release();
//! deletes the object. Override if needed
void delete_obj();
//! returns true iff obj==NULL
bool empty() const;
//! helper operators making "Ptr<T> ptr" use very similar to "T* ptr".
_Tp* operator -> ();
// const _Tp* operator -> () const;
// operator _Tp* ();
// operator const _Tp*() const;
protected:
_Tp* obj; //< the object pointer.
int* refcount; //< the associated reference counter
};
%template(PtrMat) Ptr<Mat>;
void imwrite(const char* image_name, const Mat& image);
Mat imread(const char* image_name);
%include "buffers.i"
%apply char* BUFF {const char* buffer}
%apply char* BUFF {char* buffer}
void copyMatToBuffer(char* buffer, const Mat& mat);
void copyBufferToMat(Mat& mat, const char* buffer);

View File

@ -1,352 +0,0 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// OpenGL ES 2.0 code
#include <jni.h>
#if __ANDROID__
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#else
#include <GL/gl.h>
#endif
#include "android_logger.h"
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <stdint.h>
#include "glcamera.h"
#include "image_pool.h"
using namespace cv;
static void printGLString(const char *name, GLenum s)
{
const char *v = (const char *)glGetString(s);
LOGI("GL %s = %s\n", name, v);
}
static void checkGlError(const char* op)
{
for (GLint error = glGetError(); error; error = glGetError())
{
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
static const char gVertexShader[] = "attribute vec4 a_position; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
static const char gFragmentShader[] = "precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D s_texture; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D( s_texture, v_texCoord );\n"
"} \n";
GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels)
{
// Bind the texture
glActiveTexture(GL_TEXTURE0);
checkGlError("glActiveTexture");
// Bind the texture object
glBindTexture(GL_TEXTURE_2D, _textureid);
checkGlError("glBindTexture");
GLenum format;
switch (channels)
{
case 3:
#if ANDROID
format = GL_RGB;
#else
format = GL_BGR;
#endif
break;
case 1:
format = GL_LUMINANCE;
break;
case 4:
format = GL_RGBA;
break;
}
// Load the texture
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, pixels);
checkGlError("glTexImage2D");
#if ANDROID
// Set the filtering mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
#else
/* Linear Filtering */
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
#endif
return _textureid;
}
GLuint glcamera::loadShader(GLenum shaderType, const char* pSource)
{
GLuint shader = 0;
#if __ANDROID__
shader = glCreateShader(shaderType);
if (shader)
{
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled)
{
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen)
{
char* buf = (char*)malloc(infoLen);
if (buf)
{
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
#endif
return shader;
}
GLuint glcamera::createProgram(const char* pVertexSource, const char* pFragmentSource)
{
#if __ANDROID__
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader)
{
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader)
{
return 0;
}
GLuint program = glCreateProgram();
if (program)
{
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE)
{
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength)
{
char* buf = (char*)malloc(bufLength);
if (buf)
{
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
#else
return 0;
#endif
}
void glcamera::clear(){
nimg = Mat();
}
//GLuint textureID;
bool glcamera::setupGraphics(int w, int h)
{
// printGLString("Version", GL_VERSION);
// printGLString("Vendor", GL_VENDOR);
// printGLString("Renderer", GL_RENDERER);
// printGLString("Extensions", GL_EXTENSIONS);
#if __ANDROID__
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram)
{
LOGE("Could not create program.");
return false;
}
gvPositionHandle = glGetAttribLocation(gProgram, "a_position");
gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord");
gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture");
// Use tightly packed data
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Generate a texture object
glGenTextures(1, &textureID);
glViewport(0, 0, w, h);
#endif
return true;
}
void glcamera::renderFrame()
{
#if __ANDROID__
GLfloat vVertices[] = {-1.0f, 1.0f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-1.0f, -1.0f, 0.0f, // Position 1
0.0f, img_h, // TexCoord 1
1.0f, -1.0f, 0.0f, // Position 2
img_w, img_h, // TexCoord 2
1.0f, 1.0f, 0.0f, // Position 3
img_w, 0.0f // TexCoord 3
};
GLushort indices[] = {0, 1, 2, 0, 2, 3};
GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
checkGlError("glClearColor");
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
checkGlError("glClear");
if(nimg.empty())return;
glUseProgram(gProgram);
checkGlError("glUseProgram");
// Load the vertex position
glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, vVertices);
// Load the texture coordinate
glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, &vVertices[3]);
glEnableVertexAttribArray(gvPositionHandle);
glEnableVertexAttribArray(gvTexCoordHandle);
// Bind the texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID);
// Set the sampler texture unit to 0
glUniform1i(gvSamplerHandle, 0);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
#endif
//checkGlError("glVertexAttribPointer");
//glEnableVertexAttribArray(gvPositionHandle);
//checkGlError("glEnableVertexAttribArray");
//glDrawArrays(GL_TRIANGLES, 0, 3);
//checkGlError("glDrawArrays");
}
void glcamera::init(int width, int height)
{
newimage = false;
nimg = Mat();
setupGraphics(width, height);
}
void glcamera::step()
{
if (newimage && !nimg.empty())
{
textureID = createSimpleTexture2D(textureID, nimg.ptr<unsigned char> (0), nimg.cols, nimg.rows, nimg.channels());
newimage = false;
}
renderFrame();
}
#define NEAREST_POW2(x)( std::ceil(std::log(x)/0.69315) )
void glcamera::setTextureImage(const Mat& img)
{
int p = NEAREST_POW2(img.cols/2); //subsample by 2
//int sz = std::pow(2, p);
// Size size(sz, sz);
Size size(256, 256);
img_w = 1;
img_h = 1;
if (nimg.cols != size.width)
LOGI_STREAM( "using texture of size: (" << size.width << " , " << size.height << ") image size is: (" << img.cols << " , " << img.rows << ")");
nimg.create(size, img.type());
#if SUBREGION_NPO2
cv::Rect roi(0, 0, img.cols/2, img.rows/2);
cv::Mat nimg_sub = nimg(roi);
//img.copyTo(nimg_sub);
img_w = (img.cols/2)/float(sz);
img_h = (img.rows/2)/float(sz);
cv::resize(img,nimg_sub,nimg_sub.size(),0,0,CV_INTER_NN);
#else
cv::resize(img, nimg, nimg.size(), 0, 0, CV_INTER_NN);
#endif
newimage = true;
}
void glcamera::drawMatToGL(int idx, image_pool* pool)
{
Mat img = pool->getImage(idx);
if (img.empty())
return; //no image at input_idx!
setTextureImage(img);
}
glcamera::glcamera() :
newimage(false)
{
LOGI("glcamera constructor");
}
glcamera::~glcamera()
{
LOGI("glcamera destructor");
}

View File

@ -1,44 +0,0 @@
%typemap(javaimports) glcamera "
/** a class for doing the native rendering of images
this class renders using GL2 es, the native ndk version
This class is used by the GL2CameraViewer to do the rendering,
and is inspired by the gl2 example in the ndk samples
*/"
%javamethodmodifiers glcamera::init"
/** should be called onSurfaceChanged by the GLSurfaceView that is using this
* as the drawing engine
* @param width the width of the surface view that this will be drawing to
* @param width the height of the surface view that this will be drawing to
*
*/
public";
%javamethodmodifiers glcamera::step"
/** should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
handles the rendering of the opengl scene, and requires that the opengl context be
valid.
*
*/
public";
%javamethodmodifiers glcamera::drawMatToGL"
/** copies an image from a pool and queues it for drawing in opengl.
* this does transformation into power of two texture sizes
* @param idx the image index to copy
* @param pool the image_pool to look up the image from
*
*/
public";
class glcamera {
public:
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
void clear();
};

View File

@ -1,130 +0,0 @@
#include "image_pool.h"
#include "yuv420sp2rgb.h"
#include "android_logger.h"
#include <opencv2/imgproc/imgproc.hpp>
#include <cstdlib>
#include <jni.h>
#ifdef __cplusplus
extern "C"
{
#endif
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
//
//JNIEXPORT jobject JNICALL Java_com_opencv_jni_opencvJNI_getBitmapBuffer(
// JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint,
jint, jint, jboolean);
#ifdef __cplusplus
}
#endif
using namespace cv;
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
JNIEnv *env;
LOGI("JNI_OnLoad called for opencv");
return JNI_VERSION_1_4;
}
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env, jclass thiz, jlong ppool,
jobject _jpool, jbyteArray jbuffer, jint jidx,
jint jwidth, jint jheight, jboolean jgrey)
{
int buff_height = jheight + (jheight / 2);
Size buff_size(jwidth, buff_height);
image_pool *pool = (image_pool *)ppool;
Mat mat = pool->getYUV(jidx);
//create is smart and only copies if the buffer size is different
mat.create(buff_size, CV_8UC1);
{
uchar* buff = mat.ptr<uchar> (0);
jsize sz = env->GetArrayLength(jbuffer);
//http://elliotth.blogspot.com/2007/03/optimizing-jni-array-access.html
env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*)buff);
}
pool->addYUVMat(jidx, mat);
Mat color;
if (jgrey)
{
Mat grey = pool->getGrey(jidx);
color = grey;
}
else
{
color = pool->getImage(jidx);
pool->convertYUVtoColor(jidx, color);
}
pool->addImage(jidx, color);
}
image_pool::image_pool()
{
}
image_pool::~image_pool()
{
}
Mat image_pool::getImage(int i)
{
return imagesmap[i];
}
Mat image_pool::getGrey(int i)
{
Mat tm = yuvImagesMap[i];
if (tm.empty())
return tm;
return tm(Range(0, tm.rows * (2.0f / 3)), Range::all());
}
Mat image_pool::getYUV(int i)
{
return yuvImagesMap[i];
}
void image_pool::addYUVMat(int i, Mat mat)
{
yuvImagesMap[i] = mat;
}
void image_pool::addImage(int i, Mat mat)
{
imagesmap[i] = mat;
}
void image_pool::convertYUVtoColor(int i, cv::Mat& out)
{
Mat yuv = getYUV(i);
if (yuv.empty())
return;
int width = yuv.cols;
int height = yuv.rows * (2.0f / 3);
out.create(height, width, CV_8UC3);
const unsigned char* buff = yuv.ptr<unsigned char> (0);
unsigned char* out_buff = out.ptr<unsigned char> (0);
color_convert_common(buff, buff + width * height, width, height, out_buff, false);
}
void copyMatToBuffer(char* buffer, const cv::Mat& mat)
{
memcpy(buffer, mat.data, mat.rows * mat.cols * mat.step1());
}
void copyBufferToMat(cv::Mat& mat, const char* buffer)
{
memcpy(mat.data, buffer, mat.rows * mat.cols * mat.step1());
}
void RGB2BGR(const Mat& in, Mat& out)
{
cvtColor(in, out, CV_RGB2BGR);
}

View File

@ -1,51 +0,0 @@
%typemap(javaimports) image_pool "
/** image_pool is used for keeping track of a pool of native images. It stores images as cv::Mat's and
references them by an index. It allows one to get a pointer to an underlying mat, and handles memory deletion.*/"
%javamethodmodifiers image_pool::getImage"
/** gets a pointer to a stored image, by an index. If the index is new, returns a null pointer
* @param idx the index in the pool that is associated with a cv::Mat
* @return the pointer to a cv::Mat, null pointer if the given idx is novel
*/
public";
%javamethodmodifiers image_pool::deleteImage"
/** deletes the image from the pool
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%javamethodmodifiers addYUVtoPool"
/** adds a yuv
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%include "various.i"
%apply (char* BYTE) { (char *data)}; //byte[] to char*
%native (addYUVtoPool) void addYUVtoPool(image_pool* pool, char* data,int idx, int width, int height, bool grey);
%feature("director") image_pool;
class image_pool {
public:
Mat getGrey(int i);
Mat getImage(int i);
void addImage(int i, Mat mat);
void convertYUVtoColor(int i, Mat& out);
};
void RGB2BGR(const Mat& in, Mat& out);

View File

@ -1,54 +0,0 @@
/*
* Processor.h
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#ifndef PROCESSOR_H_
#define PROCESSOR_H_
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <vector>
#include "image_pool.h"
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
class Calibration
{
public:
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
cv::Size patternsize;
private:
std::vector<cv::KeyPoint> keypoints;
std::vector<std::vector<cv::Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
};
#endif /* PROCESSOR_H_ */

View File

@ -1,19 +0,0 @@
#pragma once
#include <iostream>
#include <sstream>
#define LOG_TAG "libopencv"
#if ANDROID
#include <android/log.h>
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#else
#include <cstdio>
#define LOGI(...) printf("info:");printf("%s:",LOG_TAG); fprintf(stdout,__VA_ARGS__);printf("\n");
#define LOGE(...) printf("error:");printf("%s:",LOG_TAG); fprintf(stderr,__VA_ARGS__);printf("\n");
#endif
#ifndef LOGI_STREAM
#define LOGI_STREAM(x) {std::stringstream ss; ss << x; LOGI("%s",ss.str().c_str());}
#endif
#define LOGE_STREAM(x) {std::stringstream ss; ss << x; LOGE("%s",ss.str().c_str());}

View File

@ -1,48 +0,0 @@
#ifndef GLCAMERA_H_
#define GLCAMERA_H_
#include <opencv2/core/core.hpp>
#ifdef __ANDROID__
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#else
#include <GL/gl.h>
#include <GL/glu.h>
#endif
#include "image_pool.h"
class glcamera
{
public:
glcamera();
~glcamera();
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
void drawMatToGL(const cv::Mat& img);
void setTextureImage(const cv::Mat& img);
void clear();
private:
GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint
createProgram(const char* pVertexSource, const char* pFragmentSource);
bool setupGraphics(int w, int h);
void renderFrame();
cv::Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
float img_w, img_h;
};
#endif

View File

@ -1,51 +0,0 @@
#ifndef IMAGE_POOL_H_ANDROID_KDJFKJ
#define IMAGE_POOL_H_ANDROID_KDJFKJ
#include <opencv2/core/core.hpp>
#include <map>
class image_pool
{
public:
image_pool();
~image_pool();
cv::Mat getImage(int i);
cv::Mat getGrey(int i);
cv::Mat getYUV(int i);
int getCount()
{
return imagesmap.size();
}
/** Adds a mat at the given index - will not do a deep copy, just images[i] = mat
*
*/
void addImage(int i, cv::Mat mat);
/** this function stores the given matrix in the the yuvImagesMap. Also,
* after this call getGrey will work, as the grey image is just the top
* half of the YUV mat.
*
* \param i index to store yuv image at
* \param mat the yuv matrix to store
*/
void addYUVMat(int i, cv::Mat mat);
void convertYUVtoColor(int i, cv::Mat& out);
// int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
//
// void getBitmap(int * outintarray, int size, int idx);
private:
std::map<int, cv::Mat> imagesmap;
std::map<int, cv::Mat> yuvImagesMap;
};
void copyMatToBuffer(char* buffer, const cv::Mat& mat);
void copyBufferToMat(cv::Mat& mat, const char* buffer);
void RGB2BGR(const cv::Mat& in, cv::Mat& out);
#endif

View File

@ -1,147 +0,0 @@
/* YUV-> RGB conversion code.
*
* Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
* Productions Ltd.
*
* Licensed under the GNU GPL. If you need it under another license, contact
* me and ask.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
#ifndef YUV2RGB_H
#define YUV2RGB_H
/* Define these to something appropriate in your build */
typedef unsigned int uint32_t;
typedef signed int int32_t;
typedef unsigned short uint16_t;
typedef unsigned char uint8_t;
extern const uint32_t yuv2rgb565_table[];
extern const uint32_t yuv2bgr565_table[];
void yuv420_2_rgb565(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv422_2_rgb565(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv444_2_rgb565(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv420_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv422_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv444_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv420_2_rgb8888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv422_2_rgb8888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv444_2_rgb8888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
#endif /* YUV2RGB_H */

View File

@ -1,17 +0,0 @@
#ifndef YUV420SP2RGB_H
#define YUV420SP2RGB_H
#ifdef __cplusplus
extern "C" {
#endif
void color_convert_common(
const unsigned char *pY, const unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,36 +0,0 @@
/*
* int *INTARRAY typemaps.
* These are input typemaps for mapping a Java int[] array to a C int array.
* Note that as a Java array is used and thus passeed by reference, the C routine
* can return data to Java via the parameter.
*
* Example usage wrapping:
* void foo((int *INTARRAY, int INTARRAYSIZE);
*
* Java usage:
* byte b[] = new byte[20];
* modulename.foo(b);
*/
%typemap(in) (int *INTARRAY, int INTARRAYSIZE) {
$1 = (int *) JCALL2(GetIntArrayElements, jenv, $input, 0);
jsize sz = JCALL1(GetArrayLength, jenv, $input);
$2 = (int)sz;
}
%typemap(argout) (int *INTARRAY, int INTARRAYSIZE) {
JCALL3(ReleaseIntArrayElements, jenv, $input, (jint *) $1, 0);
}
/* Prevent default freearg typemap from being used */
%typemap(freearg) (int *INTARRAY, int INTARRAYSIZE) ""
%typemap(jni) (int *INTARRAY, int INTARRAYSIZE) "jintArray"
%typemap(jtype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(jstype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(javain) (int *INTARRAY, int INTARRAYSIZE) "$javainput"

View File

@ -1,802 +0,0 @@
/* YUV-> RGB conversion code.
*
* Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
* Productions Ltd.
*
* Licensed under the GNU GPL. If you need it under another license, contact
* me and ask.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/* For BREW or Symbian you might need to make this static const rather than
* just const, and introduce a function to get the address. */
#include "yuv2rgb.h"
const uint32_t yuv2rgb565_table[256*3] =
{
/* y_table */
0x7FFFFFEDU,
0x7FFFFFEFU,
0x7FFFFFF0U,
0x7FFFFFF1U,
0x7FFFFFF2U,
0x7FFFFFF3U,
0x7FFFFFF4U,
0x7FFFFFF6U,
0x7FFFFFF7U,
0x7FFFFFF8U,
0x7FFFFFF9U,
0x7FFFFFFAU,
0x7FFFFFFBU,
0x7FFFFFFDU,
0x7FFFFFFEU,
0x7FFFFFFFU,
0x80000000U,
0x80400801U,
0x80A01002U,
0x80E01803U,
0x81202805U,
0x81803006U,
0x81C03807U,
0x82004008U,
0x82604809U,
0x82A0500AU,
0x82E0600CU,
0x8340680DU,
0x8380700EU,
0x83C0780FU,
0x84208010U,
0x84608811U,
0x84A09813U,
0x8500A014U,
0x8540A815U,
0x8580B016U,
0x85E0B817U,
0x8620C018U,
0x8660D01AU,
0x86C0D81BU,
0x8700E01CU,
0x8740E81DU,
0x87A0F01EU,
0x87E0F81FU,
0x88210821U,
0x88811022U,
0x88C11823U,
0x89012024U,
0x89412825U,
0x89A13026U,
0x89E14028U,
0x8A214829U,
0x8A81502AU,
0x8AC1582BU,
0x8B01602CU,
0x8B61682DU,
0x8BA1782FU,
0x8BE18030U,
0x8C418831U,
0x8C819032U,
0x8CC19833U,
0x8D21A034U,
0x8D61B036U,
0x8DA1B837U,
0x8E01C038U,
0x8E41C839U,
0x8E81D03AU,
0x8EE1D83BU,
0x8F21E83DU,
0x8F61F03EU,
0x8FC1F83FU,
0x90020040U,
0x90420841U,
0x90A21042U,
0x90E22044U,
0x91222845U,
0x91823046U,
0x91C23847U,
0x92024048U,
0x92624849U,
0x92A2504AU,
0x92E2604CU,
0x9342684DU,
0x9382704EU,
0x93C2784FU,
0x94228050U,
0x94628851U,
0x94A29853U,
0x9502A054U,
0x9542A855U,
0x9582B056U,
0x95E2B857U,
0x9622C058U,
0x9662D05AU,
0x96C2D85BU,
0x9702E05CU,
0x9742E85DU,
0x97A2F05EU,
0x97E2F85FU,
0x98230861U,
0x98831062U,
0x98C31863U,
0x99032064U,
0x99632865U,
0x99A33066U,
0x99E34068U,
0x9A434869U,
0x9A83506AU,
0x9AC3586BU,
0x9B23606CU,
0x9B63686DU,
0x9BA3786FU,
0x9BE38070U,
0x9C438871U,
0x9C839072U,
0x9CC39873U,
0x9D23A074U,
0x9D63B076U,
0x9DA3B877U,
0x9E03C078U,
0x9E43C879U,
0x9E83D07AU,
0x9EE3D87BU,
0x9F23E87DU,
0x9F63F07EU,
0x9FC3F87FU,
0xA0040080U,
0xA0440881U,
0xA0A41082U,
0xA0E42084U,
0xA1242885U,
0xA1843086U,
0xA1C43887U,
0xA2044088U,
0xA2644889U,
0xA2A4588BU,
0xA2E4608CU,
0xA344688DU,
0xA384708EU,
0xA3C4788FU,
0xA4248090U,
0xA4649092U,
0xA4A49893U,
0xA504A094U,
0xA544A895U,
0xA584B096U,
0xA5E4B897U,
0xA624C098U,
0xA664D09AU,
0xA6C4D89BU,
0xA704E09CU,
0xA744E89DU,
0xA7A4F09EU,
0xA7E4F89FU,
0xA82508A1U,
0xA88510A2U,
0xA8C518A3U,
0xA90520A4U,
0xA96528A5U,
0xA9A530A6U,
0xA9E540A8U,
0xAA4548A9U,
0xAA8550AAU,
0xAAC558ABU,
0xAB2560ACU,
0xAB6568ADU,
0xABA578AFU,
0xAC0580B0U,
0xAC4588B1U,
0xAC8590B2U,
0xACE598B3U,
0xAD25A0B4U,
0xAD65B0B6U,
0xADA5B8B7U,
0xAE05C0B8U,
0xAE45C8B9U,
0xAE85D0BAU,
0xAEE5D8BBU,
0xAF25E8BDU,
0xAF65F0BEU,
0xAFC5F8BFU,
0xB00600C0U,
0xB04608C1U,
0xB0A610C2U,
0xB0E620C4U,
0xB12628C5U,
0xB18630C6U,
0xB1C638C7U,
0xB20640C8U,
0xB26648C9U,
0xB2A658CBU,
0xB2E660CCU,
0xB34668CDU,
0xB38670CEU,
0xB3C678CFU,
0xB42680D0U,
0xB46690D2U,
0xB4A698D3U,
0xB506A0D4U,
0xB546A8D5U,
0xB586B0D6U,
0xB5E6B8D7U,
0xB626C8D9U,
0xB666D0DAU,
0xB6C6D8DBU,
0xB706E0DCU,
0xB746E8DDU,
0xB7A6F0DEU,
0xB7E6F8DFU,
0xB82708E1U,
0xB88710E2U,
0xB8C718E3U,
0xB90720E4U,
0xB96728E5U,
0xB9A730E6U,
0xB9E740E8U,
0xBA4748E9U,
0xBA8750EAU,
0xBAC758EBU,
0xBB2760ECU,
0xBB6768EDU,
0xBBA778EFU,
0xBC0780F0U,
0xBC4788F1U,
0xBC8790F2U,
0xBCE798F3U,
0xBD27A0F4U,
0xBD67B0F6U,
0xBDC7B8F7U,
0xBE07C0F8U,
0xBE47C8F9U,
0xBEA7D0FAU,
0xBEE7D8FBU,
0xBF27E8FDU,
0xBF87F0FEU,
0xBFC7F8FFU,
0xC0080100U,
0xC0480901U,
0xC0A81102U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
0xC0E82104U,
/* u_table */
0x0C400103U,
0x0C200105U,
0x0C200107U,
0x0C000109U,
0x0BE0010BU,
0x0BC0010DU,
0x0BA0010FU,
0x0BA00111U,
0x0B800113U,
0x0B600115U,
0x0B400117U,
0x0B400119U,
0x0B20011BU,
0x0B00011DU,
0x0AE0011FU,
0x0AE00121U,
0x0AC00123U,
0x0AA00125U,
0x0A800127U,
0x0A600129U,
0x0A60012BU,
0x0A40012DU,
0x0A20012FU,
0x0A000131U,
0x0A000132U,
0x09E00134U,
0x09C00136U,
0x09A00138U,
0x09A0013AU,
0x0980013CU,
0x0960013EU,
0x09400140U,
0x09400142U,
0x09200144U,
0x09000146U,
0x08E00148U,
0x08C0014AU,
0x08C0014CU,
0x08A0014EU,
0x08800150U,
0x08600152U,
0x08600154U,
0x08400156U,
0x08200158U,
0x0800015AU,
0x0800015CU,
0x07E0015EU,
0x07C00160U,
0x07A00162U,
0x07A00164U,
0x07800166U,
0x07600168U,
0x0740016AU,
0x0720016CU,
0x0720016EU,
0x07000170U,
0x06E00172U,
0x06C00174U,
0x06C00176U,
0x06A00178U,
0x0680017AU,
0x0660017CU,
0x0660017EU,
0x06400180U,
0x06200182U,
0x06000184U,
0x05E00185U,
0x05E00187U,
0x05C00189U,
0x05A0018BU,
0x0580018DU,
0x0580018FU,
0x05600191U,
0x05400193U,
0x05200195U,
0x05200197U,
0x05000199U,
0x04E0019BU,
0x04C0019DU,
0x04C0019FU,
0x04A001A1U,
0x048001A3U,
0x046001A5U,
0x044001A7U,
0x044001A9U,
0x042001ABU,
0x040001ADU,
0x03E001AFU,
0x03E001B1U,
0x03C001B3U,
0x03A001B5U,
0x038001B7U,
0x038001B9U,
0x036001BBU,
0x034001BDU,
0x032001BFU,
0x032001C1U,
0x030001C3U,
0x02E001C5U,
0x02C001C7U,
0x02A001C9U,
0x02A001CBU,
0x028001CDU,
0x026001CFU,
0x024001D1U,
0x024001D3U,
0x022001D5U,
0x020001D7U,
0x01E001D8U,
0x01E001DAU,
0x01C001DCU,
0x01A001DEU,
0x018001E0U,
0x016001E2U,
0x016001E4U,
0x014001E6U,
0x012001E8U,
0x010001EAU,
0x010001ECU,
0x00E001EEU,
0x00C001F0U,
0x00A001F2U,
0x00A001F4U,
0x008001F6U,
0x006001F8U,
0x004001FAU,
0x004001FCU,
0x002001FEU,
0x00000200U,
0xFFE00202U,
0xFFC00204U,
0xFFC00206U,
0xFFA00208U,
0xFF80020AU,
0xFF60020CU,
0xFF60020EU,
0xFF400210U,
0xFF200212U,
0xFF000214U,
0xFF000216U,
0xFEE00218U,
0xFEC0021AU,
0xFEA0021CU,
0xFEA0021EU,
0xFE800220U,
0xFE600222U,
0xFE400224U,
0xFE200226U,
0xFE200228U,
0xFE000229U,
0xFDE0022BU,
0xFDC0022DU,
0xFDC0022FU,
0xFDA00231U,
0xFD800233U,
0xFD600235U,
0xFD600237U,
0xFD400239U,
0xFD20023BU,
0xFD00023DU,
0xFCE0023FU,
0xFCE00241U,
0xFCC00243U,
0xFCA00245U,
0xFC800247U,
0xFC800249U,
0xFC60024BU,
0xFC40024DU,
0xFC20024FU,
0xFC200251U,
0xFC000253U,
0xFBE00255U,
0xFBC00257U,
0xFBC00259U,
0xFBA0025BU,
0xFB80025DU,
0xFB60025FU,
0xFB400261U,
0xFB400263U,
0xFB200265U,
0xFB000267U,
0xFAE00269U,
0xFAE0026BU,
0xFAC0026DU,
0xFAA0026FU,
0xFA800271U,
0xFA800273U,
0xFA600275U,
0xFA400277U,
0xFA200279U,
0xFA20027BU,
0xFA00027CU,
0xF9E0027EU,
0xF9C00280U,
0xF9A00282U,
0xF9A00284U,
0xF9800286U,
0xF9600288U,
0xF940028AU,
0xF940028CU,
0xF920028EU,
0xF9000290U,
0xF8E00292U,
0xF8E00294U,
0xF8C00296U,
0xF8A00298U,
0xF880029AU,
0xF860029CU,
0xF860029EU,
0xF84002A0U,
0xF82002A2U,
0xF80002A4U,
0xF80002A6U,
0xF7E002A8U,
0xF7C002AAU,
0xF7A002ACU,
0xF7A002AEU,
0xF78002B0U,
0xF76002B2U,
0xF74002B4U,
0xF74002B6U,
0xF72002B8U,
0xF70002BAU,
0xF6E002BCU,
0xF6C002BEU,
0xF6C002C0U,
0xF6A002C2U,
0xF68002C4U,
0xF66002C6U,
0xF66002C8U,
0xF64002CAU,
0xF62002CCU,
0xF60002CEU,
0xF60002CFU,
0xF5E002D1U,
0xF5C002D3U,
0xF5A002D5U,
0xF5A002D7U,
0xF58002D9U,
0xF56002DBU,
0xF54002DDU,
0xF52002DFU,
0xF52002E1U,
0xF50002E3U,
0xF4E002E5U,
0xF4C002E7U,
0xF4C002E9U,
0xF4A002EBU,
0xF48002EDU,
0xF46002EFU,
0xF46002F1U,
0xF44002F3U,
0xF42002F5U,
0xF40002F7U,
0xF3E002F9U,
0xF3E002FBU,
/* v_table */
0x1A09A000U,
0x19E9A800U,
0x19A9B800U,
0x1969C800U,
0x1949D000U,
0x1909E000U,
0x18C9E800U,
0x18A9F800U,
0x186A0000U,
0x182A1000U,
0x180A2000U,
0x17CA2800U,
0x17AA3800U,
0x176A4000U,
0x172A5000U,
0x170A6000U,
0x16CA6800U,
0x168A7800U,
0x166A8000U,
0x162A9000U,
0x160AA000U,
0x15CAA800U,
0x158AB800U,
0x156AC000U,
0x152AD000U,
0x14EAE000U,
0x14CAE800U,
0x148AF800U,
0x146B0000U,
0x142B1000U,
0x13EB2000U,
0x13CB2800U,
0x138B3800U,
0x134B4000U,
0x132B5000U,
0x12EB6000U,
0x12CB6800U,
0x128B7800U,
0x124B8000U,
0x122B9000U,
0x11EBA000U,
0x11ABA800U,
0x118BB800U,
0x114BC000U,
0x112BD000U,
0x10EBE000U,
0x10ABE800U,
0x108BF800U,
0x104C0000U,
0x100C1000U,
0x0FEC2000U,
0x0FAC2800U,
0x0F8C3800U,
0x0F4C4000U,
0x0F0C5000U,
0x0EEC5800U,
0x0EAC6800U,
0x0E6C7800U,
0x0E4C8000U,
0x0E0C9000U,
0x0DEC9800U,
0x0DACA800U,
0x0D6CB800U,
0x0D4CC000U,
0x0D0CD000U,
0x0CCCD800U,
0x0CACE800U,
0x0C6CF800U,
0x0C4D0000U,
0x0C0D1000U,
0x0BCD1800U,
0x0BAD2800U,
0x0B6D3800U,
0x0B2D4000U,
0x0B0D5000U,
0x0ACD5800U,
0x0AAD6800U,
0x0A6D7800U,
0x0A2D8000U,
0x0A0D9000U,
0x09CD9800U,
0x098DA800U,
0x096DB800U,
0x092DC000U,
0x090DD000U,
0x08CDD800U,
0x088DE800U,
0x086DF800U,
0x082E0000U,
0x07EE1000U,
0x07CE1800U,
0x078E2800U,
0x076E3800U,
0x072E4000U,
0x06EE5000U,
0x06CE5800U,
0x068E6800U,
0x064E7800U,
0x062E8000U,
0x05EE9000U,
0x05CE9800U,
0x058EA800U,
0x054EB800U,
0x052EC000U,
0x04EED000U,
0x04AED800U,
0x048EE800U,
0x044EF000U,
0x042F0000U,
0x03EF1000U,
0x03AF1800U,
0x038F2800U,
0x034F3000U,
0x030F4000U,
0x02EF5000U,
0x02AF5800U,
0x028F6800U,
0x024F7000U,
0x020F8000U,
0x01EF9000U,
0x01AF9800U,
0x016FA800U,
0x014FB000U,
0x010FC000U,
0x00EFD000U,
0x00AFD800U,
0x006FE800U,
0x004FF000U,
0x00100000U,
0xFFD01000U,
0xFFB01800U,
0xFF702800U,
0xFF303000U,
0xFF104000U,
0xFED05000U,
0xFEB05800U,
0xFE706800U,
0xFE307000U,
0xFE108000U,
0xFDD09000U,
0xFD909800U,
0xFD70A800U,
0xFD30B000U,
0xFD10C000U,
0xFCD0D000U,
0xFC90D800U,
0xFC70E800U,
0xFC30F000U,
0xFBF10000U,
0xFBD11000U,
0xFB911800U,
0xFB712800U,
0xFB313000U,
0xFAF14000U,
0xFAD14800U,
0xFA915800U,
0xFA516800U,
0xFA317000U,
0xF9F18000U,
0xF9D18800U,
0xF9919800U,
0xF951A800U,
0xF931B000U,
0xF8F1C000U,
0xF8B1C800U,
0xF891D800U,
0xF851E800U,
0xF831F000U,
0xF7F20000U,
0xF7B20800U,
0xF7921800U,
0xF7522800U,
0xF7123000U,
0xF6F24000U,
0xF6B24800U,
0xF6925800U,
0xF6526800U,
0xF6127000U,
0xF5F28000U,
0xF5B28800U,
0xF5729800U,
0xF552A800U,
0xF512B000U,
0xF4F2C000U,
0xF4B2C800U,
0xF472D800U,
0xF452E800U,
0xF412F000U,
0xF3D30000U,
0xF3B30800U,
0xF3731800U,
0xF3532800U,
0xF3133000U,
0xF2D34000U,
0xF2B34800U,
0xF2735800U,
0xF2336800U,
0xF2137000U,
0xF1D38000U,
0xF1B38800U,
0xF1739800U,
0xF133A800U,
0xF113B000U,
0xF0D3C000U,
0xF093C800U,
0xF073D800U,
0xF033E000U,
0xF013F000U,
0xEFD40000U,
0xEF940800U,
0xEF741800U,
0xEF342000U,
0xEEF43000U,
0xEED44000U,
0xEE944800U,
0xEE745800U,
0xEE346000U,
0xEDF47000U,
0xEDD48000U,
0xED948800U,
0xED549800U,
0xED34A000U,
0xECF4B000U,
0xECD4C000U,
0xEC94C800U,
0xEC54D800U,
0xEC34E000U,
0xEBF4F000U,
0xEBB50000U,
0xEB950800U,
0xEB551800U,
0xEB352000U,
0xEAF53000U,
0xEAB54000U,
0xEA954800U,
0xEA555800U,
0xEA156000U,
0xE9F57000U,
0xE9B58000U,
0xE9958800U,
0xE9559800U,
0xE915A000U,
0xE8F5B000U,
0xE8B5C000U,
0xE875C800U,
0xE855D800U,
0xE815E000U,
0xE7F5F000U,
0xE7B60000U,
0xE7760800U,
0xE7561800U,
0xE7162000U,
0xE6D63000U,
0xE6B64000U,
0xE6764800U,
0xE6365800U
};

View File

@ -1,201 +0,0 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "yuv420sp2rgb.h"
#include <arm_neon.h>
#include <stdlib.h>
/* this source file should only be compiled by Android.mk when targeting
* the armeabi-v7a ABI, and should be built in NEON mode
*/
void fir_filter_neon_intrinsics(short *output, const short* input, const short* kernel, int width, int kernelSize)
{
#if 1
int nn, offset = -kernelSize / 2;
for (nn = 0; nn < width; nn++)
{
int mm, sum = 0;
int32x4_t sum_vec = vdupq_n_s32(0);
for (mm = 0; mm < kernelSize / 4; mm++)
{
int16x4_t kernel_vec = vld1_s16(kernel + mm * 4);
int16x4_t input_vec = vld1_s16(input + (nn + offset + mm * 4));
sum_vec = vmlal_s16(sum_vec, kernel_vec, input_vec);
}
sum += vgetq_lane_s32(sum_vec, 0);
sum += vgetq_lane_s32(sum_vec, 1);
sum += vgetq_lane_s32(sum_vec, 2);
sum += vgetq_lane_s32(sum_vec, 3);
if (kernelSize & 3)
{
for (mm = kernelSize - (kernelSize & 3); mm < kernelSize; mm++)
sum += kernel[mm] * input[nn + offset + mm];
}
output[nn] = (short)((sum + 0x8000) >> 16);
}
#else /* for comparison purposes only */
int nn, offset = -kernelSize/2;
for (nn = 0; nn < width; nn++)
{
int sum = 0;
int mm;
for (mm = 0; mm < kernelSize; mm++)
{
sum += kernel[mm]*input[nn+offset+mm];
}
output[n] = (short)((sum + 0x8000) >> 16);
}
#endif
}
/*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed.
H V
Y Sample Period 1 1
U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2
*/
/*
size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \;
*/
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
#define bytes_per_pixel 2
#define LOAD_Y(i,j) (pY + i * width + j)
#define LOAD_V(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2))
#define LOAD_U(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2)+1)
const uint8_t ZEROS[8] = {220,220, 220, 220, 220, 220, 220, 220};
const uint8_t Y_SUBS[8] = {16, 16, 16, 16, 16, 16, 16, 16};
const uint8_t UV_SUBS[8] = {128, 128, 128, 128, 128, 128, 128, 128};
const uint32_t UV_MULS[] = {833, 400, 833, 400};
void color_convert_common(unsigned char *pY, unsigned char *pUV, int width, int height, unsigned char *buffer, int grey)
{
int i, j;
int nR, nG, nB;
int nY, nU, nV;
unsigned char *out = buffer;
int offset = 0;
uint8x8_t Y_SUBvec = vld1_u8(Y_SUBS);
uint8x8_t UV_SUBvec = vld1_u8(UV_SUBS); // v,u,v,u v,u,v,u
uint32x4_t UV_MULSvec = vld1q_u32(UV_MULS);
uint8x8_t ZEROSvec =vld1_u8(ZEROS);
uint32_t UVvec_int[8];
if (grey)
{
memcpy(out, pY, width * height * sizeof(unsigned char));
}
else
// YUV 4:2:0
for (i = 0; i < height; i++)
{
for (j = 0; j < width; j += 8)
{
// nY = *(pY + i * width + j);
// nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2));
// nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1);
uint8x8_t nYvec = vld1_u8(LOAD_Y(i,j));
uint8x8_t nUVvec = vld1_u8(LOAD_V(i,j)); // v,u,v,u v,u,v,u
nYvec = vmul_u8(nYvec, vcle_u8(nYvec,ZEROSvec));
// Yuv Convert
// nY -= 16;
// nU -= 128;
// nV -= 128;
// nYvec = vsub_u8(nYvec, Y_SUBvec);
// nUVvec = vsub_u8(nYvec, UV_SUBvec);
uint16x8_t nYvec16 = vmovl_u8(vsub_u8(nYvec, Y_SUBvec));
uint16x8_t nUVvec16 = vmovl_u8(vsub_u8(nYvec, UV_SUBvec));
uint16x4_t Y_low4 = vget_low_u16(nYvec16);
uint16x4_t Y_high4 = vget_high_u16(nYvec16);
uint16x4_t UV_low4 = vget_low_u16(nUVvec16);
uint16x4_t UV_high4 = vget_high_u16(nUVvec16);
uint32x4_t UV_low4_int = vmovl_u16(UV_low4);
uint32x4_t UV_high4_int = vmovl_u16(UV_high4);
uint32x4_t Y_low4_int = vmull_n_u16(Y_low4, 1192);
uint32x4_t Y_high4_int = vmull_n_u16(Y_high4, 1192);
uint32x4x2_t UV_uzp = vuzpq_u32(UV_low4_int, UV_high4_int);
uint32x2_t Vl = vget_low_u32(UV_uzp.val[0]);// vld1_u32(UVvec_int);
uint32x2_t Vh = vget_high_u32(UV_uzp.val[0]);//vld1_u32(UVvec_int + 2);
uint32x2x2_t Vll_ = vzip_u32(Vl, Vl);
uint32x4_t* Vll = (uint32x4_t*)(&Vll_);
uint32x2x2_t Vhh_ = vzip_u32(Vh, Vh);
uint32x4_t* Vhh = (uint32x4_t*)(&Vhh_);
uint32x2_t Ul = vget_low_u32(UV_uzp.val[1]);
uint32x2_t Uh = vget_high_u32(UV_uzp.val[1]);
uint32x2x2_t Ull_ = vzip_u32(Ul, Ul);
uint32x4_t* Ull = (uint32x4_t*)(&Ull_);
uint32x2x2_t Uhh_ = vzip_u32(Uh, Uh);
uint32x4_t* Uhh = (uint32x4_t*)(&Uhh_);
uint32x4_t B_int_low = vmlaq_n_u32(Y_low4_int, *Ull, 2066); //multiply by scalar accum
uint32x4_t B_int_high = vmlaq_n_u32(Y_high4_int, *Uhh, 2066); //multiply by scalar accum
uint32x4_t G_int_low = vsubq_u32(Y_low4_int, vmlaq_n_u32(vmulq_n_u32(*Vll, 833), *Ull, 400));
uint32x4_t G_int_high = vsubq_u32(Y_high4_int, vmlaq_n_u32(vmulq_n_u32(*Vhh, 833), *Uhh, 400));
uint32x4_t R_int_low = vmlaq_n_u32(Y_low4_int, *Vll, 1634); //multiply by scalar accum
uint32x4_t R_int_high = vmlaq_n_u32(Y_high4_int, *Vhh, 1634); //multiply by scalar accum
B_int_low = vshrq_n_u32 (B_int_low, 10);
B_int_high = vshrq_n_u32 (B_int_high, 10);
G_int_low = vshrq_n_u32 (G_int_low, 10);
G_int_high = vshrq_n_u32 (G_int_high, 10);
R_int_low = vshrq_n_u32 (R_int_low, 10);
R_int_high = vshrq_n_u32 (R_int_high, 10);
uint8x8x3_t RGB;
RGB.val[0] = vmovn_u16(vcombine_u16(vqmovn_u32 (R_int_low),vqmovn_u32 (R_int_high)));
RGB.val[1] = vmovn_u16(vcombine_u16(vqmovn_u32 (G_int_low),vqmovn_u32 (G_int_high)));
RGB.val[2] = vmovn_u16(vcombine_u16(vqmovn_u32 (B_int_low),vqmovn_u32 (B_int_high)));
vst3_u8 (out+i*width*3 + j*3, RGB);
}
}
}

View File

@ -1,379 +0,0 @@
; YUV-> RGB conversion code Copyright (C) 2008 Robin Watts (robin;wss.co.uk).
;
; Licensed under the GPL. If you need it under another license, contact me
; and ask.
;
; This program is free software ; you can redistribute it and/or modify
; it under the terms of the GNU General Public License as published by
; the Free Software Foundation ; either version 2 of the License, or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY ; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; You should have received a copy of the GNU General Public License
; along with this program ; if not, write to the Free Software
; Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
;
;
; The algorithm used here is based heavily on one created by Sophie Wilson
; of Acorn/e-14/Broadcomm. Many thanks.
;
; Additional tweaks (in the fast fixup code) are from Paul Gardiner.
;
; The old implementation of YUV -> RGB did:
;
; R = CLAMP((Y-16)*1.164 + 1.596*V)
; G = CLAMP((Y-16)*1.164 - 0.391*U - 0.813*V)
; B = CLAMP((Y-16)*1.164 + 2.018*U )
;
; We're going to bend that here as follows:
;
; R = CLAMP(y + 1.596*V)
; G = CLAMP(y - 0.383*U - 0.813*V)
; B = CLAMP(y + 1.976*U )
;
; where y = 0 for Y <= 16,
; y = ( Y-16)*1.164, for 16 < Y <= 239,
; y = (239-16)*1.164, for 239 < Y
;
; i.e. We clamp Y to the 16 to 239 range (which it is supposed to be in
; anyway). We then pick the B_U factor so that B never exceeds 511. We then
; shrink the G_U factor in line with that to avoid a colour shift as much as
; possible.
;
; We're going to use tables to do it faster, but rather than doing it using
; 5 tables as as the above suggests, we're going to do it using just 3.
;
; We do this by working in parallel within a 32 bit word, and using one
; table each for Y U and V.
;
; Source Y values are 0 to 255, so 0.. 260 after scaling
; Source U values are -128 to 127, so -49.. 49(G), -253..251(B) after
; Source V values are -128 to 127, so -204..203(R), -104..103(G) after
;
; So total summed values:
; -223 <= R <= 481, -173 <= G <= 431, -253 <= B < 511
;
; We need to pack R G and B into a 32 bit word, and because of Bs range we
; need 2 bits above the valid range of B to detect overflow, and another one
; to detect the sense of the overflow. We therefore adopt the following
; representation:
;
; osGGGGGgggggosBBBBBbbbosRRRRRrrr
;
; Each such word breaks down into 3 ranges.
;
; osGGGGGggggg osBBBBBbbb osRRRRRrrr
;
; Thus we have 8 bits for each B and R table entry, and 10 bits for G (good
; as G is the most noticable one). The s bit for each represents the sign,
; and o represents the overflow.
;
; For R and B we pack the table by taking the 11 bit representation of their
; values, and toggling bit 10 in the U and V tables.
;
; For the green case we calculate 4*G (thus effectively using 10 bits for the
; valid range) truncate to 12 bits. We toggle bit 11 in the Y table.
; Theorarm library
; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
AREA |.text|, CODE, READONLY
EXPORT yuv420_2_rgb888
EXPORT yuv420_2_rgb888_PROFILE
; void yuv420_2_rgb565
; uint8_t *dst_ptr
; uint8_t *y_ptr
; uint8_t *u_ptr
; uint8_t *v_ptr
; int width
; int height
; int y_span
; int uv_span
; int dst_span
; int *tables
; int dither
CONST_flags
DCD 0x40080100
yuv420_2_rgb888
; r0 = dst_ptr
; r1 = y_ptr
; r2 = u_ptr
; r3 = v_ptr
; <> = width
; <> = height
; <> = y_span
; <> = uv_span
; <> = dst_span
; <> = y_table
; <> = dither
STMFD r13!,{r4-r11,r14}
LDR r8, [r13,#10*4] ; r8 = height
LDR r10,[r13,#11*4] ; r10= y_span
LDR r9, [r13,#13*4] ; r9 = dst_span
LDR r14,[r13,#14*4] ; r14= y_table
LDR r5, CONST_flags
LDR r11,[r13,#9*4] ; r11= width
ADD r4, r14, #256*4
SUBS r8, r8, #1
BLT end
BEQ trail_row1
yloop1
SUB r8, r8, r11,LSL #16 ; r8 = height-(width<<16)
ADDS r8, r8, #1<<16 ; if (width == 1)
BGE trail_pair1 ; just do 1 column
xloop1
LDRB r11,[r2], #1 ; r11 = u = *u_ptr++
LDRB r12,[r3], #1 ; r12 = v = *v_ptr++
LDRB r7, [r1, r10] ; r7 = y2 = y_ptr[stride]
LDRB r6, [r1], #1 ; r6 = y0 = *y_ptr++
ADD r12,r12,#512
LDR r11,[r4, r11,LSL #2] ; r11 = u = u_table[u]
LDR r12,[r14,r12,LSL #2] ; r12 = v = v_table[v]
LDR r7, [r14,r7, LSL #2] ; r7 = y2 = y_table[y2]
LDR r6, [r14,r6, LSL #2] ; r6 = y0 = y_table[y0]
ADD r11,r11,r12 ; r11 = uv = u+v
ADD r7, r7, r11 ; r7 = y2 + uv
ADD r6, r6, r11 ; r6 = y0 + uv
ANDS r12,r7, r5
TSTEQ r6, r5
BNE fix101
return101
; Store the bottom one first
ADD r12,r0, r9
STRB r7,[r12],#1 ; Store R
MOV r7, r7, ROR #22
STRB r7,[r12],#1 ; Store G
MOV r7, r7, ROR #21
STRB r7,[r12],#1 ; Store B
; Then store the top one
STRB r6,[r0], #1 ; Store R
MOV r6, r6, ROR #22
STRB r6,[r0], #1 ; Store G
LDRB r7, [r1, r10] ; r7 = y3 = y_ptr[stride]
LDRB r12,[r1], #1 ; r12= y1 = *y_ptr++
MOV r6, r6, ROR #21
LDR r7, [r14, r7, LSL #2] ; r7 = y3 = y_table[y2]
LDR r12,[r14, r12,LSL #2] ; r12= y1 = y_table[y0]
STRB r6,[r0], #1 ; Store B
ADD r7, r7, r11 ; r7 = y3 + uv
ADD r6, r12,r11 ; r6 = y1 + uv
ANDS r12,r7, r5
TSTEQ r6, r5
BNE fix102
return102
; Store the bottom one first
ADD r12,r0, r9
STRB r7,[r12],#1 ; Store R
MOV r7, r7, ROR #22
STRB r7,[r12],#1 ; Store G
MOV r7, r7, ROR #21
STRB r7,[r12],#1 ; Store B
; Then store the top one
STRB r6,[r0], #1 ; Store R
MOV r6, r6, ROR #22
STRB r6,[r0], #1 ; Store G
MOV r6, r6, ROR #21
STRB r6,[r0], #1 ; Store B
ADDS r8, r8, #2<<16
BLT xloop1
MOVS r8, r8, LSL #16 ; Clear the top 16 bits of r8
MOV r8, r8, LSR #16 ; If the C bit is clear we still have
BCC trail_pair1 ; 1 more pixel pair to do
end_xloop1
LDR r11,[r13,#9*4] ; r11= width
LDR r12,[r13,#12*4] ; r12= uv_stride
ADD r0, r0, r9, LSL #1
SUB r0, r0, r11,LSL #1
SUB r0, r0, r11
ADD r1, r1, r10,LSL #1
SUB r1, r1, r11
SUB r2, r2, r11,LSR #1
SUB r3, r3, r11,LSR #1
ADD r2, r2, r12
ADD r3, r3, r12
SUBS r8, r8, #2
BGT yloop1
LDMLTFD r13!,{r4-r11,pc}
trail_row1
; We have a row of pixels left to do
SUB r8, r8, r11,LSL #16 ; r8 = height-(width<<16)
ADDS r8, r8, #1<<16 ; if (width == 1)
BGE trail_pix1 ; just do 1 pixel
xloop12
LDRB r11,[r2], #1 ; r11 = u = *u_ptr++
LDRB r12,[r3], #1 ; r12 = v = *v_ptr++
LDRB r6, [r1], #1 ; r6 = y0 = *y_ptr++
LDRB r7, [r1], #1 ; r7 = y1 = *y_ptr++
ADD r12,r12,#512
LDR r11,[r4, r11,LSL #2] ; r11 = u = u_table[u]
LDR r12,[r14,r12,LSL #2] ; r12 = v = v_table[v]
LDR r7, [r14,r7, LSL #2] ; r7 = y1 = y_table[y1]
LDR r6, [r14,r6, LSL #2] ; r6 = y0 = y_table[y0]
ADD r11,r11,r12 ; r11 = uv = u+v
ADD r6, r6, r11 ; r6 = y0 + uv
ADD r7, r7, r11 ; r7 = y1 + uv
ANDS r12,r7, r5
TSTEQ r6, r5
BNE fix104
return104
; Store the bottom one first
STRB r6,[r0], #1 ; Store R
MOV r6, r6, ROR #22
STRB r6,[r0], #1 ; Store G
MOV r6, r6, ROR #21
STRB r6,[r0], #1 ; Store B
; Then store the top one
STRB r7,[r0], #1 ; Store R
MOV r7, r7, ROR #22
STRB r7,[r0], #1 ; Store G
MOV r7, r7, ROR #21
STRB r7,[r0], #1 ; Store B
ADDS r8, r8, #2<<16
BLT xloop12
MOVS r8, r8, LSL #16 ; Clear the top 16 bits of r8
MOV r8, r8, LSR #16 ; If the C bit is clear we still have
BCC trail_pix1 ; 1 more pixel pair to do
end
LDMFD r13!,{r4-r11,pc}
trail_pix1
; We have a single extra pixel to do
LDRB r11,[r2], #1 ; r11 = u = *u_ptr++
LDRB r12,[r3], #1 ; r12 = v = *v_ptr++
LDRB r6, [r1], #1 ; r6 = y0 = *y_ptr++
ADD r12,r12,#512
LDR r11,[r4, r11,LSL #2] ; r11 = u = u_table[u]
LDR r12,[r14,r12,LSL #2] ; r12 = v = v_table[v]
LDR r6, [r14,r6, LSL #2] ; r6 = y0 = y_table[y0]
ADD r11,r11,r12 ; r11 = uv = u+v
ADD r6, r6, r11 ; r6 = y0 + uv
ANDS r12,r6, r5
BNE fix105
return105
STRB r6,[r0], #1 ; Store R
MOV r6, r6, ROR #22
STRB r6,[r0], #1 ; Store G
MOV r6, r6, ROR #21
STRB r6,[r0], #1 ; Store B
LDMFD r13!,{r4-r11,pc}
trail_pair1
; We have a pair of pixels left to do
LDRB r11,[r2] ; r11 = u = *u_ptr++
LDRB r12,[r3] ; r12 = v = *v_ptr++
LDRB r7, [r1, r10] ; r7 = y2 = y_ptr[stride]
LDRB r6, [r1], #1 ; r6 = y0 = *y_ptr++
ADD r12,r12,#512
LDR r11,[r4, r11,LSL #2] ; r11 = u = u_table[u]
LDR r12,[r14,r12,LSL #2] ; r12 = v = v_table[v]
LDR r7, [r14,r7, LSL #2] ; r7 = y2 = y_table[y2]
LDR r6, [r14,r6, LSL #2] ; r6 = y0 = y_table[y0]
ADD r11,r11,r12 ; r11 = uv = u+v
ADD r7, r7, r11 ; r7 = y2 + uv
ADD r6, r6, r11 ; r6 = y0 + uv
ANDS r12,r7, r5
TSTEQ r6, r5
BNE fix103
return103
; Store the bottom one first
ADD r12,r0, r9
STRB r7,[r12],#1 ; Store R
MOV r7, r7, ROR #22
STRB r7,[r12],#1 ; Store G
MOV r7, r7, ROR #21
STRB r7,[r12],#1 ; Store B
; Then store the top one
STRB r6,[r0], #1 ; Store R
MOV r6, r6, ROR #22
STRB r6,[r0], #1 ; Store G
MOV r6, r6, ROR #21
STRB r6,[r0], #1 ; Store B
B end_xloop1
fix101
; r7 and r6 are the values, at least one of which has overflowed
; r12 = r7 & mask = .s......s......s......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r7, r7, r12 ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r7, LSR #1 ; r12 = .o......o......o......
ADD r7, r7, r12,LSR #8 ; r7 = fixed value
AND r12, r6, r5 ; r12 = .S......S......S......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r6, r6, r12 ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r6, LSR #1 ; r12 = .o......o......o......
ADD r6, r6, r12,LSR #8 ; r6 = fixed value
B return101
fix102
; r7 and r6 are the values, at least one of which has overflowed
; r12 = r7 & mask = .s......s......s......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r7, r7, r12 ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r7, LSR #1 ; r12 = .o......o......o......
ADD r7, r7, r12,LSR #8 ; r7 = fixed value
AND r12, r6, r5 ; r12 = .S......S......S......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS..SSSSS.SSSSSS
ORR r6, r6, r12 ; r6 |= ..SSSSSS..SSSSS.SSSSSS
BIC r12,r5, r6, LSR #1 ; r12 = .o......o......o......
ADD r6, r6, r12,LSR #8 ; r6 = fixed value
B return102
fix103
; r7 and r6 are the values, at least one of which has overflowed
; r12 = r7 & mask = .s......s......s......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r7, r7, r12 ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r7, LSR #1 ; r12 = .o......o......o......
ADD r7, r7, r12,LSR #8 ; r7 = fixed value
AND r12, r6, r5 ; r12 = .S......S......S......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r6, r6, r12 ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r6, LSR #1 ; r12 = .o......o......o......
ADD r6, r6, r12,LSR #8 ; r6 = fixed value
B return103
fix104
; r7 and r6 are the values, at least one of which has overflowed
; r12 = r7 & mask = .s......s......s......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r7, r7, r12 ; r7 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r7, LSR #1 ; r12 = .o......o......o......
ADD r7, r7, r12,LSR #8 ; r7 = fixed value
AND r12, r6, r5 ; r12 = .S......S......S......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r6, r6, r12 ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r6, LSR #1 ; r12 = .o......o......o......
ADD r6, r6, r12,LSR #8 ; r6 = fixed value
B return104
fix105
; r6 is the value, which has has overflowed
; r12 = r7 & mask = .s......s......s......
SUB r12,r12,r12,LSR #8 ; r12 = ..SSSSSS.SSSSSS.SSSSSS
ORR r6, r6, r12 ; r6 |= ..SSSSSS.SSSSSS.SSSSSS
BIC r12,r5, r6, LSR #1 ; r12 = .o......o......o......
ADD r6, r6, r12,LSR #8 ; r6 = fixed value
B return105
END

View File

@ -1,208 +0,0 @@
/* YUV-> RGB conversion code. (YUV420 to RGB565)
*
* Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
* Productions Ltd.
*
* Licensed under the GNU GPL. If you need it under another license, contact
* me and ask.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*
* The algorithm used here is based heavily on one created by Sophie Wilson
* of Acorn/e-14/Broadcomm. Many thanks.
*
* Additional tweaks (in the fast fixup code) are from Paul Gardiner.
*
* The old implementation of YUV -> RGB did:
*
* R = CLAMP((Y-16)*1.164 + 1.596*V)
* G = CLAMP((Y-16)*1.164 - 0.391*U - 0.813*V)
* B = CLAMP((Y-16)*1.164 + 2.018*U )
*
* We're going to bend that here as follows:
*
* R = CLAMP(y + 1.596*V)
* G = CLAMP(y - 0.383*U - 0.813*V)
* B = CLAMP(y + 1.976*U )
*
* where y = 0 for Y <= 16,
* y = ( Y-16)*1.164, for 16 < Y <= 239,
* y = (239-16)*1.164, for 239 < Y
*
* i.e. We clamp Y to the 16 to 239 range (which it is supposed to be in
* anyway). We then pick the B_U factor so that B never exceeds 511. We then
* shrink the G_U factor in line with that to avoid a colour shift as much as
* possible.
*
* We're going to use tables to do it faster, but rather than doing it using
* 5 tables as as the above suggests, we're going to do it using just 3.
*
* We do this by working in parallel within a 32 bit word, and using one
* table each for Y U and V.
*
* Source Y values are 0 to 255, so 0.. 260 after scaling
* Source U values are -128 to 127, so -49.. 49(G), -253..251(B) after
* Source V values are -128 to 127, so -204..203(R), -104..103(G) after
*
* So total summed values:
* -223 <= R <= 481, -173 <= G <= 431, -253 <= B < 511
*
* We need to pack R G and B into a 32 bit word, and because of Bs range we
* need 2 bits above the valid range of B to detect overflow, and another one
* to detect the sense of the overflow. We therefore adopt the following
* representation:
*
* osGGGGGgggggosBBBBBbbbosRRRRRrrr
*
* Each such word breaks down into 3 ranges.
*
* osGGGGGggggg osBBBBBbbb osRRRRRrrr
*
* Thus we have 8 bits for each B and R table entry, and 10 bits for G (good
* as G is the most noticable one). The s bit for each represents the sign,
* and o represents the overflow.
*
* For R and B we pack the table by taking the 11 bit representation of their
* values, and toggling bit 10 in the U and V tables.
*
* For the green case we calculate 4*G (thus effectively using 10 bits for the
* valid range) truncate to 12 bits. We toggle bit 11 in the Y table.
*/
#include "yuv2rgb.h"
enum
{
FLAGS = 0x40080100
};
#define READUV(U,V) (tables[256 + (U)] + tables[512 + (V)])
#define READY(Y) tables[Y]
#define FIXUP(Y) \
do { \
int tmp = (Y) & FLAGS; \
if (tmp != 0) \
{ \
tmp -= tmp>>8; \
(Y) |= tmp; \
tmp = FLAGS & ~(Y>>1); \
(Y) += tmp>>8; \
} \
} while (0 == 1)
#define STORE(Y,DSTPTR) \
do { \
uint32_t Y2 = (Y); \
uint8_t *DSTPTR2 = (DSTPTR); \
(DSTPTR2)[0] = (Y2); \
(DSTPTR2)[1] = (Y2)>>22; \
(DSTPTR2)[2] = (Y2)>>11; \
} while (0 == 1)
void yuv420_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither)
{
height -= 1;
while (height > 0)
{
height -= width<<16;
height += 1<<16;
while (height < 0)
{
/* Do 2 column pairs */
uint32_t uv, y0, y1;
uv = READUV(*u_ptr++,*v_ptr++);
y1 = uv + READY(y_ptr[y_span]);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &dst_ptr[dst_span]);
STORE(y0, dst_ptr);
dst_ptr += 3;
y1 = uv + READY(y_ptr[y_span]);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &dst_ptr[dst_span]);
STORE(y0, dst_ptr);
dst_ptr += 3;
height += (2<<16);
}
if ((height>>16) == 0)
{
/* Trailing column pair */
uint32_t uv, y0, y1;
uv = READUV(*u_ptr,*v_ptr);
y1 = uv + READY(y_ptr[y_span]);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y0, &dst_ptr[dst_span]);
STORE(y1, dst_ptr);
dst_ptr += 3;
}
dst_ptr += dst_span*2-width*3;
y_ptr += y_span*2-width;
u_ptr += uv_span-(width>>1);
v_ptr += uv_span-(width>>1);
height = (height<<16)>>16;
height -= 2;
}
if (height == 0)
{
/* Trail row */
height -= width<<16;
height += 1<<16;
while (height < 0)
{
/* Do a row pair */
uint32_t uv, y0, y1;
uv = READUV(*u_ptr++,*v_ptr++);
y1 = uv + READY(*y_ptr++);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y1, dst_ptr);
dst_ptr += 3;
STORE(y0, dst_ptr);
dst_ptr += 3;
height += (2<<16);
}
if ((height>>16) == 0)
{
/* Trailing pix */
uint32_t uv, y0;
uv = READUV(*u_ptr++,*v_ptr++);
y0 = uv + READY(*y_ptr++);
FIXUP(y0);
STORE(y0, dst_ptr);
dst_ptr += 3;
}
}
}

View File

@ -1,156 +0,0 @@
#include <string.h>
#include <jni.h>
#include <yuv420sp2rgb.h>
#include <yuv2rgb.h>
/*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed.
H V
Y Sample Period 1 1
U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2
*/
/*
size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \;
*/
#ifndef max
#define max(a,b) (a > b ? a : b )
#define min(a,b) (a < b ? a : b )
#endif
enum
{
FLAGS = 0x40080100
};
#define READUV(U,V) (tables[256 + (U)] + tables[512 + (V)])
#define READY(Y) tables[Y]
#define FIXUP(Y) \
do { \
int tmp = (Y) & FLAGS; \
if (tmp != 0) \
{ \
tmp -= tmp>>8; \
(Y) |= tmp; \
tmp = FLAGS & ~(Y>>1); \
(Y) += tmp>>8; \
} \
} while (0 == 1)
#define STORE(Y,DSTPTR) \
do { \
uint32_t Y2 = (Y); \
uint8_t *DSTPTR2 = (DSTPTR); \
(DSTPTR2)[2] = (Y2); \
(DSTPTR2)[1] = (Y2)>>22; \
(DSTPTR2)[0] = (Y2)>>11; \
} while (0 == 1)
typedef unsigned char byte;
const int bytes_per_pixel = 2;
void color_convert_common(const unsigned char *pY, const unsigned char *pUV, int width, int height,
unsigned char *buffer, int grey)
{
#define LOOKUP 1
#if ! LOOKUP
int nR, nG, nB;
#endif
int dest_span = 3 * width;
unsigned char *out = buffer;
if (grey)
{
memcpy(out, pY, width * height * sizeof(unsigned char));
}
else
{
#if LOOKUP
const uint32_t* tables = yuv2rgb565_table;
const byte* nY = pY;
const byte* nUV = pUV;
int idx = 0;
while (nY+width < pUV)
{
int y = (idx / width);
int x = (idx % width);
byte Y = *nY;
byte Y2 = nY[width];
byte V = *nUV;
byte U = *(nUV + 1);
/* Do 2 row pairs */
uint32_t uv, y0, y1;
uv = READUV(U,V);
y1 = uv + READY(Y);
y0 = uv + READY(Y2);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &out[dest_span]);
STORE(y0, out);
out += 3;
Y = *(++nY);
Y2 = nY[width];
y1 = uv + READY(Y);
y0 = uv + READY(Y2);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &out[dest_span]);
STORE(y0, out);
out += 3;
height += (2 << 16);
++nY;
nUV = pUV + (y / 2) * width + 2 * (x / 2);
idx+=2;
}
#else
const byte* nY = pY;
const byte* nUV = pUV;
int idx = 0;
while (nY < pUV)
{
int y = (idx / width);
int x = (idx % width);
int Y = *nY;
int V = *nUV;
int U = *(nUV + 1);
Y -= 16;
V -= 128;
U -= 128;
if (y < 0)
y = 0;
nB = (int)(1192 * Y + 2066 * U);
nG = (int)(1192 * Y - 833 * V - 400 * U);
nR = (int)(1192 * Y + 1634 * V);
nR = min(262143, max(0, nR));
nG = min(262143, max(0, nG));
nB = min(262143, max(0, nB));
nR >>= 10;
nR &= 0xff;
nG >>= 10;
nG &= 0xff;
nB >>= 10;
nB &= 0xff;
*(out++) = (unsigned char)nR;
*(out++) = (unsigned char)nG;
*(out++) = (unsigned char)nB;
nY += 1;
nUV = pUV + (y / 2) * width + 2 * (x / 2);
++idx;
}
#endif
}
}

View File

@ -1,4 +0,0 @@
#!/bin/sh
#this generates an ant based cli build of the android-jni project
android update project --name android-opencv \
--path .

Binary file not shown.

Before

Width:  |  Height:  |  Size: 161 KiB

View File

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:scrollbars="vertical" android:id="@+id/calibtext" android:text="" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
</LinearLayout>

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent" android:layout_height="fill_parent"
android:background="@drawable/cameraback">
<!--<SurfaceView-->
<com.opencv.camera.NativePreviewer
android:id="@+id/nativepreviewer" android:layout_width="400dip"
android:layout_height="300dip" android:layout_alignParentLeft="true"
android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
android:layout_marginRight="20dip" />
<LinearLayout android:id="@+id/glview_layout"
android:layout_width="400dip" android:layout_height="300dip"
android:layout_alignParentLeft="true" android:layout_margin="20dip"
android:gravity="center_horizontal|center_vertical"
android:layout_marginRight="20dip">
</LinearLayout>
<LinearLayout android:layout_width="wrap_content"
android:layout_height="fill_parent" android:orientation="vertical"
android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
android:layout_alignParentRight="true">
<ImageButton android:src="@android:drawable/ic_menu_camera"
android:id="@+id/button_capture" android:layout_width="60dip"
android:layout_height="60dip" android:layout_marginBottom="10dip"></ImageButton>
<ImageButton android:src="@android:drawable/ic_menu_preferences"
android:id="@+id/button_camera_settings" android:layout_width="60dip"
android:layout_height="60dip" android:layout_marginBottom="10dip"></ImageButton>
</LinearLayout>
</RelativeLayout>

View File

@ -1,43 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent" android:layout_height="fill_parent"
android:orientation="vertical" android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/settings_text"
android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip" />
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/image_size_prompt" />
<Spinner android:id="@+id/image_size" android:layout_width="fill_parent"
android:layout_height="wrap_content" android:saveEnabled="true"
android:prompt="@string/image_size_prompt" android:entries="@array/image_sizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/camera_mode_prompt" />
<Spinner android:id="@+id/camera_mode" android:layout_width="fill_parent"
android:layout_height="wrap_content" android:saveEnabled="true"
android:prompt="@string/camera_mode_prompt" android:entries="@array/camera_mode">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/whitebalance_prompt" />
<Spinner android:id="@+id/whitebalance" android:layout_width="fill_parent"
android:layout_height="wrap_content" android:saveEnabled="true"
android:prompt="@string/whitebalance_prompt" android:entries="@array/whitebalance">
</Spinner>
</LinearLayout>
</LinearLayout>

View File

@ -1,40 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/patterntext" android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Corners in width direction:"/>
<Spinner android:id="@+id/rows"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chesspromptx"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="Corners in height direction:"/>
<Spinner android:id="@+id/cols"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chessprompty"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
</LinearLayout>

View File

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name="CameraParams">
<attr name="preview_width" format="integer"/>
<attr name="preview_height" format="integer"/>
</declare-styleable>
</resources>

View File

@ -1,21 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="chesssizes">
<item>2</item>
<item>3</item>
<item>4</item>
<item>5</item>
<item>6</item>
<item>7</item>
<item>8</item>
<item>9</item>
<item>10</item>
<item>11</item>
<item>12</item>
<item>13</item>
</string-array>
<string name="chesspromptx">
Choose the width:</string>
<string name="chessprompty">
Choose the height:</string>
</resources>

View File

@ -1,31 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="image_sizes">
<item>320x240</item>
<item>400x300</item>
<item>640x480</item>
<item>800x600</item>
<item>1000x800</item>
</string-array>
<string-array name="camera_mode">
<item>color</item>
<item>BW</item>
</string-array>
<string name="image_size_prompt">
Image Size:\n(may not be exact)
</string>
<string name="camera_mode_prompt">
Camera Mode:
</string>
<string-array name="whitebalance">
<item>auto</item>
<item>incandescent</item>
<item>fluorescent</item>
<item>daylight</item>
<item>cloudy-daylight</item>
</string-array>
<string name="whitebalance_prompt">
Whitebalance:
</string>
</resources>

View File

@ -1,20 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Calibration</string>
<string name="patternsize">Pattern Size</string>
<string name="patterntext">Please choose the width and height (number of inside corners) of the checker
board pattern you will be using for calibration. Default is 6 by 8 corners. You may find a checkerboard pattern at
http://opencv.willowgarage.com/pattern</string>
<string name="patternlink">http://opencv.willowgarage.com/pattern</string>
<string name="camera_settings_label">Camera Settings</string>
<string name="settings_text">Change the camera settings. Be aware that BW is much faster for previewing, than color. Also, if you change the image size, you should
rerun calibration. Default values: BW and 640x480 are a good start.</string>
<string name="calibration_service_started">Calibration calculations have started...</string>
<string name="calibration_service_stopped">Calibration calculations has stopped.</string>
<string name="calibration_service_finished">Calibration finished, you camera is calibrated.</string>
<string name="calibration_service_label">Calibration</string>
<string name="calibration_not_enough">Please capture atleast 10 images of the pattern!</string>
</resources>

View File

@ -1,157 +0,0 @@
package com.opencv;
import java.util.LinkedList;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.view.ViewGroup.LayoutParams;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.opencv.camera.NativePreviewer;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.opengl.GL2CameraViewer;
public class OpenCV extends Activity {
private NativePreviewer mPreview;
private GL2CameraViewer glview;
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
return super.onKeyUp(keyCode, event);
}
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
return super.onKeyLongPress(keyCode, event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
public void setNoTitle() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// menu.add("Sample");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// if(item.getTitle().equals("Sample")){
// //do stuff...
// }
return true;
}
@Override
public void onOptionsMenuClosed(Menu menu) {
// TODO Auto-generated method stub
super.onOptionsMenuClosed(menu);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
disableScreenTurnOff();
FrameLayout frame = new FrameLayout(getApplication());
// Create our Preview view and set it as the content of our activity.
mPreview = new NativePreviewer(getApplication(), 640, 480);
LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
params.height = getWindowManager().getDefaultDisplay().getHeight();
params.width = (int) (params.height * 4.0 / 2.88);
LinearLayout vidlay = new LinearLayout(getApplication());
vidlay.setGravity(Gravity.CENTER);
vidlay.addView(mPreview, params);
frame.addView(vidlay);
// make the glview overlay ontop of video preview
mPreview.setZOrderMediaOverlay(false);
glview = new GL2CameraViewer(getApplication(), false, 0, 0);
glview.setZOrderMediaOverlay(true);
glview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
frame.addView(glview);
setContentView(frame);
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
glview.onResume();
LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<PoolCallback>();
callbackstack.add(glview.getDrawCallback());
mPreview.addCallbackStack(callbackstack);
mPreview.onResume();
}
}

View File

@ -1,47 +0,0 @@
package com.opencv.calibration;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import android.app.Activity;
import android.os.Bundle;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.widget.TextView;
import com.opencv.R;
public class CalibrationViewer extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.calibrationviewer);
Bundle extras = getIntent().getExtras();
String filename = extras.getString("calibfile");
if (filename != null) {
TextView text = (TextView) findViewById(R.id.calibtext);
text.setMovementMethod(new ScrollingMovementMethod());
try {
BufferedReader reader = new BufferedReader(new FileReader(
filename));
while (reader.ready()) {
text.append(reader.readLine() +"\n");
}
} catch (FileNotFoundException e) {
Log.e("opencv", "could not open calibration file at:"
+ filename);
} catch (IOException e) {
Log.e("opencv", "error reading file: "
+ filename);
}
}
}
}

View File

@ -1,124 +0,0 @@
package com.opencv.calibration;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.locks.ReentrantLock;
import android.os.AsyncTask;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.jni.Calibration;
import com.opencv.jni.Size;
import com.opencv.jni.image_pool;
public class Calibrator implements PoolCallback {
private Calibration calibration;
static public interface CalibrationCallback{
public void onFoundChessboard(Calibrator calibrator);
public void onDoneCalibration(Calibrator calibration, File calibfile);
public void onFailedChessboard(Calibrator calibrator);
}
private CalibrationCallback callback;
public Calibrator(CalibrationCallback callback) {
calibration = new Calibration();
this.callback = callback;
}
public void resetCalibration(){
calibration.resetChess();
}
public void setPatternSize(Size size){
Size csize = calibration.getPatternsize();
if(size.getWidth() == csize.getWidth()&&
size.getHeight() == csize.getHeight())
return;
calibration.setPatternsize(size);
resetCalibration();
}
public void setPatternSize(int width, int height){
Size patternsize = new Size(width,height);
setPatternSize(patternsize);
}
private boolean capture_chess;
ReentrantLock lock = new ReentrantLock();
public void calibrate(File calibration_file) throws IOException{
if(getNumberPatternsDetected() < 3){
return;
}
CalibrationTask calibtask = new CalibrationTask(calibration_file);
calibtask.execute((Object[])null);
}
public void queueChessCapture(){
capture_chess = true;
}
private class CalibrationTask extends AsyncTask<Object, Object, Object> {
File calibfile;
public CalibrationTask(File calib) throws IOException{
super();
calibfile = calib;
calibfile.createNewFile();
}
@Override
protected Object doInBackground(Object... params) {
lock.lock();
try{
calibration.calibrate(calibfile.getAbsolutePath());
}
finally{
lock.unlock();
}
return null;
}
@Override
protected void onPostExecute(Object result) {
callback.onDoneCalibration(Calibrator.this, calibfile);
}
}
//@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor) {
if(lock.tryLock()){
try{
if(capture_chess){
if(calibration.detectAndDrawChessboard(idx, pool)){
callback.onFoundChessboard(this);
}else
callback.onFailedChessboard(this);
capture_chess = false;
}
}finally{
lock.unlock();
}
}
}
public int getNumberPatternsDetected(){
return calibration.getNumberDetectedChessboards();
}
public void setCallback(CalibrationCallback callback) {
this.callback = callback;
}
}

View File

@ -1,75 +0,0 @@
package com.opencv.calibration;
import com.opencv.R;
import com.opencv.jni.Size;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class ChessBoardChooser extends Activity {
public static final String CHESS_SIZE = "chess_size";
public static final int DEFAULT_WIDTH = 6;
public static final int DEFAULT_HEIGHT = 8;
public static final int LOWEST = 2;
class DimChooser implements OnItemSelectedListener {
private String dim;
public DimChooser(String dim) {
this.dim = dim;
}
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int pos,
long arg3) {
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
Editor editor = settings.edit();
editor.putInt(dim, pos + LOWEST);
editor.commit();
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.chesssizer);
// Restore preferences
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
Spinner wspin, hspin;
wspin = (Spinner) findViewById(R.id.rows);
hspin = (Spinner) findViewById(R.id.cols);
wspin.setSelection(width - LOWEST);
hspin.setSelection(height - LOWEST);
wspin.setOnItemSelectedListener(new DimChooser("width"));
hspin.setOnItemSelectedListener(new DimChooser("height"));
}
public static Size getPatternSize(Context ctx) {
SharedPreferences settings = ctx.getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
return new Size(width, height);
}
}

View File

@ -1,166 +0,0 @@
package com.opencv.calibration.services;
import java.io.File;
import java.io.IOException;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
import com.opencv.R;
import com.opencv.calibration.CalibrationViewer;
import com.opencv.calibration.Calibrator;
import com.opencv.calibration.Calibrator.CalibrationCallback;
public class CalibrationService extends Service implements CalibrationCallback {
Class<?> activity;
int icon;
File calibration_file;
public void startCalibrating(Class<?> activitycaller,int icon_id, Calibrator calibrator, File calibration_file)
throws IOException {
activity = activitycaller;
icon = icon_id;
// Display a notification about us starting. We put an icon in the
// status bar.
showNotification();
this.calibration_file = calibration_file;
calibrator.setCallback(this);
calibrator.calibrate(calibration_file);
}
private NotificationManager mNM;
/**
* Class for clients to access. Because we know this service always runs in
* the same process as its clients, we don't need to deal with IPC.
*/
public class CalibrationServiceBinder extends Binder {
public CalibrationService getService() {
return CalibrationService.this;
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i("LocalService", "Received start id " + startId + ": " + intent);
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_NOT_STICKY;
}
@Override
public void onCreate() {
mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
}
@Override
public void onDestroy() {
// Cancel the persistent notification.
// mNM.cancel(R.string.calibration_service_started);
// Tell the user we stopped.
Toast.makeText(this, R.string.calibration_service_finished,
Toast.LENGTH_SHORT).show();
}
private final IBinder mBinder = new CalibrationServiceBinder();
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
/**
* Show a notification while this service is running.
*/
private void showNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_started);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, activity), 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
/**
* Show a notification while this service is running.
*/
private void doneNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_finished);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
Intent intent = new Intent(this,CalibrationViewer.class);
intent.putExtra("calibfile", calibration_file.getAbsolutePath());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
intent, 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
@Override
public void onFoundChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
@Override
public void onDoneCalibration(Calibrator calibration, File calibfile) {
doneNotification();
stopSelf();
}
@Override
public void onFailedChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
}

View File

@ -1,128 +0,0 @@
package com.opencv.camera;
import java.util.LinkedList;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.Window;
import android.view.WindowManager;
import android.widget.LinearLayout;
import com.opencv.camera.CameraButtonsHandler.CaptureListener;
import com.opencv.opengl.GL2CameraViewer;
public abstract class CameraActivity extends Activity implements CaptureListener {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
setOrientation();
disableScreenTurnOff();
setContentView(com.opencv.R.layout.camera);
cameraButtonHandler = new CameraButtonsHandler(this,this);
mPreview = (NativePreviewer) findViewById(com.opencv.R.id.nativepreviewer);
LinearLayout glview_layout = (LinearLayout) findViewById(com.opencv.R.id.glview_layout);
glview = new GL2CameraViewer(getApplication(), true, 0, 0);
glview_layout.addView(glview);
}
/**
* Handle the capture button as follows...
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_CAMERA:
case KeyEvent.KEYCODE_SPACE:
case KeyEvent.KEYCODE_DPAD_CENTER:
cameraButtonHandler.setIsCapture(true);
return true;
default:
return super.onKeyUp(keyCode, event);
}
}
/**
* Handle the capture button as follows... On some phones there is no
* capture button, only trackball
*/
@Override
public boolean onTrackballEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
cameraButtonHandler.setIsCapture(true);
return true;
}
return super.onTrackballEvent(event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
mPreview.setParamsFromPrefs(getApplicationContext());
glview.onResume();
mPreview.onResume();
setCallbackStack();
}
protected void setCallbackStack() {
LinkedList<NativeProcessor.PoolCallback> callbackstack = getCallBackStack();
if (callbackstack == null){
callbackstack = new LinkedList<NativeProcessor.PoolCallback>();
callbackstack.add(glview.getDrawCallback());
}
mPreview.addCallbackStack(callbackstack);
}
/**
* Overide this and provide your processors to the camera
*
* @return null for default drawing
*/
protected abstract LinkedList<NativeProcessor.PoolCallback> getCallBackStack();
public void onCapture(){
}
protected NativePreviewer mPreview;
protected GL2CameraViewer glview;
protected CameraButtonsHandler cameraButtonHandler;
}

View File

@ -1,83 +0,0 @@
package com.opencv.camera;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageButton;
public class CameraButtonsHandler {
/** Constructs a buttons handler, will register with the capture button
* and the camera settings button.
* @param a The activity that has inflated the com.opencv.R.layout.camera
* as its layout.
*/
public CameraButtonsHandler(Activity a, CaptureListener l) {
ImageButton capture = (ImageButton) a
.findViewById(com.opencv.R.id.button_capture);
ImageButton settings = (ImageButton) a
.findViewById(com.opencv.R.id.button_camera_settings);
capture.setOnClickListener(capture_listener);
settings.setOnClickListener(settings_listener);
captureListener = l;
ctx = a;
}
public CameraButtonsHandler(Activity a) {
ImageButton capture = (ImageButton) a
.findViewById(com.opencv.R.id.button_capture);
ImageButton settings = (ImageButton) a
.findViewById(com.opencv.R.id.button_camera_settings);
capture.setOnClickListener(capture_listener);
settings.setOnClickListener(settings_listener);
ctx = a;
}
/** Check if the capture button has been pressed
* @return true if the capture button has been pressed
*/
synchronized public boolean isCapture(){
return capture_flag;
}
/** Reset the capture flag
*/
synchronized public void resetIsCapture(){
capture_flag = false;
}
/** Manually set the flag - call this on any event that should trigger
* a capture
* @param isCapture true if a capture should take place
*/
synchronized public void setIsCapture(boolean isCapture){
capture_flag = isCapture;
if(capture_flag && captureListener != null){
captureListener.onCapture();
}
}
private OnClickListener capture_listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
setIsCapture(true);
}
};
private OnClickListener settings_listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent configurer = new Intent(ctx,
CameraConfig.class);
ctx.startActivity(configurer);
}
};
interface CaptureListener{
public void onCapture();
}
private CaptureListener captureListener;
private Context ctx;
private boolean capture_flag = false;
}

View File

@ -1,214 +0,0 @@
package com.opencv.camera;
import com.opencv.R;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class CameraConfig extends Activity {
public static final String CAMERA_SETTINGS = "CAMERA_SETTINGS";
public static final String CAMERA_MODE = "camera_mode";
public static final String IMAGE_WIDTH = "IMAGE_WIDTH";
public static final String IMAGE_HEIGHT = "IMAGE_HEIGHT";
public static final int CAMERA_MODE_BW = 0;
public static final int CAMERA_MODE_COLOR = 1;
private static final String WHITEBALANCE = "WHITEBALANCE";
public static int readCameraMode(Context ctx) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
int mode = settings.getInt(CAMERA_MODE, CAMERA_MODE_BW);
return mode;
}
public static String readWhitebalace(Context ctx) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
return settings.getString(WHITEBALANCE, "auto");
}
static public void setCameraMode(Context context, String mode) {
int m = 0;
if (mode.equals("BW")) {
m = CAMERA_MODE_BW;
} else if (mode.equals("color"))
m = CAMERA_MODE_COLOR;
setCameraMode(context, m);
}
private static String sizeToString(int[] size) {
return size[0] + "x" + size[1];
}
private static void parseStrToSize(String ssize, int[] size) {
String sz[] = ssize.split("x");
size[0] = Integer.valueOf(sz[0]);
size[1] = Integer.valueOf(sz[1]);
}
public static void readImageSize(Context ctx, int[] size) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
size[0] = settings.getInt(IMAGE_WIDTH, 640);
size[1] = settings.getInt(IMAGE_HEIGHT, 480);
}
public static void setCameraMode(Context ctx, int mode) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(CAMERA_MODE, mode);
editor.commit();
}
public static void setImageSize(Context ctx, String strsize) {
int size[] = { 0, 0 };
parseStrToSize(strsize, size);
setImageSize(ctx, size[0], size[1]);
}
public static void setImageSize(Context ctx, int width, int height) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(IMAGE_WIDTH, width);
editor.putInt(IMAGE_HEIGHT, height);
editor.commit();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.camerasettings);
int mode = readCameraMode(this);
int size[] = { 0, 0 };
readImageSize(this, size);
final Spinner size_spinner;
final Spinner mode_spinner;
final Spinner whitebalance_spinner;
size_spinner = (Spinner) findViewById(R.id.image_size);
mode_spinner = (Spinner) findViewById(R.id.camera_mode);
whitebalance_spinner = (Spinner) findViewById(R.id.whitebalance);
String strsize = sizeToString(size);
String strmode = modeToString(mode);
String wbmode = readWhitebalace(getApplicationContext());
String sizes[] = getResources().getStringArray(R.array.image_sizes);
int i = 1;
for (String x : sizes) {
if (x.equals(strsize))
break;
i++;
}
if(i <= sizes.length)
size_spinner.setSelection(i-1);
i = 1;
String modes[] = getResources().getStringArray(R.array.camera_mode);
for (String x :modes) {
if (x.equals(strmode))
break;
i++;
}
if(i <= modes.length)
mode_spinner.setSelection(i-1);
i = 1;
String wbmodes[] = getResources().getStringArray(R.array.whitebalance);
for (String x :wbmodes) {
if (x.equals(wbmode))
break;
i++;
}
if(i <= wbmodes.length)
whitebalance_spinner.setSelection(i-1);
size_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = size_spinner.getItemAtPosition(position);
if (o != null)
setImageSize(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
mode_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = mode_spinner.getItemAtPosition(position);
if (o != null)
setCameraMode(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
whitebalance_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = whitebalance_spinner.getItemAtPosition(position);
if (o != null)
setWhitebalance(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
public static void setWhitebalance(Context ctx, String o) {
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putString(WHITEBALANCE, o);
editor.commit();
}
private String modeToString(int mode) {
switch (mode) {
case CAMERA_MODE_BW:
return "BW";
case CAMERA_MODE_COLOR:
return "color";
default:
return "";
}
}
}

View File

@ -1,476 +0,0 @@
package com.opencv.camera;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import android.content.Context;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.opencv.camera.NativeProcessor.NativeProcessorCallback;
import com.opencv.camera.NativeProcessor.PoolCallback;
public class NativePreviewer extends SurfaceView implements
SurfaceHolder.Callback, Camera.PreviewCallback, NativeProcessorCallback {
private String whitebalance_mode = "auto";
/**
* Constructor useful for defining a NativePreviewer in android layout xml
*
* @param context
* @param attributes
*/
public NativePreviewer(Context context, AttributeSet attributes) {
super(context, attributes);
listAllCameraMethods();
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
/*
* TODO get this working! Can't figure out how to define these in xml
*/
preview_width = attributes.getAttributeIntValue("opencv",
"preview_width", 600);
preview_height = attributes.getAttributeIntValue("opencv",
"preview_height", 600);
Log.d("NativePreviewer", "Trying to use preview size of "
+ preview_width + " " + preview_height);
processor = new NativeProcessor();
setZOrderMediaOverlay(false);
}
/**
*
* @param context
* @param preview_width
* the desired camera preview width - will attempt to get as
* close to this as possible
* @param preview_height
* the desired camera preview height
*/
public NativePreviewer(Context context, int preview_width,
int preview_height) {
super(context);
listAllCameraMethods();
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
this.preview_width = preview_width;
this.preview_height = preview_height;
processor = new NativeProcessor();
setZOrderMediaOverlay(false);
}
/**
* Only call in the oncreate function of the instantiating activity
*
* @param width
* desired width
* @param height
* desired height
*/
public void setPreviewSize(int width, int height){
preview_width = width;
preview_height = height;
Log.d("NativePreviewer", "Trying to use preview size of "
+ preview_width + " " + preview_height);
}
public void setParamsFromPrefs(Context ctx){
int size[] ={0,0};
CameraConfig.readImageSize(ctx, size);
int mode = CameraConfig.readCameraMode(ctx);
setPreviewSize(size[0], size[1]);
setGrayscale(mode == CameraConfig.CAMERA_MODE_BW ? true : false);
whitebalance_mode = CameraConfig.readWhitebalace(ctx);
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
releaseCamera();
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
try {
initCamera(mHolder);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
List<Camera.Size> pvsizes = mCamera.getParameters()
.getSupportedPreviewSizes();
int best_width = 1000000;
int best_height = 1000000;
int bdist = 100000;
for (Size x : pvsizes) {
if (Math.abs(x.width - preview_width) < bdist) {
bdist = Math.abs(x.width - preview_width);
best_width = x.width;
best_height = x.height;
}
}
preview_width = best_width;
preview_height = best_height;
Log.d("NativePreviewer", "Determined compatible preview size is: ("
+ preview_width + "," + preview_height + ")");
Log.d("NativePreviewer", "Supported params: "
+ mCamera.getParameters().flatten());
List<String> whiteBalanceModes = parameters.getSupportedWhiteBalance();
if (whiteBalanceModes != null
&& whiteBalanceModes.contains(whitebalance_mode) ) {
parameters.setWhiteBalance(whitebalance_mode);
}
if (parameters.get("meter-mode") != null)
parameters.set("meter-mode", "meter-average");
List<String> fmodes = mCamera.getParameters().getSupportedFocusModes();
if(fmodes != null)
{
int idx = fmodes.indexOf(Camera.Parameters.FOCUS_MODE_INFINITY);
if (idx != -1) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
} else if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_FIXED) != -1) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
}
if (fmodes.indexOf(Camera.Parameters.FOCUS_MODE_AUTO) != -1) {
hasAutoFocus = true;
}
}
List<String> scenemodes = mCamera.getParameters()
.getSupportedSceneModes();
if (scenemodes != null)
if (scenemodes.indexOf(Camera.Parameters.SCENE_MODE_ACTION) != -1) {
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_ACTION);
Log.d("NativePreviewer", "set scenemode to action");
}
parameters.setPreviewSize(preview_width, preview_height);
mCamera.setParameters(parameters);
pixelinfo = new PixelFormat();
pixelformat = mCamera.getParameters().getPreviewFormat();
PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
Size preview_size = mCamera.getParameters().getPreviewSize();
preview_width = preview_size.width;
preview_height = preview_size.height;
int bufSize = preview_width * preview_height * pixelinfo.bitsPerPixel
/ 8;
// Must call this before calling addCallbackBuffer to get all the
// reflection variables setup
initForACB();
initForPCWB();
// Use only one buffer, so that we don't preview too many frames and bog
// down system
byte[] buffer = new byte[bufSize];
addCallbackBuffer(buffer);
setPreviewCallbackWithBuffer();
mCamera.startPreview();
}
public void postautofocus(int delay) {
if (hasAutoFocus)
handler.postDelayed(autofocusrunner, delay);
}
/**
* Demonstration of how to use onPreviewFrame. In this case I'm not
* processing the data, I'm just adding the buffer back to the buffer queue
* for re-use
*/
public void onPreviewFrame(byte[] data, Camera camera) {
if (start == null) {
start = new Date();
}
processor.post(data, preview_width, preview_height, pixelformat,
System.nanoTime(), this);
fcount++;
if (fcount % 100 == 0) {
double ms = (new Date()).getTime() - start.getTime();
Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0));
start = new Date();
fcount = 0;
}
}
@Override
public void onDoneNativeProcessing(byte[] buffer) {
addCallbackBuffer(buffer);
}
public void addCallbackStack(LinkedList<PoolCallback> callbackstack) {
processor.addCallbackStack(callbackstack);
}
/**
* This must be called when the activity pauses, in Activity.onPause This
* has the side effect of clearing the callback stack.
*
*/
public void onPause() {
releaseCamera();
addCallbackStack(null);
processor.stop();
}
public void onResume() {
processor.start();
}
private Method mPCWB;
private void initForPCWB() {
try {
mPCWB = Class.forName("android.hardware.Camera").getMethod(
"setPreviewCallbackWithBuffer", PreviewCallback.class);
} catch (Exception e) {
Log.e("NativePreviewer",
"Problem setting up for setPreviewCallbackWithBuffer: "
+ e.toString());
}
}
/**
* This method allows you to add a byte buffer to the queue of buffers to be
* used by preview. See:
* http://android.git.kernel.org/?p=platform/frameworks
* /base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9d
* b3d07b9620b4269ab33f78604a36327e536ce1
*
* @param b
* The buffer to register. Size should be width * height *
* bitsPerPixel / 8.
*/
private void addCallbackBuffer(byte[] b) {
try {
mAcb.invoke(mCamera, b);
} catch (Exception e) {
Log.e("NativePreviewer",
"invoking addCallbackBuffer failed: " + e.toString());
}
}
/**
* Use this method instead of setPreviewCallback if you want to use manually
* allocated buffers. Assumes that "this" implements Camera.PreviewCallback
*/
private void setPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, this);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: Called method");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
@SuppressWarnings("unused")
private void clearPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, (PreviewCallback) null);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: cleared");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
/**
* These variables are re-used over and over by addCallbackBuffer
*/
private Method mAcb;
private void initForACB() {
try {
mAcb = Class.forName("android.hardware.Camera").getMethod(
"addCallbackBuffer", byte[].class);
} catch (Exception e) {
Log.e("NativePreviewer",
"Problem setting up for addCallbackBuffer: " + e.toString());
}
}
private Runnable autofocusrunner = new Runnable() {
@Override
public void run() {
mCamera.autoFocus(autocallback);
}
};
private Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if (!success)
postautofocus(1000);
}
};
/**
* This method will list all methods of the android.hardware.Camera class,
* even the hidden ones. With the information it provides, you can use the
* same approach I took below to expose methods that were written but hidden
* in eclair
*/
private void listAllCameraMethods() {
try {
Class<?> c = Class.forName("android.hardware.Camera");
Method[] m = c.getMethods();
for (int i = 0; i < m.length; i++) {
Log.d("NativePreviewer", " method:" + m[i].toString());
}
} catch (Exception e) {
// TODO Auto-generated catch block
Log.e("NativePreviewer", e.toString());
}
}
private void initCamera(SurfaceHolder holder) throws InterruptedException {
if (mCamera == null) {
// The Surface has been created, acquire the camera and tell it
// where
// to draw.
int i = 0;
while (i++ < 5) {
try {
mCamera = Camera.open();
break;
} catch (RuntimeException e) {
Thread.sleep(200);
}
}
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
} catch (RuntimeException e) {
Log.e("camera", "stacktrace", e);
}
}
}
private void releaseCamera() {
if (mCamera != null) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's
// very
// important to release it when the activity is paused.
mCamera.stopPreview();
mCamera.release();
}
// processor = null;
mCamera = null;
mAcb = null;
mPCWB = null;
}
private Handler handler = new Handler();
private Date start;
private int fcount = 0;
private boolean hasAutoFocus = false;
private SurfaceHolder mHolder;
private Camera mCamera;
private NativeProcessor processor;
private int preview_width, preview_height;
private int pixelformat;
private PixelFormat pixelinfo;
public void setGrayscale(boolean b) {
processor.setGrayscale(b);
}
}

View File

@ -1,285 +0,0 @@
package com.opencv.camera;
import java.util.LinkedList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import android.graphics.PixelFormat;
import android.util.Log;
import com.opencv.jni.image_pool;
import com.opencv.jni.opencv;
/** The NativeProcessor is a native processing stack engine.
*
* What this means is that the NativeProcessor handles loading
* live camera frames into native memory space, i.e. the image_pool
* and then calling a stack of PoolCallback's and passing them the
* image_pool.
*
* The image_pool index 0 is populated with the live video image
*
* And any modifications to this the pool are in place, so you may
* pass on changes to the pool to the next PoolCallback in the stack.
*
*/
public class NativeProcessor {
/** Users that would like to be able to have access to live video frames
* should implement a PoolCallback
* the idx and pool contain the images, specifically at idx == 0 is the
* live video frame.
*/
static public interface PoolCallback {
void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor);
}
/**At every frame, each PoolCallback is called in order and is passed the
* the same pool and index
*
* @param stack A list of PoolCallback objects, that will be called in order
*/
public void addCallbackStack(LinkedList<PoolCallback> stack) {
try {
while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
}
try {
nextStack = stack;
} finally {
stacklock.unlock();
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Create a NativeProcessor. The processor will not start running until
* start is called, at which point it will operate in its own thread and
* sleep until a post is called. The processor should not be started until
* an onSurfaceChange event, and should be shut down when the surface is
* destroyed by calling interupt.
*
*/
public NativeProcessor() {
gray_scale_only = false;
}
/** Grayscale only is much faster because the yuv does not get decoded, and grayscale is only one
* byter per pixel - giving fast opengl texture loading.
*
* You still have access to the whole yuv image, but grayscale is only immediately available to
* use without further effort.
*
* Suggestion - use grayscale only and save your yuv images to disk if you would like color images
*
* Also, in grayscale mode, the images in the pool are only single channel, so please keep this in mind
* when accessing the color images - check the cv::Mat::channels() or cv::Mat::type() if your messing
* with color channels
*
* @param grayscale true if you want to only process grayscale images
*/
public void setGrayscale(boolean grayscale){
gray_scale_only = grayscale;
}
/**
* A callback that allows the NativeProcessor to pass back the buffer when
* it has completed processing a frame.
*/
static protected interface NativeProcessorCallback {
/**
* Called after processing, meant to be recieved by the NativePreviewer
* wich reuses the byte buffer for the camera preview...
*
* @param buffer
* the buffer passed to the NativeProcessor with post.
*/
void onDoneNativeProcessing(byte[] buffer);
}
protected void stop() {
mthread.interrupt();
try {
mthread.join();
} catch (InterruptedException e) {
Log.w("NativeProcessor",
"interupted while stoping " + e.getMessage());
}
mthread = null;
}
protected void start() {
mthread = new ProcessorThread();
mthread.start();
}
/**
* post is used to notify the processor that a preview frame is ready, this
* will return almost immediately. if the processor is busy, returns false
* and is essentially a nop.
*
* @param buffer
* a preview frame from the Android Camera onPreviewFrame
* callback
* @param width
* of preview frame
* @param height
* of preview frame
* @param format
* of preview frame
* @return true if the processor wasn't busy and accepted the post, false if
* the processor is still processing.
*/
protected boolean post(byte[] buffer, int width, int height, int format,
long timestamp, NativeProcessorCallback callback) {
lock.lock();
try {
NPPostObject pobj = new NPPostObject(buffer, width, height, format,
timestamp, callback);
postobjects.addFirst(pobj);
} finally {
lock.unlock();
}
return true;
}
private class ProcessorThread extends Thread {
private void process(NPPostObject pobj) throws Exception {
if (pobj.format == PixelFormat.YCbCr_420_SP) {
// add as color image, because we know how to decode this
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, gray_scale_only);
} else if (pobj.format == PixelFormat.YCbCr_422_SP) {
// add as gray image, because this format is not coded
// for...//TODO figure out how to decode this
// format
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, true);
} else
throw new Exception("bad pixel format!");
for (PoolCallback x : stack) {
if (interrupted()) {
throw new InterruptedException(
"Native Processor interupted while processing");
}
x.process(0, pool, pobj.timestamp, NativeProcessor.this);
}
pobj.done(); // tell the postobject that we're done doing
// all the processing.
}
@Override
public void run() {
try {
while (true) {
yield();
while (!stacklock.tryLock(5, TimeUnit.MILLISECONDS)) {
}
try {
if (nextStack != null) {
stack = nextStack;
nextStack = null;
}
} finally {
stacklock.unlock();
}
NPPostObject pobj = null;
while (!lock.tryLock(5, TimeUnit.MILLISECONDS)) {
}
try {
if (postobjects.isEmpty())
continue;
pobj = postobjects.removeLast();
} finally {
lock.unlock();
}
if (interrupted())
throw new InterruptedException();
if (stack != null && pobj != null)
process(pobj);
}
} catch (InterruptedException e) {
Log.i("NativeProcessor",
"native processor interupted, ending now");
} catch (Exception e) {
e.printStackTrace();
} finally {
}
}
}
static private class NPPostObject {
public NPPostObject(byte[] buffer, int width, int height, int format,
long timestamp, NativeProcessorCallback callback) {
this.buffer = buffer;
this.width = width;
this.height = height;
this.format = format;
this.timestamp = timestamp;
this.callback = callback;
}
public void done() {
callback.onDoneNativeProcessing(buffer);
}
int width, height;
byte[] buffer;
int format;
long timestamp;
NativeProcessorCallback callback;
}
private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>();
private image_pool pool = new image_pool();
private final Lock lock = new ReentrantLock();
private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>();
private boolean gray_scale_only;
private Lock stacklock = new ReentrantLock();
private LinkedList<PoolCallback> nextStack;
private ProcessorThread mthread;
}

View File

@ -1,411 +0,0 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opencv.opengl;
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.jni.glcamera;
import com.opencv.jni.image_pool;
import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
/**
* A simple GLSurfaceView sub-class that demonstrate how to perform
* OpenGL ES 2.0 rendering into a GL Surface. Note the following important
* details:
*
* - The class must use a custom context factory to enable 2.0 rendering.
* See ContextFactory class definition below.
*
* - The class must use a custom EGLConfigChooser to be able to select
* an EGLConfig that supports 2.0. This is done by providing a config
* specification to eglChooseConfig() that has the attribute
* EGL10.ELG_RENDERABLE_TYPE containing the EGL_OPENGL_ES2_BIT flag
* set. See ConfigChooser class definition below.
*
* - The class must select the surface's format, then choose an EGLConfig
* that matches it exactly (with regards to red/green/blue/alpha channels
* bit depths). Failure to do so would result in an EGL_BAD_MATCH error.
*/
public class GL2CameraViewer extends GLSurfaceView{
private static String TAG = "GL2JNIView";
private static final boolean DEBUG = false;
private PoolCallback poolcallback = new PoolCallback() {
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor){
drawMatToGL(idx, pool);
requestRender();
}
};
public GL2CameraViewer(Context context,AttributeSet attributeSet) {
super(context,attributeSet);
init(false, 0, 0);
setZOrderMediaOverlay(true);
}
public GL2CameraViewer(Context context) {
super(context);
init(false, 0, 0);
setZOrderMediaOverlay(true);
}
public GL2CameraViewer(Context context, boolean translucent, int depth, int stencil) {
super(context);
init(translucent, depth, stencil);
setZOrderMediaOverlay(true);
}
private void init(boolean translucent, int depth, int stencil) {
/* By default, GLSurfaceView() creates a RGB_565 opaque surface.
* If we want a translucent one, we should change the surface's
* format here, using PixelFormat.TRANSLUCENT for GL Surfaces
* is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
*/
if (translucent) {
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
/* Setup the context factory for 2.0 rendering.
* See ContextFactory class definition below
*/
setEGLContextFactory(new ContextFactory());
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our
* custom config chooser. See ConfigChooser class definition
* below.
*/
setEGLConfigChooser( translucent ?
new ConfigChooser(8, 8, 8, 8, depth, stencil) :
new ConfigChooser(5, 6, 5, 0, depth, stencil) );
/* Set the renderer responsible for frame rendering */
setRenderer(new Renderer());
setRenderMode(RENDERMODE_WHEN_DIRTY);
}
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Log.w(TAG, "creating OpenGL ES 2.0 context");
checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
checkEglError("After eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
}
}
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
/* This EGL config specification is used to specify 2.0 rendering.
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below.
*/
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
/* Allocate then read the array of minimally matching EGL configs
*/
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
if (DEBUG) {
printConfigs(egl, display, configs);
}
/* Now return the "best" one
*/
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
int numConfigs = configs.length;
Log.w(TAG, String.format("%d configurations", numConfigs));
for (int i = 0; i < numConfigs; i++) {
Log.w(TAG, String.format("Configuration %d:\n", i));
printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display,
EGLConfig config) {
int[] attributes = {
EGL10.EGL_BUFFER_SIZE,
EGL10.EGL_ALPHA_SIZE,
EGL10.EGL_BLUE_SIZE,
EGL10.EGL_GREEN_SIZE,
EGL10.EGL_RED_SIZE,
EGL10.EGL_DEPTH_SIZE,
EGL10.EGL_STENCIL_SIZE,
EGL10.EGL_CONFIG_CAVEAT,
EGL10.EGL_CONFIG_ID,
EGL10.EGL_LEVEL,
EGL10.EGL_MAX_PBUFFER_HEIGHT,
EGL10.EGL_MAX_PBUFFER_PIXELS,
EGL10.EGL_MAX_PBUFFER_WIDTH,
EGL10.EGL_NATIVE_RENDERABLE,
EGL10.EGL_NATIVE_VISUAL_ID,
EGL10.EGL_NATIVE_VISUAL_TYPE,
0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
EGL10.EGL_SAMPLES,
EGL10.EGL_SAMPLE_BUFFERS,
EGL10.EGL_SURFACE_TYPE,
EGL10.EGL_TRANSPARENT_TYPE,
EGL10.EGL_TRANSPARENT_RED_VALUE,
EGL10.EGL_TRANSPARENT_GREEN_VALUE,
EGL10.EGL_TRANSPARENT_BLUE_VALUE,
0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
EGL10.EGL_LUMINANCE_SIZE,
EGL10.EGL_ALPHA_MASK_SIZE,
EGL10.EGL_COLOR_BUFFER_TYPE,
EGL10.EGL_RENDERABLE_TYPE,
0x3042 // EGL10.EGL_CONFORMANT
};
String[] names = {
"EGL_BUFFER_SIZE",
"EGL_ALPHA_SIZE",
"EGL_BLUE_SIZE",
"EGL_GREEN_SIZE",
"EGL_RED_SIZE",
"EGL_DEPTH_SIZE",
"EGL_STENCIL_SIZE",
"EGL_CONFIG_CAVEAT",
"EGL_CONFIG_ID",
"EGL_LEVEL",
"EGL_MAX_PBUFFER_HEIGHT",
"EGL_MAX_PBUFFER_PIXELS",
"EGL_MAX_PBUFFER_WIDTH",
"EGL_NATIVE_RENDERABLE",
"EGL_NATIVE_VISUAL_ID",
"EGL_NATIVE_VISUAL_TYPE",
"EGL_PRESERVED_RESOURCES",
"EGL_SAMPLES",
"EGL_SAMPLE_BUFFERS",
"EGL_SURFACE_TYPE",
"EGL_TRANSPARENT_TYPE",
"EGL_TRANSPARENT_RED_VALUE",
"EGL_TRANSPARENT_GREEN_VALUE",
"EGL_TRANSPARENT_BLUE_VALUE",
"EGL_BIND_TO_TEXTURE_RGB",
"EGL_BIND_TO_TEXTURE_RGBA",
"EGL_MIN_SWAP_INTERVAL",
"EGL_MAX_SWAP_INTERVAL",
"EGL_LUMINANCE_SIZE",
"EGL_ALPHA_MASK_SIZE",
"EGL_COLOR_BUFFER_TYPE",
"EGL_RENDERABLE_TYPE",
"EGL_CONFORMANT"
};
int[] value = new int[1];
for (int i = 0; i < attributes.length; i++) {
int attribute = attributes[i];
String name = names[i];
if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.w(TAG, String.format(" %s: %d\n", name, value[0]));
} else {
// Log.w(TAG, String.format(" %s: failed\n", name));
while (egl.eglGetError() != EGL10.EGL_SUCCESS);
}
}
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
glcamera mglcamera;
public void drawMatToGL(int idx, image_pool pool){
if(mglcamera != null)
mglcamera.drawMatToGL(idx, pool);
else
Log.e("android-opencv", "null glcamera!!!!");
}
public void clear(){
if(mglcamera != null)
mglcamera.clear();
else
Log.e("android-opencv", "null glcamera!!!!");
}
private class Renderer implements GLSurfaceView.Renderer {
public void onDrawFrame(GL10 gl) {
mglcamera.step();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
mglcamera.init(width, height);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
}
@Override
public void onPause() {
mglcamera = null;
// TODO Auto-generated method stub
super.onPause();
}
@Override
public void onResume() {
mglcamera = new glcamera();
// TODO Auto-generated method stub
super.onResume();
}
public PoolCallback getDrawCallback() {
// TODO Auto-generated method stub
return poolcallback;
}
}

View File

@ -1,34 +0,0 @@
package com.opencv.utils;
import java.nio.ByteBuffer;
import com.opencv.jni.Mat;
import com.opencv.jni.Size;
import com.opencv.jni.opencv;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
public class BitmapBridge {
static void copyBitmap(Bitmap bmap, Mat mat) throws Exception {
if ((bmap.getConfig() == null) || bmap.getConfig() == Config.ARGB_8888)
throw new Exception("bad config");
Size sz = new Size(bmap.getWidth(), bmap.getHeight());
mat.create(sz, opencv.CV_8UC4);
ByteBuffer buffer = ByteBuffer.allocate(4 * bmap.getWidth()
* bmap.getHeight());
bmap.copyPixelsToBuffer(buffer);
opencv.copyBufferToMat(mat, buffer);
}
static Bitmap matToBitmap(Mat mat) {
Bitmap bmap = Bitmap.createBitmap(mat.getCols(), mat.getRows(),
Config.ARGB_8888);
ByteBuffer buffer = ByteBuffer.allocate(4 * bmap.getWidth()
* bmap.getHeight());
opencv.copyMatToBuffer(buffer, mat);
bmap.copyPixelsFromBuffer(buffer);
return bmap;
}
}

View File

@ -1,36 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.theveganrobot.cvcamera" android:versionCode="7" android:versionName="7.0"
>
<application android:debuggable="false" android:icon="@drawable/icon">
<activity android:name=".CVCamera" android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard"
android:label="@string/app_name"
>
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="com.opencv.camera.CameraConfig" android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard">
</activity>
</application>
<uses-feature android:glEsVersion="0x00020000" android:required="true"/>
<uses-feature android:name="android.hardware.camera" android:required="true"/>
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
<uses-sdk android:minSdkVersion="7" android:targetSdkVersion="7" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"></uses-permission>
</manifest>

View File

@ -1,5 +0,0 @@
cmake_minimum_required(VERSION 2.8)
project(CVCamera)
add_subdirectory(jni)

View File

@ -1,2 +0,0 @@
see http://code.google.com/p/android-opencv/wiki/CVCamera

View File

@ -1,12 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
android.library.reference.1=../../android-opencv
# Project target.
target=android-7

View File

@ -1,62 +0,0 @@
#########################################################
# Find opencv and android-opencv
#########################################################
set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../../build
CACHE PATH "The path where you built opencv for android")
set(AndroidOpenCV_DIR ${CMAKE_SOURCE_DIR}/../../android-opencv/build
CACHE PATH "The path where you built android-opencv")
find_package(OpenCV REQUIRED)
FIND_PACKAGE(AndroidOpenCV REQUIRED )
#########################################################
#c flags, included, and lib dependencies
#########################################################
#notice the "recycling" of CMAKE_C_FLAGS
#this is necessary to pick up android flags
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR})
set( LIBRARY_DEPS ${AndroidOpenCV_LIBS} ${OpenCV_LIBS} )
if(ANDROID)
set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl)
endif(ANDROID)
#########################################################
#SWIG STUFF
#########################################################
#the java package to place swig generated java files in
set(MY_PACKAGE com.theveganrobot.cvcamera.jni)
if(NOT ANDROID)
#non android swig and jni
#jni is available by default on android
find_package(JNI REQUIRED)
include_directories(${JNI_INCLUDE_DIRS})
FIND_PACKAGE(SWIG)
endif()
INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
if(ANDROID)
#this will set the output path for the java package
#and properly create the package declarations in generated java sources
SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
endif(ANDROID)
#this add's the swig path for the opencv wrappers
SET(CMAKE_SWIG_FLAGS ${CMAKE_SWIG_FLAGS} "-I${AndroidOpenCV_SWIG_DIR}" )
SET_SOURCE_FILES_PROPERTIES(cvcamera.i PROPERTIES CPLUSPLUS ON)
#add the swig module, giving it the name, java, and then all of the source files
SWIG_ADD_MODULE(cvcamera java
cvcamera.i #swig file
Processor.cpp #cpp files can be compiled to
)
#link the module like any other
target_link_libraries(cvcamera ${LIBRARY_DEPS} )

View File

@ -1,286 +0,0 @@
/*
* Processor.cpp
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#include "Processor.h"
#include <sys/stat.h>
using namespace cv;
Processor::Processor() :
stard(20/*max_size*/, 8/*response_threshold*/, 15/*line_threshold_projected*/, 8/*line_threshold_binarized*/, 5/*suppress_nonmax_size*/),
fastd(20/*threshold*/, true/*nonmax_suppression*/),
surfd(100./*hessian_threshold*/, 1/*octaves*/, 2/*octave_layers*/)
{
}
Processor::~Processor()
{
// TODO Auto-generated destructor stub
}
void Processor::detectAndDrawFeatures(int input_idx, image_pool* pool, int feature_type)
{
FeatureDetector* fd = 0;
switch (feature_type)
{
case DETECT_SURF:
fd = &surfd;
break;
case DETECT_FAST:
fd = &fastd;
break;
case DETECT_STAR:
fd = &stard;
break;
}
Mat greyimage = pool->getGrey(input_idx);
Mat img = pool->getImage(input_idx);
if (img.empty() || greyimage.empty() || fd == 0)
return; //no image at input_idx!
keypoints.clear();
//if(grayimage->step1() > sizeof(uchar)) return;
//cvtColor(*img,*grayimage,CV_RGB2GRAY);
fd->detect(greyimage, keypoints);
for (vector<KeyPoint>::const_iterator it = keypoints.begin(); it != keypoints.end(); ++it)
{
circle(img, it->pt, 3, cvScalar(255, 0, 255, 0));
}
//pool->addImage(output_idx,outimage);
}
static double computeReprojectionErrors(const vector<vector<Point3f> >& objectPoints,
const vector<vector<Point2f> >& imagePoints, const vector<Mat>& rvecs,
const vector<Mat>& tvecs, const Mat& cameraMatrix, const Mat& distCoeffs,
vector<float>& perViewErrors)
{
vector<Point2f> imagePoints2;
int i, totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for (i = 0; i < (int)objectPoints.size(); i++)
{
projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1);
int n = (int)objectPoints[i].size();
perViewErrors[i] = err / n;
totalErr += err;
totalPoints += n;
}
return totalErr / totalPoints;
}
static void calcChessboardCorners(Size boardSize, float squareSize, vector<Point3f>& corners)
{
corners.resize(0);
for (int i = 0; i < boardSize.height; i++)
for (int j = 0; j < boardSize.width; j++)
corners.push_back(Point3f(float(j * squareSize), float(i * squareSize), 0));
}
/**from opencv/samples/cpp/calibration.cpp
*
*/
static bool runCalibration(vector<vector<Point2f> > imagePoints, Size imageSize, Size boardSize, float squareSize,
float aspectRatio, int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr)
{
cameraMatrix = Mat::eye(3, 3, CV_64F);
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
cameraMatrix.at<double> (0, 0) = aspectRatio;
distCoeffs = Mat::zeros(5, 1, CV_64F);
vector<vector<Point3f> > objectPoints(1);
calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
for (size_t i = 1; i < imagePoints.size(); i++)
objectPoints.push_back(objectPoints[0]);
calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags);
bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET) && checkRange(distCoeffs, CV_CHECK_QUIET);
totalAvgErr
= computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs);
return ok;
}
bool Processor::detectAndDrawChessboard(int idx, image_pool* pool)
{
Mat grey = pool->getGrey(idx);
if (grey.empty())
return false;
vector<Point2f> corners;
IplImage iplgrey = grey;
if (!cvCheckChessboard(&iplgrey, Size(6, 8)))
return false;
bool patternfound = findChessboardCorners(grey, Size(6, 8), corners);
Mat img = pool->getImage(idx);
if (corners.size() < 1)
return false;
cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
if (patternfound)
imagepoints.push_back(corners);
drawChessboardCorners(img, Size(6, 8), Mat(corners), patternfound);
imgsize = grey.size();
return patternfound;
}
void Processor::drawText(int i, image_pool* pool, const char* ctext)
{
// Use "y" to show that the baseLine is about
string text = ctext;
int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
double fontScale = .8;
int thickness = .5;
Mat img = pool->getImage(i);
int baseline = 0;
Size textSize = getTextSize(text, fontFace, fontScale, thickness, &baseline);
baseline += thickness;
// center the text
Point textOrg((img.cols - textSize.width) / 2, (img.rows - textSize.height * 2));
// draw the box
rectangle(img, textOrg + Point(0, baseline), textOrg + Point(textSize.width, -textSize.height), Scalar(0, 0, 255),
CV_FILLED);
// ... and the baseline first
line(img, textOrg + Point(0, thickness), textOrg + Point(textSize.width, thickness), Scalar(0, 0, 255));
// then put the text itself
putText(img, text, textOrg, fontFace, fontScale, Scalar::all(255), thickness, 8);
}
void saveCameraParams(const string& filename, Size imageSize, Size boardSize, float squareSize, float aspectRatio,
int flags, const Mat& cameraMatrix, const Mat& distCoeffs, const vector<Mat>& rvecs,
const vector<Mat>& tvecs, const vector<float>& reprojErrs,
const vector<vector<Point2f> >& imagePoints, double totalAvgErr)
{
FileStorage fs(filename, FileStorage::WRITE);
time_t t;
time(&t);
struct tm *t2 = localtime(&t);
char buf[1024];
strftime(buf, sizeof(buf) - 1, "%c", t2);
fs << "calibration_time" << buf;
if (!rvecs.empty() || !reprojErrs.empty())
fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size());
fs << "image_width" << imageSize.width;
fs << "image_height" << imageSize.height;
fs << "board_width" << boardSize.width;
fs << "board_height" << boardSize.height;
fs << "squareSize" << squareSize;
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
fs << "aspectRatio" << aspectRatio;
if (flags != 0)
{
sprintf(buf, "flags: %s%s%s%s", flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "", flags
& CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", flags & CV_CALIB_FIX_PRINCIPAL_POINT
? "+fix_principal_point" : "", flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
cvWriteComment(*fs, buf, 0);
}
fs << "flags" << flags;
fs << "camera_matrix" << cameraMatrix;
fs << "distortion_coefficients" << distCoeffs;
fs << "avg_reprojection_error" << totalAvgErr;
if (!reprojErrs.empty())
fs << "per_view_reprojection_errors" << Mat(reprojErrs);
if (!rvecs.empty() && !tvecs.empty())
{
Mat bigmat(rvecs.size(), 6, CV_32F);
for (size_t i = 0; i < rvecs.size(); i++)
{
Mat r = bigmat(Range(i, i + 1), Range(0, 3));
Mat t = bigmat(Range(i, i + 1), Range(3, 6));
rvecs[i].copyTo(r);
tvecs[i].copyTo(t);
}
cvWriteComment(*fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0);
fs << "extrinsic_parameters" << bigmat;
}
if (!imagePoints.empty())
{
Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
for (size_t i = 0; i < imagePoints.size(); i++)
{
Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
Mat(imagePoints[i]).copyTo(r);
}
fs << "image_points" << imagePtMat;
}
}
void Processor::resetChess()
{
imagepoints.clear();
}
void Processor::calibrate(const char* filename)
{
vector<Mat> rvecs, tvecs;
vector<float> reprojErrs;
double totalAvgErr = 0;
int flags = 0;
bool writeExtrinsics = true;
bool writePoints = true;
bool ok = runCalibration(imagepoints, imgsize, Size(6, 8), 1.f, 1.f, flags, K, distortion, rvecs, tvecs, reprojErrs,
totalAvgErr);
if (ok)
{
saveCameraParams(filename, imgsize, Size(6, 8), 1.f, 1.f, flags, K, distortion, writeExtrinsics ? rvecs : vector<
Mat> (), writeExtrinsics ? tvecs : vector<Mat> (), writeExtrinsics ? reprojErrs : vector<float> (), writePoints
? imagepoints : vector<vector<Point2f> > (), totalAvgErr);
}
}
int Processor::getNumberDetectedChessboards()
{
return imagepoints.size();
}

View File

@ -1,56 +0,0 @@
/*
* Processor.h
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#ifndef PROCESSOR_H_
#define PROCESSOR_H_
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <vector>
#include "image_pool.h"
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
class Processor
{
public:
Processor();
virtual ~Processor();
void detectAndDrawFeatures(int idx, image_pool* pool, int feature_type);
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
private:
cv::StarFeatureDetector stard;
cv::FastFeatureDetector fastd;
cv::SurfFeatureDetector surfd;
std::vector<cv::KeyPoint> keypoints;
std::vector<std::vector<cv::Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
};
#endif /* PROCESSOR_H_ */

View File

@ -1,51 +0,0 @@
/*
* include the headers required by the generated cpp code
*/
%{
#include "Processor.h"
#include "image_pool.h"
using namespace cv;
%}
/**
* some constants, see Processor.h
*/
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
//import the android-cv.i file so that swig is aware of all that has been previous defined
//notice that it is not an include....
%import "android-cv.i"
//make sure to import the image_pool as it is
//referenced by the Processor java generated
//class
%typemap(javaimports) Processor "
import com.opencv.jni.image_pool;// import the image_pool interface for playing nice with
// android-opencv
/** Processor - for processing images that are stored in an image pool
*/"
class Processor {
public:
Processor();
virtual ~Processor();
void detectAndDrawFeatures(int idx, image_pool* pool, int feature_type);
bool detectAndDrawChessboard(int idx,image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};

View File

@ -1,36 +0,0 @@
/* File : android-cv.i */
%module cvcamera
/*
* the java import code muse be included for the opencv jni wrappers
* this means that the android project must reference opencv/android as a project
* see the default.properties for how this is done
*/
%pragma(java) jniclassimports=%{
import com.opencv.jni.*; //import the android-opencv jni wrappers
%}
%pragma(java) jniclasscode=%{
static {
try {
//load the cvcamera library, make sure that libcvcamera.so is in your <project>/libs/armeabi directory
//so that android sdk automatically installs it along with the app.
//the android-opencv lib must be loaded first inorder for the cvcamera
//lib to be found
//check the apk generated, by opening it in an archive manager, to verify that
//both these libraries are present
System.loadLibrary("android-opencv");
System.loadLibrary("cvcamera");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
%}
//include the Processor class swig interface file
%include "Processor.i"

View File

@ -1,2 +0,0 @@
android update project --name CVCamera \
--path .

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

View File

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
</LinearLayout>

View File

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">CVCamera</string>
<string name="app_description">app to demo using android camera and passing data to opencv layer.</string>
<string name="Changes">Release 0.0.1 - first demo of using the OpenCV library with camera data</string>
</resources>

View File

@ -1,505 +0,0 @@
package com.theveganrobot.cvcamera;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.LinkedList;
import java.util.Scanner;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.Toast;
import com.opencv.camera.CameraConfig;
import com.opencv.camera.NativePreviewer;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.jni.image_pool;
import com.opencv.opengl.GL2CameraViewer;
import com.theveganrobot.cvcamera.jni.Processor;
import com.theveganrobot.cvcamera.jni.cvcamera;
public class CVCamera extends Activity {
static final int DIALOG_CALIBRATING = 0;
static final int DIALOG_CALIBRATION_FILE = 1;
private static final int DIALOG_OPENING_TUTORIAL = 2;
private static final int DIALOG_TUTORIAL_FAST = 3;
private static final int DIALOG_TUTORIAL_SURF = 4;
private static final int DIALOG_TUTORIAL_STAR = 5;
private static final int DIALOG_TUTORIAL_CHESS = 6;
private boolean captureChess;
ProgressDialog makeCalibDialog() {
ProgressDialog progressDialog;
progressDialog = new ProgressDialog(this);
progressDialog.setMessage("Callibrating. Please wait...");
progressDialog.setCancelable(false);
return progressDialog;
}
void toasts(int id) {
switch (id) {
case DIALOG_OPENING_TUTORIAL:
Toast.makeText(this, "Try clicking the menu for CV options.",
Toast.LENGTH_LONG).show();
break;
case DIALOG_TUTORIAL_FAST:
Toast.makeText(this, "Detecting and Displaying FAST features",
Toast.LENGTH_LONG).show();
break;
case DIALOG_TUTORIAL_SURF:
Toast.makeText(this, "Detecting and Displaying SURF features",
Toast.LENGTH_LONG).show();
break;
case DIALOG_TUTORIAL_STAR:
Toast.makeText(this, "Detecting and Displaying STAR features",
Toast.LENGTH_LONG).show();
break;
case DIALOG_TUTORIAL_CHESS:
Toast.makeText(
this,
"Calibration Mode, Point at a chessboard pattern and press the camera button, space,"
+ "or the DPAD to capture.", Toast.LENGTH_LONG)
.show();
break;
default:
break;
}
}
@Override
protected Dialog onCreateDialog(int id) {
Dialog dialog;
switch (id) {
case DIALOG_CALIBRATING:
dialog = makeCalibDialog();
break;
case DIALOG_CALIBRATION_FILE:
dialog = makeCalibFileAlert();
break;
default:
dialog = null;
}
return dialog;
}
private Dialog makeCalibFileAlert() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(calib_text)
.setTitle("camera.yml at " + calib_file_loc)
.setCancelable(false)
.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
AlertDialog alert = builder.create();
return alert;
}
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_CAMERA:
case KeyEvent.KEYCODE_SPACE:
case KeyEvent.KEYCODE_DPAD_CENTER:
captureChess = true;
return true;
default:
return super.onKeyUp(keyCode, event);
}
}
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
return super.onKeyLongPress(keyCode, event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
public void setNoTitle() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
menu.add("FAST");
menu.add("STAR");
menu.add("SURF");
menu.add("Chess");
menu.add("Settings");
return true;
}
private NativePreviewer mPreview;
private GL2CameraViewer glview;
@Override
public boolean onOptionsItemSelected(MenuItem item) {
LinkedList<PoolCallback> defaultcallbackstack = new LinkedList<PoolCallback>();
defaultcallbackstack.addFirst(glview.getDrawCallback());
if (item.getTitle().equals("FAST")) {
defaultcallbackstack.addFirst(new FastProcessor());
toasts(DIALOG_TUTORIAL_FAST);
}
else if (item.getTitle().equals("Chess")) {
defaultcallbackstack.addFirst(new CalibrationProcessor());
toasts(DIALOG_TUTORIAL_CHESS);
}
else if (item.getTitle().equals("STAR")) {
defaultcallbackstack.addFirst(new STARProcessor());
toasts(DIALOG_TUTORIAL_STAR);
}
else if (item.getTitle().equals("SURF")) {
defaultcallbackstack.addFirst(new SURFProcessor());
toasts(DIALOG_TUTORIAL_SURF);
}
else if (item.getTitle().equals("Settings")) {
Intent intent = new Intent(this,CameraConfig.class);
startActivity(intent);
}
mPreview.addCallbackStack(defaultcallbackstack);
return true;
}
@Override
public void onOptionsMenuClosed(Menu menu) {
// TODO Auto-generated method stub
super.onOptionsMenuClosed(menu);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
disableScreenTurnOff();
FrameLayout frame = new FrameLayout(this);
// Create our Preview view and set it as the content of our activity.
mPreview = new NativePreviewer(getApplication(), 640, 480);
LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
params.height = getWindowManager().getDefaultDisplay().getHeight();
params.width = (int) (params.height * 4.0 / 2.88);
LinearLayout vidlay = new LinearLayout(getApplication());
vidlay.setGravity(Gravity.CENTER);
vidlay.addView(mPreview, params);
frame.addView(vidlay);
// make the glview overlay ontop of video preview
mPreview.setZOrderMediaOverlay(false);
glview = new GL2CameraViewer(getApplication(), false, 0, 0);
glview.setZOrderMediaOverlay(true);
LinearLayout gllay = new LinearLayout(getApplication());
gllay.setGravity(Gravity.CENTER);
gllay.addView(glview, params);
frame.addView(gllay);
ImageButton capture_button = new ImageButton(getApplicationContext());
capture_button.setImageDrawable(getResources().getDrawable(
android.R.drawable.ic_menu_camera));
capture_button.setLayoutParams(new LayoutParams(
LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
capture_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
captureChess = true;
}
});
LinearLayout buttons = new LinearLayout(getApplicationContext());
buttons.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
buttons.addView(capture_button);
Button focus_button = new Button(getApplicationContext());
focus_button.setLayoutParams(new LayoutParams(
LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
focus_button.setText("Focus");
focus_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mPreview.postautofocus(100);
}
});
buttons.addView(focus_button);
frame.addView(buttons);
setContentView(frame);
toasts(DIALOG_OPENING_TUTORIAL);
}
@Override
public boolean onTrackballEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
captureChess = true;
return true;
}
return super.onTrackballEvent(event);
}
@Override
protected void onPause() {
super.onPause();
// clears the callback stack
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
glview.onResume();
mPreview.setParamsFromPrefs(getApplicationContext());
// add an initiall callback stack to the preview on resume...
// this one will just draw the frames to opengl
LinkedList<NativeProcessor.PoolCallback> cbstack = new LinkedList<PoolCallback>();
cbstack.add(glview.getDrawCallback());
mPreview.addCallbackStack(cbstack);
mPreview.onResume();
}
// final processor so taht these processor callbacks can access it
final Processor processor = new Processor();
class FastProcessor implements NativeProcessor.PoolCallback {
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor) {
processor.detectAndDrawFeatures(idx, pool, cvcamera.DETECT_FAST);
}
}
class STARProcessor implements NativeProcessor.PoolCallback {
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor) {
processor.detectAndDrawFeatures(idx, pool, cvcamera.DETECT_STAR);
}
}
class SURFProcessor implements NativeProcessor.PoolCallback {
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor) {
processor.detectAndDrawFeatures(idx, pool, cvcamera.DETECT_SURF);
}
}
String calib_text = null;
String calib_file_loc = null;
class CalibrationProcessor implements NativeProcessor.PoolCallback {
boolean calibrated = false;
@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor) {
if (calibrated) {
processor.drawText(idx, pool, "Calibrated successfully");
return;
}
if (processor.getNumberDetectedChessboards() == 10) {
File opencvdir = new File(
Environment.getExternalStorageDirectory(), "opencv");
if (!opencvdir.exists()) {
opencvdir.mkdir();
}
File calibfile = new File(opencvdir, "camera.yml");
calib_file_loc = calibfile.getAbsolutePath();
processor.calibrate(calibfile.getAbsolutePath());
Log.i("chessboard", "calibrated");
calibrated = true;
processor.resetChess();
runOnUiThread(new Runnable() {
@Override
public void run() {
removeDialog(DIALOG_CALIBRATING);
}
});
try {
StringBuilder text = new StringBuilder();
String NL = System.getProperty("line.separator");
Scanner scanner = new Scanner(calibfile);
try {
while (scanner.hasNextLine()) {
text.append(scanner.nextLine() + NL);
}
} finally {
scanner.close();
}
calib_text = text.toString();
runOnUiThread(new Runnable() {
@Override
public void run() {
showDialog(DIALOG_CALIBRATION_FILE);
}
});
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else if (captureChess
&& processor.detectAndDrawChessboard(idx, pool)) {
runOnUiThread(new Runnable() {
String numchess = String.valueOf(processor
.getNumberDetectedChessboards());
@Override
public void run() {
Toast.makeText(CVCamera.this,
"Detected " + numchess + " of 10 chessboards",
Toast.LENGTH_SHORT).show();
}
});
Log.i("cvcamera",
"detected a chessboard, n chess boards found: "
+ String.valueOf(processor
.getNumberDetectedChessboards()));
}
captureChess = false;
if (processor.getNumberDetectedChessboards() == 10) {
runOnUiThread(new Runnable() {
@Override
public void run() {
showDialog(DIALOG_CALIBRATING);
}
});
processor.drawText(idx, pool, "Calibrating, please wait.");
}
if (processor.getNumberDetectedChessboards() < 10) {
processor.drawText(idx, pool,
"found " + processor.getNumberDetectedChessboards()
+ "/10 chessboards");
}
}
}
}

View File

@ -1,3 +0,0 @@
#!/bin/bash
echo uninstalling CVCamera from phone
adb uninstall com.theveganrobot.cvcamera

View File

@ -1,46 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.opencv.calibration" android:versionCode="1"
android:versionName="1.0">
<application android:debuggable="true" android:icon="@drawable/icon"
android:label="@string/app_name">
<activity android:name=".Calibration" android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard">
<intent-filter>
<!-- <action android:name="android.intent.action.MAIN" /> -->
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<!--
These activities are defined in the android-jni library, and just reused here
-->
<activity android:name="com.opencv.calibration.ChessBoardChooser" android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard">
</activity>
<activity android:name="com.opencv.camera.CameraConfig" android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard">
</activity>
<activity android:name="com.opencv.calibration.CalibrationViewer" android:label="@string/app_name"/>
<service android:name="com.opencv.calibration.services.CalibrationService"></service>
</application>
<uses-sdk android:minSdkVersion="7" />
<uses-feature android:glEsVersion="0x00020000" />
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"></uses-permission>
</manifest>

View File

@ -1,10 +0,0 @@
to build completeley from command line:
sh project_create.sh
ant debug
ant install
That assumes that you have already build the opencv/android/android-jni project
If you're in eclipse, try to create a new android project from existing sources.
Make sure that you also have the android-jni project open in eclipse is this is the case
or the android library dependency will give you errors.

View File

@ -1,12 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
android.library.reference.1=../../android-opencv/
# Project target.
target=android-7

View File

@ -1,2 +0,0 @@
android update project --name Calibration \
--path .

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 161 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 409 B

View File

@ -1,45 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:opencv="http://schemas.android.com/apk/res/com.opencv"
android:layout_width="fill_parent" android:layout_height="fill_parent"
android:background="@drawable/cameraback">
<!--<com.opencv.camera.NativePreviewer -->
<!-- <SurfaceView -->
<com.opencv.camera.NativePreviewer
android:id="@+id/nativepreviewer" android:layout_width="400dip"
android:layout_height="300dip" android:layout_alignParentLeft="true"
android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
android:layout_marginRight="20dip"
/>
<LinearLayout android:id="@+id/glview_layout"
android:layout_width="400dip" android:layout_height="300dip"
android:layout_alignParentLeft="true" android:layout_margin="20dip"
android:gravity="center_horizontal|center_vertical"
android:layout_marginRight="20dip">
</LinearLayout>
<LinearLayout android:layout_width="wrap_content"
android:layout_height="fill_parent" android:orientation="vertical"
android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
android:layout_alignParentRight="true">
<ImageButton android:src="@android:drawable/ic_menu_camera"
android:id="@+id/capture" android:layout_width="60dip"
android:layout_height="60dip"></ImageButton>
<ImageButton android:src="@android:drawable/ic_menu_save"
android:id="@+id/calibrate" android:layout_width="60dip"
android:layout_height="60dip"></ImageButton>
<TextView android:id="@+id/numberpatterns"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:padding="10dip" android:background="@android:color/white"
android:text="0" />
</LinearLayout>
</RelativeLayout>

View File

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android">
<item android:id="@+id/calibrate" android:titleCondensed="Calib"
android:title="Calibrate"></item>
<item android:id="@+id/patternsize" android:titleCondensed="Size"
android:title="@string/patternsize" android:icon="@drawable/patternicon"></item>
<item android:id="@+id/settings" android:title="Settings" android:icon="@android:drawable/ic_menu_preferences"></item>
<item android:id="@+id/help" android:title="Help" android:icon="@android:drawable/ic_menu_help"></item>
</menu>

View File

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="good_color">#00ff00</color>
<color name="bad_color">#FF0000</color>
</resources>

View File

@ -1,13 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="calibfile">/opencv/camera.yml</string>
<string name="sdcarddir">/opencv</string>
<string name="calibration_service_started">Calibration calculations have started...</string>
<string name="calibration_service_stopped">Calibration calculations has stopped.</string>
<string name="calibration_service_finished">Calibration finished, you camera is calibrated.</string>
<string name="calibration_service_label">Calibration</string>
<string name="calibration_not_enough">Please capture atleast 10 images of the pattern!</string>
<string name="sdcard_error_msg"> Please make sure that you\'re sdcard is not mounted to you\'re computer, and that you have an sdcard that is writable on your device.</string>
</resources>

View File

@ -1,325 +0,0 @@
package com.opencv.calibration;
import java.io.File;
import java.io.IOException;
import java.util.LinkedList;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.os.IBinder;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.opencv.calibration.Calibrator.CalibrationCallback;
import com.opencv.calibration.services.CalibrationService;
import com.opencv.camera.CameraConfig;
import com.opencv.camera.NativePreviewer;
import com.opencv.camera.NativeProcessor;
import com.opencv.misc.SDCardChecker;
import com.opencv.opengl.GL2CameraViewer;
public class Calibration extends Activity implements CalibrationCallback {
private NativePreviewer mPreview;
private GL2CameraViewer glview;
private Calibrator calibrator;
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_CAMERA:
case KeyEvent.KEYCODE_SPACE:
case KeyEvent.KEYCODE_DPAD_CENTER:
calibrator.queueChessCapture();
return true;
default:
return super.onKeyUp(keyCode, event);
}
}
@Override
public boolean onTrackballEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
calibrator.queueChessCapture();
return true;
}
return super.onTrackballEvent(event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
public void setNoTitle() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.calibrationmenu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.patternsize: {
Intent sizer = new Intent(getApplicationContext(),
ChessBoardChooser.class);
startActivity(sizer);
}
break;
case R.id.help:
help();
break;
case R.id.calibrate:
calibrate();
break;
case R.id.settings:
Intent configurer = new Intent(getApplicationContext(),
CameraConfig.class);
startActivity(configurer);
}
return true;
}
private void help() {
// TODO Auto-generated method stub
}
@Override
public void onOptionsMenuClosed(Menu menu) {
// TODO Auto-generated method stub
super.onOptionsMenuClosed(menu);
}
private ServiceConnection mConnection = new ServiceConnection() {
@Override
public void onServiceDisconnected(ComponentName name) {
}
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
CalibrationService calibservice = ((CalibrationService.CalibrationServiceBinder) service)
.getService();
if (!SDCardChecker.CheckStorage(Calibration.this))
return;
SDCardChecker.MakeDataDir(Calibration.this);
File calibfile = SDCardChecker.getFile(calibservice,
R.string.calibfile);
try {
Calibrator tcalib = calibrator;
calibrator = new Calibrator(Calibration.this);
setCallbackStack();
calibservice.startCalibrating(Calibration.class, R.drawable.icon,tcalib, calibfile);
} catch (IOException e) {
e.printStackTrace();
}
// Tell the user about this for our demo.
Toast.makeText(Calibration.this,
"Starting calibration in the background.",
Toast.LENGTH_SHORT).show();
unbindService(this);
}
};
public static File getCalibrationFile(Context ctx) {
return SDCardChecker.getFile(ctx, R.string.calibfile);
}
void doBindCalibService() {
// Establish a connection with the service. We use an explicit
// class name because we want a specific service implementation that
// we know will be running in our own process (and thus won't be
// supporting component replacement by other applications).
bindService(new Intent(Calibration.this, CalibrationService.class),
mConnection, Context.BIND_AUTO_CREATE);
}
void calibrate() {
if (calibrator.getNumberPatternsDetected() < 3) {
Toast.makeText(this, getText(R.string.calibration_not_enough),
Toast.LENGTH_LONG).show();
return;
}
Intent calibservice = new Intent(Calibration.this,
CalibrationService.class);
startService(calibservice);
doBindCalibService();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
disableScreenTurnOff();
setContentView(R.layout.calib_camera);
mPreview = (NativePreviewer) findViewById(R.id.nativepreviewer);
mPreview.setPreviewSize(800, 400);
mPreview.setGrayscale(true);
LinearLayout glview_layout = (LinearLayout) findViewById(R.id.glview_layout);
glview = new GL2CameraViewer(getApplication(), false, 0, 0);
glview_layout.addView(glview);
calibrator = new Calibrator(this);
ImageButton capturebutton = (ImageButton) findViewById(R.id.capture);
capturebutton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
calibrator.queueChessCapture();
}
});
ImageButton calibbutton = (ImageButton) findViewById(R.id.calibrate);
calibbutton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
calibrate();
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
protected void setCallbackStack() {
calibrator.setPatternSize(ChessBoardChooser.getPatternSize(this));
LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<NativeProcessor.PoolCallback>();
callbackstack.add(calibrator);
callbackstack.add(glview.getDrawCallback());
mPreview.addCallbackStack(callbackstack);
updateNumber(calibrator);
}
@Override
protected void onResume() {
super.onResume();
int size[] ={0,0};
CameraConfig.readImageSize(getApplicationContext(), size);
int mode = CameraConfig.readCameraMode(getApplicationContext());
mPreview.setPreviewSize(size[0], size[1]);
mPreview.setGrayscale(mode == CameraConfig.CAMERA_MODE_BW ? true : false);
glview.onResume();
mPreview.onResume();
setCallbackStack();
}
void updateNumber(Calibrator calibrator) {
TextView numbertext = (TextView) findViewById(R.id.numberpatterns);
int numdetectd = calibrator.getNumberPatternsDetected();
if (numdetectd > 2) {
numbertext
.setTextColor(getResources().getColor(R.color.good_color));
} else
numbertext.setTextColor(getResources().getColor(R.color.bad_color));
numbertext.setText(String.valueOf(numdetectd));
}
@Override
public void onFoundChessboard(final Calibrator calibrator) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(Calibration.this,
"Captured a calibration pattern!", Toast.LENGTH_SHORT)
.show();
updateNumber(calibrator);
}
});
}
@Override
public void onDoneCalibration(Calibrator calibration, File calibfile) {
}
@Override
public void onFailedChessboard(final Calibrator calibrator) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(
Calibration.this,
"No pattern found. Make sure its the right dimensions, and close enough...",
Toast.LENGTH_LONG).show();
updateNumber(calibrator);
}
});
}
}

View File

@ -1,64 +0,0 @@
package com.opencv.misc;
import java.io.File;
import android.content.Context;
import android.os.Environment;
import android.widget.Toast;
import com.opencv.calibration.R;
public class SDCardChecker {
public static File createThumb(Context ctx, File workingDir) {
return new File(workingDir, "thumb.jpg");
}
public static File getDir(Context ctx, String relativename) {
return new File(Environment.getExternalStorageDirectory()
+ relativename);
}
public static File getDir(Context ctx, int id) {
return new File(Environment.getExternalStorageDirectory()
+ ctx.getResources().getString(id));
}
public static File getFile(Context ctx, int id) {
return new File(Environment.getExternalStorageDirectory()
+ ctx.getResources().getString(id));
}
public static void MakeDataDir(Context ctx) {
File dir = getDir(ctx, R.string.sdcarddir);
dir.mkdirs();
}
public static boolean CheckStorage(Context ctx) {
boolean mExternalStorageAvailable = false;
boolean mExternalStorageWriteable = false;
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
// We can read and write the media
mExternalStorageAvailable = mExternalStorageWriteable = true;
} else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) {
// We can only read the media
mExternalStorageAvailable = true;
mExternalStorageWriteable = false;
} else {
// Something else is wrong. It may be one of many other states, but
// all we need
// to know is we can neither read nor write
mExternalStorageAvailable = mExternalStorageWriteable = false;
}
boolean goodmount = mExternalStorageAvailable
&& mExternalStorageWriteable;
if (!goodmount) {
Toast.makeText(ctx, ctx.getString(R.string.sdcard_error_msg),
Toast.LENGTH_LONG).show();
}
return goodmount;
}
}

View File

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.OpenCV_SAMPLE" android:versionCode="1"
android:versionName="1.0">
<application android:label="@string/app_name"
android:debuggable="true" android:icon="@drawable/icon">
<activity android:name=".OpenCV_SAMPLE" android:label="@string/app_name"
android:screenOrientation="landscape" android:configChanges="orientation|keyboardHidden|keyboard">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<!-- These activities are defined in the android-opencv library, and just
reused here -->
<activity android:name="com.opencv.camera.CameraConfig"
android:label="@string/app_name" android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard">
</activity>
</application>
<uses-sdk android:minSdkVersion="7" />
<!-- set the opengl version -->
<uses-feature android:glEsVersion="0x00020000" />
<!-- use the camera -->
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
</manifest>

View File

@ -1,5 +0,0 @@
cmake_minimum_required(VERSION 2.8)
project(OpenCV_SAMPLE)
add_subdirectory(jni)

View File

@ -1,7 +0,0 @@
@ECHO OFF
SETLOCAL
PUSHD %~dp0
SET PROJECT_NAME=OpenCV_SAMPLE
CALL ..\..\scripts\build.cmd %*
POPD
ENDLOCAL

View File

@ -1,4 +0,0 @@
mkdir build_neon
cd build_neon
cmake -DOpenCV_DIR=../../../build_neon -DAndroidOpenCV_DIR=../../../android-opencv/build_neon -DARM_TARGETS="armeabi-v7a with NEON" -DCMAKE_TOOLCHAIN_FILE=$ANDTOOLCHAIN ..

View File

@ -1,12 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-7
android.library.reference.1=../../android-opencv/

View File

@ -1,68 +0,0 @@
#########################################################
# Find opencv and android-opencv
#########################################################
set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../../build
CACHE PATH "The path where you built opencv for android")
set(AndroidOpenCV_DIR ${CMAKE_SOURCE_DIR}/../../android-opencv/build
CACHE PATH "The path where you built android-opencv")
find_package(OpenCV REQUIRED)
FIND_PACKAGE(AndroidOpenCV REQUIRED )
#########################################################
#c flags, included, and lib dependencies
#########################################################
#notice the "recycling" of CMAKE_C_FLAGS
#this is necessary to pick up android flags
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR})
set( LIBRARY_DEPS ${AndroidOpenCV_LIBS} ${OpenCV_LIBS} )
if(ANDROID)
set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl)
endif(ANDROID)
#########################################################
#SWIG STUFF
#########################################################
#the java package to place swig generated java files in
set(MY_PACKAGE com.OpenCV_SAMPLE.jni )
set(MY_MODULE OpenCV_SAMPLE )
set(MY_SWIG
OpenCV_SAMPLE.i #swig file
)
set(MY_SRCS
cvsample.cpp #cpp files can be compiled to
)
if(NOT ANDROID)
#non android swig and jni
#jni is available by default on android
find_package(JNI REQUIRED)
include_directories(${JNI_INCLUDE_DIRS})
FIND_PACKAGE(SWIG)
endif()
INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
if(ANDROID)
#this will set the output path for the java package
#and properly create the package declarations in generated java sources
SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
endif(ANDROID)
#this add's the swig path for the opencv wrappers
SET(CMAKE_SWIG_FLAGS ${CMAKE_SWIG_FLAGS} "-I${AndroidOpenCV_SWIG_DIR}" )
SET_SOURCE_FILES_PROPERTIES(${MY_SWIG} PROPERTIES CPLUSPLUS ON)
#add the swig module, giving it the name, java, and then all of the source files
SWIG_ADD_MODULE(${MY_MODULE} java
${MY_SWIG}
${MY_SRCS}
)
#link the module like any other
target_link_libraries(${MY_MODULE} ${LIBRARY_DEPS} )

View File

@ -1,48 +0,0 @@
/* File : foobar.i */
%module OpenCV_SAMPLE
/*
* the java import code muse be included for the opencv jni wrappers
* this means that the android project must reference opencv/android as a project
* see the default.properties for how this is done
*/
%pragma(java) jniclassimports=%{
import com.opencv.jni.*; //import the android-opencv jni wrappers
%}
%pragma(java) jniclasscode=%{
static {
try {
//load up our shared libraries
System.loadLibrary("android-opencv");
System.loadLibrary("OpenCV_SAMPLE");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
%}
//import the android-cv.i file so that swig is aware of all that has been previous defined
//notice that it is not an include....
%import "android-cv.i"
%{
#include "cvsample.h"
using cv::Mat;
%}
//make sure to import the image_pool as it is
//referenced by the Processor java generated
//class
%typemap(javaimports) CVSample "
import com.opencv.jni.*;// import the opencv java bindings
"
class CVSample
{
public:
void canny(const Mat& input, Mat& output, int edgeThresh);
void invert(Mat& inout);
void blur(Mat& inout, int half_kernel_size);
};

View File

@ -1,27 +0,0 @@
#include "cvsample.h"
#include <opencv2/imgproc/imgproc.hpp>
void CVSample::canny(const cv::Mat& input, cv::Mat& output, int edgeThresh)
{
if (input.empty())
return;
cv::Mat gray;
if (input.channels() == 3)
{
cv::cvtColor(input, gray, CV_RGB2GRAY);
}
else
gray = input;
cv::Canny(gray, output, edgeThresh, edgeThresh * 3, 3);
}
void CVSample::invert(cv::Mat& inout)
{
cv::bitwise_not(inout, inout);
}
void CVSample::blur(cv::Mat& inout, int half_kernel_size)
{
int ksz = half_kernel_size*2 + 1;
cv::Size kernel(ksz,ksz);
cv::blur(inout,inout,kernel);
}

View File

@ -1,11 +0,0 @@
#pragma once
#include <opencv2/core/core.hpp>
class CVSample
{
public:
void canny(const cv::Mat& input, cv::Mat& output, int edgeThresh);
void invert(cv::Mat& inout);
void blur(cv::Mat& inout, int half_kernel_size);
};

View File

@ -1,3 +0,0 @@
#this generates an ant based cli build of the android-jni project
android update project --name OpenCV_SAMPLE \
--path .

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

View File

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/hello"
/>
</LinearLayout>

Some files were not shown because too many files have changed in this diff Show More