refactored; added Camera2, notify callbacks, front/back maxCamera sizes; disable new stuff if target API < 21

This commit is contained in:
Andrey Pavlenko 2015-10-07 11:28:57 +03:00
parent 8e088d38a5
commit 15db8243ef
5 changed files with 882 additions and 311 deletions

View File

@ -181,6 +181,17 @@ else()
list(REMOVE_ITEM handwrittren_lib_project_files_rel "${ANDROID_MANIFEST_FILE}") list(REMOVE_ITEM handwrittren_lib_project_files_rel "${ANDROID_MANIFEST_FILE}")
endif() endif()
# Calc default SDK Target
android_get_compatible_target(android_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET} 11)
string(REGEX REPLACE "android-" "" android_sdk_target_num ${android_sdk_target})
if( (ANDROID_SDK_TARGET AND ANDROID_SDK_TARGET LESS 21) OR (android_sdk_target_num LESS 21) )
message(STATUS "[OpenCV for Android SDK]: A new OpenGL Camera Bridge (CameraGLSurfaceView, CameraGLRendererBase, CameraRenderer, Camera2Renderer) is disabled, because ANDROID_SDK_TARGET (${android_sdk_target_num}) < 21")
ocv_list_filterout(handwritten_java_sources "android\\\\+CameraGL")
ocv_list_filterout(handwritten_java_sources "android\\\\+Camera.?Renderer")
endif()
# IMPORTANT: add dependencies to cmake (we should rerun cmake if any of these files is modified) # IMPORTANT: add dependencies to cmake (we should rerun cmake if any of these files is modified)
add_cmake_dependencies(${scripts_gen_java} ${scripts_hdr_parser} ${opencv_public_headers}) add_cmake_dependencies(${scripts_gen_java} ${scripts_hdr_parser} ${opencv_public_headers})

View File

@ -0,0 +1,302 @@
package org.opencv.android;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
@TargetApi(21)
public class Camera2Renderer extends CameraGLRendererBase {
protected final String LOGTAG = "Camera2Renderer";
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private String mCameraID;
private Size mPreviewSize = new Size(-1, -1);
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
Camera2Renderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected void doStart() {
Log.d(LOGTAG, "doStart");
startBackgroundThread();
super.doStart();
}
@Override
protected void doStop() {
Log.d(LOGTAG, "doStop");
super.doStop();
stopBackgroundThread();
}
boolean cacPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height);
if(mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) mView.getContext()
.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager
.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int bestWidth = 0, bestHeight = 0;
float aspect = (float)width / height;
for (Size psize : map.getOutputSizes(SurfaceTexture.class)) {
int w = psize.getWidth(), h = psize.getHeight();
Log.d(LOGTAG, "trying size: "+w+"x"+h);
if ( width >= w && height >= h &&
bestWidth <= w && bestHeight <= h &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight);
if( bestWidth == 0 || bestHeight == 0 ||
mPreviewSize.getWidth() == bestWidth &&
mPreviewSize.getHeight() == bestHeight )
return false;
else {
mPreviewSize = new Size(bestWidth, bestHeight);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "cacPreviewSize - Security Exception");
}
return false;
}
@Override
protected void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
CameraManager manager = (CameraManager) mView.getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if(camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return;
}
if(id == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if( id == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK ||
id == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
mCameraID = cameraID;
break;
}
}
}
if(mCameraID != null) {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException(
"Time out waiting to lock camera opening.");
}
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception");
} catch (InterruptedException e) {
Log.e(LOGTAG, "OpenCamera - Interrupted Exception");
}
}
@Override
protected void closeCamera() {
Log.i(LOGTAG, "closeCamera");
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
mCameraOpenCloseLock.release();
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
};
private void createCameraPreviewSession() {
int w=mPreviewSize.getWidth(), h=mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession("+w+"x"+h+")");
if(w<0 || h<0)
return;
try {
mCameraOpenCloseLock.acquire();
if (null == mCameraDevice) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
if(null == mSTexture) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null");
return;
}
mSTexture.setDefaultBufferSize(w, h);
Surface surface = new Surface(mSTexture);
mPreviewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession cameraCaptureSession) {
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCaptureSession failed");
}
mCameraOpenCloseLock.release();
}
@Override
public void onConfigureFailed(
CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
mCameraOpenCloseLock.release();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession");
} catch (InterruptedException e) {
throw new RuntimeException(
"Interrupted while createCameraPreviewSession", e);
}
finally {
//mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if(mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread");
}
}
@Override
protected void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize("+width+"x"+height+")");
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
try {
mCameraOpenCloseLock.acquire();
boolean needReconfig = cacPreviewSize(width, height);
mCameraWidth = mPreviewSize.getWidth();
mCameraHeight = mPreviewSize.getHeight();
if( !needReconfig ) {
mCameraOpenCloseLock.release();
return;
}
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
mCameraOpenCloseLock.release();
createCameraPreviewSession();
} catch (InterruptedException e) {
mCameraOpenCloseLock.release();
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
}
}

View File

@ -0,0 +1,424 @@
package org.opencv.android;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.opencv.android.CameraGLSurfaceView.CameraTextureListener;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.View;
@TargetApi(15)
public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
protected final String LOGTAG = "CameraGLRendererBase";
// shaders
private final String vss = ""
+ "attribute vec2 vPosition;\n"
+ "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n"
+ "void main() {\n" + " texCoord = vTexCoord;\n"
+ " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n"
+ "}";
private final String fssOES = ""
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final String fss2D = ""
+ "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
// coord-s
private final float vertices[] = {
-1, -1,
-1, 1,
1, -1,
1, 1 };
private final float texCoordOES[] = {
0, 1,
0, 0,
1, 1,
1, 0 };
private final float texCoord2D[] = {
0, 0,
0, 1,
1, 0,
1, 1 };
private int[] texCamera = {0}, texFBO = {0}, texDraw = {0};
private int[] FBO = {0};
private int progOES = -1, prog2D = -1;
private int vPosOES, vTCOES, vPos2D, vTC2D;
private FloatBuffer vert, texOES, tex2D;
protected int mCameraWidth = -1, mCameraHeight = -1;
protected int mFBOWidth = -1, mFBOHeight = -1;
protected int mMaxCameraWidth = -1, mMaxCameraHeight = -1;
protected int mCameraIndex = CameraBridgeViewBase.CAMERA_ID_ANY;
protected SurfaceTexture mSTexture;
protected boolean mHaveSurface = false;
protected boolean mHaveFBO = false;
protected boolean mUpdateST = false;
protected boolean mEnabled = true;
protected boolean mIsStarted = false;
protected CameraGLSurfaceView mView;
protected abstract void openCamera(int id);
protected abstract void closeCamera();
protected abstract void setCameraPreviewSize(int width, int height); // updates mCameraWidth & mCameraHeight
public CameraGLRendererBase(CameraGLSurfaceView view) {
mView = view;
int bytes = vertices.length * Float.SIZE / Byte.SIZE;
vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
vert.put(vertices).position(0);
texOES.put(texCoordOES).position(0);
tex2D.put(texCoord2D).position(0);
}
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.i(LOGTAG, "onFrameAvailable");
mUpdateST = true;
mView.requestRender();
}
@Override
public void onDrawFrame(GL10 gl) {
//Log.i(LOGTAG, "onDrawFrame start");
if (!mHaveFBO)
return;
synchronized(this) {
if (mUpdateST) {
mSTexture.updateTexImage();
mUpdateST = false;
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
CameraTextureListener texListener = mView.getCameraTextureListener();
if(texListener != null) {
//Log.d(LOGTAG, "haveUserCallback");
// texCamera(OES) -> texFBO
drawTex(texCamera[0], true, FBO[0]);
// call user code (texFBO -> texDraw)
boolean modified = texListener.onCameraTexture(texFBO[0], texDraw[0], mCameraWidth, mCameraHeight);
if(modified) {
// texDraw -> screen
drawTex(texDraw[0], false, 0);
} else {
// texFBO -> screen
drawTex(texFBO[0], false, 0);
}
} else {
Log.d(LOGTAG, "texCamera(OES) -> screen");
// texCamera(OES) -> screen
drawTex(texCamera[0], true, 0);
}
//Log.i(LOGTAG, "onDrawFrame end");
}
}
@Override
public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) {
Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")");
mHaveSurface = true;
updateState();
setPreviewSize(surfaceWidth, surfaceHeight);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(LOGTAG, "onSurfaceCreated");
initShaders();
}
private void initShaders() {
String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION);
if (strGLVersion != null)
Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion);
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
progOES = loadShader(vss, fssOES);
vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition");
vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPosOES);
GLES20.glEnableVertexAttribArray(vTCOES);
prog2D = loadShader(vss, fss2D);
vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition");
vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPos2D);
GLES20.glEnableVertexAttribArray(vTC2D);
}
private void initSurfaceTexture() {
Log.d(LOGTAG, "initSurfaceTexture");
deleteSurfaceTexture();
initTexOES(texCamera);
mSTexture = new SurfaceTexture(texCamera[0]);
mSTexture.setOnFrameAvailableListener(this);
}
private void deleteSurfaceTexture() {
Log.d(LOGTAG, "deleteSurfaceTexture");
if(mSTexture != null) {
mSTexture.release();
mSTexture = null;
deleteTex(texCamera);
}
}
private void initTexOES(int[] tex) {
if(tex.length == 1) {
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
}
private static void deleteTex(int[] tex) {
if(tex.length == 1) {
GLES20.glDeleteTextures(1, tex, 0);
}
}
private static int loadShader(String vss, String fss) {
Log.d("CameraGLRendererBase", "loadShader");
int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vshader, vss);
GLES20.glCompileShader(vshader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile vertex shader: "+GLES20.glGetShaderInfoLog(vshader));
GLES20.glDeleteShader(vshader);
vshader = 0;
return 0;
}
int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fshader, fss);
GLES20.glCompileShader(fshader);
GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader));
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
fshader = 0;
return 0;
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vshader);
GLES20.glAttachShader(program, fshader);
GLES20.glLinkProgram(program);
Log.d("CameraGLRendererBase", "shaders were compiled OK");
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
return program;
}
private void deleteFBO()
{
Log.d(LOGTAG, "deleteFBO("+mFBOWidth+"x"+mFBOHeight+")");
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteFramebuffers(1, FBO, 0);
deleteTex(texFBO);
deleteTex(texDraw);
mFBOWidth = mFBOHeight = 0;
}
private void initFBO(int width, int height)
{
Log.d(LOGTAG, "initFBO("+width+"x"+height+")");
deleteFBO();
GLES20.glGenTextures(1, texDraw, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glGenTextures(1, texFBO, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
//int hFBO;
GLES20.glGenFramebuffers(1, FBO, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0);
Log.d(LOGTAG, "initFBO error status: " + GLES20.glGetError());
int FBOstatus = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (FBOstatus != GLES20.GL_FRAMEBUFFER_COMPLETE)
Log.e(LOGTAG, "initFBO failed, status: " + FBOstatus);
mFBOWidth = width;
mFBOHeight = height;
}
// draw texture to FBO or to screen if fbo == 0
private void drawTex(int tex, boolean isOES, int fbo)
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
if(fbo == 0)
GLES20.glViewport(0, 0, mView.getWidth(), mView.getHeight());
else
GLES20.glViewport(0, 0, mFBOWidth, mFBOHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if(isOES) {
GLES20.glUseProgram(progOES);
GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES);
} else {
GLES20.glUseProgram(prog2D);
GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D);
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
if(isOES) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
public synchronized void enableView() {
Log.d(LOGTAG, "enableView");
mEnabled = true;
updateState();
}
public synchronized void disableView() {
Log.d(LOGTAG, "disableView");
mEnabled = false;
updateState();
}
protected void updateState() {
Log.d(LOGTAG, "updateState");
Log.d(LOGTAG, "mEnabled="+mEnabled+", mHaveSurface="+mHaveSurface);
boolean willStart = mEnabled && mHaveSurface && mView.getVisibility() == View.VISIBLE;
if (willStart != mIsStarted) {
if(willStart) doStart();
else doStop();
} else {
Log.d(LOGTAG, "keeping State unchanged");
}
Log.d(LOGTAG, "updateState end");
}
protected synchronized void doStart() {
Log.d(LOGTAG, "doStart");
initSurfaceTexture();
openCamera(mCameraIndex);
mIsStarted = true;
if(mCameraWidth>0 && mCameraHeight>0)
setPreviewSize(mCameraWidth, mCameraHeight); // start preview and call listener.onCameraViewStarted()
}
protected void doStop() {
Log.d(LOGTAG, "doStop");
synchronized(this) {
mUpdateST = false;
mIsStarted = false;
mHaveFBO = false;
closeCamera();
deleteSurfaceTexture();
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStopped();
}
protected void setPreviewSize(int width, int height) {
synchronized(this) {
mHaveFBO = false;
mCameraWidth = width;
mCameraHeight = height;
setCameraPreviewSize(width, height); // can change mCameraWidth & mCameraHeight
initFBO(mCameraWidth, mCameraHeight);
mHaveFBO = true;
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStarted(mCameraWidth, mCameraHeight);
}
public void setCameraIndex(int cameraIndex) {
disableView();
mCameraIndex = cameraIndex;
enableView();
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
disableView();
mMaxCameraWidth = maxWidth;
mMaxCameraHeight = maxHeight;
enableView();
}
public void onResume() {
Log.i(LOGTAG, "onResume");
}
public void onPause() {
Log.i(LOGTAG, "onPause");
mHaveSurface = false;
updateState();
mCameraWidth = mCameraHeight = -1;
}
}

View File

@ -1,18 +1,18 @@
package org.opencv.android; package org.opencv.android;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; import org.opencv.R;
import org.opencv.core.Mat;
import android.app.Activity;
import android.content.Context; import android.content.Context;
import android.content.res.TypedArray;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
import android.util.AttributeSet; import android.util.AttributeSet;
import android.view.MotionEvent; import android.util.Log;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.widget.TextView;
public class CameraGLSurfaceView extends GLSurfaceView { public class CameraGLSurfaceView extends GLSurfaceView {
private static final String LOGTAG = "CameraGLSurfaceView";
public interface CameraTextureListener { public interface CameraTextureListener {
/** /**
* This method is invoked when camera preview has started. After this method is invoked * This method is invoked when camera preview has started. After this method is invoked
@ -29,24 +29,33 @@ public class CameraGLSurfaceView extends GLSurfaceView {
public void onCameraViewStopped(); public void onCameraViewStopped();
/** /**
* This method is invoked when delivery of the frame needs to be done. * This method is invoked when a new preview frame from Camera is ready.
* The returned values - is a modified frame which needs to be displayed on the screen. * @param texIn - the OpenGL texture ID that contains frame in RGBA format
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc) * @param texOut - the OpenGL texture ID that can be used to store modified frame image t display
* @param width - the width of the frame
* @param height - the height of the frame
* @return `true` if `texOut` should be displayed, `false` - to show `texIn`
*/ */
public boolean onCameraFrame(int texIn, int texOut, int width, int height); public boolean onCameraTexture(int texIn, int texOut, int width, int height);
}; };
private CameraTextureListener mTexListener; private CameraTextureListener mTexListener;
private CameraRenderer mRenderer; private CameraGLRendererBase mRenderer;
public CameraGLSurfaceView(Context context, AttributeSet attrs) { public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs); super(context, attrs);
/*if(android.os.Build.VERSION.SDK_INT >= 21) TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
int cameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
styledAttrs.recycle();
if(android.os.Build.VERSION.SDK_INT >= 21)
mRenderer = new Camera2Renderer(this); mRenderer = new Camera2Renderer(this);
else*/ else
mRenderer = new CameraRenderer(this); mRenderer = new CameraRenderer(this);
setCameraIndex(cameraIndex);
setEGLContextClientVersion(2); setEGLContextClientVersion(2);
setRenderer(mRenderer); setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
@ -62,6 +71,14 @@ public class CameraGLSurfaceView extends GLSurfaceView {
return mTexListener; return mTexListener;
} }
public void setCameraIndex(int cameraIndex) {
mRenderer.setCameraIndex(cameraIndex);
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
mRenderer.setMaxCameraPreviewSize(maxWidth, maxHeight);
}
@Override @Override
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder); super.surfaceCreated(holder);
@ -69,6 +86,7 @@ public class CameraGLSurfaceView extends GLSurfaceView {
@Override @Override
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
mRenderer.mHaveSurface = false;
super.surfaceDestroyed(holder); super.surfaceDestroyed(holder);
} }
@ -79,20 +97,23 @@ public class CameraGLSurfaceView extends GLSurfaceView {
@Override @Override
public void onResume() { public void onResume() {
Log.i(LOGTAG, "onResume");
super.onResume(); super.onResume();
mRenderer.onResume(); mRenderer.onResume();
} }
@Override @Override
public void onPause() { public void onPause() {
Log.i(LOGTAG, "onPause");
mRenderer.onPause(); mRenderer.onPause();
super.onPause(); super.onPause();
} }
@Override public void enableView() {
public boolean onTouchEvent(MotionEvent e) { mRenderer.enableView();
if(e.getAction() == MotionEvent.ACTION_DOWN) }
((Activity)getContext()).openOptionsMenu();
return true; public void disableView() {
mRenderer.disableView();
} }
} }

View File

@ -1,197 +1,132 @@
package org.opencv.android; package org.opencv.android;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.List; import java.util.List;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.opencv.android.CameraGLSurfaceView.CameraTextureListener;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Build;
import android.util.Log;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.hardware.Camera; import android.hardware.Camera;
import android.hardware.Camera.Size; import android.hardware.Camera.Size;
import android.os.Build;
import android.util.Log;
@TargetApi(15) @TargetApi(15)
public class CameraRenderer implements GLSurfaceView.Renderer, @SuppressWarnings("deprecation")
SurfaceTexture.OnFrameAvailableListener { public class CameraRenderer extends CameraGLRendererBase {
public static final String LOGTAG = "CameraRenderer"; public static final String LOGTAG = "CameraRenderer";
// shaders
private final String vss = ""
+ "attribute vec2 vPosition;\n"
+ "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n"
+ "void main() {\n" + " texCoord = vTexCoord;\n"
+ " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n"
+ "}";
private final String fssOES = ""
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final String fss2D = ""
+ "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
// coord-s
private final float vertices[] = {
-1, -1,
-1, 1,
1, -1,
1, 1 };
private final float texCoordOES[] = {
0, 1,
0, 0,
1, 1,
1, 0 };
private final float texCoord2D[] = {
0, 0,
0, 1,
1, 0,
1, 1 };
private int[] texCamera = {0}, texFBO = {0}, texDraw = {0};
private int[] FBO = {0};
private int progOES, prog2D;
private int vPosOES, vTCOES, vPos2D, vTC2D;
private FloatBuffer vert, texOES, tex2D;
private Camera mCamera; private Camera mCamera;
private boolean mPreviewStarted = false; private boolean mPreviewStarted = false;
private int mPreviewWidth, mPreviewHeight;
private SurfaceTexture mSTexture;
private boolean mGLInit = false;
private boolean mUpdateST = false;
private CameraGLSurfaceView mView;
CameraRenderer(CameraGLSurfaceView view) { CameraRenderer(CameraGLSurfaceView view) {
mView = view; super(view);
int bytes = vertices.length * Float.SIZE / Byte.SIZE;
vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
vert.put(vertices).position(0);
texOES.put(texCoordOES).position(0);
tex2D.put(texCoord2D).position(0);
} }
public void onResume() { @Override
//nothing protected synchronized void closeCamera() {
Log.i(LOGTAG, "onResume"); Log.i(LOGTAG, "closeCamera");
}
public void onPause() {
Log.i(LOGTAG, "onPause");
mGLInit = false;
mUpdateST = false;
if(mCamera != null) { if(mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mPreviewStarted = false;
mCamera.release(); mCamera.release();
mCamera = null; mCamera = null;
} }
if(mSTexture != null) {
mSTexture.release();
mSTexture = null;
deleteTex(texCamera);
}
} }
public void onSurfaceCreated(GL10 unused, EGLConfig config) { @Override
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); protected synchronized void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
closeCamera();
if (id == CameraBridgeViewBase.CAMERA_ID_ANY) {
Log.d(LOGTAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(LOGTAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
progOES = loadShader(vss, fssOES); if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition"); boolean connected = false;
vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord"); for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
GLES20.glEnableVertexAttribArray(vPosOES); Log.d(LOGTAG, "Trying to open camera with new open(" + camIdx + ")");
GLES20.glEnableVertexAttribArray(vTCOES); try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.i(LOGTAG, "Trying to open BACK camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.i(LOGTAG, "Trying to open FRONT camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.e(LOGTAG, "Back camera not found!");
} else if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.e(LOGTAG, "Front camera not found!");
} else {
Log.d(LOGTAG, "Trying to open camera with new open(" + localCameraIndex + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if(mCamera == null) {
Log.e(LOGTAG, "Error: can't open camera");
return;
}
Camera.Parameters params = mCamera.getParameters();
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
prog2D = loadShader(vss, fss2D);
vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition");
vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPos2D);
GLES20.glEnableVertexAttribArray(vTC2D);
initTexOES(texCamera);
mSTexture = new SurfaceTexture(texCamera[0]);
mSTexture.setOnFrameAvailableListener(this);
mCamera = Camera.open();
try { try {
mCamera.setPreviewTexture(mSTexture); mCamera.setPreviewTexture(mSTexture);
} catch (IOException ioe) { } catch (IOException ioe) {
} Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage());
mGLInit = true;
}
public void onDrawFrame(GL10 unused) {
if (!mGLInit)
return;
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (this) {
if (mUpdateST) {
mSTexture.updateTexImage();
mUpdateST = false;
}
}
CameraTextureListener texListener = mView.getCameraTextureListener();
if(texListener != null) {
Log.d(LOGTAG, "haveUserCallback");
// texCamera(OES) -> texFBO
drawTex(texCamera[0], true, FBO[0]);
// call user code (texFBO -> texDraw)
boolean modified = texListener.onCameraFrame(texFBO[0], texDraw[0], mPreviewWidth, mPreviewHeight);
if(modified) {
// texDraw -> screen
drawTex(texDraw[0], false, 0);
} else {
// texFBO -> screen
drawTex(texFBO[0], false, 0);
}
} else {
// texCamera(OES) -> screen
drawTex(texCamera[0], true, 0);
} }
} }
public void onSurfaceChanged(GL10 unused, int width, int height) { @Override
Log.i(LOGTAG, "onSurfaceChanged("+width+"x"+height+")"); public synchronized void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height);
if(mCamera == null) if(mCamera == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return; return;
if(mPreviewStarted) {
mCamera.stopPreview();
mPreviewStarted = false;
} }
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
Camera.Parameters param = mCamera.getParameters(); Camera.Parameters param = mCamera.getParameters();
List<Size> psize = param.getSupportedPreviewSizes(); List<Size> psize = param.getSupportedPreviewSizes();
int bestWidth = 0, bestHeight = 0; int bestWidth = 0, bestHeight = 0;
@ -207,147 +142,25 @@ public class CameraRenderer implements GLSurfaceView.Renderer,
bestHeight = h; bestHeight = h;
} }
} }
if(bestWidth > 0 && bestHeight > 0) { if(bestWidth <= 0 || bestHeight <= 0) {
param.setPreviewSize(bestWidth, bestHeight); bestWidth = psize.get(0).width;
Log.i(LOGTAG, "selected size: "+bestWidth+" x "+bestHeight); bestHeight = psize.get(0).height;
Log.e(LOGTAG, "Error: best size was not selected, using "+bestWidth+" x "+bestHeight);
GLES20.glViewport(0, 0, bestWidth, bestWidth); } else {
initFBO(bestWidth, bestHeight); Log.i(LOGTAG, "Selected best size: "+bestWidth+" x "+bestHeight);
mPreviewWidth = bestWidth;
mPreviewHeight = bestHeight;
} }
if(mPreviewStarted) {
mCamera.stopPreview();
mPreviewStarted = false;
}
mCameraWidth = bestWidth;
mCameraHeight = bestHeight;
param.setPreviewSize(bestWidth, bestHeight);
} }
//param.set("orientation", "landscape"); param.set("orientation", "landscape");
mCamera.setParameters(param); mCamera.setParameters(param);
mCamera.startPreview(); mCamera.startPreview();
mPreviewStarted = true; mPreviewStarted = true;
} }
public synchronized void onFrameAvailable(SurfaceTexture st) {
mUpdateST = true;
mView.requestRender();
}
private void initTexOES(int[] tex) {
if(tex.length == 1) {
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
}
private void deleteTex(int[] tex) {
if(tex.length == 1) {
GLES20.glDeleteTextures(1, tex, 0);
}
}
private static int loadShader(String vss, String fss) {
int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vshader, vss);
GLES20.glCompileShader(vshader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(LOGTAG, "Could not compile vertex shader");
Log.v(LOGTAG, "Could not compile vertex shader:"+GLES20.glGetShaderInfoLog(vshader));
GLES20.glDeleteShader(vshader);
vshader = 0;
}
int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fshader, fss);
GLES20.glCompileShader(fshader);
GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("Renderer", "Could not compile fragment shader");
Log.v("Renderer", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader));
GLES20.glDeleteShader(fshader);
fshader = 0;
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vshader);
GLES20.glAttachShader(program, fshader);
GLES20.glLinkProgram(program);
return program;
}
private void releaseFBO()
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteFramebuffers(1, FBO, 0);
deleteTex(texFBO);
deleteTex(texDraw);
}
private void initFBO(int width, int height)
{
Log.d(LOGTAG, "initFBO("+width+"x"+height+")");
releaseFBO();
GLES20.glGenTextures(1, texDraw, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glGenTextures(1, texFBO, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
//int hFBO;
GLES20.glGenFramebuffers(1, FBO, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0);
Log.d(LOGTAG, "initFBO status: " + GLES20.glGetError());
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE)
Log.e(LOGTAG, "initFBO failed: " + GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER));
//GLES20.glViewport(0, 0, width, height);
}
// draw texture to FBO or to screen if fbo == 0
private void drawTex(int tex, boolean isOES, int fbo)
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if(isOES) {
GLES20.glUseProgram(progOES);
GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES);
} else {
GLES20.glUseProgram(prog2D);
GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D);
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
if(isOES) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
} }