Merge pull request #26646 from warped-rudi:refactoring

Android camera refactoring #26646

This patch set does not contain any functional changes. It just cleans up the code structure to improve readability and to prepare for future changes.

* videoio(Android): Use 'unique_ptr' instead of 'shared_ptr'
Using shared pointers for unshared data is considered an antipattern.
* videoio(Android): Make callback functions private static members
Don't leak internal functions into global namespace. Some member
variables are now private as well.
* videoio(Android): Move resolution matching into separate function
Also make internally used member functions private.
* videoio(Android): Move ranges query into separate function
Also remove some unneccessary initialisations from initCapture().
* videoio(Android): Wrap extremly long source code lines
* videoio(Android): Rename members of 'RangeValue'

### Pull Request Readiness Checklist

See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request

- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [ ] The PR is proposed to the proper branch
- [ ] There is a reference to the original bug report and related work
- [ ] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
      Patch to opencv_extra has the same branch name.
- [ ] The feature is well documented and sample code can be built with the project CMake
This commit is contained in:
Rüdiger Ihle 2025-01-22 14:58:14 +01:00 committed by GitHub
parent 7728dd3387
commit c623a5afc1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -42,100 +42,19 @@ using namespace cv;
template <typename T> struct RangeValue { template <typename T> struct RangeValue {
T min, max; T min, max;
/**
* return absolute value from relative value RangeValue(T minv = 0, T maxv = 0) : min(minv), max(maxv) {}
* * value: in percent (50 for 50%) bool isValid() const { return (min != max); }
* */ T percentage(int percent) const {
T value(int percent) {
return static_cast<T>(min + ((max - min) * percent) / 100); return static_cast<T>(min + ((max - min) * percent) / 100);
} }
RangeValue(T minv = 0, T maxv = 0) : min(minv), max(maxv) {}
bool Supported() const { return (min != max); }
T clamp( T value ) const { T clamp( T value ) const {
return (value > max) ? max : ((value < min) ? min : value); return (value > max) ? max : ((value < min) ? min : value);
} }
}; };
static inline void deleter_ACameraManager(ACameraManager *cameraManager) { template <typename T>
ACameraManager_delete(cameraManager); using AObjPtr = std::unique_ptr<T, std::function<void(T *)>>;
}
static inline void deleter_ACameraIdList(ACameraIdList *cameraIdList) {
ACameraManager_deleteCameraIdList(cameraIdList);
}
static inline void deleter_ACameraDevice(ACameraDevice *cameraDevice) {
ACameraDevice_close(cameraDevice);
}
static inline void deleter_ACameraMetadata(ACameraMetadata *cameraMetadata) {
ACameraMetadata_free(cameraMetadata);
}
static inline void deleter_AImageReader(AImageReader *imageReader) {
AImageReader_delete(imageReader);
}
static inline void deleter_ACaptureSessionOutputContainer(ACaptureSessionOutputContainer *outputContainer) {
ACaptureSessionOutputContainer_free(outputContainer);
}
static inline void deleter_ACameraCaptureSession(ACameraCaptureSession *captureSession) {
ACameraCaptureSession_close(captureSession);
}
static inline void deleter_AImage(AImage *image) {
AImage_delete(image);
}
static inline void deleter_ANativeWindow(ANativeWindow *nativeWindow) {
ANativeWindow_release(nativeWindow);
}
static inline void deleter_ACaptureSessionOutput(ACaptureSessionOutput *sessionOutput) {
ACaptureSessionOutput_free(sessionOutput);
}
static inline void deleter_ACameraOutputTarget(ACameraOutputTarget *outputTarget) {
ACameraOutputTarget_free(outputTarget);
}
static inline void deleter_ACaptureRequest(ACaptureRequest *captureRequest) {
ACaptureRequest_free(captureRequest);
}
/*
* CameraDevice callbacks
*/
static void OnDeviceDisconnect(void* /* ctx */, ACameraDevice* dev) {
std::string id(ACameraDevice_getId(dev));
LOGW("Device %s disconnected", id.c_str());
}
static void OnDeviceError(void* /* ctx */, ACameraDevice* dev, int err) {
std::string id(ACameraDevice_getId(dev));
LOGI("Camera Device Error: %#x, Device %s", err, id.c_str());
switch (err) {
case ERROR_CAMERA_IN_USE:
LOGI("Camera in use");
break;
case ERROR_CAMERA_SERVICE:
LOGI("Fatal Error occurred in Camera Service");
break;
case ERROR_CAMERA_DEVICE:
LOGI("Fatal Error occurred in Camera Device");
break;
case ERROR_CAMERA_DISABLED:
LOGI("Camera disabled");
break;
case ERROR_MAX_CAMERAS_IN_USE:
LOGI("System limit for maximum concurrent cameras used was exceeded");
break;
default:
LOGI("Unknown Camera Device Error: %#x", err);
}
}
enum class CaptureSessionState { enum class CaptureSessionState {
INITIALIZING, // session is ready INITIALIZING, // session is ready
@ -144,22 +63,6 @@ enum class CaptureSessionState {
CLOSED // session was closed CLOSED // session was closed
}; };
void OnSessionClosed(void* context, ACameraCaptureSession* session);
void OnSessionReady(void* context, ACameraCaptureSession* session);
void OnSessionActive(void* context, ACameraCaptureSession* session);
void OnCaptureCompleted(void* context,
ACameraCaptureSession* session,
ACaptureRequest* request,
const ACameraMetadata* result);
void OnCaptureFailed(void* context,
ACameraCaptureSession* session,
ACaptureRequest* request,
ACameraCaptureFailure* failure);
#define CAPTURE_TIMEOUT_SECONDS 2 #define CAPTURE_TIMEOUT_SECONDS 2
#define CAPTURE_POLL_INTERVAL_MS 5 #define CAPTURE_POLL_INTERVAL_MS 5
@ -178,15 +81,15 @@ static double elapsedTimeFrom(std::chrono::time_point<std::chrono::system_clock>
class AndroidCameraCapture : public IVideoCapture class AndroidCameraCapture : public IVideoCapture
{ {
int cachedIndex; int cachedIndex;
std::shared_ptr<ACameraManager> cameraManager; AObjPtr<ACameraManager> cameraManager { nullptr, ACameraManager_delete };
std::shared_ptr<ACameraDevice> cameraDevice; AObjPtr<ACameraDevice> cameraDevice { nullptr, ACameraDevice_close };
std::shared_ptr<AImageReader> imageReader; AObjPtr<AImageReader> imageReader { nullptr, AImageReader_delete };
std::shared_ptr<ACaptureSessionOutputContainer> outputContainer; AObjPtr<ACaptureSessionOutputContainer> outputContainer { nullptr, ACaptureSessionOutputContainer_free };
std::shared_ptr<ACaptureSessionOutput> sessionOutput; AObjPtr<ACaptureSessionOutput> sessionOutput { nullptr, ACaptureSessionOutput_free };
std::shared_ptr<ANativeWindow> nativeWindow; AObjPtr<ANativeWindow> nativeWindow { nullptr, ANativeWindow_release };
std::shared_ptr<ACameraOutputTarget> outputTarget; AObjPtr<ACameraOutputTarget> outputTarget { nullptr, ACameraOutputTarget_free };
std::shared_ptr<ACaptureRequest> captureRequest; AObjPtr<ACaptureRequest> captureRequest { nullptr, ACaptureRequest_free };
std::shared_ptr<ACameraCaptureSession> captureSession; AObjPtr<ACameraCaptureSession> captureSession { nullptr, ACameraCaptureSession_close };
CaptureSessionState sessionState = CaptureSessionState::INITIALIZING; CaptureSessionState sessionState = CaptureSessionState::INITIALIZING;
int32_t frameWidth = 0; int32_t frameWidth = 0;
int32_t frameStride = 0; int32_t frameStride = 0;
@ -208,7 +111,24 @@ class AndroidCameraCapture : public IVideoCapture
int32_t sensitivity = 0; int32_t sensitivity = 0;
RangeValue<int32_t> sensitivityRange; RangeValue<int32_t> sensitivityRange;
public: ACameraDevice_stateCallbacks deviceCallbacks = {};
ACameraCaptureSession_stateCallbacks sessionCallbacks = {};
ACameraCaptureSession_captureCallbacks captureCallbacks = {};
static void OnDeviceDisconnect(void* ctx, ACameraDevice* dev);
static void OnDeviceError(void* ctx, ACameraDevice* dev, int err);
static void OnSessionClosed(void* context, ACameraCaptureSession* session);
static void OnSessionReady(void* context, ACameraCaptureSession* session);
static void OnSessionActive(void* context, ACameraCaptureSession* session);
static void OnCaptureCompleted(void* context,
ACameraCaptureSession* session,
ACaptureRequest* request,
const ACameraMetadata* result);
static void OnCaptureFailed(void* context,
ACameraCaptureSession* session,
ACaptureRequest* request,
ACameraCaptureFailure* failure);
// for synchronization with NDK capture callback // for synchronization with NDK capture callback
bool waitingCapture = false; bool waitingCapture = false;
bool captureSuccess = false; bool captureSuccess = false;
@ -218,6 +138,19 @@ public:
public: public:
AndroidCameraCapture(const VideoCaptureParameters& params) AndroidCameraCapture(const VideoCaptureParameters& params)
{ {
deviceCallbacks.context = this;
deviceCallbacks.onError = OnDeviceError;
deviceCallbacks.onDisconnected = OnDeviceDisconnect,
sessionCallbacks.context = this;
sessionCallbacks.onReady = OnSessionReady;
sessionCallbacks.onActive = OnSessionActive;
sessionCallbacks.onClosed = OnSessionClosed;
captureCallbacks.context = this;
captureCallbacks.onCaptureCompleted = OnCaptureCompleted;
captureCallbacks.onCaptureFailed = OnCaptureFailed;
desiredWidth = params.get<int>(CAP_PROP_FRAME_WIDTH, desiredWidth); desiredWidth = params.get<int>(CAP_PROP_FRAME_WIDTH, desiredWidth);
desiredHeight = params.get<int>(CAP_PROP_FRAME_HEIGHT, desiredHeight); desiredHeight = params.get<int>(CAP_PROP_FRAME_HEIGHT, desiredHeight);
@ -237,47 +170,7 @@ public:
~AndroidCameraCapture() { cleanUp(); } ~AndroidCameraCapture() { cleanUp(); }
ACameraDevice_stateCallbacks* GetDeviceListener() { bool isOpened() const CV_OVERRIDE { return imageReader && captureSession; }
static ACameraDevice_stateCallbacks cameraDeviceListener = {
.onDisconnected = ::OnDeviceDisconnect,
.onError = ::OnDeviceError,
};
return &cameraDeviceListener;
}
ACameraCaptureSession_stateCallbacks sessionListener;
ACameraCaptureSession_stateCallbacks* GetSessionListener() {
sessionListener = {
.context = this,
.onClosed = ::OnSessionClosed,
.onReady = ::OnSessionReady,
.onActive = ::OnSessionActive,
};
return &sessionListener;
}
ACameraCaptureSession_captureCallbacks captureListener;
ACameraCaptureSession_captureCallbacks* GetCaptureCallback() {
captureListener = {
.context = this,
.onCaptureStarted = nullptr,
.onCaptureProgressed = nullptr,
.onCaptureCompleted = ::OnCaptureCompleted,
.onCaptureFailed = ::OnCaptureFailed,
.onCaptureSequenceCompleted = nullptr,
.onCaptureSequenceAborted = nullptr,
.onCaptureBufferLost = nullptr,
};
return &captureListener;
}
void setSessionState(CaptureSessionState newSessionState) {
this->sessionState = newSessionState;
}
bool isOpened() const CV_OVERRIDE { return imageReader.get() != nullptr && captureSession.get() != nullptr; }
int getCaptureDomain() CV_OVERRIDE { return CAP_ANDROID; } int getCaptureDomain() CV_OVERRIDE { return CAP_ANDROID; }
@ -294,12 +187,15 @@ public:
waitingCapture = true; waitingCapture = true;
captureSuccess = false; captureSuccess = false;
auto start = std::chrono::system_clock::now(); auto start = std::chrono::system_clock::now();
bool captured = condition.wait_for(lock, std::chrono::seconds(CAPTURE_TIMEOUT_SECONDS), [this]{ return captureSuccess; }); bool captured = condition.wait_for(lock, std::chrono::seconds(
CAPTURE_TIMEOUT_SECONDS), [this]{ return captureSuccess; });
waitingCapture = false; waitingCapture = false;
if (captured) { if (captured) {
mStatus = AImageReader_acquireLatestImage(imageReader.get(), &img); mStatus = AImageReader_acquireLatestImage(imageReader.get(), &img);
// even though an image has been captured we may not be able to acquire it straight away so we poll every 10ms // even though an image has been captured we may not be able to acquire it
while (mStatus == AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE && elapsedTimeFrom(start) < CAPTURE_TIMEOUT_SECONDS) { // straight away so we poll every 10ms
while (mStatus == AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE &&
elapsedTimeFrom(start) < CAPTURE_TIMEOUT_SECONDS) {
std::this_thread::sleep_for(std::chrono::milliseconds(CAPTURE_POLL_INTERVAL_MS)); std::this_thread::sleep_for(std::chrono::milliseconds(CAPTURE_POLL_INTERVAL_MS));
mStatus = AImageReader_acquireLatestImage(imageReader.get(), &img); mStatus = AImageReader_acquireLatestImage(imageReader.get(), &img);
} }
@ -320,7 +216,7 @@ public:
} }
} }
} }
std::shared_ptr<AImage> image = std::shared_ptr<AImage>(img, deleter_AImage); AObjPtr<AImage> image(img, AImage_delete);
int32_t srcFormat = -1; int32_t srcFormat = -1;
AImage_getFormat(image.get(), &srcFormat); AImage_getFormat(image.get(), &srcFormat);
if (srcFormat != AIMAGE_FORMAT_YUV_420_888) { if (srcFormat != AIMAGE_FORMAT_YUV_420_888) {
@ -345,14 +241,19 @@ public:
AImage_getPlanePixelStride(image.get(), 1, &uvPixelStride); AImage_getPlanePixelStride(image.get(), 1, &uvPixelStride);
int32_t yBufferLen = yLen; int32_t yBufferLen = yLen;
if ( (uvPixelStride == 2) && (uPixel == vPixel + 1) && (yLen == (yStride * (frameHeight - 1)) + frameWidth) && (uLen == (uvStride * ((frameHeight / 2) - 1)) + frameWidth - 1) && (uvStride == yStride) && (vLen == uLen) ) { if ( (uvPixelStride == 2) && (uPixel == vPixel + 1) &&
(yLen == (yStride * (frameHeight - 1)) + frameWidth) &&
(uLen == (uvStride * ((frameHeight / 2) - 1)) + frameWidth - 1) &&
(uvStride == yStride) && (vLen == uLen) ) {
frameStride = yStride; frameStride = yStride;
yBufferLen = frameStride * frameHeight; yBufferLen = frameStride * frameHeight;
colorFormat = COLOR_FormatYUV420SemiPlanar; colorFormat = COLOR_FormatYUV420SemiPlanar;
if (fourCC == FOURCC_UNKNOWN) { if (fourCC == FOURCC_UNKNOWN) {
fourCC = FOURCC_NV21; fourCC = FOURCC_NV21;
} }
} else if ( (uvPixelStride == 1) && (uPixel == vPixel + vLen) && (yLen == frameWidth * frameHeight) && (uLen == yLen / 4) && (vLen == uLen) ) { } else if ( (uvPixelStride == 1) && (uPixel == vPixel + vLen) &&
(yLen == frameWidth * frameHeight) &&
(uLen == yLen / 4) && (vLen == uLen) ) {
colorFormat = COLOR_FormatYUV420Planar; colorFormat = COLOR_FormatYUV420Planar;
if (fourCC == FOURCC_UNKNOWN) { if (fourCC == FOURCC_UNKNOWN) {
fourCC = FOURCC_YV12; fourCC = FOURCC_YV12;
@ -497,7 +398,8 @@ public:
fourCC = newFourCC; fourCC = newFourCC;
return true; return true;
} else { } else {
LOGE("Unsupported FOURCC conversion COLOR_FormatYUV420SemiPlanar -> COLOR_FormatYUV420Planar"); LOGE("Unsupported FOURCC conversion COLOR_FormatYUV420SemiPlanar"
" -> COLOR_FormatYUV420Planar");
return false; return false;
} }
case FOURCC_NV21: case FOURCC_NV21:
@ -505,7 +407,8 @@ public:
fourCC = newFourCC; fourCC = newFourCC;
return true; return true;
} else { } else {
LOGE("Unsupported FOURCC conversion COLOR_FormatYUV420Planar -> COLOR_FormatYUV420SemiPlanar"); LOGE("Unsupported FOURCC conversion COLOR_FormatYUV420Planar"
" -> COLOR_FormatYUV420SemiPlanar");
return false; return false;
} }
default: default:
@ -521,14 +424,14 @@ public:
} }
return true; return true;
case CAP_PROP_EXPOSURE: case CAP_PROP_EXPOSURE:
if (isOpened() && exposureRange.Supported()) { if (isOpened() && exposureRange.isValid()) {
exposureTime = exposureRange.clamp(static_cast<int64_t>(value)); exposureTime = exposureRange.clamp(static_cast<int64_t>(value));
LOGI("Setting CAP_PROP_EXPOSURE will have no effect unless CAP_PROP_AUTO_EXPOSURE is off"); LOGI("Setting CAP_PROP_EXPOSURE will have no effect unless CAP_PROP_AUTO_EXPOSURE is off");
return submitRequest(ACaptureRequest_setEntry_i64, ACAMERA_SENSOR_EXPOSURE_TIME, exposureTime); return submitRequest(ACaptureRequest_setEntry_i64, ACAMERA_SENSOR_EXPOSURE_TIME, exposureTime);
} }
return false; return false;
case CAP_PROP_ISO_SPEED: case CAP_PROP_ISO_SPEED:
if (isOpened() && sensitivityRange.Supported()) { if (isOpened() && sensitivityRange.isValid()) {
sensitivity = sensitivityRange.clamp(static_cast<int32_t>(value)); sensitivity = sensitivityRange.clamp(static_cast<int32_t>(value));
LOGI("Setting CAP_PROP_ISO_SPEED will have no effect unless CAP_PROP_AUTO_EXPOSURE is off"); LOGI("Setting CAP_PROP_ISO_SPEED will have no effect unless CAP_PROP_AUTO_EXPOSURE is off");
return submitRequest(ACaptureRequest_setEntry_i32, ACAMERA_SENSOR_SENSITIVITY, sensitivity); return submitRequest(ACaptureRequest_setEntry_i32, ACAMERA_SENSOR_SENSITIVITY, sensitivity);
@ -546,132 +449,68 @@ public:
return false; return false;
} }
void setWidthHeight() {
cleanUp();
initCapture(cachedIndex);
}
// calculate a score based on how well the width and height match the desired width and height
// basically draw the 2 rectangle on top of each other and take the ratio of the non-overlapping
// area to the overlapping area
double getScore(int32_t width, int32_t height) {
double area1 = width * height;
double area2 = desiredWidth * desiredHeight;
if ((width < desiredWidth) == (height < desiredHeight)) {
return (width < desiredWidth) ? (area2 - area1)/area1 : (area1 - area2)/area2;
} else {
int32_t overlappedWidth = std::min(width, desiredWidth);
int32_t overlappedHeight = std::min(height, desiredHeight);
double overlappedArea = overlappedWidth * overlappedHeight;
return (area1 + area2 - overlappedArea)/overlappedArea;
}
}
bool initCapture(int index) bool initCapture(int index)
{ {
cachedIndex = index; cachedIndex = index;
cameraManager = std::shared_ptr<ACameraManager>(ACameraManager_create(), deleter_ACameraManager); cameraManager.reset(ACameraManager_create());
if (!cameraManager) { if (!cameraManager) {
LOGE("Cannot create camera manager!"); LOGE("Cannot create camera manager!");
return false; return false;
} }
ACameraIdList* cameraIds = nullptr; ACameraIdList* cameraIds;
camera_status_t cStatus = ACameraManager_getCameraIdList(cameraManager.get(), &cameraIds); camera_status_t cStatus = ACameraManager_getCameraIdList(cameraManager.get(), &cameraIds);
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("Get camera list failed with error code: %d", cStatus); LOGE("Get camera list failed with error code: %d", cStatus);
return false; return false;
} }
std::shared_ptr<ACameraIdList> cameraIdList = std::shared_ptr<ACameraIdList>(cameraIds, deleter_ACameraIdList); AObjPtr<ACameraIdList> cameraIdList(cameraIds, ACameraManager_deleteCameraIdList);
if (index < 0 || index >= cameraIds->numCameras) { if (index < 0 || index >= cameraIds->numCameras) {
LOGE("Camera index out of range %d (Number of cameras: %d)", index, cameraIds->numCameras); LOGE("Camera index out of range %d (Number of cameras: %d)", index, cameraIds->numCameras);
return false; return false;
} }
ACameraDevice* camera = nullptr; const char *cameraId = cameraIdList.get()->cameraIds[index];
cStatus = ACameraManager_openCamera(cameraManager.get(), cameraIdList.get()->cameraIds[index], GetDeviceListener(), &camera);
ACameraDevice* camera;
cStatus = ACameraManager_openCamera(cameraManager.get(), cameraId, &deviceCallbacks, &camera);
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("Open camera failed with error code: %d", cStatus); LOGE("Open camera failed with error code: %d", cStatus);
return false; return false;
} }
cameraDevice = std::shared_ptr<ACameraDevice>(camera, deleter_ACameraDevice); cameraDevice.reset(camera);
ACameraMetadata* metadata; ACameraMetadata* metadata;
cStatus = ACameraManager_getCameraCharacteristics(cameraManager.get(), cameraIdList.get()->cameraIds[index], &metadata); cStatus = ACameraManager_getCameraCharacteristics(cameraManager.get(), cameraId, &metadata);
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("Get camera characteristics failed with error code: %d", cStatus); LOGE("Get camera characteristics failed with error code: %d", cStatus);
return false; return false;
} }
std::shared_ptr<ACameraMetadata> cameraMetadata = std::shared_ptr<ACameraMetadata>(metadata, deleter_ACameraMetadata); AObjPtr<ACameraMetadata> cameraMetadata(metadata, ACameraMetadata_free);
ACameraMetadata_const_entry entry = {};
ACameraMetadata_getConstEntry(cameraMetadata.get(), ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
double bestScore = std::numeric_limits<double>::max(); getPropertyRanges(cameraMetadata.get());
int32_t bestMatchWidth = 0;
int32_t bestMatchHeight = 0;
for (uint32_t i = 0; i < entry.count; i += 4) { int32_t bestMatchWidth = 0, bestMatchHeight = 0;
int32_t input = entry.data.i32[i + 3]; findResolutionMatch(cameraMetadata.get(), bestMatchWidth, bestMatchHeight);
int32_t format = entry.data.i32[i + 0];
if (input) {
continue;
}
if (format == AIMAGE_FORMAT_YUV_420_888) {
int32_t width = entry.data.i32[i + 1];
int32_t height = entry.data.i32[i + 2];
if (width == desiredWidth && height == desiredHeight) {
bestMatchWidth = width;
bestMatchHeight = height;
bestScore = 0;
break;
} else {
double score = getScore(width, height);
if (score < bestScore) {
bestMatchWidth = width;
bestMatchHeight = height;
bestScore = score;
}
}
}
}
LOGI("Best resolution match: %dx%d", bestMatchWidth, bestMatchHeight); LOGI("Best resolution match: %dx%d", bestMatchWidth, bestMatchHeight);
ACameraMetadata_const_entry val;
cStatus = ACameraMetadata_getConstEntry(cameraMetadata.get(), ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val);
if (cStatus == ACAMERA_OK) {
exposureRange.min = exposureTimeLimits.clamp(val.data.i64[0]);
exposureRange.max = exposureTimeLimits.clamp(val.data.i64[1]);
exposureTime = exposureRange.value(2);
} else {
LOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
exposureRange.min = exposureRange.max = 0;
exposureTime = 0;
}
cStatus = ACameraMetadata_getConstEntry(cameraMetadata.get(), ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val);
if (cStatus == ACAMERA_OK){
sensitivityRange.min = val.data.i32[0];
sensitivityRange.max = val.data.i32[1];
sensitivity = sensitivityRange.value(2);
} else {
LOGW("Unsupported ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
sensitivityRange.min = sensitivityRange.max = 0;
sensitivity = 0;
}
AImageReader* reader; AImageReader* reader;
media_status_t mStatus = AImageReader_new(bestMatchWidth, bestMatchHeight, AIMAGE_FORMAT_YUV_420_888, MAX_BUF_COUNT, &reader); media_status_t mStatus = AImageReader_new(bestMatchWidth, bestMatchHeight,
AIMAGE_FORMAT_YUV_420_888, MAX_BUF_COUNT, &reader);
if (mStatus != AMEDIA_OK) { if (mStatus != AMEDIA_OK) {
LOGE("ImageReader creation failed with error code: %d", mStatus); LOGE("ImageReader creation failed with error code: %d", mStatus);
return false; return false;
} }
frameWidth = bestMatchWidth; frameWidth = bestMatchWidth;
frameHeight = bestMatchHeight; frameHeight = bestMatchHeight;
imageReader = std::shared_ptr<AImageReader>(reader, deleter_AImageReader); imageReader.reset(reader);
ANativeWindow *window; ANativeWindow* window;
mStatus = AImageReader_getWindow(imageReader.get(), &window); mStatus = AImageReader_getWindow(imageReader.get(), &window);
if (mStatus != AMEDIA_OK) { if (mStatus != AMEDIA_OK) {
LOGE("Could not get ANativeWindow: %d", mStatus); LOGE("Could not get ANativeWindow: %d", mStatus);
return false; return false;
} }
nativeWindow = std::shared_ptr<ANativeWindow>(window, deleter_ANativeWindow); nativeWindow.reset(window);
ANativeWindow_acquire(nativeWindow.get());
ACaptureSessionOutputContainer* container; ACaptureSessionOutputContainer* container;
cStatus = ACaptureSessionOutputContainer_create(&container); cStatus = ACaptureSessionOutputContainer_create(&container);
@ -679,16 +518,16 @@ public:
LOGE("CaptureSessionOutputContainer creation failed with error code: %d", cStatus); LOGE("CaptureSessionOutputContainer creation failed with error code: %d", cStatus);
return false; return false;
} }
outputContainer = std::shared_ptr<ACaptureSessionOutputContainer>(container, deleter_ACaptureSessionOutputContainer); outputContainer.reset(container);
ANativeWindow_acquire(nativeWindow.get());
ACaptureSessionOutput* output; ACaptureSessionOutput* output;
cStatus = ACaptureSessionOutput_create(nativeWindow.get(), &output); cStatus = ACaptureSessionOutput_create(nativeWindow.get(), &output);
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("CaptureSessionOutput creation failed with error code: %d", cStatus); LOGE("CaptureSessionOutput creation failed with error code: %d", cStatus);
return false; return false;
} }
sessionOutput = std::shared_ptr<ACaptureSessionOutput>(output, deleter_ACaptureSessionOutput); sessionOutput.reset(output);
cStatus = ACaptureSessionOutputContainer_add(outputContainer.get(), sessionOutput.get()); cStatus = ACaptureSessionOutputContainer_add(outputContainer.get(), sessionOutput.get());
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("CaptureSessionOutput Container add failed with error code: %d", cStatus); LOGE("CaptureSessionOutput Container add failed with error code: %d", cStatus);
@ -702,15 +541,15 @@ public:
LOGE("CameraOutputTarget creation failed with error code: %d", cStatus); LOGE("CameraOutputTarget creation failed with error code: %d", cStatus);
return false; return false;
} }
outputTarget = std::shared_ptr<ACameraOutputTarget>(target, deleter_ACameraOutputTarget); outputTarget.reset(target);
ACaptureRequest * request; ACaptureRequest* request;
cStatus = ACameraDevice_createCaptureRequest(cameraDevice.get(), TEMPLATE_PREVIEW, &request); cStatus = ACameraDevice_createCaptureRequest(cameraDevice.get(), TEMPLATE_PREVIEW, &request);
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("CaptureRequest creation failed with error code: %d", cStatus); LOGE("CaptureRequest creation failed with error code: %d", cStatus);
return false; return false;
} }
captureRequest = std::shared_ptr<ACaptureRequest>(request, deleter_ACaptureRequest); captureRequest.reset(request);
cStatus = ACaptureRequest_addTarget(captureRequest.get(), outputTarget.get()); cStatus = ACaptureRequest_addTarget(captureRequest.get(), outputTarget.get());
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
@ -719,13 +558,14 @@ public:
} }
targetAdded = true; targetAdded = true;
ACameraCaptureSession *session; ACameraCaptureSession* session;
cStatus = ACameraDevice_createCaptureSession(cameraDevice.get(), outputContainer.get(), GetSessionListener(), &session); cStatus = ACameraDevice_createCaptureSession(cameraDevice.get(),
outputContainer.get(), &sessionCallbacks, &session);
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("CaptureSession creation failed with error code: %d", cStatus); LOGE("CaptureSession creation failed with error code: %d", cStatus);
return false; return false;
} }
captureSession = std::shared_ptr<ACameraCaptureSession>(session, deleter_ACameraCaptureSession); captureSession.reset(session);
ACaptureRequest_setEntry_u8(captureRequest.get(), ACAMERA_CONTROL_AE_MODE, 1, &aeMode); ACaptureRequest_setEntry_u8(captureRequest.get(), ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
ACaptureRequest_setEntry_i32(captureRequest.get(), ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); ACaptureRequest_setEntry_i32(captureRequest.get(), ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
@ -734,7 +574,8 @@ public:
} }
ACaptureRequest_setEntry_u8(captureRequest.get(), ACAMERA_FLASH_MODE, 1, &flashMode); ACaptureRequest_setEntry_u8(captureRequest.get(), ACAMERA_FLASH_MODE, 1, &flashMode);
cStatus = ACameraCaptureSession_setRepeatingRequest(captureSession.get(), GetCaptureCallback(), 1, &request, nullptr); cStatus = ACameraCaptureSession_setRepeatingRequest(captureSession.get(),
&captureCallbacks, 1, &request, nullptr);
if (cStatus != ACAMERA_OK) { if (cStatus != ACAMERA_OK) {
LOGE("CameraCaptureSession set repeating request failed with error code: %d", cStatus); LOGE("CameraCaptureSession set repeating request failed with error code: %d", cStatus);
return false; return false;
@ -742,29 +583,107 @@ public:
return true; return true;
} }
private:
void getPropertyRanges(const ACameraMetadata* metadata)
{
camera_status_t cStatus;
ACameraMetadata_const_entry val;
cStatus = ACameraMetadata_getConstEntry(metadata, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val);
if (cStatus == ACAMERA_OK) {
exposureRange.min = exposureTimeLimits.clamp(val.data.i64[0]);
exposureRange.max = exposureTimeLimits.clamp(val.data.i64[1]);
exposureTime = exposureRange.percentage(2);
} else {
LOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
exposureRange.min = exposureRange.max = 0;
exposureTime = 0;
}
cStatus = ACameraMetadata_getConstEntry(metadata, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val);
if (cStatus == ACAMERA_OK){
sensitivityRange.min = val.data.i32[0];
sensitivityRange.max = val.data.i32[1];
sensitivity = sensitivityRange.percentage(2);
} else {
LOGW("Unsupported ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
sensitivityRange.min = sensitivityRange.max = 0;
sensitivity = 0;
}
}
// calculate a score based on how well the width and height match the desired width and height
// basically draw the 2 rectangle on top of each other and take the ratio of the non-overlapping
// area to the overlapping area
double getScore(int32_t width, int32_t height) const {
double area1 = width * height;
double area2 = desiredWidth * desiredHeight;
if ((width < desiredWidth) == (height < desiredHeight)) {
return (width < desiredWidth) ? (area2 - area1)/area1 : (area1 - area2)/area2;
} else {
int32_t overlappedWidth = std::min(width, desiredWidth);
int32_t overlappedHeight = std::min(height, desiredHeight);
double overlappedArea = overlappedWidth * overlappedHeight;
return (area1 + area2 - overlappedArea)/overlappedArea;
}
}
void findResolutionMatch(const ACameraMetadata* metadata,
int32_t &bestMatchWidth, int32_t &bestMatchHeight) const {
ACameraMetadata_const_entry entry = {};
ACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
double bestScore = std::numeric_limits<double>::max();
for (uint32_t i = 0; i < entry.count; i += 4) {
int32_t input = entry.data.i32[i + 3];
int32_t format = entry.data.i32[i + 0];
if (!input && format == AIMAGE_FORMAT_YUV_420_888) {
int32_t width = entry.data.i32[i + 1];
int32_t height = entry.data.i32[i + 2];
if (width == desiredWidth && height == desiredHeight) {
bestMatchWidth = width;
bestMatchHeight = height;
return;
}
double score = getScore(width, height);
if (score < bestScore) {
bestMatchWidth = width;
bestMatchHeight = height;
bestScore = score;
}
}
}
}
void setWidthHeight() {
cleanUp();
initCapture(cachedIndex);
}
void cleanUp() { void cleanUp() {
captureListener.context = nullptr;
sessionListener.context = nullptr;
if (sessionState == CaptureSessionState::ACTIVE) { if (sessionState == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession.get()); ACameraCaptureSession_stopRepeating(captureSession.get());
} }
captureSession = nullptr; captureSession.reset();
if (targetAdded) { if (targetAdded) {
ACaptureRequest_removeTarget(captureRequest.get(), outputTarget.get()); ACaptureRequest_removeTarget(captureRequest.get(), outputTarget.get());
targetAdded = false; targetAdded = false;
} }
captureRequest = nullptr; captureRequest.reset();
outputTarget = nullptr; outputTarget.reset();
if (sessionOutputAdded) { if (sessionOutputAdded) {
ACaptureSessionOutputContainer_remove(outputContainer.get(), sessionOutput.get()); ACaptureSessionOutputContainer_remove(outputContainer.get(), sessionOutput.get());
sessionOutputAdded = false; sessionOutputAdded = false;
} }
sessionOutput = nullptr; sessionOutput.reset();
nativeWindow = nullptr; nativeWindow.reset();
outputContainer = nullptr; outputContainer.reset();
cameraDevice = nullptr; cameraDevice.reset();
cameraManager = nullptr; cameraManager.reset();
imageReader = nullptr; imageReader.reset();
} }
template<typename FuncT, typename T> template<typename FuncT, typename T>
@ -775,38 +694,70 @@ public:
return request && return request &&
setFn(request, tag, 1, &data) == ACAMERA_OK && setFn(request, tag, 1, &data) == ACAMERA_OK &&
ACameraCaptureSession_setRepeatingRequest(captureSession.get(), ACameraCaptureSession_setRepeatingRequest(captureSession.get(),
GetCaptureCallback(), &captureCallbacks,
1, &request, nullptr) == ACAMERA_OK; 1, &request, nullptr) == ACAMERA_OK;
} }
}; };
/******************************** Device management *******************************/
void AndroidCameraCapture::OnDeviceDisconnect(void* /* ctx */, ACameraDevice* dev) {
const char *id = ACameraDevice_getId(dev);
LOGW("Device %s disconnected", id ? id : "<null>");
}
void AndroidCameraCapture::OnDeviceError(void* /* ctx */, ACameraDevice* dev, int err) {
const char *id = ACameraDevice_getId(dev);
LOGI("Camera Device Error: %#x, Device %s", err, id ? id : "<null>");
switch (err) {
case ERROR_CAMERA_IN_USE:
LOGI("Camera in use");
break;
case ERROR_CAMERA_SERVICE:
LOGI("Fatal Error occurred in Camera Service");
break;
case ERROR_CAMERA_DEVICE:
LOGI("Fatal Error occurred in Camera Device");
break;
case ERROR_CAMERA_DISABLED:
LOGI("Camera disabled");
break;
case ERROR_MAX_CAMERAS_IN_USE:
LOGI("System limit for maximum concurrent cameras used was exceeded");
break;
default:
LOGI("Unknown Camera Device Error: %#x", err);
}
}
/******************************** Session management *******************************/ /******************************** Session management *******************************/
void OnSessionClosed(void* context, ACameraCaptureSession* session) { void AndroidCameraCapture::OnSessionClosed(void* context, ACameraCaptureSession* session) {
if (context == nullptr) return; if (context == nullptr) return;
LOGW("session %p closed", session); LOGW("session %p closed", session);
reinterpret_cast<AndroidCameraCapture*>(context)->setSessionState(CaptureSessionState::CLOSED); static_cast<AndroidCameraCapture*>(context)->sessionState = CaptureSessionState::CLOSED;
} }
void OnSessionReady(void* context, ACameraCaptureSession* session) { void AndroidCameraCapture::OnSessionReady(void* context, ACameraCaptureSession* session) {
if (context == nullptr) return; if (context == nullptr) return;
LOGW("session %p ready", session); LOGW("session %p ready", session);
reinterpret_cast<AndroidCameraCapture*>(context)->setSessionState(CaptureSessionState::READY); static_cast<AndroidCameraCapture*>(context)->sessionState = CaptureSessionState::READY;
} }
void OnSessionActive(void* context, ACameraCaptureSession* session) { void AndroidCameraCapture::OnSessionActive(void* context, ACameraCaptureSession* session) {
if (context == nullptr) return; if (context == nullptr) return;
LOGW("session %p active", session); LOGW("session %p active", session);
reinterpret_cast<AndroidCameraCapture*>(context)->setSessionState(CaptureSessionState::ACTIVE); static_cast<AndroidCameraCapture*>(context)->sessionState = CaptureSessionState::ACTIVE;
} }
void OnCaptureCompleted(void* context, void AndroidCameraCapture::OnCaptureCompleted(void* context,
ACameraCaptureSession* session, ACameraCaptureSession* session,
ACaptureRequest* /* request */, ACaptureRequest* /* request */,
const ACameraMetadata* /* result */) { const ACameraMetadata* /* result */) {
if (context == nullptr) return; if (context == nullptr) return;
LOGV("session %p capture completed", session); LOGV("session %p capture completed", session);
AndroidCameraCapture* cameraCapture = reinterpret_cast<AndroidCameraCapture*>(context); AndroidCameraCapture* cameraCapture = static_cast<AndroidCameraCapture*>(context);
std::unique_lock<std::mutex> lock(cameraCapture->mtx); std::unique_lock<std::mutex> lock(cameraCapture->mtx);
if (cameraCapture->waitingCapture) { if (cameraCapture->waitingCapture) {
@ -816,13 +767,13 @@ void OnCaptureCompleted(void* context,
} }
} }
void OnCaptureFailed(void* context, void AndroidCameraCapture::OnCaptureFailed(void* context,
ACameraCaptureSession* session, ACameraCaptureSession* session,
ACaptureRequest* /* request */, ACaptureRequest* /* request */,
ACameraCaptureFailure* /* failure */) { ACameraCaptureFailure* /* failure */) {
if (context == nullptr) return; if (context == nullptr) return;
LOGV("session %p capture failed", session); LOGV("session %p capture failed", session);
AndroidCameraCapture* cameraCapture = reinterpret_cast<AndroidCameraCapture*>(context); AndroidCameraCapture* cameraCapture = static_cast<AndroidCameraCapture*>(context);
std::unique_lock<std::mutex> lock(cameraCapture->mtx); std::unique_lock<std::mutex> lock(cameraCapture->mtx);
if (cameraCapture->waitingCapture) { if (cameraCapture->waitingCapture) {