From 69230df9905534cda15becd44c0109874c4be5f0 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Wed, 29 Aug 2012 17:37:16 -0700 Subject: Camera2: Basic ZSL and precapture trigger support. - Add capture sequencer to control still image capture process - Use precapture trigger for standard capture in sequencer - Add infrastructure for reprocessing streams - Add ZSL processor to maintain ZSL queue - Add ZSL capture sequence to sequencer This patch sets up ZSL mode and precapture triggers. For now, to enable zsl mode, set the system property camera.zsl_mode to 1. Bug: 6243944 Change-Id: Icf8cb1a83a7c11a152a11007c8f3c54f8ea1c70c --- services/camera/libcameraservice/Android.mk | 6 +- services/camera/libcameraservice/Camera2Client.cpp | 538 +++++---------------- services/camera/libcameraservice/Camera2Client.h | 38 +- services/camera/libcameraservice/Camera2Device.cpp | 284 ++++++++++- services/camera/libcameraservice/Camera2Device.h | 107 ++++ .../libcameraservice/camera2/CallbackProcessor.cpp | 2 +- .../libcameraservice/camera2/CallbackProcessor.h | 2 +- .../libcameraservice/camera2/CameraMetadata.cpp | 4 + .../libcameraservice/camera2/CameraMetadata.h | 5 + .../libcameraservice/camera2/CaptureProcessor.cpp | 248 ---------- .../libcameraservice/camera2/CaptureProcessor.h | 79 --- .../libcameraservice/camera2/CaptureSequencer.cpp | 506 +++++++++++++++++++ .../libcameraservice/camera2/CaptureSequencer.h | 154 ++++++ .../libcameraservice/camera2/FrameProcessor.cpp | 67 ++- .../libcameraservice/camera2/FrameProcessor.h | 19 + .../libcameraservice/camera2/JpegProcessor.cpp | 253 ++++++++++ .../libcameraservice/camera2/JpegProcessor.h | 82 ++++ .../camera/libcameraservice/camera2/Parameters.cpp | 203 +++++++- .../camera/libcameraservice/camera2/Parameters.h | 26 +- .../libcameraservice/camera2/ZslProcessor.cpp | 378 +++++++++++++++ .../camera/libcameraservice/camera2/ZslProcessor.h | 119 +++++ 21 files changed, 2344 insertions(+), 776 deletions(-) delete mode 100644 services/camera/libcameraservice/camera2/CaptureProcessor.cpp delete mode 100644 services/camera/libcameraservice/camera2/CaptureProcessor.h create mode 100644 services/camera/libcameraservice/camera2/CaptureSequencer.cpp create mode 100644 services/camera/libcameraservice/camera2/CaptureSequencer.h create mode 100644 services/camera/libcameraservice/camera2/JpegProcessor.cpp create mode 100644 services/camera/libcameraservice/camera2/JpegProcessor.h create mode 100644 services/camera/libcameraservice/camera2/ZslProcessor.cpp create mode 100644 services/camera/libcameraservice/camera2/ZslProcessor.h (limited to 'services/camera/libcameraservice') diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 1370c62..e27a065 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -14,8 +14,10 @@ LOCAL_SRC_FILES:= \ camera2/CameraMetadata.cpp \ camera2/Parameters.cpp \ camera2/FrameProcessor.cpp \ - camera2/CaptureProcessor.cpp \ - camera2/CallbackProcessor.cpp + camera2/JpegProcessor.cpp \ + camera2/CallbackProcessor.cpp \ + camera2/ZslProcessor.cpp \ + camera2/CaptureSequencer.cpp \ LOCAL_SHARED_LIBRARIES:= \ libui \ diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index acd290d..5400604 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -59,12 +59,21 @@ Camera2Client::Camera2Client(const sp& cameraService, mRecordingHeapCount(kDefaultRecordingHeapCount) { ATRACE_CALL(); - ALOGV("%s: Created client for camera %d", __FUNCTION__, cameraId); + ALOGI("Camera %d: Opened", cameraId); mDevice = new Camera2Device(cameraId); SharedParameters::Lock l(mParameters); l.mParameters.state = Parameters::DISCONNECTED; + + char value[PROPERTY_VALUE_MAX]; + property_get("camera.zsl_mode", value, "0"); + if (!strcmp(value,"1")) { + ALOGI("Camera %d: Enabling ZSL mode", cameraId); + l.mParameters.zslMode = true; + } else { + l.mParameters.zslMode = false; + } } status_t Camera2Client::checkPid(const char* checkLocation) const { @@ -100,20 +109,32 @@ status_t Camera2Client::initialize(camera_module_t *module) return NO_INIT; } + String8 threadName; + mFrameProcessor = new FrameProcessor(this); - String8 frameThreadName = String8::format("Camera2Client[%d]::FrameProcessor", + threadName = String8::format("Camera2Client[%d]::FrameProcessor", + mCameraId); + mFrameProcessor->run(threadName.string()); + + mCaptureSequencer = new CaptureSequencer(this); + threadName = String8::format("Camera2Client[%d]::CaptureSequencer", + mCameraId); + mCaptureSequencer->run(threadName.string()); + + mJpegProcessor = new JpegProcessor(this, mCaptureSequencer); + threadName = String8::format("Camera2Client[%d]::JpegProcessor", mCameraId); - mFrameProcessor->run(frameThreadName.string()); + mJpegProcessor->run(threadName.string()); - mCaptureProcessor = new CaptureProcessor(this); - String8 captureThreadName = - String8::format("Camera2Client[%d]::CaptureProcessor", mCameraId); - mCaptureProcessor->run(captureThreadName.string()); + mZslProcessor = new ZslProcessor(this, mCaptureSequencer); + threadName = String8::format("Camera2Client[%d]::ZslProcessor", + mCameraId); + mZslProcessor->run(threadName.string()); mCallbackProcessor = new CallbackProcessor(this); - String8 callbackThreadName = - String8::format("Camera2Client[%d]::CallbackProcessor", mCameraId); - mCallbackProcessor->run(callbackThreadName.string()); + threadName = String8::format("Camera2Client[%d]::CallbackProcessor", + mCameraId); + mCallbackProcessor->run(threadName.string()); if (gLogLevel >= 1) { ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__, @@ -126,7 +147,7 @@ status_t Camera2Client::initialize(camera_module_t *module) Camera2Client::~Camera2Client() { ATRACE_CALL(); - ALOGV("%s: Camera %d: Shutting down client.", __FUNCTION__, mCameraId); + ALOGV("Camera %d: Shutting down", mCameraId); mDestructionStarted = true; @@ -135,7 +156,7 @@ Camera2Client::~Camera2Client() { disconnect(); mFrameProcessor->requestExit(); - ALOGV("%s: Camera %d: Shutdown complete", __FUNCTION__, mCameraId); + ALOGI("Camera %d: Closed", mCameraId); } status_t Camera2Client::dump(int fd, const Vector& args) { @@ -299,10 +320,12 @@ status_t Camera2Client::dump(int fd, const Vector& args) { p.videoStabilization ? "enabled" : "disabled"); result.append(" Current streams:\n"); - result.appendFormat(" Preview stream ID: %d\n", mPreviewStreamId); + result.appendFormat(" Preview stream ID: %d\n", + getPreviewStreamId()); result.appendFormat(" Capture stream ID: %d\n", - mCaptureProcessor->getStreamId()); - result.appendFormat(" Recording stream ID: %d\n", mRecordingStreamId); + getCaptureStreamId()); + result.appendFormat(" Recording stream ID: %d\n", + getRecordingStreamId()); result.append(" Current requests:\n"); if (mPreviewRequest.entryCount() != 0) { @@ -314,15 +337,6 @@ status_t Camera2Client::dump(int fd, const Vector& args) { write(fd, result.string(), result.size()); } - if (mCaptureRequest.entryCount() != 0) { - result = " Capture request:\n"; - write(fd, result.string(), result.size()); - mCaptureRequest.dump(fd, 2, 6); - } else { - result = " Capture request: undefined\n"; - write(fd, result.string(), result.size()); - } - if (mRecordingRequest.entryCount() != 0) { result = " Recording request:\n"; write(fd, result.string(), result.size()); @@ -332,6 +346,8 @@ status_t Camera2Client::dump(int fd, const Vector& args) { write(fd, result.string(), result.size()); } + mCaptureSequencer->dump(fd, args); + mFrameProcessor->dump(fd, args); result = " Device dump:\n"; @@ -366,7 +382,7 @@ void Camera2Client::disconnect() { mPreviewStreamId = NO_STREAM; } - mCaptureProcessor->deleteStream(); + mJpegProcessor->deleteStream(); if (mRecordingStreamId != NO_STREAM) { mDevice->deleteStream(mRecordingStreamId); @@ -623,6 +639,14 @@ status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { return res; } } + if (params.zslMode) { + res = mZslProcessor->updateStream(params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + } if (mPreviewRequest.entryCount() == 0) { res = updatePreviewRequest(params); @@ -633,18 +657,20 @@ status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { } } + Vector outputStreams; + outputStreams.push(getPreviewStreamId()); + if (callbacksEnabled) { - uint8_t outputStreams[2] = - { mPreviewStreamId, mCallbackProcessor->getStreamId() }; - res = mPreviewRequest.update( - ANDROID_REQUEST_OUTPUT_STREAMS, - outputStreams, 2); - } else { - uint8_t outputStreams[1] = { mPreviewStreamId }; - res = mPreviewRequest.update( - ANDROID_REQUEST_OUTPUT_STREAMS, - outputStreams, 1); + outputStreams.push(getCallbackStreamId()); } + if (params.zslMode) { + outputStreams.push(getZslStreamId()); + } + + res = mPreviewRequest.update( + ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams); + if (res != OK) { ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); @@ -817,14 +843,19 @@ status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { } if (callbacksEnabled) { - uint8_t outputStreams[3] = - { mPreviewStreamId, mRecordingStreamId, - mCallbackProcessor->getStreamId() }; + uint8_t outputStreams[3] ={ + getPreviewStreamId(), + getRecordingStreamId(), + getCallbackStreamId() + }; res = mRecordingRequest.update( ANDROID_REQUEST_OUTPUT_STREAMS, outputStreams, 3); } else { - uint8_t outputStreams[2] = { mPreviewStreamId, mRecordingStreamId }; + uint8_t outputStreams[2] = { + getPreviewStreamId(), + getRecordingStreamId() + }; res = mRecordingRequest.update( ANDROID_REQUEST_OUTPUT_STREAMS, outputStreams, 2); @@ -1020,8 +1051,18 @@ status_t Camera2Client::takePicture(int msgType) { __FUNCTION__, mCameraId); return INVALID_OPERATION; case Parameters::PREVIEW: - case Parameters::RECORD: // Good to go for takePicture + res = commandStopFaceDetectionL(l.mParameters); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop face detection for still capture", + __FUNCTION__, mCameraId); + return res; + } + l.mParameters.state = Parameters::STILL_CAPTURE; + break; + case Parameters::RECORD: + // Good to go for video snapshot + l.mParameters.state = Parameters::VIDEO_SNAPSHOT; break; case Parameters::STILL_CAPTURE: case Parameters::VIDEO_SNAPSHOT: @@ -1032,130 +1073,20 @@ status_t Camera2Client::takePicture(int msgType) { ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId); - res = mCaptureProcessor->updateStream(l.mParameters); + res = mJpegProcessor->updateStream(l.mParameters); if (res != OK) { ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); return res; } - if (mCaptureRequest.entryCount() == 0) { - res = updateCaptureRequest(l.mParameters); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create still image capture request: " - "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - } - - bool callbacksEnabled = l.mParameters.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK; - bool recordingEnabled = (l.mParameters.state == Parameters::RECORD); - - int captureStreamId = mCaptureProcessor->getStreamId(); - - int streamSwitch = (callbacksEnabled ? 0x2 : 0x0) + - (recordingEnabled ? 0x1 : 0x0); - switch ( streamSwitch ) { - case 0: { // No recording, callbacks - uint8_t streamIds[2] = { - mPreviewStreamId, - captureStreamId - }; - res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, - streamIds, 2); - break; - } - case 1: { // Recording - uint8_t streamIds[3] = { - mPreviewStreamId, - mRecordingStreamId, - captureStreamId - }; - res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, - streamIds, 3); - break; - } - case 2: { // Callbacks - uint8_t streamIds[3] = { - mPreviewStreamId, - mCallbackProcessor->getStreamId(), - captureStreamId - }; - res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, - streamIds, 3); - break; - } - case 3: { // Both - uint8_t streamIds[4] = { - mPreviewStreamId, - mCallbackProcessor->getStreamId(), - mRecordingStreamId, - captureStreamId - }; - res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, - streamIds, 4); - break; - } - }; - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set up still image capture request: " - "%s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - res = mCaptureRequest.sort(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to sort capture request: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - CameraMetadata captureCopy = mCaptureRequest; - if (captureCopy.entryCount() == 0) { - ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", - __FUNCTION__, mCameraId); - return NO_MEMORY; - } - - if (l.mParameters.state == Parameters::PREVIEW) { - res = mDevice->clearStreamingRequest(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to stop preview for still capture: " - "%s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - } - // TODO: Capture should be atomic with setStreamingRequest here - res = mDevice->capture(captureCopy); + res = mCaptureSequencer->startCapture(); if (res != OK) { - ALOGE("%s: Camera %d: Unable to submit still image capture request: " - "%s (%d)", + ALOGE("%s: Camera %d: Unable to start capture: %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - switch (l.mParameters.state) { - case Parameters::PREVIEW: - l.mParameters.state = Parameters::STILL_CAPTURE; - res = commandStopFaceDetectionL(l.mParameters); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to stop face detection for still capture", - __FUNCTION__, mCameraId); - return res; - } - break; - case Parameters::RECORD: - l.mParameters.state = Parameters::VIDEO_SNAPSHOT; - break; - default: - ALOGE("%s: Camera %d: Unknown state for still capture!", - __FUNCTION__, mCameraId); - return INVALID_OPERATION; } - return OK; + return res; } status_t Camera2Client::setParameters(const String8& params) { @@ -1501,6 +1432,7 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { ALOGV("%s: Autoexposure state now %d, last trigger %d", __FUNCTION__, newState, triggerId); + mCaptureSequencer->notifyAutoExposure(newState, triggerId); } void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { @@ -1508,7 +1440,7 @@ void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { __FUNCTION__, newState, triggerId); } -int Camera2Client::getCameraId() { +int Camera2Client::getCameraId() const { return mCameraId; } @@ -1520,6 +1452,35 @@ camera2::SharedParameters& Camera2Client::getParameters() { return mParameters; } +int Camera2Client::getPreviewStreamId() const { + return mPreviewStreamId; +} + +int Camera2Client::getCaptureStreamId() const { + return mJpegProcessor->getStreamId(); +} + +int Camera2Client::getCallbackStreamId() const { + return mCallbackProcessor->getStreamId(); +} + +int Camera2Client::getRecordingStreamId() const { + return mRecordingStreamId; +} + +int Camera2Client::getZslStreamId() const { + return mZslProcessor->getStreamId(); +} + +status_t Camera2Client::registerFrameListener(int32_t id, + wp listener) { + return mFrameProcessor->registerListener(id, listener); +} + +status_t Camera2Client::removeFrameListener(int32_t id) { + return mFrameProcessor->removeListener(id); +} + Camera2Client::SharedCameraClient::Lock::Lock(SharedCameraClient &client): mCameraClient(client.mCameraClient), mSharedClient(client) { @@ -1546,6 +1507,10 @@ void Camera2Client::SharedCameraClient::clear() { mCameraClient.clear(); } +const int32_t Camera2Client::kPreviewRequestId; +const int32_t Camera2Client::kRecordRequestId; +const int32_t Camera2Client::kFirstCaptureRequestId; + void Camera2Client::onRecordingFrameAvailable() { ATRACE_CALL(); status_t res; @@ -1656,13 +1621,6 @@ status_t Camera2Client::updateRequests(const Parameters ¶ms) { __FUNCTION__, mCameraId, strerror(-res), res); return res; } - res = updateCaptureRequest(params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update capture request: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - res = updateRecordingRequest(params); if (res != OK) { ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", @@ -1761,7 +1719,7 @@ status_t Camera2Client::updatePreviewRequest(const Parameters ¶ms) { } } - res = updateRequestCommon(&mPreviewRequest, params); + res = params.updateRequest(&mPreviewRequest); if (res != OK) { ALOGE("%s: Camera %d: Unable to update common entries of preview " "request: %s (%d)", __FUNCTION__, mCameraId, @@ -1769,65 +1727,8 @@ status_t Camera2Client::updatePreviewRequest(const Parameters ¶ms) { return res; } - return OK; -} - -status_t Camera2Client::updateCaptureRequest(const Parameters ¶ms) { - ATRACE_CALL(); - status_t res; - if (mCaptureRequest.entryCount() == 0) { - res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_STILL_CAPTURE, - &mCaptureRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to create default still image request:" - " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - } - - res = updateRequestCommon(&mCaptureRequest, params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update common entries of capture " - "request: %s (%d)", __FUNCTION__, mCameraId, - strerror(-res), res); - return res; - } - - res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE, - params.jpegThumbSize, 2); - if (res != OK) return res; - res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY, - ¶ms.jpegThumbQuality, 1); - if (res != OK) return res; - res = mCaptureRequest.update(ANDROID_JPEG_QUALITY, - ¶ms.jpegQuality, 1); - if (res != OK) return res; - res = mCaptureRequest.update( - ANDROID_JPEG_ORIENTATION, - ¶ms.jpegRotation, 1); - if (res != OK) return res; - - if (params.gpsEnabled) { - res = mCaptureRequest.update( - ANDROID_JPEG_GPS_COORDINATES, - params.gpsCoordinates, 3); - if (res != OK) return res; - res = mCaptureRequest.update( - ANDROID_JPEG_GPS_TIMESTAMP, - ¶ms.gpsTimestamp, 1); - if (res != OK) return res; - res = mCaptureRequest.update( - ANDROID_JPEG_GPS_PROCESSING_METHOD, - params.gpsProcessingMethod); - if (res != OK) return res; - } else { - res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES); - if (res != OK) return res; - res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP); - if (res != OK) return res; - res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD); - if (res != OK) return res; - } + res = mPreviewRequest.update(ANDROID_REQUEST_ID, + &kPreviewRequestId, 1); return OK; } @@ -1845,7 +1746,7 @@ status_t Camera2Client::updateRecordingRequest(const Parameters ¶ms) { } } - res = updateRequestCommon(&mRecordingRequest, params); + res = params.updateRequest(&mRecordingRequest); if (res != OK) { ALOGE("%s: Camera %d: Unable to update common entries of recording " "request: %s (%d)", __FUNCTION__, mCameraId, @@ -1913,197 +1814,6 @@ status_t Camera2Client::updateRecordingStream(const Parameters ¶ms) { return OK; } -status_t Camera2Client::updateRequestCommon(CameraMetadata *request, - const Parameters ¶ms) const { - ATRACE_CALL(); - status_t res; - res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, - params.previewFpsRange, 2); - if (res != OK) return res; - - uint8_t wbMode = params.autoWhiteBalanceLock ? - (uint8_t)ANDROID_CONTROL_AWB_LOCKED : params.wbMode; - res = request->update(ANDROID_CONTROL_AWB_MODE, - &wbMode, 1); - if (res != OK) return res; - res = request->update(ANDROID_CONTROL_EFFECT_MODE, - ¶ms.effectMode, 1); - if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, - ¶ms.antibandingMode, 1); - if (res != OK) return res; - - uint8_t controlMode = - (params.sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ? - ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE; - res = request->update(ANDROID_CONTROL_MODE, - &controlMode, 1); - if (res != OK) return res; - if (controlMode == ANDROID_CONTROL_USE_SCENE_MODE) { - res = request->update(ANDROID_CONTROL_SCENE_MODE, - ¶ms.sceneMode, 1); - if (res != OK) return res; - } - - uint8_t flashMode = ANDROID_FLASH_OFF; - uint8_t aeMode; - switch (params.flashMode) { - case Parameters::FLASH_MODE_OFF: - aeMode = ANDROID_CONTROL_AE_ON; break; - case Parameters::FLASH_MODE_AUTO: - aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break; - case Parameters::FLASH_MODE_ON: - aeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break; - case Parameters::FLASH_MODE_TORCH: - aeMode = ANDROID_CONTROL_AE_ON; - flashMode = ANDROID_FLASH_TORCH; - break; - case Parameters::FLASH_MODE_RED_EYE: - aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break; - default: - ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__, - mCameraId, params.flashMode); - return BAD_VALUE; - } - if (params.autoExposureLock) aeMode = ANDROID_CONTROL_AE_LOCKED; - - res = request->update(ANDROID_FLASH_MODE, - &flashMode, 1); - if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AE_MODE, - &aeMode, 1); - if (res != OK) return res; - - float focusDistance = 0; // infinity focus in diopters - uint8_t focusMode; - switch (params.focusMode) { - case Parameters::FOCUS_MODE_AUTO: - case Parameters::FOCUS_MODE_MACRO: - case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: - case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: - case Parameters::FOCUS_MODE_EDOF: - focusMode = params.focusMode; - break; - case Parameters::FOCUS_MODE_INFINITY: - case Parameters::FOCUS_MODE_FIXED: - focusMode = ANDROID_CONTROL_AF_OFF; - break; - default: - ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__, - mCameraId, params.focusMode); - return BAD_VALUE; - } - res = request->update(ANDROID_LENS_FOCUS_DISTANCE, - &focusDistance, 1); - if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AF_MODE, - &focusMode, 1); - if (res != OK) return res; - - size_t focusingAreasSize = params.focusingAreas.size() * 5; - int32_t *focusingAreas = new int32_t[focusingAreasSize]; - for (size_t i = 0; i < focusingAreasSize; i += 5) { - if (params.focusingAreas[i].weight != 0) { - focusingAreas[i + 0] = - params.normalizedXToArray(params.focusingAreas[i].left); - focusingAreas[i + 1] = - params.normalizedYToArray(params.focusingAreas[i].top); - focusingAreas[i + 2] = - params.normalizedXToArray(params.focusingAreas[i].right); - focusingAreas[i + 3] = - params.normalizedYToArray(params.focusingAreas[i].bottom); - } else { - focusingAreas[i + 0] = 0; - focusingAreas[i + 1] = 0; - focusingAreas[i + 2] = 0; - focusingAreas[i + 3] = 0; - } - focusingAreas[i + 4] = params.focusingAreas[i].weight; - } - res = request->update(ANDROID_CONTROL_AF_REGIONS, - focusingAreas,focusingAreasSize); - if (res != OK) return res; - delete[] focusingAreas; - - res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION, - ¶ms.exposureCompensation, 1); - if (res != OK) return res; - - size_t meteringAreasSize = params.meteringAreas.size() * 5; - int32_t *meteringAreas = new int32_t[meteringAreasSize]; - for (size_t i = 0; i < meteringAreasSize; i += 5) { - if (params.meteringAreas[i].weight != 0) { - meteringAreas[i + 0] = - params.normalizedXToArray(params.meteringAreas[i].left); - meteringAreas[i + 1] = - params.normalizedYToArray(params.meteringAreas[i].top); - meteringAreas[i + 2] = - params.normalizedXToArray(params.meteringAreas[i].right); - meteringAreas[i + 3] = - params.normalizedYToArray(params.meteringAreas[i].bottom); - } else { - meteringAreas[i + 0] = 0; - meteringAreas[i + 1] = 0; - meteringAreas[i + 2] = 0; - meteringAreas[i + 3] = 0; - } - meteringAreas[i + 4] = params.meteringAreas[i].weight; - } - res = request->update(ANDROID_CONTROL_AE_REGIONS, - meteringAreas, meteringAreasSize); - if (res != OK) return res; - - res = request->update(ANDROID_CONTROL_AWB_REGIONS, - meteringAreas, meteringAreasSize); - if (res != OK) return res; - delete[] meteringAreas; - - // Need to convert zoom index into a crop rectangle. The rectangle is - // chosen to maximize its area on the sensor - - camera_metadata_ro_entry_t maxDigitalZoom = - mParameters.staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM); - float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) / - (params.NUM_ZOOM_STEPS-1); - float zoomRatio = 1 + zoomIncrement * params.zoom; - - float zoomLeft, zoomTop, zoomWidth, zoomHeight; - if (params.previewWidth >= params.previewHeight) { - zoomWidth = params.fastInfo.arrayWidth / zoomRatio; - zoomHeight = zoomWidth * - params.previewHeight / params.previewWidth; - } else { - zoomHeight = params.fastInfo.arrayHeight / zoomRatio; - zoomWidth = zoomHeight * - params.previewWidth / params.previewHeight; - } - zoomLeft = (params.fastInfo.arrayWidth - zoomWidth) / 2; - zoomTop = (params.fastInfo.arrayHeight - zoomHeight) / 2; - - int32_t cropRegion[3] = { zoomLeft, zoomTop, zoomWidth }; - res = request->update(ANDROID_SCALER_CROP_REGION, - cropRegion, 3); - if (res != OK) return res; - - // TODO: Decide how to map recordingHint, or whether just to ignore it - - uint8_t vstabMode = params.videoStabilization ? - ANDROID_CONTROL_VIDEO_STABILIZATION_ON : - ANDROID_CONTROL_VIDEO_STABILIZATION_OFF; - res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, - &vstabMode, 1); - if (res != OK) return res; - - uint8_t faceDetectMode = params.enableFaceDetect ? - params.fastInfo.bestFaceDetectMode : - (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF; - res = request->update(ANDROID_STATS_FACE_DETECT_MODE, - &faceDetectMode, 1); - if (res != OK) return res; - - return OK; -} - size_t Camera2Client::calculateBufferSize(int width, int height, int format, int stride) { switch (format) { diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index b2fd636..df5dbf4 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -21,7 +21,9 @@ #include "CameraService.h" #include "camera2/Parameters.h" #include "camera2/FrameProcessor.h" -#include "camera2/CaptureProcessor.h" +#include "camera2/JpegProcessor.h" +#include "camera2/ZslProcessor.h" +#include "camera2/CaptureSequencer.h" #include "camera2/CallbackProcessor.h" #include #include @@ -95,10 +97,20 @@ public: * Interface used by independent components of Camera2Client. */ - int getCameraId(); + int getCameraId() const; const sp& getCameraDevice(); camera2::SharedParameters& getParameters(); + int getPreviewStreamId() const; + int getCaptureStreamId() const; + int getCallbackStreamId() const; + int getRecordingStreamId() const; + int getZslStreamId() const; + + status_t registerFrameListener(int32_t id, + wp listener); + status_t removeFrameListener(int32_t id); + // Simple class to ensure that access to ICameraClient is serialized by // requiring mCameraClientLock to be locked before access to mCameraClient // is possible. @@ -123,6 +135,10 @@ public: static size_t calculateBufferSize(int width, int height, int format, int stride); + static const int32_t kPreviewRequestId = 1000; + static const int32_t kRecordRequestId = 2000; + static const int32_t kFirstCaptureRequestId = 3000; + private: /** ICamera interface-related private members */ @@ -183,9 +199,9 @@ private: /* Still image capture related members */ - sp mCaptureProcessor; - CameraMetadata mCaptureRequest; - status_t updateCaptureRequest(const Parameters ¶ms); + sp mCaptureSequencer; + sp mJpegProcessor; + sp mZslProcessor; /* Recording related members */ @@ -228,18 +244,6 @@ private: // Verify that caller is the owner of the camera status_t checkPid(const char *checkLocation) const; - - // Update parameters all requests use, based on mParameters - status_t updateRequestCommon(CameraMetadata *request, const Parameters ¶ms) const; - - // Map from sensor active array pixel coordinates to normalized camera - // parameter coordinates. The former are (0,0)-(array width - 1, array height - // - 1), the latter from (-1000,-1000)-(1000,1000) - int normalizedXToArray(int x) const; - int normalizedYToArray(int y) const; - int arrayXToNormalized(int width) const; - int arrayYToNormalized(int height) const; - }; }; // namespace android diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp index daeeebb..a171c46 100644 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ b/services/camera/libcameraservice/Camera2Device.cpp @@ -206,6 +206,42 @@ status_t Camera2Device::createStream(sp consumer, return OK; } +status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) { + status_t res; + ALOGV("%s: E", __FUNCTION__); + + bool found = false; + StreamList::iterator streamI; + for (streamI = mStreams.begin(); + streamI != mStreams.end(); streamI++) { + if ((*streamI)->getId() == outputId) { + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Output stream %d doesn't exist; can't create " + "reprocess stream from it!", __FUNCTION__, mId, outputId); + return BAD_VALUE; + } + + sp stream = new ReprocessStreamAdapter(mDevice); + + res = stream->connectToDevice((*streamI)); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create reprocessing stream from "\ + "stream %d: %s (%d)", __FUNCTION__, mId, outputId, + strerror(-res), res); + return res; + } + + *id = stream->getId(); + + mReprocessStreams.push_back(stream); + return OK; +} + + status_t Camera2Device::getStreamInfo(int id, uint32_t *width, uint32_t *height, uint32_t *format) { ALOGV("%s: E", __FUNCTION__); @@ -277,6 +313,33 @@ status_t Camera2Device::deleteStream(int id) { return OK; } +status_t Camera2Device::deleteReprocessStream(int id) { + ALOGV("%s: E", __FUNCTION__); + bool found = false; + for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin(); + streamI != mReprocessStreams.end(); streamI++) { + if ((*streamI)->getId() == id) { + status_t res = (*streamI)->release(); + if (res != OK) { + ALOGE("%s: Unable to release reprocess stream %d from " + "HAL device: %s (%d)", __FUNCTION__, id, + strerror(-res), res); + return res; + } + mReprocessStreams.erase(streamI); + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Unable to find stream %d to delete", + __FUNCTION__, mId, id); + return BAD_VALUE; + } + return OK; +} + + status_t Camera2Device::createDefaultRequest(int templateId, CameraMetadata *request) { status_t err; @@ -405,6 +468,32 @@ status_t Camera2Device::triggerPrecaptureMetering(uint32_t id) { return res; } +status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener) { + ALOGV("%s: E", __FUNCTION__); + bool found = false; + status_t res = OK; + for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin(); + streamI != mReprocessStreams.end(); streamI++) { + if ((*streamI)->getId() == reprocessStreamId) { + res = (*streamI)->pushIntoStream(buffer, listener); + if (res != OK) { + ALOGE("%s: Unable to push buffer to reprocess stream %d: %s (%d)", + __FUNCTION__, reprocessStreamId, strerror(-res), res); + return res; + } + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Unable to find reprocess stream %d", + __FUNCTION__, mId, reprocessStreamId); + res = BAD_VALUE; + } + return res; +} + /** * Camera2Device::NotificationListener */ @@ -903,7 +992,7 @@ status_t Camera2Device::StreamAdapter::connectToDevice( } buffers[bufferIdx] = anwBuffers[bufferIdx]->handle; - ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)(buffers[bufferIdx])); + ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)buffers[bufferIdx]); } ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, mTotalBuffers); @@ -1094,5 +1183,198 @@ int Camera2Device::StreamAdapter::set_crop(const camera2_stream_ops_t* w, return native_window_set_crop(a, &crop); } +/** + * Camera2Device::ReprocessStreamAdapter + */ + +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +Camera2Device::ReprocessStreamAdapter::ReprocessStreamAdapter(camera2_device_t *d): + mState(RELEASED), + mDevice(d), + mId(-1), + mWidth(0), mHeight(0), mFormat(0), + mActiveBuffers(0), + mFrameCount(0) +{ + camera2_stream_in_ops::acquire_buffer = acquire_buffer; + camera2_stream_in_ops::release_buffer = release_buffer; +} + +Camera2Device::ReprocessStreamAdapter::~ReprocessStreamAdapter() { + if (mState != RELEASED) { + release(); + } +} + +status_t Camera2Device::ReprocessStreamAdapter::connectToDevice( + const sp &outputStream) { + status_t res; + ALOGV("%s: E", __FUNCTION__); + + if (mState != RELEASED) return INVALID_OPERATION; + if (outputStream == NULL) { + ALOGE("%s: Null base stream passed to reprocess stream adapter", + __FUNCTION__); + return BAD_VALUE; + } + + mBaseStream = outputStream; + mWidth = outputStream->getWidth(); + mHeight = outputStream->getHeight(); + mFormat = outputStream->getFormat(); + + ALOGV("%s: New reprocess stream parameters %d x %d, format 0x%x", + __FUNCTION__, mWidth, mHeight, mFormat); + + // Allocate device-side stream interface + + uint32_t id; + res = mDevice->ops->allocate_reprocess_stream_from_stream(mDevice, + outputStream->getId(), getStreamOps(), + &id); + if (res != OK) { + ALOGE("%s: Device reprocess stream allocation failed: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + ALOGV("%s: Allocated reprocess stream id %d based on stream %d", + __FUNCTION__, id, outputStream->getId()); + + mId = id; + + mState = ACTIVE; + + return OK; +} + +status_t Camera2Device::ReprocessStreamAdapter::release() { + status_t res; + ALOGV("%s: Releasing stream %d", __FUNCTION__, mId); + if (mState >= ACTIVE) { + res = mDevice->ops->release_reprocess_stream(mDevice, mId); + if (res != OK) { + ALOGE("%s: Unable to release stream %d", + __FUNCTION__, mId); + return res; + } + } + + List::iterator s; + for (s = mQueue.begin(); s != mQueue.end(); s++) { + sp listener = s->releaseListener.promote(); + if (listener != 0) listener->onBufferReleased(s->handle); + } + for (s = mInFlightQueue.begin(); s != mInFlightQueue.end(); s++) { + sp listener = s->releaseListener.promote(); + if (listener != 0) listener->onBufferReleased(s->handle); + } + mQueue.clear(); + mInFlightQueue.clear(); + + mState = RELEASED; + return OK; +} + +status_t Camera2Device::ReprocessStreamAdapter::pushIntoStream( + buffer_handle_t *handle, const wp &releaseListener) { + // TODO: Some error checking here would be nice + ALOGV("%s: Pushing buffer %p to stream", __FUNCTION__, (void*)(*handle)); + + QueueEntry entry; + entry.handle = handle; + entry.releaseListener = releaseListener; + mQueue.push_back(entry); + return OK; +} + +status_t Camera2Device::ReprocessStreamAdapter::dump(int fd, + const Vector& args) { + String8 result = + String8::format(" Reprocess stream %d: %d x %d, fmt 0x%x\n", + mId, mWidth, mHeight, mFormat); + result.appendFormat(" acquired buffers: %d\n", + mActiveBuffers); + result.appendFormat(" frame count: %d\n", + mFrameCount); + write(fd, result.string(), result.size()); + return OK; +} + +const camera2_stream_in_ops *Camera2Device::ReprocessStreamAdapter::getStreamOps() { + return static_cast(this); +} + +int Camera2Device::ReprocessStreamAdapter::acquire_buffer( + const camera2_stream_in_ops_t *w, + buffer_handle_t** buffer) { + int res; + ReprocessStreamAdapter* stream = + const_cast( + static_cast(w)); + if (stream->mState != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState); + return INVALID_OPERATION; + } + + if (stream->mQueue.empty()) { + *buffer = NULL; + return OK; + } + + QueueEntry &entry = *(stream->mQueue.begin()); + + *buffer = entry.handle; + + stream->mInFlightQueue.push_back(entry); + stream->mQueue.erase(stream->mQueue.begin()); + + stream->mActiveBuffers++; + + ALOGV("Stream %d acquire: Buffer %p acquired", stream->mId, + (void*)(**buffer)); + return OK; +} + +int Camera2Device::ReprocessStreamAdapter::release_buffer( + const camera2_stream_in_ops_t* w, + buffer_handle_t* buffer) { + ReprocessStreamAdapter *stream = + const_cast( + static_cast(w) ); + stream->mFrameCount++; + ALOGV("Reprocess stream %d release: Frame %d (%p)", + stream->mId, stream->mFrameCount, (void*)*buffer); + int state = stream->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + stream->mActiveBuffers--; + + List::iterator s; + for (s = stream->mInFlightQueue.begin(); s != stream->mInFlightQueue.end(); s++) { + if ( s->handle == buffer ) break; + } + if (s == stream->mInFlightQueue.end()) { + ALOGE("%s: Can't find buffer %p in in-flight list!", __FUNCTION__, + buffer); + return INVALID_OPERATION; + } + + sp listener = s->releaseListener.promote(); + if (listener != 0) { + listener->onBufferReleased(s->handle); + } else { + ALOGE("%s: Can't free buffer - missing listener", __FUNCTION__); + } + stream->mInFlightQueue.erase(s); + + return OK; +} }; // namespace android diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/Camera2Device.h index 64f4608..a327d8d 100644 --- a/services/camera/libcameraservice/Camera2Device.h +++ b/services/camera/libcameraservice/Camera2Device.h @@ -80,6 +80,12 @@ class Camera2Device : public virtual RefBase { int *id); /** + * Create an input reprocess stream that uses buffers from an existing + * output stream. + */ + status_t createReprocessStreamFromStream(int outputId, int *id); + + /** * Get information about a given stream. */ status_t getStreamInfo(int id, @@ -97,6 +103,12 @@ class Camera2Device : public virtual RefBase { status_t deleteStream(int id); /** + * Delete reprocess stream. Must not be called if there are requests in + * flight which reference that stream. + */ + status_t deleteReprocessStream(int id); + + /** * Create a metadata buffer with fields that the HAL device believes are * best for the given use case */ @@ -163,6 +175,21 @@ class Camera2Device : public virtual RefBase { */ status_t triggerPrecaptureMetering(uint32_t id); + /** + * Abstract interface for clients that want to listen to reprocess buffer + * release events + */ + struct BufferReleasedListener: public virtual RefBase { + virtual void onBufferReleased(buffer_handle_t *handle) = 0; + }; + + /** + * Push a buffer to be reprocessed into a reprocessing stream, and + * provide a listener to call once the buffer is returned by the HAL + */ + status_t pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener); + private: const int mId; @@ -343,6 +370,86 @@ class Camera2Device : public virtual RefBase { typedef List > StreamList; StreamList mStreams; + /** + * Adapter from an ANativeWindow interface to camera2 device stream ops. + * Also takes care of allocating/deallocating stream in device interface + */ + class ReprocessStreamAdapter: public camera2_stream_in_ops, public virtual RefBase { + public: + ReprocessStreamAdapter(camera2_device_t *d); + + ~ReprocessStreamAdapter(); + + /** + * Create a HAL device reprocess stream based on an existing output stream. + */ + status_t connectToDevice(const sp &outputStream); + + status_t release(); + + /** + * Push buffer into stream for reprocessing. Takes ownership until it notifies + * that the buffer has been released + */ + status_t pushIntoStream(buffer_handle_t *handle, + const wp &releaseListener); + + /** + * Get stream parameters. + * Only valid after a successful connectToDevice call. + */ + int getId() const { return mId; } + uint32_t getWidth() const { return mWidth; } + uint32_t getHeight() const { return mHeight; } + uint32_t getFormat() const { return mFormat; } + + // Dump stream information + status_t dump(int fd, const Vector& args); + + private: + enum { + ERROR = -1, + RELEASED = 0, + ACTIVE + } mState; + + sp mConsumerInterface; + wp mBaseStream; + + struct QueueEntry { + buffer_handle_t *handle; + wp releaseListener; + }; + + List mQueue; + + List mInFlightQueue; + + camera2_device_t *mDevice; + + uint32_t mId; + uint32_t mWidth; + uint32_t mHeight; + uint32_t mFormat; + + /** Debugging information */ + uint32_t mActiveBuffers; + uint32_t mFrameCount; + int64_t mLastTimestamp; + + const camera2_stream_in_ops *getStreamOps(); + + static int acquire_buffer(const camera2_stream_in_ops_t *w, + buffer_handle_t** buffer); + + static int release_buffer(const camera2_stream_in_ops_t* w, + buffer_handle_t* buffer); + + }; // class ReprocessStreamAdapter + + typedef List > ReprocessStreamList; + ReprocessStreamList mReprocessStreams; + // Receives HAL notifications and routes them to the NotificationListener static void notificationCallback(int32_t msg_type, int32_t ext1, diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index 854b890..bccb18e 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -136,7 +136,7 @@ int CallbackProcessor::getStreamId() const { return mCallbackStreamId; } -void CallbackProcessor::dump(int fd, const Vector& args) { +void CallbackProcessor::dump(int fd, const Vector& args) const { } bool CallbackProcessor::threadLoop() { diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h index 36c51a3..c2a1372 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.h +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h @@ -48,7 +48,7 @@ class CallbackProcessor: status_t deleteStream(); int getStreamId() const; - void dump(int fd, const Vector& args); + void dump(int fd, const Vector& args) const; private: static const nsecs_t kWaitDuration = 10000000; // 10 ms wp mClient; diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.cpp b/services/camera/libcameraservice/camera2/CameraMetadata.cpp index 95377b2..8399e20 100644 --- a/services/camera/libcameraservice/camera2/CameraMetadata.cpp +++ b/services/camera/libcameraservice/camera2/CameraMetadata.cpp @@ -84,6 +84,10 @@ size_t CameraMetadata::entryCount() const { get_camera_metadata_entry_count(mBuffer); } +bool CameraMetadata::isEmpty() const { + return entryCount() == 0; +} + status_t CameraMetadata::sort() { return sort_camera_metadata(mBuffer); } diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.h b/services/camera/libcameraservice/camera2/CameraMetadata.h index 340414e..aee6cd7 100644 --- a/services/camera/libcameraservice/camera2/CameraMetadata.h +++ b/services/camera/libcameraservice/camera2/CameraMetadata.h @@ -87,6 +87,11 @@ class CameraMetadata { size_t entryCount() const; /** + * Is the buffer empty (no entires) + */ + bool isEmpty() const; + + /** * Sort metadata buffer for faster find */ status_t sort(); diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.cpp b/services/camera/libcameraservice/camera2/CaptureProcessor.cpp deleted file mode 100644 index b17f9d2..0000000 --- a/services/camera/libcameraservice/camera2/CaptureProcessor.cpp +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2Client::CaptureProcessor" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include - -#include "CaptureProcessor.h" -#include -#include "../Camera2Device.h" -#include "../Camera2Client.h" - - -namespace android { -namespace camera2 { - -CaptureProcessor::CaptureProcessor(wp client): - Thread(false), - mClient(client), - mCaptureAvailable(false), - mCaptureStreamId(NO_STREAM) { -} - -CaptureProcessor::~CaptureProcessor() { - ALOGV("%s: Exit", __FUNCTION__); -} - -void CaptureProcessor::onFrameAvailable() { - Mutex::Autolock l(mInputMutex); - if (!mCaptureAvailable) { - mCaptureAvailable = true; - mCaptureAvailableSignal.signal(); - } -} - -status_t CaptureProcessor::updateStream(const Parameters ¶ms) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - status_t res; - - Mutex::Autolock l(mInputMutex); - - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); - - // Find out buffer size for JPEG - camera_metadata_ro_entry_t maxJpegSize = - params.staticInfo(ANDROID_JPEG_MAX_SIZE); - if (maxJpegSize.count == 0) { - ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!", - __FUNCTION__, client->getCameraId()); - return INVALID_OPERATION; - } - - if (mCaptureConsumer == 0) { - // Create CPU buffer queue endpoint - mCaptureConsumer = new CpuConsumer(1); - mCaptureConsumer->setFrameAvailableListener(this); - mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); - mCaptureWindow = new SurfaceTextureClient( - mCaptureConsumer->getProducerInterface()); - // Create memory for API consumption - mCaptureHeap = new Camera2Heap(maxJpegSize.data.i32[0], 1, - "Camera2Client::CaptureHeap"); - if (mCaptureHeap->mHeap->getSize() == 0) { - ALOGE("%s: Camera %d: Unable to allocate memory for capture", - __FUNCTION__, client->getCameraId()); - return NO_MEMORY; - } - } - - if (mCaptureStreamId != NO_STREAM) { - // Check if stream parameters have to change - uint32_t currentWidth, currentHeight; - res = device->getStreamInfo(mCaptureStreamId, - ¤tWidth, ¤tHeight, 0); - if (res != OK) { - ALOGE("%s: Camera %d: Error querying capture output stream info: " - "%s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); - return res; - } - if (currentWidth != (uint32_t)params.pictureWidth || - currentHeight != (uint32_t)params.pictureHeight) { - res = device->deleteStream(mCaptureStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old output stream " - "for capture: %s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); - return res; - } - mCaptureStreamId = NO_STREAM; - } - } - - if (mCaptureStreamId == NO_STREAM) { - // Create stream for HAL production - res = device->createStream(mCaptureWindow, - params.pictureWidth, params.pictureHeight, - HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0], - &mCaptureStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create output stream for capture: " - "%s (%d)", __FUNCTION__, client->getCameraId(), - strerror(-res), res); - return res; - } - - } - return OK; -} - -status_t CaptureProcessor::deleteStream() { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock l(mInputMutex); - - if (mCaptureStreamId != NO_STREAM) { - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); - - device->deleteStream(mCaptureStreamId); - mCaptureStreamId = NO_STREAM; - } - return OK; -} - -int CaptureProcessor::getStreamId() const { - Mutex::Autolock l(mInputMutex); - return mCaptureStreamId; -} - -void CaptureProcessor::dump(int fd, const Vector& args) { -} - -bool CaptureProcessor::threadLoop() { - status_t res; - - { - Mutex::Autolock l(mInputMutex); - while (!mCaptureAvailable) { - res = mCaptureAvailableSignal.waitRelative(mInputMutex, - kWaitDuration); - if (res == TIMED_OUT) return true; - } - mCaptureAvailable = false; - } - - do { - sp client = mClient.promote(); - if (client == 0) return false; - res = processNewCapture(client); - } while (res == OK); - - return true; -} - -status_t CaptureProcessor::processNewCapture(sp &client) { - ATRACE_CALL(); - status_t res; - sp captureHeap; - - CpuConsumer::LockedBuffer imgBuffer; - - res = mCaptureConsumer->lockNextBuffer(&imgBuffer); - if (res != OK) { - if (res != BAD_VALUE) { - ALOGE("%s: Camera %d: Error receiving still image buffer: " - "%s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); - } - return res; - } - - ALOGV("%s: Camera %d: Still capture available", __FUNCTION__, - client->getCameraId()); - - // TODO: Signal errors here upstream - { - SharedParameters::Lock l(client->getParameters()); - - switch (l.mParameters.state) { - case Parameters::STILL_CAPTURE: - l.mParameters.state = Parameters::STOPPED; - break; - case Parameters::VIDEO_SNAPSHOT: - l.mParameters.state = Parameters::RECORD; - break; - default: - ALOGE("%s: Camera %d: Still image produced unexpectedly " - "in state %s!", - __FUNCTION__, client->getCameraId(), - Parameters::getStateName(l.mParameters.state)); - mCaptureConsumer->unlockBuffer(imgBuffer); - return BAD_VALUE; - } - } - - if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) { - ALOGE("%s: Camera %d: Unexpected format for still image: " - "%x, expected %x", __FUNCTION__, client->getCameraId(), - imgBuffer.format, - HAL_PIXEL_FORMAT_BLOB); - mCaptureConsumer->unlockBuffer(imgBuffer); - return OK; - } - - // TODO: Optimize this to avoid memcopy - void* captureMemory = mCaptureHeap->mHeap->getBase(); - size_t size = mCaptureHeap->mHeap->getSize(); - memcpy(captureMemory, imgBuffer.data, size); - - mCaptureConsumer->unlockBuffer(imgBuffer); - - captureHeap = mCaptureHeap; - - Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); - ALOGV("%s: Sending still image to client", __FUNCTION__); - if (l.mCameraClient != 0) { - l.mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, - captureHeap->mBuffers[0], NULL); - } else { - ALOGV("%s: No client!", __FUNCTION__); - } - return OK; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.h b/services/camera/libcameraservice/camera2/CaptureProcessor.h deleted file mode 100644 index 8e35739..0000000 --- a/services/camera/libcameraservice/camera2/CaptureProcessor.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H - -#include -#include -#include -#include -#include -#include -#include "Parameters.h" -#include "CameraMetadata.h" -#include "Camera2Heap.h" - -namespace android { - -class Camera2Client; - -namespace camera2 { - -/*** - * Still image capture output image processing - */ -class CaptureProcessor: - public Thread, public CpuConsumer::FrameAvailableListener { - public: - CaptureProcessor(wp client); - ~CaptureProcessor(); - - void onFrameAvailable(); - - status_t updateStream(const Parameters ¶ms); - status_t deleteStream(); - int getStreamId() const; - - void dump(int fd, const Vector& args); - private: - static const nsecs_t kWaitDuration = 10000000; // 10 ms - wp mClient; - - mutable Mutex mInputMutex; - bool mCaptureAvailable; - Condition mCaptureAvailableSignal; - - enum { - NO_STREAM = -1 - }; - - int mCaptureStreamId; - sp mCaptureConsumer; - sp mCaptureWindow; - sp mCaptureHeap; - - virtual bool threadLoop(); - - status_t processNewCapture(sp &client); - -}; - - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp new file mode 100644 index 0000000..532d2aa --- /dev/null +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp @@ -0,0 +1,506 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2Client::CaptureSequencer" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include + +#include "CaptureSequencer.h" +#include "../Camera2Device.h" +#include "../Camera2Client.h" +#include "Parameters.h" + +namespace android { +namespace camera2 { + +/** Public members */ + +CaptureSequencer::CaptureSequencer(wp client): + Thread(false), + mStartCapture(false), + mBusy(false), + mNewAEState(false), + mNewFrameReceived(false), + mNewCaptureReceived(false), + mClient(client), + mCaptureState(IDLE), + mTriggerId(0), + mTimeoutCount(0), + mCaptureId(Camera2Client::kFirstCaptureRequestId) { +} + +CaptureSequencer::~CaptureSequencer() { + ALOGV("%s: Exit", __FUNCTION__); +} + +void CaptureSequencer::setZslProcessor(wp processor) { + Mutex::Autolock l(mInputMutex); + mZslProcessor = processor; +} + +status_t CaptureSequencer::startCapture() { + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + if (mBusy) { + ALOGE("%s: Already busy capturing!", __FUNCTION__); + return INVALID_OPERATION; + } + if (!mStartCapture) { + mStartCapture = true; + mStartCaptureSignal.signal(); + } + return OK; +} + +void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) { + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + mAEState = newState; + mAETriggerId = triggerId; + if (!mNewAEState) { + mNewAEState = true; + mNewNotifySignal.signal(); + } +} + +void CaptureSequencer::onFrameAvailable(int32_t frameId, + CameraMetadata &frame) { + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + mNewFrameId = frameId; + mNewFrame.acquire(frame); + if (!mNewFrameReceived) { + mNewFrameReceived = true; + mNewFrameSignal.signal(); + } +} + +void CaptureSequencer::onCaptureAvailable(nsecs_t timestamp) { + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + mCaptureTimestamp = timestamp; + if (!mNewCaptureReceived) { + mNewCaptureReceived = true; + mNewCaptureSignal.signal(); + } +} + + +void CaptureSequencer::dump(int fd, const Vector& args) { + String8 result; + if (mCaptureRequest.entryCount() != 0) { + result = " Capture request:\n"; + write(fd, result.string(), result.size()); + mCaptureRequest.dump(fd, 2, 6); + } else { + result = " Capture request: undefined\n"; + write(fd, result.string(), result.size()); + } + result = String8::format(" Current capture state: %s\n", + kStateNames[mCaptureState]); + result.append(" Latest captured frame:\n"); + write(fd, result.string(), result.size()); + mNewFrame.dump(fd, 2, 6); +} + +/** Private members */ + +const char* CaptureSequencer::kStateNames[CaptureSequencer::NUM_CAPTURE_STATES+1] = +{ + "IDLE", + "START", + "ZSL_START", + "ZSL_WAITING", + "ZSL_REPROCESSING", + "STANDARD_START", + "STANDARD_PRECAPTURE", + "STANDARD_CAPTURING", + "DONE", + "ERROR", + "UNKNOWN" +}; + +const CaptureSequencer::StateManager + CaptureSequencer::kStateManagers[CaptureSequencer::NUM_CAPTURE_STATES-1] = { + &CaptureSequencer::manageIdle, + &CaptureSequencer::manageStart, + &CaptureSequencer::manageZslStart, + &CaptureSequencer::manageZslWaiting, + &CaptureSequencer::manageZslReprocessing, + &CaptureSequencer::manageStandardStart, + &CaptureSequencer::manageStandardPrecaptureWait, + &CaptureSequencer::manageStandardCapture, + &CaptureSequencer::manageStandardCaptureWait, + &CaptureSequencer::manageDone, +}; + +bool CaptureSequencer::threadLoop() { + status_t res; + + sp client = mClient.promote(); + if (client == 0) return false; + + if (mCaptureState < ERROR) { + mCaptureState = (this->*kStateManagers[mCaptureState])(client); + } else { + ALOGE("%s: Bad capture state: %s", + __FUNCTION__, kStateNames[mCaptureState]); + return false; + } + + return true; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageIdle(sp &client) { + status_t res; + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + while (!mStartCapture) { + res = mStartCaptureSignal.waitRelative(mInputMutex, + kWaitDuration); + if (res == TIMED_OUT) break; + } + if (mStartCapture) { + mStartCapture = false; + mBusy = true; + return START; + } + return IDLE; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &client) { + status_t res; + ATRACE_CALL(); + mCaptureId++; + + { + Mutex::Autolock l(mInputMutex); + mBusy = false; + } + + SharedParameters::Lock l(client->getParameters()); + switch (l.mParameters.state) { + case Parameters::STILL_CAPTURE: + l.mParameters.state = Parameters::STOPPED; + break; + case Parameters::VIDEO_SNAPSHOT: + l.mParameters.state = Parameters::RECORD; + break; + default: + ALOGE("%s: Camera %d: Still image produced unexpectedly " + "in state %s!", + __FUNCTION__, client->getCameraId(), + Parameters::getStateName(l.mParameters.state)); + } + + return IDLE; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStart( + sp &client) { + status_t res; + ATRACE_CALL(); + SharedParameters::Lock l(client->getParameters()); + CaptureState nextState = DONE; + + res = updateCaptureRequest(l.mParameters, client); + if (res != OK ) { + ALOGE("%s: Camera %d: Can't update still image capture request: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + + if (l.mParameters.zslMode && + l.mParameters.state == Parameters::STILL_CAPTURE) { + nextState = ZSL_START; + } else { + nextState = STANDARD_START; + } + + return nextState; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageZslStart( + sp &client) { + status_t res; + sp processor = mZslProcessor.promote(); + if (processor == 0) { + ALOGE("%s: No ZSL queue to use!", __FUNCTION__); + return DONE; + } + + client->registerFrameListener(mCaptureId, + this); + + res = client->getCameraDevice()->clearStreamingRequest(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: " + "%s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + // TODO: Actually select the right thing here. + processor->pushToReprocess(mCaptureId); + + mTimeoutCount = kMaxTimeoutsForCaptureEnd; + return STANDARD_CAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting( + sp &client) { + return DONE; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing( + sp &client) { + return START; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart( + sp &client) { + ATRACE_CALL(); + client->registerFrameListener(mCaptureId, + this); + { + SharedParameters::Lock l(client->getParameters()); + mTriggerId = l.mParameters.precaptureTriggerCounter++; + } + client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId); + + mAeInPrecapture = false; + mTimeoutCount = kMaxTimeoutsForPrecaptureStart; + return STANDARD_PRECAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait( + sp &client) { + status_t res; + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + while (!mNewAEState) { + res = mNewNotifySignal.waitRelative(mInputMutex, kWaitDuration); + if (res == TIMED_OUT) { + mTimeoutCount--; + break; + } + } + if (mTimeoutCount <= 0) { + ALOGW("Timed out waiting for precapture %s", + mAeInPrecapture ? "end" : "start"); + return STANDARD_CAPTURE; + } + if (mNewAEState) { + if (!mAeInPrecapture) { + // Waiting to see PRECAPTURE state + if (mAETriggerId == mTriggerId && + mAEState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { + ALOGV("%s: Got precapture start", __FUNCTION__); + mAeInPrecapture = true; + mTimeoutCount = kMaxTimeoutsForPrecaptureEnd; + } + } else { + // Waiting to see PRECAPTURE state end + if (mAETriggerId == mTriggerId && + mAEState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { + ALOGV("%s: Got precapture end", __FUNCTION__); + return STANDARD_CAPTURE; + } + } + mNewAEState = false; + } + return STANDARD_PRECAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( + sp &client) { + status_t res; + ATRACE_CALL(); + SharedParameters::Lock l(client->getParameters()); + Vector outputStreams; + + outputStreams.push(client->getPreviewStreamId()); + outputStreams.push(client->getCaptureStreamId()); + + if (l.mParameters.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { + outputStreams.push(client->getCallbackStreamId()); + } + + if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) { + outputStreams.push(client->getRecordingStreamId()); + } + + res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams); + if (res == OK) { + res = mCaptureRequest.update(ANDROID_REQUEST_ID, + &mCaptureId, 1); + } + if (res == OK) { + res = mCaptureRequest.sort(); + } + + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + + CameraMetadata captureCopy = mCaptureRequest; + if (captureCopy.entryCount() == 0) { + ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", + __FUNCTION__, client->getCameraId()); + return DONE; + } + + if (l.mParameters.state == Parameters::STILL_CAPTURE) { + res = client->getCameraDevice()->clearStreamingRequest(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop preview for still capture: " + "%s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + } + // TODO: Capture should be atomic with setStreamingRequest here + res = client->getCameraDevice()->capture(captureCopy); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to submit still image capture request: " + "%s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + + mTimeoutCount = kMaxTimeoutsForCaptureEnd; + return STANDARD_CAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait( + sp &client) { + status_t res; + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + while (!mNewFrameReceived) { + res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration); + if (res == TIMED_OUT) { + mTimeoutCount--; + break; + } + } + while (!mNewCaptureReceived) { + res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration); + if (res == TIMED_OUT) { + mTimeoutCount--; + break; + } + } + if (mTimeoutCount <= 0) { + ALOGW("Timed out waiting for capture to complete"); + return DONE; + } + if (mNewFrameReceived && mNewCaptureReceived) { + if (mNewFrameId != mCaptureId) { + ALOGW("Mismatched capture frame IDs: Expected %d, got %d", + mCaptureId, mNewFrameId); + } + camera_metadata_entry_t entry; + entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + ALOGE("No timestamp field in capture frame!"); + } + if (entry.data.i64[0] != mCaptureTimestamp) { + ALOGW("Mismatched capture timestamps: Metadata frame %lld," + " captured buffer %lld", entry.data.i64[0], mCaptureTimestamp); + } + client->removeFrameListener(mCaptureId); + + mNewFrameReceived = false; + mNewCaptureReceived = false; + return DONE; + } + return STANDARD_CAPTURE_WAIT; +} + +status_t CaptureSequencer::updateCaptureRequest(const Parameters ¶ms, + sp &client) { + ATRACE_CALL(); + status_t res; + if (mCaptureRequest.entryCount() == 0) { + res = client->getCameraDevice()->createDefaultRequest( + CAMERA2_TEMPLATE_STILL_CAPTURE, + &mCaptureRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create default still image request:" + " %s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + } + + res = params.updateRequest(&mCaptureRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update common entries of capture " + "request: %s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + + res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE, + params.jpegThumbSize, 2); + if (res != OK) return res; + res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY, + ¶ms.jpegThumbQuality, 1); + if (res != OK) return res; + res = mCaptureRequest.update(ANDROID_JPEG_QUALITY, + ¶ms.jpegQuality, 1); + if (res != OK) return res; + res = mCaptureRequest.update( + ANDROID_JPEG_ORIENTATION, + ¶ms.jpegRotation, 1); + if (res != OK) return res; + + if (params.gpsEnabled) { + res = mCaptureRequest.update( + ANDROID_JPEG_GPS_COORDINATES, + params.gpsCoordinates, 3); + if (res != OK) return res; + res = mCaptureRequest.update( + ANDROID_JPEG_GPS_TIMESTAMP, + ¶ms.gpsTimestamp, 1); + if (res != OK) return res; + res = mCaptureRequest.update( + ANDROID_JPEG_GPS_PROCESSING_METHOD, + params.gpsProcessingMethod); + if (res != OK) return res; + } else { + res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES); + if (res != OK) return res; + res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP); + if (res != OK) return res; + res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD); + if (res != OK) return res; + } + + return OK; +} + + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h new file mode 100644 index 0000000..0492a43 --- /dev/null +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.h @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H + +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "Parameters.h" +#include "FrameProcessor.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class ZslProcessor; + +/** + * Manages the still image capture process for + * zero-shutter-lag, regular, and video snapshots. + */ +class CaptureSequencer: + virtual public Thread, + virtual public FrameProcessor::FilteredListener { + public: + CaptureSequencer(wp client); + ~CaptureSequencer(); + + // Get reference to the ZslProcessor, which holds the ZSL buffers and frames + void setZslProcessor(wp processor); + + // Begin still image capture + status_t startCapture(); + + // Notifications about AE state changes + void notifyAutoExposure(uint8_t newState, int triggerId); + + // Notifications from the frame processor + virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame); + + // Notifications from the capture processor + void onCaptureAvailable(nsecs_t timestamp); + + void dump(int fd, const Vector& args); + + private: + /** + * Accessed by other threads + */ + Mutex mInputMutex; + + bool mStartCapture; + bool mBusy; + Condition mStartCaptureSignal; + + bool mNewAEState; + uint8_t mAEState; + int mAETriggerId; + Condition mNewNotifySignal; + + bool mNewFrameReceived; + int32_t mNewFrameId; + CameraMetadata mNewFrame; + Condition mNewFrameSignal; + + bool mNewCaptureReceived; + nsecs_t mCaptureTimestamp; + Condition mNewCaptureSignal; + + /** + * Internal to CaptureSequencer + */ + static const nsecs_t kWaitDuration = 100000000; // 100 ms + static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms + static const int kMaxTimeoutsForPrecaptureEnd = 10; // 1 sec + static const int kMaxTimeoutsForCaptureEnd = 20; // 2 sec + + wp mClient; + wp mZslProcessor; + + enum CaptureState { + IDLE, + START, + ZSL_START, + ZSL_WAITING, + ZSL_REPROCESSING, + STANDARD_START, + STANDARD_PRECAPTURE_WAIT, + STANDARD_CAPTURE, + STANDARD_CAPTURE_WAIT, + DONE, + ERROR, + NUM_CAPTURE_STATES + } mCaptureState; + static const char* kStateNames[]; + + typedef CaptureState (CaptureSequencer::*StateManager)(sp &client); + static const StateManager kStateManagers[]; + + CameraMetadata mCaptureRequest; + + int mTriggerId; + int mTimeoutCount; + bool mAeInPrecapture; + + int32_t mCaptureId; + + // Main internal methods + + virtual bool threadLoop(); + + CaptureState manageIdle(sp &client); + CaptureState manageStart(sp &client); + + CaptureState manageZslStart(sp &client); + CaptureState manageZslWaiting(sp &client); + CaptureState manageZslReprocessing(sp &client); + + CaptureState manageStandardStart(sp &client); + CaptureState manageStandardPrecaptureWait(sp &client); + CaptureState manageStandardCapture(sp &client); + CaptureState manageStandardCaptureWait(sp &client); + + CaptureState manageDone(sp &client); + + // Utility methods + + status_t updateCaptureRequest(const Parameters ¶ms, + sp &client); +}; + +}; // namespace camera2 +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp index 5059754..e24db0b 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -36,6 +36,19 @@ FrameProcessor::~FrameProcessor() { ALOGV("%s: Exit", __FUNCTION__); } +status_t FrameProcessor::registerListener(int32_t id, + wp listener) { + Mutex::Autolock l(mInputMutex); + ALOGV("%s: Registering listener for frame id %d", + __FUNCTION__, id); + return mListeners.replaceValueFor(id, listener); +} + +status_t FrameProcessor::removeListener(int32_t id) { + Mutex::Autolock l(mInputMutex); + return mListeners.removeItem(id); +} + void FrameProcessor::dump(int fd, const Vector& args) { String8 result(" Latest received frame:\n"); write(fd, result.string(), result.size()); @@ -50,6 +63,7 @@ bool FrameProcessor::threadLoop() { sp client = mClient.promote(); if (client == 0) return false; device = client->getCameraDevice(); + if (device == 0) return false; } res = device->waitForNextFrame(kWaitDuration); @@ -67,20 +81,28 @@ bool FrameProcessor::threadLoop() { void FrameProcessor::processNewFrames(sp &client) { status_t res; + ATRACE_CALL(); CameraMetadata frame; while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) { camera_metadata_entry_t entry; + entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); if (entry.count == 0) { - ALOGE("%s: Camera %d: Error reading frame number: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + ALOGE("%s: Camera %d: Error reading frame number", + __FUNCTION__, client->getCameraId()); break; } res = processFaceDetect(frame, client); if (res != OK) break; - mLastFrame.acquire(frame); + // Must be last - listener can take ownership of frame + res = processListener(frame, client); + if (res != OK) break; + + if (!frame.isEmpty()) { + mLastFrame.acquire(frame); + } } if (res != NOT_ENOUGH_DATA) { ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", @@ -91,9 +113,43 @@ void FrameProcessor::processNewFrames(sp &client) { return; } -status_t FrameProcessor::processFaceDetect( - const CameraMetadata &frame, sp &client) { +status_t FrameProcessor::processListener(CameraMetadata &frame, + sp &client) { + status_t res; + ATRACE_CALL(); + camera_metadata_entry_t entry; + + entry = frame.find(ANDROID_REQUEST_ID); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Error reading frame id", + __FUNCTION__, client->getCameraId()); + return BAD_VALUE; + } + int32_t frameId = entry.data.i32[0]; + ALOGV("%s: Got frame with ID %d", __FUNCTION__, frameId); + + sp listener; + { + Mutex::Autolock l(mInputMutex); + ssize_t listenerIndex = mListeners.indexOfKey(frameId); + if (listenerIndex != NAME_NOT_FOUND) { + listener = mListeners[listenerIndex].promote(); + if (listener == 0) { + mListeners.removeItemsAt(listenerIndex, 1); + } + } + } + + if (listener != 0) { + listener->onFrameAvailable(frameId, frame); + } + return OK; +} + +status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, + sp &client) { status_t res; + ATRACE_CALL(); camera_metadata_ro_entry_t entry; bool enableFaceDetect; int maxFaces; @@ -209,6 +265,5 @@ status_t FrameProcessor::processFaceDetect( return OK; } - }; // namespace camera2 }; // namespace android diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h index 2cdf7f0..25d489a 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.h +++ b/services/camera/libcameraservice/camera2/FrameProcessor.h @@ -20,6 +20,7 @@ #include #include #include +#include #include "CameraMetadata.h" namespace android { @@ -36,6 +37,17 @@ class FrameProcessor: public Thread { FrameProcessor(wp client); ~FrameProcessor(); + struct FilteredListener: virtual public RefBase { + // Listener may take ownership of frame + virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame) = 0; + }; + + // Register a listener for a specific frame ID (android.request.id). + // De-registers any existing listeners for that ID + status_t registerListener(int32_t id, wp listener); + + status_t removeListener(int32_t id); + void dump(int fd, const Vector& args); private: static const nsecs_t kWaitDuration = 10000000; // 10 ms @@ -43,10 +55,17 @@ class FrameProcessor: public Thread { virtual bool threadLoop(); + Mutex mInputMutex; + KeyedVector > mListeners; + void processNewFrames(sp &client); + status_t processFaceDetect(const CameraMetadata &frame, sp &client); + status_t processListener(CameraMetadata &frame, + sp &client); + CameraMetadata mLastFrame; }; diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp new file mode 100644 index 0000000..92148ca --- /dev/null +++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp @@ -0,0 +1,253 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2Client::JpegProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include "JpegProcessor.h" +#include +#include "../Camera2Device.h" +#include "../Camera2Client.h" + + +namespace android { +namespace camera2 { + +JpegProcessor::JpegProcessor( + wp client, + wp sequencer): + Thread(false), + mClient(client), + mSequencer(sequencer), + mCaptureAvailable(false), + mCaptureStreamId(NO_STREAM) { +} + +JpegProcessor::~JpegProcessor() { + ALOGV("%s: Exit", __FUNCTION__); +} + +void JpegProcessor::onFrameAvailable() { + Mutex::Autolock l(mInputMutex); + if (!mCaptureAvailable) { + mCaptureAvailable = true; + mCaptureAvailableSignal.signal(); + } +} + +status_t JpegProcessor::updateStream(const Parameters ¶ms) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp client = mClient.promote(); + if (client == 0) return OK; + sp device = client->getCameraDevice(); + + // Find out buffer size for JPEG + camera_metadata_ro_entry_t maxJpegSize = + params.staticInfo(ANDROID_JPEG_MAX_SIZE); + if (maxJpegSize.count == 0) { + ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!", + __FUNCTION__, client->getCameraId()); + return INVALID_OPERATION; + } + + if (mCaptureConsumer == 0) { + // Create CPU buffer queue endpoint + mCaptureConsumer = new CpuConsumer(1); + mCaptureConsumer->setFrameAvailableListener(this); + mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); + mCaptureWindow = new SurfaceTextureClient( + mCaptureConsumer->getProducerInterface()); + // Create memory for API consumption + mCaptureHeap = new Camera2Heap(maxJpegSize.data.i32[0], 1, + "Camera2Client::CaptureHeap"); + if (mCaptureHeap->mHeap->getSize() == 0) { + ALOGE("%s: Camera %d: Unable to allocate memory for capture", + __FUNCTION__, client->getCameraId()); + return NO_MEMORY; + } + } + + if (mCaptureStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mCaptureStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying capture output stream info: " + "%s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.pictureWidth || + currentHeight != (uint32_t)params.pictureHeight) { + res = device->deleteStream(mCaptureStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for capture: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + mCaptureStreamId = NO_STREAM; + } + } + + if (mCaptureStreamId == NO_STREAM) { + // Create stream for HAL production + res = device->createStream(mCaptureWindow, + params.pictureWidth, params.pictureHeight, + HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0], + &mCaptureStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create output stream for capture: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + + } + return OK; +} + +status_t JpegProcessor::deleteStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + if (mCaptureStreamId != NO_STREAM) { + sp client = mClient.promote(); + if (client == 0) return OK; + sp device = client->getCameraDevice(); + + device->deleteStream(mCaptureStreamId); + mCaptureStreamId = NO_STREAM; + } + return OK; +} + +int JpegProcessor::getStreamId() const { + Mutex::Autolock l(mInputMutex); + return mCaptureStreamId; +} + +void JpegProcessor::dump(int fd, const Vector& args) const { +} + +bool JpegProcessor::threadLoop() { + status_t res; + + { + Mutex::Autolock l(mInputMutex); + while (!mCaptureAvailable) { + res = mCaptureAvailableSignal.waitRelative(mInputMutex, + kWaitDuration); + if (res == TIMED_OUT) return true; + } + mCaptureAvailable = false; + } + + do { + sp client = mClient.promote(); + if (client == 0) return false; + res = processNewCapture(client); + } while (res == OK); + + return true; +} + +status_t JpegProcessor::processNewCapture(sp &client) { + ATRACE_CALL(); + status_t res; + sp captureHeap; + + CpuConsumer::LockedBuffer imgBuffer; + + res = mCaptureConsumer->lockNextBuffer(&imgBuffer); + if (res != OK) { + if (res != BAD_VALUE) { + ALOGE("%s: Camera %d: Error receiving still image buffer: " + "%s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + } + return res; + } + + ALOGV("%s: Camera %d: Still capture available", __FUNCTION__, + client->getCameraId()); + + // TODO: Signal errors here upstream + { + SharedParameters::Lock l(client->getParameters()); + + switch (l.mParameters.state) { + case Parameters::STILL_CAPTURE: + case Parameters::VIDEO_SNAPSHOT: + break; + default: + ALOGE("%s: Camera %d: Still image produced unexpectedly " + "in state %s!", + __FUNCTION__, client->getCameraId(), + Parameters::getStateName(l.mParameters.state)); + mCaptureConsumer->unlockBuffer(imgBuffer); + return BAD_VALUE; + } + } + + if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Camera %d: Unexpected format for still image: " + "%x, expected %x", __FUNCTION__, client->getCameraId(), + imgBuffer.format, + HAL_PIXEL_FORMAT_BLOB); + mCaptureConsumer->unlockBuffer(imgBuffer); + return OK; + } + + sp sequencer = mSequencer.promote(); + if (sequencer != 0) { + sequencer->onCaptureAvailable(imgBuffer.timestamp); + } + + // TODO: Optimize this to avoid memcopy + void* captureMemory = mCaptureHeap->mHeap->getBase(); + size_t size = mCaptureHeap->mHeap->getSize(); + memcpy(captureMemory, imgBuffer.data, size); + + mCaptureConsumer->unlockBuffer(imgBuffer); + + captureHeap = mCaptureHeap; + + Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); + ALOGV("%s: Sending still image to client", __FUNCTION__); + if (l.mCameraClient != 0) { + l.mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, + captureHeap->mBuffers[0], NULL); + } else { + ALOGV("%s: No client!", __FUNCTION__); + } + return OK; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h new file mode 100644 index 0000000..6e7a860 --- /dev/null +++ b/services/camera/libcameraservice/camera2/JpegProcessor.h @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H + +#include +#include +#include +#include +#include +#include +#include "Parameters.h" +#include "CameraMetadata.h" +#include "Camera2Heap.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class CaptureSequencer; + +/*** + * Still image capture output image processing + */ +class JpegProcessor: + public Thread, public CpuConsumer::FrameAvailableListener { + public: + JpegProcessor(wp client, wp sequencer); + ~JpegProcessor(); + + void onFrameAvailable(); + + status_t updateStream(const Parameters ¶ms); + status_t deleteStream(); + int getStreamId() const; + + void dump(int fd, const Vector& args) const; + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + wp mClient; + wp mSequencer; + + mutable Mutex mInputMutex; + bool mCaptureAvailable; + Condition mCaptureAvailableSignal; + + enum { + NO_STREAM = -1 + }; + + int mCaptureStreamId; + sp mCaptureConsumer; + sp mCaptureWindow; + sp mCaptureHeap; + + virtual bool threadLoop(); + + status_t processNewCapture(sp &client); + +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index 2f7d023..1cad2ae 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -18,6 +18,9 @@ #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 +#include +#include + #include #include @@ -738,9 +741,11 @@ status_t Parameters::initialize(const CameraMetadata *info) { enableFaceDetect = false; enableFocusMoveMessages = false; - afTriggerCounter = 0; + afTriggerCounter = 1; currentAfTriggerId = -1; + precaptureTriggerCounter = 1; + previewCallbackFlags = 0; state = STOPPED; @@ -1318,6 +1323,202 @@ status_t Parameters::set(const String8& params) { return OK; } +status_t Parameters::updateRequest(CameraMetadata *request) const { + ATRACE_CALL(); + status_t res; + + uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; + res = request->update(ANDROID_REQUEST_METADATA_MODE, + &metadataMode, 1); + if (res != OK) return res; + + res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + previewFpsRange, 2); + if (res != OK) return res; + + uint8_t reqWbMode = autoWhiteBalanceLock ? + (uint8_t)ANDROID_CONTROL_AWB_LOCKED : wbMode; + res = request->update(ANDROID_CONTROL_AWB_MODE, + &reqWbMode, 1); + if (res != OK) return res; + res = request->update(ANDROID_CONTROL_EFFECT_MODE, + &effectMode, 1); + if (res != OK) return res; + res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, + &antibandingMode, 1); + if (res != OK) return res; + + uint8_t reqControlMode = + (sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ? + ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE; + res = request->update(ANDROID_CONTROL_MODE, + &reqControlMode, 1); + if (res != OK) return res; + if (reqControlMode == ANDROID_CONTROL_USE_SCENE_MODE) { + res = request->update(ANDROID_CONTROL_SCENE_MODE, + &sceneMode, 1); + if (res != OK) return res; + } + + uint8_t reqFlashMode = ANDROID_FLASH_OFF; + uint8_t reqAeMode; + switch (flashMode) { + case Parameters::FLASH_MODE_OFF: + reqAeMode = ANDROID_CONTROL_AE_ON; break; + case Parameters::FLASH_MODE_AUTO: + reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break; + case Parameters::FLASH_MODE_ON: + reqAeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break; + case Parameters::FLASH_MODE_TORCH: + reqAeMode = ANDROID_CONTROL_AE_ON; + reqFlashMode = ANDROID_FLASH_TORCH; + break; + case Parameters::FLASH_MODE_RED_EYE: + reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break; + default: + ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__, + cameraId, flashMode); + return BAD_VALUE; + } + if (autoExposureLock) reqAeMode = ANDROID_CONTROL_AE_LOCKED; + + res = request->update(ANDROID_FLASH_MODE, + &reqFlashMode, 1); + if (res != OK) return res; + res = request->update(ANDROID_CONTROL_AE_MODE, + &reqAeMode, 1); + if (res != OK) return res; + + float reqFocusDistance = 0; // infinity focus in diopters + uint8_t reqFocusMode; + switch (focusMode) { + case Parameters::FOCUS_MODE_AUTO: + case Parameters::FOCUS_MODE_MACRO: + case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: + case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: + case Parameters::FOCUS_MODE_EDOF: + reqFocusMode = focusMode; + break; + case Parameters::FOCUS_MODE_INFINITY: + case Parameters::FOCUS_MODE_FIXED: + reqFocusMode = ANDROID_CONTROL_AF_OFF; + break; + default: + ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__, + cameraId, focusMode); + return BAD_VALUE; + } + res = request->update(ANDROID_LENS_FOCUS_DISTANCE, + &reqFocusDistance, 1); + if (res != OK) return res; + res = request->update(ANDROID_CONTROL_AF_MODE, + &reqFocusMode, 1); + if (res != OK) return res; + + size_t reqFocusingAreasSize = focusingAreas.size() * 5; + int32_t *reqFocusingAreas = new int32_t[reqFocusingAreasSize]; + for (size_t i = 0; i < reqFocusingAreasSize; i += 5) { + if (focusingAreas[i].weight != 0) { + reqFocusingAreas[i + 0] = + normalizedXToArray(focusingAreas[i].left); + reqFocusingAreas[i + 1] = + normalizedYToArray(focusingAreas[i].top); + reqFocusingAreas[i + 2] = + normalizedXToArray(focusingAreas[i].right); + reqFocusingAreas[i + 3] = + normalizedYToArray(focusingAreas[i].bottom); + } else { + reqFocusingAreas[i + 0] = 0; + reqFocusingAreas[i + 1] = 0; + reqFocusingAreas[i + 2] = 0; + reqFocusingAreas[i + 3] = 0; + } + reqFocusingAreas[i + 4] = focusingAreas[i].weight; + } + res = request->update(ANDROID_CONTROL_AF_REGIONS, + reqFocusingAreas, reqFocusingAreasSize); + if (res != OK) return res; + delete[] reqFocusingAreas; + + res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION, + &exposureCompensation, 1); + if (res != OK) return res; + + size_t reqMeteringAreasSize = meteringAreas.size() * 5; + int32_t *reqMeteringAreas = new int32_t[reqMeteringAreasSize]; + for (size_t i = 0; i < reqMeteringAreasSize; i += 5) { + if (meteringAreas[i].weight != 0) { + reqMeteringAreas[i + 0] = + normalizedXToArray(meteringAreas[i].left); + reqMeteringAreas[i + 1] = + normalizedYToArray(meteringAreas[i].top); + reqMeteringAreas[i + 2] = + normalizedXToArray(meteringAreas[i].right); + reqMeteringAreas[i + 3] = + normalizedYToArray(meteringAreas[i].bottom); + } else { + reqMeteringAreas[i + 0] = 0; + reqMeteringAreas[i + 1] = 0; + reqMeteringAreas[i + 2] = 0; + reqMeteringAreas[i + 3] = 0; + } + reqMeteringAreas[i + 4] = meteringAreas[i].weight; + } + res = request->update(ANDROID_CONTROL_AE_REGIONS, + reqMeteringAreas, reqMeteringAreasSize); + if (res != OK) return res; + + res = request->update(ANDROID_CONTROL_AWB_REGIONS, + reqMeteringAreas, reqMeteringAreasSize); + if (res != OK) return res; + delete[] reqMeteringAreas; + + // Need to convert zoom index into a crop rectangle. The rectangle is + // chosen to maximize its area on the sensor + + camera_metadata_ro_entry_t maxDigitalZoom = + staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM); + float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) / + (NUM_ZOOM_STEPS-1); + float zoomRatio = 1 + zoomIncrement * zoom; + + float zoomLeft, zoomTop, zoomWidth, zoomHeight; + if (previewWidth >= previewHeight) { + zoomWidth = fastInfo.arrayWidth / zoomRatio; + zoomHeight = zoomWidth * + previewHeight / previewWidth; + } else { + zoomHeight = fastInfo.arrayHeight / zoomRatio; + zoomWidth = zoomHeight * + previewWidth / previewHeight; + } + zoomLeft = (fastInfo.arrayWidth - zoomWidth) / 2; + zoomTop = (fastInfo.arrayHeight - zoomHeight) / 2; + + int32_t reqCropRegion[3] = { zoomLeft, zoomTop, zoomWidth }; + res = request->update(ANDROID_SCALER_CROP_REGION, + reqCropRegion, 3); + if (res != OK) return res; + + // TODO: Decide how to map recordingHint, or whether just to ignore it + + uint8_t reqVstabMode = videoStabilization ? + ANDROID_CONTROL_VIDEO_STABILIZATION_ON : + ANDROID_CONTROL_VIDEO_STABILIZATION_OFF; + res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + &reqVstabMode, 1); + if (res != OK) return res; + + uint8_t reqFaceDetectMode = enableFaceDetect ? + fastInfo.bestFaceDetectMode : + (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF; + res = request->update(ANDROID_STATS_FACE_DETECT_MODE, + &reqFaceDetectMode, 1); + if (res != OK) return res; + + return OK; +} + const char* Parameters::getStateName(State state) { #define CASE_ENUM_TO_CHAR(x) case x: return(#x); break; switch(state) { diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index 817d001..e71d086 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -29,12 +29,17 @@ namespace android { namespace camera2 { -// Current camera state; this is the full state of the Camera under the old -// camera API (contents of the CameraParameters object in a more-efficient -// format, plus other state). The enum values are mostly based off the -// corresponding camera2 enums, not the camera1 strings. A few are defined here -// if they don't cleanly map to camera2 values. +/** + * Current camera state; this is the full state of the Camera under the old + * camera API (contents of the CameraParameters object in a more-efficient + * format, plus other state). The enum values are mostly based off the + * corresponding camera2 enums, not the camera1 strings. A few are defined here + * if they don't cleanly map to camera2 values. + */ struct Parameters { + /** + * Parameters and other state + */ int cameraId; int cameraFacing; @@ -117,9 +122,13 @@ struct Parameters { int currentAfTriggerId; bool afInMotion; + int precaptureTriggerCounter; + uint32_t previewCallbackFlags; bool previewCallbackOneShot; + bool zslMode; + // Overall camera state enum State { DISCONNECTED, @@ -149,7 +158,9 @@ struct Parameters { int32_t maxFaces; } fastInfo; - // Parameter manipulation and setup methods + /** + * Parameter manipulation and setup methods + */ Parameters(int cameraId, int cameraFacing); ~Parameters(); @@ -170,6 +181,9 @@ struct Parameters { // Validate and update camera parameters based on new settings status_t set(const String8 ¶ms); + // Update passed-in request for common parameters + status_t updateRequest(CameraMetadata *request) const; + // Static methods for debugging and converting between camera1 and camera2 // parameters diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp new file mode 100644 index 0000000..a39585e --- /dev/null +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -0,0 +1,378 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2Client::ZslProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +#include +#include + +#include "ZslProcessor.h" +#include +#include "../Camera2Device.h" +#include "../Camera2Client.h" + + +namespace android { +namespace camera2 { + +ZslProcessor::ZslProcessor( + wp client, + wp sequencer): + Thread(false), + mState(RUNNING), + mClient(client), + mSequencer(sequencer), + mZslBufferAvailable(false), + mZslStreamId(NO_STREAM), + mZslReprocessStreamId(NO_STREAM), + mFrameListHead(0), + mZslQueueHead(0), + mZslQueueTail(0) { + mZslQueue.insertAt(0, kZslBufferDepth); + mFrameList.insertAt(0, kFrameListDepth); + sp captureSequencer = mSequencer.promote(); + if (captureSequencer != 0) captureSequencer->setZslProcessor(this); +} + +ZslProcessor::~ZslProcessor() { + ALOGV("%s: Exit", __FUNCTION__); +} + +void ZslProcessor::onFrameAvailable() { + Mutex::Autolock l(mInputMutex); + if (!mZslBufferAvailable) { + mZslBufferAvailable = true; + mZslBufferAvailableSignal.signal(); + } +} + +void ZslProcessor::onFrameAvailable(int32_t frameId, CameraMetadata &frame) { + Mutex::Autolock l(mInputMutex); + camera_metadata_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + nsecs_t timestamp = entry.data.i64[0]; + ALOGVV("Got preview frame for timestamp %lld", timestamp); + + if (mState != RUNNING) return; + + mFrameList.editItemAt(mFrameListHead).acquire(frame); + mFrameListHead = (mFrameListHead + 1) % kFrameListDepth; + + findMatchesLocked(); +} + +void ZslProcessor::onBufferReleased(buffer_handle_t *handle) { + Mutex::Autolock l(mInputMutex); + + buffer_handle_t *expectedHandle = + &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle); + + if (handle != expectedHandle) { + ALOGE("%s: Expected buffer %p, got buffer %p", + __FUNCTION__, expectedHandle, handle); + } + + mState = RUNNING; +} + +status_t ZslProcessor::updateStream(const Parameters ¶ms) { + ATRACE_CALL(); + ALOGV("%s: Configuring ZSL streams", __FUNCTION__); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp client = mClient.promote(); + if (client == 0) return OK; + sp device = client->getCameraDevice(); + + if (mZslConsumer == 0) { + // Create CPU buffer queue endpoint + mZslConsumer = new BufferItemConsumer( + GRALLOC_USAGE_HW_CAMERA_ZSL, + kZslBufferDepth, + true); + mZslConsumer->setFrameAvailableListener(this); + mZslConsumer->setName(String8("Camera2Client::ZslConsumer")); + mZslWindow = new SurfaceTextureClient( + mZslConsumer->getProducerInterface()); + } + + if (mZslStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mZslStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying capture output stream info: " + "%s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.pictureWidth || + currentHeight != (uint32_t)params.pictureHeight) { + res = device->deleteStream(mZslReprocessStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old reprocess stream " + "for ZSL: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + res = device->deleteStream(mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for ZSL: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + mZslStreamId = NO_STREAM; + } + } + + if (mZslStreamId == NO_STREAM) { + // Create stream for HAL production + res = device->createStream(mZslWindow, + params.pictureWidth, params.pictureHeight, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 0, + &mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create output stream for ZSL: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + res = device->createReprocessStreamFromStream(mZslStreamId, + &mZslReprocessStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + } + client->registerFrameListener(Camera2Client::kPreviewRequestId, this); + + return OK; +} + +status_t ZslProcessor::deleteStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + if (mZslStreamId != NO_STREAM) { + sp client = mClient.promote(); + if (client == 0) return OK; + sp device = client->getCameraDevice(); + + device->deleteStream(mZslReprocessStreamId); + mZslReprocessStreamId = NO_STREAM; + device->deleteStream(mZslStreamId); + mZslStreamId = NO_STREAM; + } + return OK; +} + +int ZslProcessor::getStreamId() const { + Mutex::Autolock l(mInputMutex); + return mZslStreamId; +} + +int ZslProcessor::getReprocessStreamId() const { + Mutex::Autolock l(mInputMutex); + return mZslReprocessStreamId; +} + +status_t ZslProcessor::pushToReprocess(int32_t requestId) { + ALOGV("%s: Send in reprocess request with id %d", + __FUNCTION__, requestId); + Mutex::Autolock l(mInputMutex); + status_t res; + sp client = mClient.promote(); + + if (client == 0) return false; + + if (mZslQueueTail != mZslQueueHead) { + buffer_handle_t *handle = + &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle); + CameraMetadata request = mZslQueue[mZslQueueTail].frame; + uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; + res = request.update(ANDROID_REQUEST_TYPE, + &requestType, 1); + uint8_t inputStreams[1] = { mZslReprocessStreamId }; + if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, + inputStreams, 1); + uint8_t outputStreams[1] = { client->getCaptureStreamId() }; + if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams, 1); + res = request.update(ANDROID_REQUEST_ID, + &requestId, 1); + + if (res != OK ) { + ALOGE("%s: Unable to update frame to a reprocess request", __FUNCTION__); + return INVALID_OPERATION; + } + + res = client->getCameraDevice()->pushReprocessBuffer(mZslReprocessStreamId, + handle, this); + if (res != OK) { + ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + res = client->getCameraDevice()->capture(request); + if (res != OK ) { + ALOGE("%s: Unable to send ZSL reprocess request to capture: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + mState = LOCKED; + } else { + ALOGE("%s: Nothing to push", __FUNCTION__); + return BAD_VALUE; + } + return OK; +} + +void ZslProcessor::dump(int fd, const Vector& args) const { +} + +bool ZslProcessor::threadLoop() { + status_t res; + + { + Mutex::Autolock l(mInputMutex); + while (!mZslBufferAvailable) { + res = mZslBufferAvailableSignal.waitRelative(mInputMutex, + kWaitDuration); + if (res == TIMED_OUT) return true; + } + mZslBufferAvailable = false; + } + + do { + sp client = mClient.promote(); + if (client == 0) return false; + res = processNewZslBuffer(client); + } while (res == OK); + + return true; +} + +status_t ZslProcessor::processNewZslBuffer(sp &client) { + ATRACE_CALL(); + status_t res; + Mutex::Autolock l(mInputMutex); + + if (mState == LOCKED) { + BufferItemConsumer::BufferItem item; + res = mZslConsumer->acquireBuffer(&item); + if (res != OK) { + if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { + ALOGE("%s: Camera %d: Error receiving ZSL image buffer: " + "%s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + } + return res; + } + mZslConsumer->releaseBuffer(item); + return OK; + } + + ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail); + + if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) { + mZslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer); + mZslQueue.replaceAt(mZslQueueTail); + mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth; + } + + ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead); + + res = mZslConsumer->acquireBuffer(&(queueHead.buffer)); + if (res != OK) { + if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { + ALOGE("%s: Camera %d: Error receiving ZSL image buffer: " + "%s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + } + return res; + } + queueHead.frame.release(); + + mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth; + + ALOGVV(" Added buffer, timestamp %lld", queueHead.buffer.mTimestamp); + + findMatchesLocked(); + + return OK; +} + +void ZslProcessor::findMatchesLocked() { + for (size_t i = 0; i < mZslQueue.size(); i++) { + ZslPair &queueEntry = mZslQueue.editItemAt(i); + nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; + if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) { + // Have buffer, no matching frame. Look for one + for (size_t j = 0; j < mFrameList.size(); j++) { + bool match = false; + CameraMetadata &frame = mFrameList.editItemAt(j); + if (!frame.isEmpty()) { + camera_metadata_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + ALOGE("%s: Can't find timestamp in frame!", + __FUNCTION__); + continue; + } + nsecs_t frameTimestamp = entry.data.i64[0]; + if (bufferTimestamp == frameTimestamp) { + ALOGVV("%s: Found match %lld", __FUNCTION__, + frameTimestamp); + match = true; + } else { + int64_t delta = abs(bufferTimestamp - frameTimestamp); + if ( delta < 1000000) { + ALOGVV("%s: Found close match %lld (delta %lld)", + __FUNCTION__, bufferTimestamp, delta); + match = true; + } + } + } + if (match) { + queueEntry.frame.acquire(frame); + break; + } + } + } + } +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h new file mode 100644 index 0000000..74921a3 --- /dev/null +++ b/services/camera/libcameraservice/camera2/ZslProcessor.h @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H + +#include +#include +#include +#include +#include +#include +#include "Parameters.h" +#include "FrameProcessor.h" +#include "CameraMetadata.h" +#include "Camera2Heap.h" +#include "../Camera2Device.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class CaptureSequencer; + +/*** + * ZSL queue processing + */ +class ZslProcessor: + virtual public Thread, + virtual public BufferItemConsumer::FrameAvailableListener, + virtual public FrameProcessor::FilteredListener, + virtual public Camera2Device::BufferReleasedListener { + public: + ZslProcessor(wp client, wp sequencer); + ~ZslProcessor(); + + // From mZslConsumer + virtual void onFrameAvailable(); + // From FrameProcessor + virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame); + + virtual void onBufferReleased(buffer_handle_t *handle); + + status_t updateStream(const Parameters ¶ms); + status_t deleteStream(); + int getStreamId() const; + int getReprocessStreamId() const; + + status_t pushToReprocess(int32_t requestId); + + void dump(int fd, const Vector& args) const; + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + + enum { + RUNNING, + LOCKED + } mState; + + wp mClient; + wp mSequencer; + + mutable Mutex mInputMutex; + bool mZslBufferAvailable; + Condition mZslBufferAvailableSignal; + + enum { + NO_STREAM = -1 + }; + + int mZslStreamId; + int mZslReprocessStreamId; + sp mZslConsumer; + sp mZslWindow; + + struct ZslPair { + BufferItemConsumer::BufferItem buffer; + CameraMetadata frame; + }; + + static const size_t kZslBufferDepth = 3; + static const size_t kFrameListDepth = kZslBufferDepth * 2; + Vector mFrameList; + size_t mFrameListHead; + + ZslPair mNextPair; + + Vector mZslQueue; + size_t mZslQueueHead; + size_t mZslQueueTail; + + virtual bool threadLoop(); + + status_t processNewZslBuffer(sp &client); + + // Match up entries from frame list to buffers in ZSL queue + void findMatchesLocked(); +}; + + +}; //namespace camera2 +}; //namespace android + +#endif -- cgit v1.1