From a16733eeb9c40db4793bec408f29b4204e5f23b1 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Mon, 27 Aug 2012 23:41:56 -0700 Subject: Camera2: Factor out FrameProcessor. Move FrameProcessor to its own file, add necessary interfaces to Camera2Client. Bug: 6243944 Change-Id: I8d1b8280a65d2822461ebe6b6c9de17992b34276 --- services/camera/libcameraservice/Android.mk | 3 +- services/camera/libcameraservice/Camera2Client.cpp | 231 ++++----------------- services/camera/libcameraservice/Camera2Client.h | 56 ++--- .../libcameraservice/camera2/FrameProcessor.cpp | 214 +++++++++++++++++++ .../libcameraservice/camera2/FrameProcessor.h | 57 +++++ 5 files changed, 344 insertions(+), 217 deletions(-) create mode 100644 services/camera/libcameraservice/camera2/FrameProcessor.cpp create mode 100644 services/camera/libcameraservice/camera2/FrameProcessor.h (limited to 'services') diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 391c319..eac6163 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -12,7 +12,8 @@ LOCAL_SRC_FILES:= \ Camera2Client.cpp \ Camera2Device.cpp \ camera2/CameraMetadata.cpp \ - camera2/Parameters.cpp + camera2/Parameters.cpp \ + camera2/FrameProcessor.cpp LOCAL_SHARED_LIBRARIES:= \ libui \ diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 9b6401b..4edb49f 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -393,8 +393,8 @@ status_t Camera2Client::connect(const sp& client) { mClientPid = getCallingPid(); - Mutex::Autolock iccl(mICameraClientLock); mCameraClient = client; + mSharedCameraClient = client; SharedParameters::Lock l(mParameters); l.mParameters.state = Parameters::STOPPED; @@ -433,10 +433,9 @@ status_t Camera2Client::unlock() { // TODO: Check for uninterruptable conditions if (mClientPid == getCallingPid()) { - Mutex::Autolock iccl(mICameraClientLock); - mClientPid = 0; mCameraClient.clear(); + mSharedCameraClient.clear(); return OK; } @@ -1457,22 +1456,25 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { case Parameters::FOCUS_MODE_FIXED: default: if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) { - ALOGE("%s: Unexpected AF state change %d (ID %d) in focus mode %d", - __FUNCTION__, newState, triggerId, l.mParameters.focusMode); + ALOGE("%s: Unexpected AF state change %d " + "(ID %d) in focus mode %d", + __FUNCTION__, newState, triggerId, + l.mParameters.focusMode); } } } if (sendMovingMessage) { - Mutex::Autolock iccl(mICameraClientLock); - if (mCameraClient != 0) { - mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE, + SharedCameraClient::Lock l(mSharedCameraClient); + if (l.mCameraClient != 0) { + l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE, afInMotion ? 1 : 0, 0); } } if (sendCompletedMessage) { - Mutex::Autolock iccl(mICameraClientLock); - if (mCameraClient != 0) { - mCameraClient->notifyCallback(CAMERA_MSG_FOCUS, success ? 1 : 0, 0); + SharedCameraClient::Lock l(mSharedCameraClient); + if (l.mCameraClient != 0) { + l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS, + success ? 1 : 0, 0); } } } @@ -1487,185 +1489,38 @@ void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { __FUNCTION__, newState, triggerId); } -Camera2Client::FrameProcessor::FrameProcessor(wp client): - Thread(false), mClient(client) { +int Camera2Client::getCameraId() { + return mCameraId; } -Camera2Client::FrameProcessor::~FrameProcessor() { - ALOGV("%s: Exit", __FUNCTION__); +const sp& Camera2Client::getCameraDevice() { + return mDevice; } -void Camera2Client::FrameProcessor::dump(int fd, const Vector& args) { - String8 result(" Latest received frame:\n"); - write(fd, result.string(), result.size()); - mLastFrame.dump(fd, 2, 6); +camera2::SharedParameters& Camera2Client::getParameters() { + return mParameters; } -bool Camera2Client::FrameProcessor::threadLoop() { - status_t res; - - sp device; - { - sp client = mClient.promote(); - if (client == 0) return false; - device = client->mDevice; - } - - res = device->waitForNextFrame(kWaitDuration); - if (res == OK) { - sp client = mClient.promote(); - if (client == 0) return false; - processNewFrames(client); - } else if (res != TIMED_OUT) { - ALOGE("Camera2Client::FrameProcessor: Error waiting for new " - "frames: %s (%d)", strerror(-res), res); - } - - return true; +Camera2Client::SharedCameraClient::Lock::Lock(SharedCameraClient &client): + mCameraClient(client.mCameraClient), + mSharedClient(client) { + mSharedClient.mCameraClientLock.lock(); } -void Camera2Client::FrameProcessor::processNewFrames(sp &client) { - status_t res; - CameraMetadata frame; - while ( (res = client->mDevice->getNextFrame(&frame)) == OK) { - camera_metadata_entry_t entry; - entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Error reading frame number: %s (%d)", - __FUNCTION__, client->mCameraId, strerror(-res), res); - break; - } - - res = processFaceDetect(frame, client); - if (res != OK) break; - - mLastFrame.acquire(frame); - } - if (res != NOT_ENOUGH_DATA) { - ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", - __FUNCTION__, client->mCameraId, strerror(-res), res); - return; - } - - return; +Camera2Client::SharedCameraClient::Lock::~Lock() { + mSharedClient.mCameraClientLock.unlock(); } -status_t Camera2Client::FrameProcessor::processFaceDetect( - const CameraMetadata &frame, sp &client) { - status_t res; - camera_metadata_ro_entry_t entry; - bool enableFaceDetect; - int maxFaces; - { - SharedParameters::Lock l(client->mParameters); - enableFaceDetect = l.mParameters.enableFaceDetect; - } - entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE); - - // TODO: This should be an error once implementations are compliant - if (entry.count == 0) { - return OK; - } - - uint8_t faceDetectMode = entry.data.u8[0]; - - camera_frame_metadata metadata; - Vector faces; - metadata.number_of_faces = 0; - - if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) { - SharedParameters::Lock l(client->mParameters); - entry = frame.find(ANDROID_STATS_FACE_RECTANGLES); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Unable to read face rectangles", - __FUNCTION__, client->mCameraId); - return res; - } - metadata.number_of_faces = entry.count / 4; - if (metadata.number_of_faces > - l.mParameters.fastInfo.maxFaces) { - ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)", - __FUNCTION__, client->mCameraId, - metadata.number_of_faces, l.mParameters.fastInfo.maxFaces); - return res; - } - const int32_t *faceRects = entry.data.i32; - - entry = frame.find(ANDROID_STATS_FACE_SCORES); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Unable to read face scores", - __FUNCTION__, client->mCameraId); - return res; - } - const uint8_t *faceScores = entry.data.u8; - - const int32_t *faceLandmarks = NULL; - const int32_t *faceIds = NULL; - - if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { - entry = frame.find(ANDROID_STATS_FACE_LANDMARKS); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Unable to read face landmarks", - __FUNCTION__, client->mCameraId); - return res; - } - faceLandmarks = entry.data.i32; - - entry = frame.find(ANDROID_STATS_FACE_IDS); - - if (entry.count == 0) { - ALOGE("%s: Camera %d: Unable to read face IDs", - __FUNCTION__, client->mCameraId); - return res; - } - faceIds = entry.data.i32; - } - - faces.setCapacity(metadata.number_of_faces); - - for (int i = 0; i < metadata.number_of_faces; i++) { - camera_face_t face; - - face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]); - face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]); - face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]); - face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]); - - face.score = faceScores[i]; - if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { - face.id = faceIds[i]; - face.left_eye[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); - face.left_eye[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]); - face.right_eye[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]); - face.right_eye[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]); - face.mouth[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]); - face.mouth[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]); - } else { - face.id = 0; - face.left_eye[0] = face.left_eye[1] = -2000; - face.right_eye[0] = face.right_eye[1] = -2000; - face.mouth[0] = face.mouth[1] = -2000; - } - faces.push_back(face); - } - - metadata.faces = faces.editArray(); - } +Camera2Client::SharedCameraClient& Camera2Client::SharedCameraClient::operator=( + const sp&client) { + Mutex::Autolock l(mCameraClientLock); + mCameraClient = client; + return *this; +} - if (metadata.number_of_faces != 0) { - Mutex::Autolock iccl(client->mICameraClientLock); - if (client->mCameraClient != NULL) { - client->mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA, - NULL, &metadata); - } - } - return OK; +void Camera2Client::SharedCameraClient::clear() { + Mutex::Autolock l(mCameraClientLock); + mCameraClient.clear(); } void Camera2Client::onCallbackAvailable() { @@ -1777,11 +1632,11 @@ void Camera2Client::onCallbackAvailable() { // Call outside parameter lock to allow re-entrancy from notification { - Mutex::Autolock iccl(mICameraClientLock); - if (mCameraClient != 0) { + SharedCameraClient::Lock l(mSharedCameraClient); + if (l.mCameraClient != 0) { ALOGV("%s: Camera %d: Invoking client data callback", __FUNCTION__, mCameraId); - mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME, + l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME, callbackHeap->mBuffers[heapIdx], NULL); } } @@ -1853,9 +1708,9 @@ void Camera2Client::onCaptureAvailable() { captureHeap = mCaptureHeap; } // Call outside parameter locks to allow re-entrancy from notification - Mutex::Autolock iccl(mICameraClientLock); - if (mCameraClient != 0) { - mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, + SharedCameraClient::Lock l(mSharedCameraClient); + if (l.mCameraClient != 0) { + l.mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, captureHeap->mBuffers[0], NULL); } } @@ -1951,9 +1806,9 @@ void Camera2Client::onRecordingFrameAvailable() { } // Call outside locked parameters to allow re-entrancy from notification - Mutex::Autolock iccl(mICameraClientLock); - if (mCameraClient != 0) { - mCameraClient->dataCallbackTimestamp(timestamp, + SharedCameraClient::Lock l(mSharedCameraClient); + if (l.mCameraClient != 0) { + l.mCameraClient->dataCallbackTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME, recordingHeap->mBuffers[heapIdx]); } diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index 4023397..3195f94 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -20,6 +20,7 @@ #include "Camera2Device.h" #include "CameraService.h" #include "camera2/Parameters.h" +#include "camera2/FrameProcessor.h" #include #include #include @@ -82,6 +83,32 @@ public: virtual void notifyAutoExposure(uint8_t newState, int triggerId); virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId); + // Interface used by independent components of Camera2Client. + + int getCameraId(); + const sp& getCameraDevice(); + camera2::SharedParameters& getParameters(); + + // Simple class to ensure that access to ICameraClient is serialized by + // requiring mCameraClientLock to be locked before access to mCameraClient + // is possible. + class SharedCameraClient { + public: + class Lock { + public: + Lock(SharedCameraClient &client); + ~Lock(); + sp &mCameraClient; + private: + SharedCameraClient &mSharedClient; + }; + SharedCameraClient& operator=(const sp& client); + void clear(); + private: + sp mCameraClient; + mutable Mutex mCameraClientLock; + } mSharedCameraClient; + private: /** ICamera interface-related private members */ @@ -91,11 +118,6 @@ private: // they're called mutable Mutex mICameraLock; - // Mutex that must be locked by methods accessing the base Client's - // mCameraClient ICameraClient interface member, for sending notifications - // up to the camera user - mutable Mutex mICameraClientLock; - typedef camera2::Parameters Parameters; typedef camera2::CameraMetadata CameraMetadata; @@ -131,29 +153,7 @@ private: // Used with stream IDs static const int NO_STREAM = -1; - /* Output frame metadata processing thread. This thread waits for new - * frames from the device, and analyzes them as necessary. - */ - class FrameProcessor: public Thread { - public: - FrameProcessor(wp client); - ~FrameProcessor(); - - void dump(int fd, const Vector& args); - private: - static const nsecs_t kWaitDuration = 10000000; // 10 ms - wp mClient; - - virtual bool threadLoop(); - - void processNewFrames(sp &client); - status_t processFaceDetect(const CameraMetadata &frame, - sp &client); - - CameraMetadata mLastFrame; - }; - - sp mFrameProcessor; + sp mFrameProcessor; /* Preview related members */ diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp new file mode 100644 index 0000000..5059754 --- /dev/null +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -0,0 +1,214 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2Client::FrameProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include "FrameProcessor.h" +#include "../Camera2Device.h" +#include "../Camera2Client.h" + +namespace android { +namespace camera2 { + +FrameProcessor::FrameProcessor(wp client): + Thread(false), mClient(client) { +} + +FrameProcessor::~FrameProcessor() { + ALOGV("%s: Exit", __FUNCTION__); +} + +void FrameProcessor::dump(int fd, const Vector& args) { + String8 result(" Latest received frame:\n"); + write(fd, result.string(), result.size()); + mLastFrame.dump(fd, 2, 6); +} + +bool FrameProcessor::threadLoop() { + status_t res; + + sp device; + { + sp client = mClient.promote(); + if (client == 0) return false; + device = client->getCameraDevice(); + } + + res = device->waitForNextFrame(kWaitDuration); + if (res == OK) { + sp client = mClient.promote(); + if (client == 0) return false; + processNewFrames(client); + } else if (res != TIMED_OUT) { + ALOGE("Camera2Client::FrameProcessor: Error waiting for new " + "frames: %s (%d)", strerror(-res), res); + } + + return true; +} + +void FrameProcessor::processNewFrames(sp &client) { + status_t res; + CameraMetadata frame; + while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) { + camera_metadata_entry_t entry; + entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Error reading frame number: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + break; + } + + res = processFaceDetect(frame, client); + if (res != OK) break; + + mLastFrame.acquire(frame); + } + if (res != NOT_ENOUGH_DATA) { + ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return; + } + + return; +} + +status_t FrameProcessor::processFaceDetect( + const CameraMetadata &frame, sp &client) { + status_t res; + camera_metadata_ro_entry_t entry; + bool enableFaceDetect; + int maxFaces; + { + SharedParameters::Lock l(client->getParameters()); + enableFaceDetect = l.mParameters.enableFaceDetect; + } + entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE); + + // TODO: This should be an error once implementations are compliant + if (entry.count == 0) { + return OK; + } + + uint8_t faceDetectMode = entry.data.u8[0]; + + camera_frame_metadata metadata; + Vector faces; + metadata.number_of_faces = 0; + + if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) { + SharedParameters::Lock l(client->getParameters()); + entry = frame.find(ANDROID_STATS_FACE_RECTANGLES); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Unable to read face rectangles", + __FUNCTION__, client->getCameraId()); + return res; + } + metadata.number_of_faces = entry.count / 4; + if (metadata.number_of_faces > + l.mParameters.fastInfo.maxFaces) { + ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)", + __FUNCTION__, client->getCameraId(), + metadata.number_of_faces, l.mParameters.fastInfo.maxFaces); + return res; + } + const int32_t *faceRects = entry.data.i32; + + entry = frame.find(ANDROID_STATS_FACE_SCORES); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Unable to read face scores", + __FUNCTION__, client->getCameraId()); + return res; + } + const uint8_t *faceScores = entry.data.u8; + + const int32_t *faceLandmarks = NULL; + const int32_t *faceIds = NULL; + + if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { + entry = frame.find(ANDROID_STATS_FACE_LANDMARKS); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Unable to read face landmarks", + __FUNCTION__, client->getCameraId()); + return res; + } + faceLandmarks = entry.data.i32; + + entry = frame.find(ANDROID_STATS_FACE_IDS); + + if (entry.count == 0) { + ALOGE("%s: Camera %d: Unable to read face IDs", + __FUNCTION__, client->getCameraId()); + return res; + } + faceIds = entry.data.i32; + } + + faces.setCapacity(metadata.number_of_faces); + + for (int i = 0; i < metadata.number_of_faces; i++) { + camera_face_t face; + + face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]); + face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]); + face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]); + face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]); + + face.score = faceScores[i]; + if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { + face.id = faceIds[i]; + face.left_eye[0] = + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); + face.left_eye[1] = + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]); + face.right_eye[0] = + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]); + face.right_eye[1] = + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]); + face.mouth[0] = + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]); + face.mouth[1] = + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]); + } else { + face.id = 0; + face.left_eye[0] = face.left_eye[1] = -2000; + face.right_eye[0] = face.right_eye[1] = -2000; + face.mouth[0] = face.mouth[1] = -2000; + } + faces.push_back(face); + } + + metadata.faces = faces.editArray(); + } + + if (metadata.number_of_faces != 0) { + Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); + if (l.mCameraClient != NULL) { + l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA, + NULL, &metadata); + } + } + return OK; +} + + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h new file mode 100644 index 0000000..2cdf7f0 --- /dev/null +++ b/services/camera/libcameraservice/camera2/FrameProcessor.h @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H + +#include +#include +#include +#include "CameraMetadata.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +/* Output frame metadata processing thread. This thread waits for new + * frames from the device, and analyzes them as necessary. + */ +class FrameProcessor: public Thread { + public: + FrameProcessor(wp client); + ~FrameProcessor(); + + void dump(int fd, const Vector& args); + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + wp mClient; + + virtual bool threadLoop(); + + void processNewFrames(sp &client); + status_t processFaceDetect(const CameraMetadata &frame, + sp &client); + + CameraMetadata mLastFrame; +}; + + +}; //namespace camera2 +}; //namespace android + +#endif -- cgit v1.1