summaryrefslogtreecommitdiffstats
path: root/services/camera/libcameraservice/camera2
diff options
context:
space:
mode:
Diffstat (limited to 'services/camera/libcameraservice/camera2')
-rw-r--r--services/camera/libcameraservice/camera2/CallbackProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/camera2/CallbackProcessor.h2
-rw-r--r--services/camera/libcameraservice/camera2/CameraMetadata.cpp4
-rw-r--r--services/camera/libcameraservice/camera2/CameraMetadata.h5
-rw-r--r--services/camera/libcameraservice/camera2/CaptureSequencer.cpp506
-rw-r--r--services/camera/libcameraservice/camera2/CaptureSequencer.h154
-rw-r--r--services/camera/libcameraservice/camera2/FrameProcessor.cpp67
-rw-r--r--services/camera/libcameraservice/camera2/FrameProcessor.h19
-rw-r--r--services/camera/libcameraservice/camera2/JpegProcessor.cpp (renamed from services/camera/libcameraservice/camera2/CaptureProcessor.cpp)33
-rw-r--r--services/camera/libcameraservice/camera2/JpegProcessor.h (renamed from services/camera/libcameraservice/camera2/CaptureProcessor.h)15
-rw-r--r--services/camera/libcameraservice/camera2/Parameters.cpp203
-rw-r--r--services/camera/libcameraservice/camera2/Parameters.h26
-rw-r--r--services/camera/libcameraservice/camera2/ZslProcessor.cpp378
-rw-r--r--services/camera/libcameraservice/camera2/ZslProcessor.h119
14 files changed, 1498 insertions, 35 deletions
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
index 854b890..bccb18e 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
@@ -136,7 +136,7 @@ int CallbackProcessor::getStreamId() const {
return mCallbackStreamId;
}
-void CallbackProcessor::dump(int fd, const Vector<String16>& args) {
+void CallbackProcessor::dump(int fd, const Vector<String16>& args) const {
}
bool CallbackProcessor::threadLoop() {
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h
index 36c51a3..c2a1372 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h
@@ -48,7 +48,7 @@ class CallbackProcessor:
status_t deleteStream();
int getStreamId() const;
- void dump(int fd, const Vector<String16>& args);
+ void dump(int fd, const Vector<String16>& args) const;
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
wp<Camera2Client> mClient;
diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.cpp b/services/camera/libcameraservice/camera2/CameraMetadata.cpp
index 95377b2..8399e20 100644
--- a/services/camera/libcameraservice/camera2/CameraMetadata.cpp
+++ b/services/camera/libcameraservice/camera2/CameraMetadata.cpp
@@ -84,6 +84,10 @@ size_t CameraMetadata::entryCount() const {
get_camera_metadata_entry_count(mBuffer);
}
+bool CameraMetadata::isEmpty() const {
+ return entryCount() == 0;
+}
+
status_t CameraMetadata::sort() {
return sort_camera_metadata(mBuffer);
}
diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.h b/services/camera/libcameraservice/camera2/CameraMetadata.h
index 340414e..aee6cd7 100644
--- a/services/camera/libcameraservice/camera2/CameraMetadata.h
+++ b/services/camera/libcameraservice/camera2/CameraMetadata.h
@@ -87,6 +87,11 @@ class CameraMetadata {
size_t entryCount() const;
/**
+ * Is the buffer empty (no entires)
+ */
+ bool isEmpty() const;
+
+ /**
* Sort metadata buffer for faster find
*/
status_t sort();
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
new file mode 100644
index 0000000..532d2aa
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
@@ -0,0 +1,506 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::CaptureSequencer"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <utils/Vector.h>
+
+#include "CaptureSequencer.h"
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+#include "Parameters.h"
+
+namespace android {
+namespace camera2 {
+
+/** Public members */
+
+CaptureSequencer::CaptureSequencer(wp<Camera2Client> client):
+ Thread(false),
+ mStartCapture(false),
+ mBusy(false),
+ mNewAEState(false),
+ mNewFrameReceived(false),
+ mNewCaptureReceived(false),
+ mClient(client),
+ mCaptureState(IDLE),
+ mTriggerId(0),
+ mTimeoutCount(0),
+ mCaptureId(Camera2Client::kFirstCaptureRequestId) {
+}
+
+CaptureSequencer::~CaptureSequencer() {
+ ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void CaptureSequencer::setZslProcessor(wp<ZslProcessor> processor) {
+ Mutex::Autolock l(mInputMutex);
+ mZslProcessor = processor;
+}
+
+status_t CaptureSequencer::startCapture() {
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ if (mBusy) {
+ ALOGE("%s: Already busy capturing!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ if (!mStartCapture) {
+ mStartCapture = true;
+ mStartCaptureSignal.signal();
+ }
+ return OK;
+}
+
+void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ mAEState = newState;
+ mAETriggerId = triggerId;
+ if (!mNewAEState) {
+ mNewAEState = true;
+ mNewNotifySignal.signal();
+ }
+}
+
+void CaptureSequencer::onFrameAvailable(int32_t frameId,
+ CameraMetadata &frame) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ mNewFrameId = frameId;
+ mNewFrame.acquire(frame);
+ if (!mNewFrameReceived) {
+ mNewFrameReceived = true;
+ mNewFrameSignal.signal();
+ }
+}
+
+void CaptureSequencer::onCaptureAvailable(nsecs_t timestamp) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ mCaptureTimestamp = timestamp;
+ if (!mNewCaptureReceived) {
+ mNewCaptureReceived = true;
+ mNewCaptureSignal.signal();
+ }
+}
+
+
+void CaptureSequencer::dump(int fd, const Vector<String16>& args) {
+ String8 result;
+ if (mCaptureRequest.entryCount() != 0) {
+ result = " Capture request:\n";
+ write(fd, result.string(), result.size());
+ mCaptureRequest.dump(fd, 2, 6);
+ } else {
+ result = " Capture request: undefined\n";
+ write(fd, result.string(), result.size());
+ }
+ result = String8::format(" Current capture state: %s\n",
+ kStateNames[mCaptureState]);
+ result.append(" Latest captured frame:\n");
+ write(fd, result.string(), result.size());
+ mNewFrame.dump(fd, 2, 6);
+}
+
+/** Private members */
+
+const char* CaptureSequencer::kStateNames[CaptureSequencer::NUM_CAPTURE_STATES+1] =
+{
+ "IDLE",
+ "START",
+ "ZSL_START",
+ "ZSL_WAITING",
+ "ZSL_REPROCESSING",
+ "STANDARD_START",
+ "STANDARD_PRECAPTURE",
+ "STANDARD_CAPTURING",
+ "DONE",
+ "ERROR",
+ "UNKNOWN"
+};
+
+const CaptureSequencer::StateManager
+ CaptureSequencer::kStateManagers[CaptureSequencer::NUM_CAPTURE_STATES-1] = {
+ &CaptureSequencer::manageIdle,
+ &CaptureSequencer::manageStart,
+ &CaptureSequencer::manageZslStart,
+ &CaptureSequencer::manageZslWaiting,
+ &CaptureSequencer::manageZslReprocessing,
+ &CaptureSequencer::manageStandardStart,
+ &CaptureSequencer::manageStandardPrecaptureWait,
+ &CaptureSequencer::manageStandardCapture,
+ &CaptureSequencer::manageStandardCaptureWait,
+ &CaptureSequencer::manageDone,
+};
+
+bool CaptureSequencer::threadLoop() {
+ status_t res;
+
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+
+ if (mCaptureState < ERROR) {
+ mCaptureState = (this->*kStateManagers[mCaptureState])(client);
+ } else {
+ ALOGE("%s: Bad capture state: %s",
+ __FUNCTION__, kStateNames[mCaptureState]);
+ return false;
+ }
+
+ return true;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageIdle(sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ while (!mStartCapture) {
+ res = mStartCaptureSignal.waitRelative(mInputMutex,
+ kWaitDuration);
+ if (res == TIMED_OUT) break;
+ }
+ if (mStartCapture) {
+ mStartCapture = false;
+ mBusy = true;
+ return START;
+ }
+ return IDLE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ mCaptureId++;
+
+ {
+ Mutex::Autolock l(mInputMutex);
+ mBusy = false;
+ }
+
+ SharedParameters::Lock l(client->getParameters());
+ switch (l.mParameters.state) {
+ case Parameters::STILL_CAPTURE:
+ l.mParameters.state = Parameters::STOPPED;
+ break;
+ case Parameters::VIDEO_SNAPSHOT:
+ l.mParameters.state = Parameters::RECORD;
+ break;
+ default:
+ ALOGE("%s: Camera %d: Still image produced unexpectedly "
+ "in state %s!",
+ __FUNCTION__, client->getCameraId(),
+ Parameters::getStateName(l.mParameters.state));
+ }
+
+ return IDLE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStart(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ SharedParameters::Lock l(client->getParameters());
+ CaptureState nextState = DONE;
+
+ res = updateCaptureRequest(l.mParameters, client);
+ if (res != OK ) {
+ ALOGE("%s: Camera %d: Can't update still image capture request: %s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+
+ if (l.mParameters.zslMode &&
+ l.mParameters.state == Parameters::STILL_CAPTURE) {
+ nextState = ZSL_START;
+ } else {
+ nextState = STANDARD_START;
+ }
+
+ return nextState;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslStart(
+ sp<Camera2Client> &client) {
+ status_t res;
+ sp<ZslProcessor> processor = mZslProcessor.promote();
+ if (processor == 0) {
+ ALOGE("%s: No ZSL queue to use!", __FUNCTION__);
+ return DONE;
+ }
+
+ client->registerFrameListener(mCaptureId,
+ this);
+
+ res = client->getCameraDevice()->clearStreamingRequest();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+ // TODO: Actually select the right thing here.
+ processor->pushToReprocess(mCaptureId);
+
+ mTimeoutCount = kMaxTimeoutsForCaptureEnd;
+ return STANDARD_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting(
+ sp<Camera2Client> &client) {
+ return DONE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing(
+ sp<Camera2Client> &client) {
+ return START;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart(
+ sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ client->registerFrameListener(mCaptureId,
+ this);
+ {
+ SharedParameters::Lock l(client->getParameters());
+ mTriggerId = l.mParameters.precaptureTriggerCounter++;
+ }
+ client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId);
+
+ mAeInPrecapture = false;
+ mTimeoutCount = kMaxTimeoutsForPrecaptureStart;
+ return STANDARD_PRECAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ while (!mNewAEState) {
+ res = mNewNotifySignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ break;
+ }
+ }
+ if (mTimeoutCount <= 0) {
+ ALOGW("Timed out waiting for precapture %s",
+ mAeInPrecapture ? "end" : "start");
+ return STANDARD_CAPTURE;
+ }
+ if (mNewAEState) {
+ if (!mAeInPrecapture) {
+ // Waiting to see PRECAPTURE state
+ if (mAETriggerId == mTriggerId &&
+ mAEState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+ ALOGV("%s: Got precapture start", __FUNCTION__);
+ mAeInPrecapture = true;
+ mTimeoutCount = kMaxTimeoutsForPrecaptureEnd;
+ }
+ } else {
+ // Waiting to see PRECAPTURE state end
+ if (mAETriggerId == mTriggerId &&
+ mAEState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+ ALOGV("%s: Got precapture end", __FUNCTION__);
+ return STANDARD_CAPTURE;
+ }
+ }
+ mNewAEState = false;
+ }
+ return STANDARD_PRECAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ SharedParameters::Lock l(client->getParameters());
+ Vector<uint8_t> outputStreams;
+
+ outputStreams.push(client->getPreviewStreamId());
+ outputStreams.push(client->getCaptureStreamId());
+
+ if (l.mParameters.previewCallbackFlags &
+ CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+ outputStreams.push(client->getCallbackStreamId());
+ }
+
+ if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
+ outputStreams.push(client->getRecordingStreamId());
+ }
+
+ res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ outputStreams);
+ if (res == OK) {
+ res = mCaptureRequest.update(ANDROID_REQUEST_ID,
+ &mCaptureId, 1);
+ }
+ if (res == OK) {
+ res = mCaptureRequest.sort();
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+
+ CameraMetadata captureCopy = mCaptureRequest;
+ if (captureCopy.entryCount() == 0) {
+ ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
+ __FUNCTION__, client->getCameraId());
+ return DONE;
+ }
+
+ if (l.mParameters.state == Parameters::STILL_CAPTURE) {
+ res = client->getCameraDevice()->clearStreamingRequest();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to stop preview for still capture: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+ }
+ // TODO: Capture should be atomic with setStreamingRequest here
+ res = client->getCameraDevice()->capture(captureCopy);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to submit still image capture request: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+
+ mTimeoutCount = kMaxTimeoutsForCaptureEnd;
+ return STANDARD_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ while (!mNewFrameReceived) {
+ res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ break;
+ }
+ }
+ while (!mNewCaptureReceived) {
+ res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ break;
+ }
+ }
+ if (mTimeoutCount <= 0) {
+ ALOGW("Timed out waiting for capture to complete");
+ return DONE;
+ }
+ if (mNewFrameReceived && mNewCaptureReceived) {
+ if (mNewFrameId != mCaptureId) {
+ ALOGW("Mismatched capture frame IDs: Expected %d, got %d",
+ mCaptureId, mNewFrameId);
+ }
+ camera_metadata_entry_t entry;
+ entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count == 0) {
+ ALOGE("No timestamp field in capture frame!");
+ }
+ if (entry.data.i64[0] != mCaptureTimestamp) {
+ ALOGW("Mismatched capture timestamps: Metadata frame %lld,"
+ " captured buffer %lld", entry.data.i64[0], mCaptureTimestamp);
+ }
+ client->removeFrameListener(mCaptureId);
+
+ mNewFrameReceived = false;
+ mNewCaptureReceived = false;
+ return DONE;
+ }
+ return STANDARD_CAPTURE_WAIT;
+}
+
+status_t CaptureSequencer::updateCaptureRequest(const Parameters &params,
+ sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ status_t res;
+ if (mCaptureRequest.entryCount() == 0) {
+ res = client->getCameraDevice()->createDefaultRequest(
+ CAMERA2_TEMPLATE_STILL_CAPTURE,
+ &mCaptureRequest);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to create default still image request:"
+ " %s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ res = params.updateRequest(&mCaptureRequest);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to update common entries of capture "
+ "request: %s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+
+ res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE,
+ params.jpegThumbSize, 2);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
+ &params.jpegThumbQuality, 1);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(ANDROID_JPEG_QUALITY,
+ &params.jpegQuality, 1);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_ORIENTATION,
+ &params.jpegRotation, 1);
+ if (res != OK) return res;
+
+ if (params.gpsEnabled) {
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_GPS_COORDINATES,
+ params.gpsCoordinates, 3);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_GPS_TIMESTAMP,
+ &params.gpsTimestamp, 1);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_GPS_PROCESSING_METHOD,
+ params.gpsProcessingMethod);
+ if (res != OK) return res;
+ } else {
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES);
+ if (res != OK) return res;
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP);
+ if (res != OK) return res;
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
+ if (res != OK) return res;
+ }
+
+ return OK;
+}
+
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h
new file mode 100644
index 0000000..0492a43
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include "CameraMetadata.h"
+#include "Parameters.h"
+#include "FrameProcessor.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class ZslProcessor;
+
+/**
+ * Manages the still image capture process for
+ * zero-shutter-lag, regular, and video snapshots.
+ */
+class CaptureSequencer:
+ virtual public Thread,
+ virtual public FrameProcessor::FilteredListener {
+ public:
+ CaptureSequencer(wp<Camera2Client> client);
+ ~CaptureSequencer();
+
+ // Get reference to the ZslProcessor, which holds the ZSL buffers and frames
+ void setZslProcessor(wp<ZslProcessor> processor);
+
+ // Begin still image capture
+ status_t startCapture();
+
+ // Notifications about AE state changes
+ void notifyAutoExposure(uint8_t newState, int triggerId);
+
+ // Notifications from the frame processor
+ virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame);
+
+ // Notifications from the capture processor
+ void onCaptureAvailable(nsecs_t timestamp);
+
+ void dump(int fd, const Vector<String16>& args);
+
+ private:
+ /**
+ * Accessed by other threads
+ */
+ Mutex mInputMutex;
+
+ bool mStartCapture;
+ bool mBusy;
+ Condition mStartCaptureSignal;
+
+ bool mNewAEState;
+ uint8_t mAEState;
+ int mAETriggerId;
+ Condition mNewNotifySignal;
+
+ bool mNewFrameReceived;
+ int32_t mNewFrameId;
+ CameraMetadata mNewFrame;
+ Condition mNewFrameSignal;
+
+ bool mNewCaptureReceived;
+ nsecs_t mCaptureTimestamp;
+ Condition mNewCaptureSignal;
+
+ /**
+ * Internal to CaptureSequencer
+ */
+ static const nsecs_t kWaitDuration = 100000000; // 100 ms
+ static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms
+ static const int kMaxTimeoutsForPrecaptureEnd = 10; // 1 sec
+ static const int kMaxTimeoutsForCaptureEnd = 20; // 2 sec
+
+ wp<Camera2Client> mClient;
+ wp<ZslProcessor> mZslProcessor;
+
+ enum CaptureState {
+ IDLE,
+ START,
+ ZSL_START,
+ ZSL_WAITING,
+ ZSL_REPROCESSING,
+ STANDARD_START,
+ STANDARD_PRECAPTURE_WAIT,
+ STANDARD_CAPTURE,
+ STANDARD_CAPTURE_WAIT,
+ DONE,
+ ERROR,
+ NUM_CAPTURE_STATES
+ } mCaptureState;
+ static const char* kStateNames[];
+
+ typedef CaptureState (CaptureSequencer::*StateManager)(sp<Camera2Client> &client);
+ static const StateManager kStateManagers[];
+
+ CameraMetadata mCaptureRequest;
+
+ int mTriggerId;
+ int mTimeoutCount;
+ bool mAeInPrecapture;
+
+ int32_t mCaptureId;
+
+ // Main internal methods
+
+ virtual bool threadLoop();
+
+ CaptureState manageIdle(sp<Camera2Client> &client);
+ CaptureState manageStart(sp<Camera2Client> &client);
+
+ CaptureState manageZslStart(sp<Camera2Client> &client);
+ CaptureState manageZslWaiting(sp<Camera2Client> &client);
+ CaptureState manageZslReprocessing(sp<Camera2Client> &client);
+
+ CaptureState manageStandardStart(sp<Camera2Client> &client);
+ CaptureState manageStandardPrecaptureWait(sp<Camera2Client> &client);
+ CaptureState manageStandardCapture(sp<Camera2Client> &client);
+ CaptureState manageStandardCaptureWait(sp<Camera2Client> &client);
+
+ CaptureState manageDone(sp<Camera2Client> &client);
+
+ // Utility methods
+
+ status_t updateCaptureRequest(const Parameters &params,
+ sp<Camera2Client> &client);
+};
+
+}; // namespace camera2
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
index 5059754..e24db0b 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
@@ -36,6 +36,19 @@ FrameProcessor::~FrameProcessor() {
ALOGV("%s: Exit", __FUNCTION__);
}
+status_t FrameProcessor::registerListener(int32_t id,
+ wp<FilteredListener> listener) {
+ Mutex::Autolock l(mInputMutex);
+ ALOGV("%s: Registering listener for frame id %d",
+ __FUNCTION__, id);
+ return mListeners.replaceValueFor(id, listener);
+}
+
+status_t FrameProcessor::removeListener(int32_t id) {
+ Mutex::Autolock l(mInputMutex);
+ return mListeners.removeItem(id);
+}
+
void FrameProcessor::dump(int fd, const Vector<String16>& args) {
String8 result(" Latest received frame:\n");
write(fd, result.string(), result.size());
@@ -50,6 +63,7 @@ bool FrameProcessor::threadLoop() {
sp<Camera2Client> client = mClient.promote();
if (client == 0) return false;
device = client->getCameraDevice();
+ if (device == 0) return false;
}
res = device->waitForNextFrame(kWaitDuration);
@@ -67,20 +81,28 @@ bool FrameProcessor::threadLoop() {
void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
status_t res;
+ ATRACE_CALL();
CameraMetadata frame;
while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) {
camera_metadata_entry_t entry;
+
entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
if (entry.count == 0) {
- ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
- __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ ALOGE("%s: Camera %d: Error reading frame number",
+ __FUNCTION__, client->getCameraId());
break;
}
res = processFaceDetect(frame, client);
if (res != OK) break;
- mLastFrame.acquire(frame);
+ // Must be last - listener can take ownership of frame
+ res = processListener(frame, client);
+ if (res != OK) break;
+
+ if (!frame.isEmpty()) {
+ mLastFrame.acquire(frame);
+ }
}
if (res != NOT_ENOUGH_DATA) {
ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
@@ -91,9 +113,43 @@ void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
return;
}
-status_t FrameProcessor::processFaceDetect(
- const CameraMetadata &frame, sp<Camera2Client> &client) {
+status_t FrameProcessor::processListener(CameraMetadata &frame,
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ camera_metadata_entry_t entry;
+
+ entry = frame.find(ANDROID_REQUEST_ID);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Error reading frame id",
+ __FUNCTION__, client->getCameraId());
+ return BAD_VALUE;
+ }
+ int32_t frameId = entry.data.i32[0];
+ ALOGV("%s: Got frame with ID %d", __FUNCTION__, frameId);
+
+ sp<FilteredListener> listener;
+ {
+ Mutex::Autolock l(mInputMutex);
+ ssize_t listenerIndex = mListeners.indexOfKey(frameId);
+ if (listenerIndex != NAME_NOT_FOUND) {
+ listener = mListeners[listenerIndex].promote();
+ if (listener == 0) {
+ mListeners.removeItemsAt(listenerIndex, 1);
+ }
+ }
+ }
+
+ if (listener != 0) {
+ listener->onFrameAvailable(frameId, frame);
+ }
+ return OK;
+}
+
+status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
+ sp<Camera2Client> &client) {
status_t res;
+ ATRACE_CALL();
camera_metadata_ro_entry_t entry;
bool enableFaceDetect;
int maxFaces;
@@ -209,6 +265,5 @@ status_t FrameProcessor::processFaceDetect(
return OK;
}
-
}; // namespace camera2
}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h
index 2cdf7f0..25d489a 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.h
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.h
@@ -20,6 +20,7 @@
#include <utils/Thread.h>
#include <utils/String16.h>
#include <utils/Vector.h>
+#include <utils/KeyedVector.h>
#include "CameraMetadata.h"
namespace android {
@@ -36,6 +37,17 @@ class FrameProcessor: public Thread {
FrameProcessor(wp<Camera2Client> client);
~FrameProcessor();
+ struct FilteredListener: virtual public RefBase {
+ // Listener may take ownership of frame
+ virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame) = 0;
+ };
+
+ // Register a listener for a specific frame ID (android.request.id).
+ // De-registers any existing listeners for that ID
+ status_t registerListener(int32_t id, wp<FilteredListener> listener);
+
+ status_t removeListener(int32_t id);
+
void dump(int fd, const Vector<String16>& args);
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
@@ -43,10 +55,17 @@ class FrameProcessor: public Thread {
virtual bool threadLoop();
+ Mutex mInputMutex;
+ KeyedVector<int32_t, wp<FilteredListener> > mListeners;
+
void processNewFrames(sp<Camera2Client> &client);
+
status_t processFaceDetect(const CameraMetadata &frame,
sp<Camera2Client> &client);
+ status_t processListener(CameraMetadata &frame,
+ sp<Camera2Client> &client);
+
CameraMetadata mLastFrame;
};
diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
index b17f9d2..92148ca 100644
--- a/services/camera/libcameraservice/camera2/CaptureProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
@@ -14,14 +14,14 @@
* limitations under the License.
*/
-#define LOG_TAG "Camera2Client::CaptureProcessor"
+#define LOG_TAG "Camera2Client::JpegProcessor"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <utils/Trace.h>
-#include "CaptureProcessor.h"
+#include "JpegProcessor.h"
#include <gui/SurfaceTextureClient.h>
#include "../Camera2Device.h"
#include "../Camera2Client.h"
@@ -30,18 +30,21 @@
namespace android {
namespace camera2 {
-CaptureProcessor::CaptureProcessor(wp<Camera2Client> client):
+JpegProcessor::JpegProcessor(
+ wp<Camera2Client> client,
+ wp<CaptureSequencer> sequencer):
Thread(false),
mClient(client),
+ mSequencer(sequencer),
mCaptureAvailable(false),
mCaptureStreamId(NO_STREAM) {
}
-CaptureProcessor::~CaptureProcessor() {
+JpegProcessor::~JpegProcessor() {
ALOGV("%s: Exit", __FUNCTION__);
}
-void CaptureProcessor::onFrameAvailable() {
+void JpegProcessor::onFrameAvailable() {
Mutex::Autolock l(mInputMutex);
if (!mCaptureAvailable) {
mCaptureAvailable = true;
@@ -49,7 +52,7 @@ void CaptureProcessor::onFrameAvailable() {
}
}
-status_t CaptureProcessor::updateStream(const Parameters &params) {
+status_t JpegProcessor::updateStream(const Parameters &params) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
status_t res;
@@ -127,7 +130,7 @@ status_t CaptureProcessor::updateStream(const Parameters &params) {
return OK;
}
-status_t CaptureProcessor::deleteStream() {
+status_t JpegProcessor::deleteStream() {
ATRACE_CALL();
status_t res;
@@ -144,15 +147,15 @@ status_t CaptureProcessor::deleteStream() {
return OK;
}
-int CaptureProcessor::getStreamId() const {
+int JpegProcessor::getStreamId() const {
Mutex::Autolock l(mInputMutex);
return mCaptureStreamId;
}
-void CaptureProcessor::dump(int fd, const Vector<String16>& args) {
+void JpegProcessor::dump(int fd, const Vector<String16>& args) const {
}
-bool CaptureProcessor::threadLoop() {
+bool JpegProcessor::threadLoop() {
status_t res;
{
@@ -174,7 +177,7 @@ bool CaptureProcessor::threadLoop() {
return true;
}
-status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
+status_t JpegProcessor::processNewCapture(sp<Camera2Client> &client) {
ATRACE_CALL();
status_t res;
sp<Camera2Heap> captureHeap;
@@ -200,10 +203,7 @@ status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
switch (l.mParameters.state) {
case Parameters::STILL_CAPTURE:
- l.mParameters.state = Parameters::STOPPED;
- break;
case Parameters::VIDEO_SNAPSHOT:
- l.mParameters.state = Parameters::RECORD;
break;
default:
ALOGE("%s: Camera %d: Still image produced unexpectedly "
@@ -224,6 +224,11 @@ status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
return OK;
}
+ sp<CaptureSequencer> sequencer = mSequencer.promote();
+ if (sequencer != 0) {
+ sequencer->onCaptureAvailable(imgBuffer.timestamp);
+ }
+
// TODO: Optimize this to avoid memcopy
void* captureMemory = mCaptureHeap->mHeap->getBase();
size_t size = mCaptureHeap->mHeap->getSize();
diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h
index 8e35739..6e7a860 100644
--- a/services/camera/libcameraservice/camera2/CaptureProcessor.h
+++ b/services/camera/libcameraservice/camera2/JpegProcessor.h
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
-#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H
#include <utils/Thread.h>
#include <utils/String16.h>
@@ -33,14 +33,16 @@ class Camera2Client;
namespace camera2 {
+class CaptureSequencer;
+
/***
* Still image capture output image processing
*/
-class CaptureProcessor:
+class JpegProcessor:
public Thread, public CpuConsumer::FrameAvailableListener {
public:
- CaptureProcessor(wp<Camera2Client> client);
- ~CaptureProcessor();
+ JpegProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+ ~JpegProcessor();
void onFrameAvailable();
@@ -48,10 +50,11 @@ class CaptureProcessor:
status_t deleteStream();
int getStreamId() const;
- void dump(int fd, const Vector<String16>& args);
+ void dump(int fd, const Vector<String16>& args) const;
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
wp<Camera2Client> mClient;
+ wp<CaptureSequencer> mSequencer;
mutable Mutex mInputMutex;
bool mCaptureAvailable;
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp
index 2f7d023..1cad2ae 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/camera2/Parameters.cpp
@@ -18,6 +18,9 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
#include <math.h>
#include <stdlib.h>
@@ -738,9 +741,11 @@ status_t Parameters::initialize(const CameraMetadata *info) {
enableFaceDetect = false;
enableFocusMoveMessages = false;
- afTriggerCounter = 0;
+ afTriggerCounter = 1;
currentAfTriggerId = -1;
+ precaptureTriggerCounter = 1;
+
previewCallbackFlags = 0;
state = STOPPED;
@@ -1318,6 +1323,202 @@ status_t Parameters::set(const String8& params) {
return OK;
}
+status_t Parameters::updateRequest(CameraMetadata *request) const {
+ ATRACE_CALL();
+ status_t res;
+
+ uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
+ res = request->update(ANDROID_REQUEST_METADATA_MODE,
+ &metadataMode, 1);
+ if (res != OK) return res;
+
+ res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ previewFpsRange, 2);
+ if (res != OK) return res;
+
+ uint8_t reqWbMode = autoWhiteBalanceLock ?
+ (uint8_t)ANDROID_CONTROL_AWB_LOCKED : wbMode;
+ res = request->update(ANDROID_CONTROL_AWB_MODE,
+ &reqWbMode, 1);
+ if (res != OK) return res;
+ res = request->update(ANDROID_CONTROL_EFFECT_MODE,
+ &effectMode, 1);
+ if (res != OK) return res;
+ res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ &antibandingMode, 1);
+ if (res != OK) return res;
+
+ uint8_t reqControlMode =
+ (sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ?
+ ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE;
+ res = request->update(ANDROID_CONTROL_MODE,
+ &reqControlMode, 1);
+ if (res != OK) return res;
+ if (reqControlMode == ANDROID_CONTROL_USE_SCENE_MODE) {
+ res = request->update(ANDROID_CONTROL_SCENE_MODE,
+ &sceneMode, 1);
+ if (res != OK) return res;
+ }
+
+ uint8_t reqFlashMode = ANDROID_FLASH_OFF;
+ uint8_t reqAeMode;
+ switch (flashMode) {
+ case Parameters::FLASH_MODE_OFF:
+ reqAeMode = ANDROID_CONTROL_AE_ON; break;
+ case Parameters::FLASH_MODE_AUTO:
+ reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break;
+ case Parameters::FLASH_MODE_ON:
+ reqAeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break;
+ case Parameters::FLASH_MODE_TORCH:
+ reqAeMode = ANDROID_CONTROL_AE_ON;
+ reqFlashMode = ANDROID_FLASH_TORCH;
+ break;
+ case Parameters::FLASH_MODE_RED_EYE:
+ reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break;
+ default:
+ ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__,
+ cameraId, flashMode);
+ return BAD_VALUE;
+ }
+ if (autoExposureLock) reqAeMode = ANDROID_CONTROL_AE_LOCKED;
+
+ res = request->update(ANDROID_FLASH_MODE,
+ &reqFlashMode, 1);
+ if (res != OK) return res;
+ res = request->update(ANDROID_CONTROL_AE_MODE,
+ &reqAeMode, 1);
+ if (res != OK) return res;
+
+ float reqFocusDistance = 0; // infinity focus in diopters
+ uint8_t reqFocusMode;
+ switch (focusMode) {
+ case Parameters::FOCUS_MODE_AUTO:
+ case Parameters::FOCUS_MODE_MACRO:
+ case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
+ case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
+ case Parameters::FOCUS_MODE_EDOF:
+ reqFocusMode = focusMode;
+ break;
+ case Parameters::FOCUS_MODE_INFINITY:
+ case Parameters::FOCUS_MODE_FIXED:
+ reqFocusMode = ANDROID_CONTROL_AF_OFF;
+ break;
+ default:
+ ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__,
+ cameraId, focusMode);
+ return BAD_VALUE;
+ }
+ res = request->update(ANDROID_LENS_FOCUS_DISTANCE,
+ &reqFocusDistance, 1);
+ if (res != OK) return res;
+ res = request->update(ANDROID_CONTROL_AF_MODE,
+ &reqFocusMode, 1);
+ if (res != OK) return res;
+
+ size_t reqFocusingAreasSize = focusingAreas.size() * 5;
+ int32_t *reqFocusingAreas = new int32_t[reqFocusingAreasSize];
+ for (size_t i = 0; i < reqFocusingAreasSize; i += 5) {
+ if (focusingAreas[i].weight != 0) {
+ reqFocusingAreas[i + 0] =
+ normalizedXToArray(focusingAreas[i].left);
+ reqFocusingAreas[i + 1] =
+ normalizedYToArray(focusingAreas[i].top);
+ reqFocusingAreas[i + 2] =
+ normalizedXToArray(focusingAreas[i].right);
+ reqFocusingAreas[i + 3] =
+ normalizedYToArray(focusingAreas[i].bottom);
+ } else {
+ reqFocusingAreas[i + 0] = 0;
+ reqFocusingAreas[i + 1] = 0;
+ reqFocusingAreas[i + 2] = 0;
+ reqFocusingAreas[i + 3] = 0;
+ }
+ reqFocusingAreas[i + 4] = focusingAreas[i].weight;
+ }
+ res = request->update(ANDROID_CONTROL_AF_REGIONS,
+ reqFocusingAreas, reqFocusingAreasSize);
+ if (res != OK) return res;
+ delete[] reqFocusingAreas;
+
+ res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION,
+ &exposureCompensation, 1);
+ if (res != OK) return res;
+
+ size_t reqMeteringAreasSize = meteringAreas.size() * 5;
+ int32_t *reqMeteringAreas = new int32_t[reqMeteringAreasSize];
+ for (size_t i = 0; i < reqMeteringAreasSize; i += 5) {
+ if (meteringAreas[i].weight != 0) {
+ reqMeteringAreas[i + 0] =
+ normalizedXToArray(meteringAreas[i].left);
+ reqMeteringAreas[i + 1] =
+ normalizedYToArray(meteringAreas[i].top);
+ reqMeteringAreas[i + 2] =
+ normalizedXToArray(meteringAreas[i].right);
+ reqMeteringAreas[i + 3] =
+ normalizedYToArray(meteringAreas[i].bottom);
+ } else {
+ reqMeteringAreas[i + 0] = 0;
+ reqMeteringAreas[i + 1] = 0;
+ reqMeteringAreas[i + 2] = 0;
+ reqMeteringAreas[i + 3] = 0;
+ }
+ reqMeteringAreas[i + 4] = meteringAreas[i].weight;
+ }
+ res = request->update(ANDROID_CONTROL_AE_REGIONS,
+ reqMeteringAreas, reqMeteringAreasSize);
+ if (res != OK) return res;
+
+ res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+ reqMeteringAreas, reqMeteringAreasSize);
+ if (res != OK) return res;
+ delete[] reqMeteringAreas;
+
+ // Need to convert zoom index into a crop rectangle. The rectangle is
+ // chosen to maximize its area on the sensor
+
+ camera_metadata_ro_entry_t maxDigitalZoom =
+ staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM);
+ float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) /
+ (NUM_ZOOM_STEPS-1);
+ float zoomRatio = 1 + zoomIncrement * zoom;
+
+ float zoomLeft, zoomTop, zoomWidth, zoomHeight;
+ if (previewWidth >= previewHeight) {
+ zoomWidth = fastInfo.arrayWidth / zoomRatio;
+ zoomHeight = zoomWidth *
+ previewHeight / previewWidth;
+ } else {
+ zoomHeight = fastInfo.arrayHeight / zoomRatio;
+ zoomWidth = zoomHeight *
+ previewWidth / previewHeight;
+ }
+ zoomLeft = (fastInfo.arrayWidth - zoomWidth) / 2;
+ zoomTop = (fastInfo.arrayHeight - zoomHeight) / 2;
+
+ int32_t reqCropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
+ res = request->update(ANDROID_SCALER_CROP_REGION,
+ reqCropRegion, 3);
+ if (res != OK) return res;
+
+ // TODO: Decide how to map recordingHint, or whether just to ignore it
+
+ uint8_t reqVstabMode = videoStabilization ?
+ ANDROID_CONTROL_VIDEO_STABILIZATION_ON :
+ ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
+ res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ &reqVstabMode, 1);
+ if (res != OK) return res;
+
+ uint8_t reqFaceDetectMode = enableFaceDetect ?
+ fastInfo.bestFaceDetectMode :
+ (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF;
+ res = request->update(ANDROID_STATS_FACE_DETECT_MODE,
+ &reqFaceDetectMode, 1);
+ if (res != OK) return res;
+
+ return OK;
+}
+
const char* Parameters::getStateName(State state) {
#define CASE_ENUM_TO_CHAR(x) case x: return(#x); break;
switch(state) {
diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h
index 817d001..e71d086 100644
--- a/services/camera/libcameraservice/camera2/Parameters.h
+++ b/services/camera/libcameraservice/camera2/Parameters.h
@@ -29,12 +29,17 @@
namespace android {
namespace camera2 {
-// Current camera state; this is the full state of the Camera under the old
-// camera API (contents of the CameraParameters object in a more-efficient
-// format, plus other state). The enum values are mostly based off the
-// corresponding camera2 enums, not the camera1 strings. A few are defined here
-// if they don't cleanly map to camera2 values.
+/**
+ * Current camera state; this is the full state of the Camera under the old
+ * camera API (contents of the CameraParameters object in a more-efficient
+ * format, plus other state). The enum values are mostly based off the
+ * corresponding camera2 enums, not the camera1 strings. A few are defined here
+ * if they don't cleanly map to camera2 values.
+ */
struct Parameters {
+ /**
+ * Parameters and other state
+ */
int cameraId;
int cameraFacing;
@@ -117,9 +122,13 @@ struct Parameters {
int currentAfTriggerId;
bool afInMotion;
+ int precaptureTriggerCounter;
+
uint32_t previewCallbackFlags;
bool previewCallbackOneShot;
+ bool zslMode;
+
// Overall camera state
enum State {
DISCONNECTED,
@@ -149,7 +158,9 @@ struct Parameters {
int32_t maxFaces;
} fastInfo;
- // Parameter manipulation and setup methods
+ /**
+ * Parameter manipulation and setup methods
+ */
Parameters(int cameraId, int cameraFacing);
~Parameters();
@@ -170,6 +181,9 @@ struct Parameters {
// Validate and update camera parameters based on new settings
status_t set(const String8 &params);
+ // Update passed-in request for common parameters
+ status_t updateRequest(CameraMetadata *request) const;
+
// Static methods for debugging and converting between camera1 and camera2
// parameters
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
new file mode 100644
index 0000000..a39585e
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
@@ -0,0 +1,378 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::ZslProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "ZslProcessor.h"
+#include <gui/SurfaceTextureClient.h>
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+
+
+namespace android {
+namespace camera2 {
+
+ZslProcessor::ZslProcessor(
+ wp<Camera2Client> client,
+ wp<CaptureSequencer> sequencer):
+ Thread(false),
+ mState(RUNNING),
+ mClient(client),
+ mSequencer(sequencer),
+ mZslBufferAvailable(false),
+ mZslStreamId(NO_STREAM),
+ mZslReprocessStreamId(NO_STREAM),
+ mFrameListHead(0),
+ mZslQueueHead(0),
+ mZslQueueTail(0) {
+ mZslQueue.insertAt(0, kZslBufferDepth);
+ mFrameList.insertAt(0, kFrameListDepth);
+ sp<CaptureSequencer> captureSequencer = mSequencer.promote();
+ if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
+}
+
+ZslProcessor::~ZslProcessor() {
+ ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void ZslProcessor::onFrameAvailable() {
+ Mutex::Autolock l(mInputMutex);
+ if (!mZslBufferAvailable) {
+ mZslBufferAvailable = true;
+ mZslBufferAvailableSignal.signal();
+ }
+}
+
+void ZslProcessor::onFrameAvailable(int32_t frameId, CameraMetadata &frame) {
+ Mutex::Autolock l(mInputMutex);
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ nsecs_t timestamp = entry.data.i64[0];
+ ALOGVV("Got preview frame for timestamp %lld", timestamp);
+
+ if (mState != RUNNING) return;
+
+ mFrameList.editItemAt(mFrameListHead).acquire(frame);
+ mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
+
+ findMatchesLocked();
+}
+
+void ZslProcessor::onBufferReleased(buffer_handle_t *handle) {
+ Mutex::Autolock l(mInputMutex);
+
+ buffer_handle_t *expectedHandle =
+ &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle);
+
+ if (handle != expectedHandle) {
+ ALOGE("%s: Expected buffer %p, got buffer %p",
+ __FUNCTION__, expectedHandle, handle);
+ }
+
+ mState = RUNNING;
+}
+
+status_t ZslProcessor::updateStream(const Parameters &params) {
+ ATRACE_CALL();
+ ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ if (mZslConsumer == 0) {
+ // Create CPU buffer queue endpoint
+ mZslConsumer = new BufferItemConsumer(
+ GRALLOC_USAGE_HW_CAMERA_ZSL,
+ kZslBufferDepth,
+ true);
+ mZslConsumer->setFrameAvailableListener(this);
+ mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
+ mZslWindow = new SurfaceTextureClient(
+ mZslConsumer->getProducerInterface());
+ }
+
+ if (mZslStreamId != NO_STREAM) {
+ // Check if stream parameters have to change
+ uint32_t currentWidth, currentHeight;
+ res = device->getStreamInfo(mZslStreamId,
+ &currentWidth, &currentHeight, 0);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Error querying capture output stream info: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ if (currentWidth != (uint32_t)params.pictureWidth ||
+ currentHeight != (uint32_t)params.pictureHeight) {
+ res = device->deleteStream(mZslReprocessStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete old reprocess stream "
+ "for ZSL: %s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ res = device->deleteStream(mZslStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete old output stream "
+ "for ZSL: %s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ mZslStreamId = NO_STREAM;
+ }
+ }
+
+ if (mZslStreamId == NO_STREAM) {
+ // Create stream for HAL production
+ res = device->createStream(mZslWindow,
+ params.pictureWidth, params.pictureHeight,
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 0,
+ &mZslStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't create output stream for ZSL: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ res = device->createReprocessStreamFromStream(mZslStreamId,
+ &mZslReprocessStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ }
+ client->registerFrameListener(Camera2Client::kPreviewRequestId, this);
+
+ return OK;
+}
+
+status_t ZslProcessor::deleteStream() {
+ ATRACE_CALL();
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ if (mZslStreamId != NO_STREAM) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ device->deleteStream(mZslReprocessStreamId);
+ mZslReprocessStreamId = NO_STREAM;
+ device->deleteStream(mZslStreamId);
+ mZslStreamId = NO_STREAM;
+ }
+ return OK;
+}
+
+int ZslProcessor::getStreamId() const {
+ Mutex::Autolock l(mInputMutex);
+ return mZslStreamId;
+}
+
+int ZslProcessor::getReprocessStreamId() const {
+ Mutex::Autolock l(mInputMutex);
+ return mZslReprocessStreamId;
+}
+
+status_t ZslProcessor::pushToReprocess(int32_t requestId) {
+ ALOGV("%s: Send in reprocess request with id %d",
+ __FUNCTION__, requestId);
+ Mutex::Autolock l(mInputMutex);
+ status_t res;
+ sp<Camera2Client> client = mClient.promote();
+
+ if (client == 0) return false;
+
+ if (mZslQueueTail != mZslQueueHead) {
+ buffer_handle_t *handle =
+ &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle);
+ CameraMetadata request = mZslQueue[mZslQueueTail].frame;
+ uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
+ res = request.update(ANDROID_REQUEST_TYPE,
+ &requestType, 1);
+ uint8_t inputStreams[1] = { mZslReprocessStreamId };
+ if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
+ inputStreams, 1);
+ uint8_t outputStreams[1] = { client->getCaptureStreamId() };
+ if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ outputStreams, 1);
+ res = request.update(ANDROID_REQUEST_ID,
+ &requestId, 1);
+
+ if (res != OK ) {
+ ALOGE("%s: Unable to update frame to a reprocess request", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ res = client->getCameraDevice()->pushReprocessBuffer(mZslReprocessStreamId,
+ handle, this);
+ if (res != OK) {
+ ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ res = client->getCameraDevice()->capture(request);
+ if (res != OK ) {
+ ALOGE("%s: Unable to send ZSL reprocess request to capture: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ mState = LOCKED;
+ } else {
+ ALOGE("%s: Nothing to push", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+void ZslProcessor::dump(int fd, const Vector<String16>& args) const {
+}
+
+bool ZslProcessor::threadLoop() {
+ status_t res;
+
+ {
+ Mutex::Autolock l(mInputMutex);
+ while (!mZslBufferAvailable) {
+ res = mZslBufferAvailableSignal.waitRelative(mInputMutex,
+ kWaitDuration);
+ if (res == TIMED_OUT) return true;
+ }
+ mZslBufferAvailable = false;
+ }
+
+ do {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ res = processNewZslBuffer(client);
+ } while (res == OK);
+
+ return true;
+}
+
+status_t ZslProcessor::processNewZslBuffer(sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ status_t res;
+ Mutex::Autolock l(mInputMutex);
+
+ if (mState == LOCKED) {
+ BufferItemConsumer::BufferItem item;
+ res = mZslConsumer->acquireBuffer(&item);
+ if (res != OK) {
+ if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
+ ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ }
+ return res;
+ }
+ mZslConsumer->releaseBuffer(item);
+ return OK;
+ }
+
+ ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail);
+
+ if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) {
+ mZslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer);
+ mZslQueue.replaceAt(mZslQueueTail);
+ mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth;
+ }
+
+ ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead);
+
+ res = mZslConsumer->acquireBuffer(&(queueHead.buffer));
+ if (res != OK) {
+ if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
+ ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ }
+ return res;
+ }
+ queueHead.frame.release();
+
+ mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth;
+
+ ALOGVV(" Added buffer, timestamp %lld", queueHead.buffer.mTimestamp);
+
+ findMatchesLocked();
+
+ return OK;
+}
+
+void ZslProcessor::findMatchesLocked() {
+ for (size_t i = 0; i < mZslQueue.size(); i++) {
+ ZslPair &queueEntry = mZslQueue.editItemAt(i);
+ nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
+ if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) {
+ // Have buffer, no matching frame. Look for one
+ for (size_t j = 0; j < mFrameList.size(); j++) {
+ bool match = false;
+ CameraMetadata &frame = mFrameList.editItemAt(j);
+ if (!frame.isEmpty()) {
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count == 0) {
+ ALOGE("%s: Can't find timestamp in frame!",
+ __FUNCTION__);
+ continue;
+ }
+ nsecs_t frameTimestamp = entry.data.i64[0];
+ if (bufferTimestamp == frameTimestamp) {
+ ALOGVV("%s: Found match %lld", __FUNCTION__,
+ frameTimestamp);
+ match = true;
+ } else {
+ int64_t delta = abs(bufferTimestamp - frameTimestamp);
+ if ( delta < 1000000) {
+ ALOGVV("%s: Found close match %lld (delta %lld)",
+ __FUNCTION__, bufferTimestamp, delta);
+ match = true;
+ }
+ }
+ }
+ if (match) {
+ queueEntry.frame.acquire(frame);
+ break;
+ }
+ }
+ }
+ }
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h
new file mode 100644
index 0000000..74921a3
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <gui/BufferItemConsumer.h>
+#include "Parameters.h"
+#include "FrameProcessor.h"
+#include "CameraMetadata.h"
+#include "Camera2Heap.h"
+#include "../Camera2Device.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class CaptureSequencer;
+
+/***
+ * ZSL queue processing
+ */
+class ZslProcessor:
+ virtual public Thread,
+ virtual public BufferItemConsumer::FrameAvailableListener,
+ virtual public FrameProcessor::FilteredListener,
+ virtual public Camera2Device::BufferReleasedListener {
+ public:
+ ZslProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+ ~ZslProcessor();
+
+ // From mZslConsumer
+ virtual void onFrameAvailable();
+ // From FrameProcessor
+ virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame);
+
+ virtual void onBufferReleased(buffer_handle_t *handle);
+
+ status_t updateStream(const Parameters &params);
+ status_t deleteStream();
+ int getStreamId() const;
+ int getReprocessStreamId() const;
+
+ status_t pushToReprocess(int32_t requestId);
+
+ void dump(int fd, const Vector<String16>& args) const;
+ private:
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+
+ enum {
+ RUNNING,
+ LOCKED
+ } mState;
+
+ wp<Camera2Client> mClient;
+ wp<CaptureSequencer> mSequencer;
+
+ mutable Mutex mInputMutex;
+ bool mZslBufferAvailable;
+ Condition mZslBufferAvailableSignal;
+
+ enum {
+ NO_STREAM = -1
+ };
+
+ int mZslStreamId;
+ int mZslReprocessStreamId;
+ sp<BufferItemConsumer> mZslConsumer;
+ sp<ANativeWindow> mZslWindow;
+
+ struct ZslPair {
+ BufferItemConsumer::BufferItem buffer;
+ CameraMetadata frame;
+ };
+
+ static const size_t kZslBufferDepth = 3;
+ static const size_t kFrameListDepth = kZslBufferDepth * 2;
+ Vector<CameraMetadata> mFrameList;
+ size_t mFrameListHead;
+
+ ZslPair mNextPair;
+
+ Vector<ZslPair> mZslQueue;
+ size_t mZslQueueHead;
+ size_t mZslQueueTail;
+
+ virtual bool threadLoop();
+
+ status_t processNewZslBuffer(sp<Camera2Client> &client);
+
+ // Match up entries from frame list to buffers in ZSL queue
+ void findMatchesLocked();
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif