summaryrefslogtreecommitdiffstats
path: root/services/camera/libcameraservice/camera2
diff options
context:
space:
mode:
Diffstat (limited to 'services/camera/libcameraservice/camera2')
-rw-r--r--services/camera/libcameraservice/camera2/BurstCapture.cpp112
-rw-r--r--services/camera/libcameraservice/camera2/BurstCapture.h71
-rw-r--r--services/camera/libcameraservice/camera2/CallbackProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/camera2/CallbackProcessor.h2
-rw-r--r--services/camera/libcameraservice/camera2/CameraMetadata.cpp4
-rw-r--r--services/camera/libcameraservice/camera2/CameraMetadata.h5
-rw-r--r--services/camera/libcameraservice/camera2/CaptureSequencer.cpp595
-rw-r--r--services/camera/libcameraservice/camera2/CaptureSequencer.h161
-rw-r--r--services/camera/libcameraservice/camera2/FrameProcessor.cpp67
-rw-r--r--services/camera/libcameraservice/camera2/FrameProcessor.h19
-rw-r--r--services/camera/libcameraservice/camera2/JpegCompressor.cpp220
-rw-r--r--services/camera/libcameraservice/camera2/JpegCompressor.h107
-rw-r--r--services/camera/libcameraservice/camera2/JpegProcessor.cpp (renamed from services/camera/libcameraservice/camera2/CaptureProcessor.cpp)39
-rw-r--r--services/camera/libcameraservice/camera2/JpegProcessor.h (renamed from services/camera/libcameraservice/camera2/CaptureProcessor.h)15
-rw-r--r--services/camera/libcameraservice/camera2/Parameters.cpp228
-rw-r--r--services/camera/libcameraservice/camera2/Parameters.h35
-rw-r--r--services/camera/libcameraservice/camera2/ZslProcessor.cpp383
-rw-r--r--services/camera/libcameraservice/camera2/ZslProcessor.h119
18 files changed, 2154 insertions, 36 deletions
diff --git a/services/camera/libcameraservice/camera2/BurstCapture.cpp b/services/camera/libcameraservice/camera2/BurstCapture.cpp
new file mode 100644
index 0000000..5020819
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/BurstCapture.cpp
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "BurstCapture"
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "BurstCapture.h"
+
+#include "JpegCompressor.h"
+#include "../Camera2Client.h"
+
+namespace android {
+namespace camera2 {
+
+BurstCapture::BurstCapture(wp<Camera2Client> client, wp<CaptureSequencer> sequencer):
+ mCaptureStreamId(NO_STREAM),
+ mClient(client),
+ mSequencer(sequencer)
+{
+}
+
+BurstCapture::~BurstCapture() {
+}
+
+status_t BurstCapture::start(Vector<CameraMetadata> &metadatas, int32_t firstCaptureId) {
+ ALOGE("Not completely implemented");
+ return INVALID_OPERATION;
+}
+
+void BurstCapture::onFrameAvailable() {
+ ALOGV("%s", __FUNCTION__);
+ Mutex::Autolock l(mInputMutex);
+ if(!mInputChanged) {
+ mInputChanged = true;
+ mInputSignal.signal();
+ }
+}
+
+bool BurstCapture::threadLoop() {
+ status_t res;
+ {
+ Mutex::Autolock l(mInputMutex);
+ while(!mInputChanged) {
+ res = mInputSignal.waitRelative(mInputMutex, kWaitDuration);
+ if(res == TIMED_OUT) return true;
+ }
+ mInputChanged = false;
+ }
+
+ do {
+ sp<Camera2Client> client = mClient.promote();
+ if(client == 0) return false;
+ ALOGV("%s: Calling processFrameAvailable()", __FUNCTION__);
+ res = processFrameAvailable(client);
+ } while(res == OK);
+
+ return true;
+}
+
+CpuConsumer::LockedBuffer* BurstCapture::jpegEncode(
+ CpuConsumer::LockedBuffer *imgBuffer,
+ int quality)
+{
+ ALOGV("%s", __FUNCTION__);
+
+ CpuConsumer::LockedBuffer *imgEncoded = new CpuConsumer::LockedBuffer;
+ uint8_t *data = new uint8_t[ANDROID_JPEG_MAX_SIZE];
+ imgEncoded->data = data;
+ imgEncoded->width = imgBuffer->width;
+ imgEncoded->height = imgBuffer->height;
+ imgEncoded->stride = imgBuffer->stride;
+
+ Vector<CpuConsumer::LockedBuffer*> buffers;
+ buffers.push_back(imgBuffer);
+ buffers.push_back(imgEncoded);
+
+ sp<JpegCompressor> jpeg = new JpegCompressor();
+ status_t res = jpeg->start(buffers, 1);
+
+ bool success = jpeg->waitForDone(10 * 1e9);
+ if(success) {
+ return buffers[1];
+ }
+ else {
+ ALOGE("%s: JPEG encode timed out", __FUNCTION__);
+ return NULL; // TODO: maybe change function return value to status_t
+ }
+}
+
+status_t BurstCapture::processFrameAvailable(sp<Camera2Client> &client) {
+ ALOGE("Not implemented");
+ return INVALID_OPERATION;
+}
+
+} // namespace camera2
+} // namespace android
diff --git a/services/camera/libcameraservice/camera2/BurstCapture.h b/services/camera/libcameraservice/camera2/BurstCapture.h
new file mode 100644
index 0000000..dfc45eb
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/BurstCapture.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H
+#define ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H
+
+#include "camera2/CameraMetadata.h"
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <gui/CpuConsumer.h>
+#include "Camera2Device.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class CaptureSequencer;
+
+class BurstCapture : public virtual Thread,
+ public virtual CpuConsumer::FrameAvailableListener
+{
+public:
+ BurstCapture(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+ virtual ~BurstCapture();
+
+ virtual void onFrameAvailable();
+ virtual status_t start(Vector<CameraMetadata> &metadatas, int32_t firstCaptureId);
+
+protected:
+ Mutex mInputMutex;
+ bool mInputChanged;
+ Condition mInputSignal;
+ int mCaptureStreamId;
+ wp<Camera2Client> mClient;
+ wp<CaptureSequencer> mSequencer;
+
+ // Should only be accessed by processing thread
+ enum {
+ NO_STREAM = -1
+ };
+
+ CpuConsumer::LockedBuffer* jpegEncode(
+ CpuConsumer::LockedBuffer *imgBuffer,
+ int quality);
+
+ virtual status_t processFrameAvailable(sp<Camera2Client> &client);
+
+private:
+ virtual bool threadLoop();
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+};
+
+} // namespace camera2
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
index 854b890..ca917f2 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
@@ -39,6 +39,7 @@ CallbackProcessor::CallbackProcessor(wp<Camera2Client> client):
CallbackProcessor::~CallbackProcessor() {
ALOGV("%s: Exit", __FUNCTION__);
+ deleteStream();
}
void CallbackProcessor::onFrameAvailable() {
@@ -126,6 +127,11 @@ status_t CallbackProcessor::deleteStream() {
sp<Camera2Device> device = client->getCameraDevice();
device->deleteStream(mCallbackStreamId);
+
+ mCallbackHeap.clear();
+ mCallbackWindow.clear();
+ mCallbackConsumer.clear();
+
mCallbackStreamId = NO_STREAM;
}
return OK;
@@ -136,7 +142,7 @@ int CallbackProcessor::getStreamId() const {
return mCallbackStreamId;
}
-void CallbackProcessor::dump(int fd, const Vector<String16>& args) {
+void CallbackProcessor::dump(int fd, const Vector<String16>& args) const {
}
bool CallbackProcessor::threadLoop() {
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h
index 36c51a3..c2a1372 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h
@@ -48,7 +48,7 @@ class CallbackProcessor:
status_t deleteStream();
int getStreamId() const;
- void dump(int fd, const Vector<String16>& args);
+ void dump(int fd, const Vector<String16>& args) const;
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
wp<Camera2Client> mClient;
diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.cpp b/services/camera/libcameraservice/camera2/CameraMetadata.cpp
index 95377b2..8399e20 100644
--- a/services/camera/libcameraservice/camera2/CameraMetadata.cpp
+++ b/services/camera/libcameraservice/camera2/CameraMetadata.cpp
@@ -84,6 +84,10 @@ size_t CameraMetadata::entryCount() const {
get_camera_metadata_entry_count(mBuffer);
}
+bool CameraMetadata::isEmpty() const {
+ return entryCount() == 0;
+}
+
status_t CameraMetadata::sort() {
return sort_camera_metadata(mBuffer);
}
diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.h b/services/camera/libcameraservice/camera2/CameraMetadata.h
index 340414e..aee6cd7 100644
--- a/services/camera/libcameraservice/camera2/CameraMetadata.h
+++ b/services/camera/libcameraservice/camera2/CameraMetadata.h
@@ -87,6 +87,11 @@ class CameraMetadata {
size_t entryCount() const;
/**
+ * Is the buffer empty (no entires)
+ */
+ bool isEmpty() const;
+
+ /**
* Sort metadata buffer for faster find
*/
status_t sort();
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
new file mode 100644
index 0000000..2f8b7db
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
@@ -0,0 +1,595 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::CaptureSequencer"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <utils/Vector.h>
+
+#include "CaptureSequencer.h"
+#include "BurstCapture.h"
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+#include "Parameters.h"
+
+namespace android {
+namespace camera2 {
+
+/** Public members */
+
+CaptureSequencer::CaptureSequencer(wp<Camera2Client> client):
+ Thread(false),
+ mStartCapture(false),
+ mBusy(false),
+ mNewAEState(false),
+ mNewFrameReceived(false),
+ mNewCaptureReceived(false),
+ mClient(client),
+ mCaptureState(IDLE),
+ mTriggerId(0),
+ mTimeoutCount(0),
+ mCaptureId(Camera2Client::kFirstCaptureRequestId) {
+ ALOGV("%s", __FUNCTION__);
+}
+
+CaptureSequencer::~CaptureSequencer() {
+ ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void CaptureSequencer::setZslProcessor(wp<ZslProcessor> processor) {
+ Mutex::Autolock l(mInputMutex);
+ mZslProcessor = processor;
+}
+
+status_t CaptureSequencer::startCapture() {
+ ALOGV("%s", __FUNCTION__);
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ if (mBusy) {
+ ALOGE("%s: Already busy capturing!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ if (!mStartCapture) {
+ mStartCapture = true;
+ mStartCaptureSignal.signal();
+ }
+ return OK;
+}
+
+void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ mAEState = newState;
+ mAETriggerId = triggerId;
+ if (!mNewAEState) {
+ mNewAEState = true;
+ mNewNotifySignal.signal();
+ }
+}
+
+void CaptureSequencer::onFrameAvailable(int32_t frameId,
+ CameraMetadata &frame) {
+ ALOGV("%s: Listener found new frame", __FUNCTION__);
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ mNewFrameId = frameId;
+ mNewFrame.acquire(frame);
+ if (!mNewFrameReceived) {
+ mNewFrameReceived = true;
+ mNewFrameSignal.signal();
+ }
+}
+
+void CaptureSequencer::onCaptureAvailable(nsecs_t timestamp) {
+ ATRACE_CALL();
+ ALOGV("%s", __FUNCTION__);
+ Mutex::Autolock l(mInputMutex);
+ mCaptureTimestamp = timestamp;
+ if (!mNewCaptureReceived) {
+ mNewCaptureReceived = true;
+ mNewCaptureSignal.signal();
+ }
+}
+
+
+void CaptureSequencer::dump(int fd, const Vector<String16>& args) {
+ String8 result;
+ if (mCaptureRequest.entryCount() != 0) {
+ result = " Capture request:\n";
+ write(fd, result.string(), result.size());
+ mCaptureRequest.dump(fd, 2, 6);
+ } else {
+ result = " Capture request: undefined\n";
+ write(fd, result.string(), result.size());
+ }
+ result = String8::format(" Current capture state: %s\n",
+ kStateNames[mCaptureState]);
+ result.append(" Latest captured frame:\n");
+ write(fd, result.string(), result.size());
+ mNewFrame.dump(fd, 2, 6);
+}
+
+/** Private members */
+
+const char* CaptureSequencer::kStateNames[CaptureSequencer::NUM_CAPTURE_STATES+1] =
+{
+ "IDLE",
+ "START",
+ "ZSL_START",
+ "ZSL_WAITING",
+ "ZSL_REPROCESSING",
+ "STANDARD_START",
+ "STANDARD_PRECAPTURE",
+ "STANDARD_CAPTURING",
+ "BURST_CAPTURE_START",
+ "BURST_CAPTURE_WAIT",
+ "DONE",
+ "ERROR",
+ "UNKNOWN"
+};
+
+const CaptureSequencer::StateManager
+ CaptureSequencer::kStateManagers[CaptureSequencer::NUM_CAPTURE_STATES-1] = {
+ &CaptureSequencer::manageIdle,
+ &CaptureSequencer::manageStart,
+ &CaptureSequencer::manageZslStart,
+ &CaptureSequencer::manageZslWaiting,
+ &CaptureSequencer::manageZslReprocessing,
+ &CaptureSequencer::manageStandardStart,
+ &CaptureSequencer::manageStandardPrecaptureWait,
+ &CaptureSequencer::manageStandardCapture,
+ &CaptureSequencer::manageStandardCaptureWait,
+ &CaptureSequencer::manageBurstCaptureStart,
+ &CaptureSequencer::manageBurstCaptureWait,
+ &CaptureSequencer::manageDone,
+};
+
+bool CaptureSequencer::threadLoop() {
+ status_t res;
+
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+
+ if (mCaptureState < ERROR) {
+ mCaptureState = (this->*kStateManagers[mCaptureState])(client);
+ } else {
+ ALOGE("%s: Bad capture state: %s",
+ __FUNCTION__, kStateNames[mCaptureState]);
+ return false;
+ }
+
+ return true;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageIdle(sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ while (!mStartCapture) {
+ res = mStartCaptureSignal.waitRelative(mInputMutex,
+ kWaitDuration);
+ if (res == TIMED_OUT) break;
+ }
+ if (mStartCapture) {
+ mStartCapture = false;
+ mBusy = true;
+ return START;
+ }
+ return IDLE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ mCaptureId++;
+
+ {
+ Mutex::Autolock l(mInputMutex);
+ mBusy = false;
+ }
+
+ SharedParameters::Lock l(client->getParameters());
+ switch (l.mParameters.state) {
+ case Parameters::STILL_CAPTURE:
+ l.mParameters.state = Parameters::STOPPED;
+ break;
+ case Parameters::VIDEO_SNAPSHOT:
+ l.mParameters.state = Parameters::RECORD;
+ break;
+ default:
+ ALOGE("%s: Camera %d: Still image produced unexpectedly "
+ "in state %s!",
+ __FUNCTION__, client->getCameraId(),
+ Parameters::getStateName(l.mParameters.state));
+ }
+
+ return IDLE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStart(
+ sp<Camera2Client> &client) {
+ ALOGV("%s", __FUNCTION__);
+ status_t res;
+ ATRACE_CALL();
+ SharedParameters::Lock l(client->getParameters());
+ CaptureState nextState = DONE;
+
+ res = updateCaptureRequest(l.mParameters, client);
+ if (res != OK ) {
+ ALOGE("%s: Camera %d: Can't update still image capture request: %s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+
+ if(l.mParameters.lightFx != Parameters::LIGHTFX_NONE &&
+ l.mParameters.state == Parameters::STILL_CAPTURE) {
+ nextState = BURST_CAPTURE_START;
+ }
+ else if (l.mParameters.zslMode &&
+ l.mParameters.state == Parameters::STILL_CAPTURE) {
+ nextState = ZSL_START;
+ } else {
+ nextState = STANDARD_START;
+ }
+
+ return nextState;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslStart(
+ sp<Camera2Client> &client) {
+ status_t res;
+ sp<ZslProcessor> processor = mZslProcessor.promote();
+ if (processor == 0) {
+ ALOGE("%s: No ZSL queue to use!", __FUNCTION__);
+ return DONE;
+ }
+
+ client->registerFrameListener(mCaptureId,
+ this);
+
+ res = client->getCameraDevice()->clearStreamingRequest();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+ // TODO: Actually select the right thing here.
+ processor->pushToReprocess(mCaptureId);
+
+ mTimeoutCount = kMaxTimeoutsForCaptureEnd;
+ return STANDARD_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting(
+ sp<Camera2Client> &client) {
+ return DONE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing(
+ sp<Camera2Client> &client) {
+ return START;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart(
+ sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ client->registerFrameListener(mCaptureId,
+ this);
+ {
+ SharedParameters::Lock l(client->getParameters());
+ mTriggerId = l.mParameters.precaptureTriggerCounter++;
+ }
+ client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId);
+
+ mAeInPrecapture = false;
+ mTimeoutCount = kMaxTimeoutsForPrecaptureStart;
+ return STANDARD_PRECAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ while (!mNewAEState) {
+ res = mNewNotifySignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ break;
+ }
+ }
+ if (mTimeoutCount <= 0) {
+ ALOGW("Timed out waiting for precapture %s",
+ mAeInPrecapture ? "end" : "start");
+ return STANDARD_CAPTURE;
+ }
+ if (mNewAEState) {
+ if (!mAeInPrecapture) {
+ // Waiting to see PRECAPTURE state
+ if (mAETriggerId == mTriggerId &&
+ mAEState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+ ALOGV("%s: Got precapture start", __FUNCTION__);
+ mAeInPrecapture = true;
+ mTimeoutCount = kMaxTimeoutsForPrecaptureEnd;
+ }
+ } else {
+ // Waiting to see PRECAPTURE state end
+ if (mAETriggerId == mTriggerId &&
+ mAEState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+ ALOGV("%s: Got precapture end", __FUNCTION__);
+ return STANDARD_CAPTURE;
+ }
+ }
+ mNewAEState = false;
+ }
+ return STANDARD_PRECAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ SharedParameters::Lock l(client->getParameters());
+ Vector<uint8_t> outputStreams;
+
+ outputStreams.push(client->getPreviewStreamId());
+ outputStreams.push(client->getCaptureStreamId());
+
+ if (l.mParameters.previewCallbackFlags &
+ CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+ outputStreams.push(client->getCallbackStreamId());
+ }
+
+ if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
+ outputStreams.push(client->getRecordingStreamId());
+ }
+
+ res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ outputStreams);
+ if (res == OK) {
+ res = mCaptureRequest.update(ANDROID_REQUEST_ID,
+ &mCaptureId, 1);
+ }
+ if (res == OK) {
+ res = mCaptureRequest.sort();
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+
+ CameraMetadata captureCopy = mCaptureRequest;
+ if (captureCopy.entryCount() == 0) {
+ ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
+ __FUNCTION__, client->getCameraId());
+ return DONE;
+ }
+
+ if (l.mParameters.state == Parameters::STILL_CAPTURE) {
+ res = client->getCameraDevice()->clearStreamingRequest();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to stop preview for still capture: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+ }
+ // TODO: Capture should be atomic with setStreamingRequest here
+ res = client->getCameraDevice()->capture(captureCopy);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to submit still image capture request: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+
+ if (l.mParameters.playShutterSound) {
+ client->getCameraService()->playSound(CameraService::SOUND_SHUTTER);
+ }
+
+ mTimeoutCount = kMaxTimeoutsForCaptureEnd;
+ return STANDARD_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ while (!mNewFrameReceived) {
+ res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ break;
+ }
+ }
+ while (!mNewCaptureReceived) {
+ res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ break;
+ }
+ }
+ if (mTimeoutCount <= 0) {
+ ALOGW("Timed out waiting for capture to complete");
+ return DONE;
+ }
+ if (mNewFrameReceived && mNewCaptureReceived) {
+ if (mNewFrameId != mCaptureId) {
+ ALOGW("Mismatched capture frame IDs: Expected %d, got %d",
+ mCaptureId, mNewFrameId);
+ }
+ camera_metadata_entry_t entry;
+ entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count == 0) {
+ ALOGE("No timestamp field in capture frame!");
+ }
+ if (entry.data.i64[0] != mCaptureTimestamp) {
+ ALOGW("Mismatched capture timestamps: Metadata frame %lld,"
+ " captured buffer %lld", entry.data.i64[0], mCaptureTimestamp);
+ }
+ client->removeFrameListener(mCaptureId);
+
+ mNewFrameReceived = false;
+ mNewCaptureReceived = false;
+ return DONE;
+ }
+ return STANDARD_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureStart(
+ sp<Camera2Client> &client) {
+ ALOGV("%s", __FUNCTION__);
+ status_t res;
+ ATRACE_CALL();
+
+ // check which burst mode is set, create respective burst object
+ {
+ SharedParameters::Lock l(client->getParameters());
+
+ res = updateCaptureRequest(l.mParameters, client);
+ if(res != OK) {
+ return DONE;
+ }
+
+ //
+ // check for burst mode type in mParameters here
+ //
+ mBurstCapture = new BurstCapture(client, this);
+ }
+
+ res = mCaptureRequest.update(ANDROID_REQUEST_ID, &mCaptureId, 1);
+ if (res == OK) {
+ res = mCaptureRequest.sort();
+ }
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return DONE;
+ }
+
+ CameraMetadata captureCopy = mCaptureRequest;
+ if (captureCopy.entryCount() == 0) {
+ ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
+ __FUNCTION__, client->getCameraId());
+ return DONE;
+ }
+
+ Vector<CameraMetadata> requests;
+ requests.push(mCaptureRequest);
+ res = mBurstCapture->start(requests, mCaptureId);
+ mTimeoutCount = kMaxTimeoutsForCaptureEnd * 10;
+ return BURST_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureWait(
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+
+ while (!mNewCaptureReceived) {
+ res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ break;
+ }
+ }
+
+ if (mTimeoutCount <= 0) {
+ ALOGW("Timed out waiting for burst capture to complete");
+ return DONE;
+ }
+ if (mNewCaptureReceived) {
+ mNewCaptureReceived = false;
+ // TODO: update mCaptureId to last burst's capture ID + 1?
+ return DONE;
+ }
+
+ return BURST_CAPTURE_WAIT;
+}
+
+status_t CaptureSequencer::updateCaptureRequest(const Parameters &params,
+ sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ status_t res;
+ if (mCaptureRequest.entryCount() == 0) {
+ res = client->getCameraDevice()->createDefaultRequest(
+ CAMERA2_TEMPLATE_STILL_CAPTURE,
+ &mCaptureRequest);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to create default still image request:"
+ " %s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ res = params.updateRequest(&mCaptureRequest);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to update common entries of capture "
+ "request: %s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+
+ res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE,
+ params.jpegThumbSize, 2);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
+ &params.jpegThumbQuality, 1);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(ANDROID_JPEG_QUALITY,
+ &params.jpegQuality, 1);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_ORIENTATION,
+ &params.jpegRotation, 1);
+ if (res != OK) return res;
+
+ if (params.gpsEnabled) {
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_GPS_COORDINATES,
+ params.gpsCoordinates, 3);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_GPS_TIMESTAMP,
+ &params.gpsTimestamp, 1);
+ if (res != OK) return res;
+ res = mCaptureRequest.update(
+ ANDROID_JPEG_GPS_PROCESSING_METHOD,
+ params.gpsProcessingMethod);
+ if (res != OK) return res;
+ } else {
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES);
+ if (res != OK) return res;
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP);
+ if (res != OK) return res;
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
+ if (res != OK) return res;
+ }
+
+ return OK;
+}
+
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h
new file mode 100644
index 0000000..39ae079
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.h
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include "CameraMetadata.h"
+#include "Parameters.h"
+#include "FrameProcessor.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class ZslProcessor;
+class BurstCapture;
+
+/**
+ * Manages the still image capture process for
+ * zero-shutter-lag, regular, and video snapshots.
+ */
+class CaptureSequencer:
+ virtual public Thread,
+ virtual public FrameProcessor::FilteredListener {
+ public:
+ CaptureSequencer(wp<Camera2Client> client);
+ ~CaptureSequencer();
+
+ // Get reference to the ZslProcessor, which holds the ZSL buffers and frames
+ void setZslProcessor(wp<ZslProcessor> processor);
+
+ // Begin still image capture
+ status_t startCapture();
+
+ // Notifications about AE state changes
+ void notifyAutoExposure(uint8_t newState, int triggerId);
+
+ // Notifications from the frame processor
+ virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame);
+
+ // Notifications from the capture processor
+ void onCaptureAvailable(nsecs_t timestamp);
+
+ void dump(int fd, const Vector<String16>& args);
+
+ private:
+ /**
+ * Accessed by other threads
+ */
+ Mutex mInputMutex;
+
+ bool mStartCapture;
+ bool mBusy;
+ Condition mStartCaptureSignal;
+
+ bool mNewAEState;
+ uint8_t mAEState;
+ int mAETriggerId;
+ Condition mNewNotifySignal;
+
+ bool mNewFrameReceived;
+ int32_t mNewFrameId;
+ CameraMetadata mNewFrame;
+ Condition mNewFrameSignal;
+
+ bool mNewCaptureReceived;
+ nsecs_t mCaptureTimestamp;
+ Condition mNewCaptureSignal;
+
+ /**
+ * Internal to CaptureSequencer
+ */
+ static const nsecs_t kWaitDuration = 100000000; // 100 ms
+ static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms
+ static const int kMaxTimeoutsForPrecaptureEnd = 10; // 1 sec
+ static const int kMaxTimeoutsForCaptureEnd = 20; // 2 sec
+
+ wp<Camera2Client> mClient;
+ wp<ZslProcessor> mZslProcessor;
+ sp<BurstCapture> mBurstCapture;
+
+ enum CaptureState {
+ IDLE,
+ START,
+ ZSL_START,
+ ZSL_WAITING,
+ ZSL_REPROCESSING,
+ STANDARD_START,
+ STANDARD_PRECAPTURE_WAIT,
+ STANDARD_CAPTURE,
+ STANDARD_CAPTURE_WAIT,
+ BURST_CAPTURE_START,
+ BURST_CAPTURE_WAIT,
+ DONE,
+ ERROR,
+ NUM_CAPTURE_STATES
+ } mCaptureState;
+ static const char* kStateNames[];
+
+ typedef CaptureState (CaptureSequencer::*StateManager)(sp<Camera2Client> &client);
+ static const StateManager kStateManagers[];
+
+ CameraMetadata mCaptureRequest;
+
+ int mTriggerId;
+ int mTimeoutCount;
+ bool mAeInPrecapture;
+
+ int32_t mCaptureId;
+
+ // Main internal methods
+
+ virtual bool threadLoop();
+
+ CaptureState manageIdle(sp<Camera2Client> &client);
+ CaptureState manageStart(sp<Camera2Client> &client);
+
+ CaptureState manageZslStart(sp<Camera2Client> &client);
+ CaptureState manageZslWaiting(sp<Camera2Client> &client);
+ CaptureState manageZslReprocessing(sp<Camera2Client> &client);
+
+ CaptureState manageStandardStart(sp<Camera2Client> &client);
+ CaptureState manageStandardPrecaptureWait(sp<Camera2Client> &client);
+ CaptureState manageStandardCapture(sp<Camera2Client> &client);
+ CaptureState manageStandardCaptureWait(sp<Camera2Client> &client);
+
+ CaptureState manageBurstCaptureStart(sp<Camera2Client> &client);
+ CaptureState manageBurstCaptureWait(sp<Camera2Client> &client);
+
+ CaptureState manageDone(sp<Camera2Client> &client);
+
+ // Utility methods
+
+ status_t updateCaptureRequest(const Parameters &params,
+ sp<Camera2Client> &client);
+};
+
+}; // namespace camera2
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
index 5059754..e24db0b 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
@@ -36,6 +36,19 @@ FrameProcessor::~FrameProcessor() {
ALOGV("%s: Exit", __FUNCTION__);
}
+status_t FrameProcessor::registerListener(int32_t id,
+ wp<FilteredListener> listener) {
+ Mutex::Autolock l(mInputMutex);
+ ALOGV("%s: Registering listener for frame id %d",
+ __FUNCTION__, id);
+ return mListeners.replaceValueFor(id, listener);
+}
+
+status_t FrameProcessor::removeListener(int32_t id) {
+ Mutex::Autolock l(mInputMutex);
+ return mListeners.removeItem(id);
+}
+
void FrameProcessor::dump(int fd, const Vector<String16>& args) {
String8 result(" Latest received frame:\n");
write(fd, result.string(), result.size());
@@ -50,6 +63,7 @@ bool FrameProcessor::threadLoop() {
sp<Camera2Client> client = mClient.promote();
if (client == 0) return false;
device = client->getCameraDevice();
+ if (device == 0) return false;
}
res = device->waitForNextFrame(kWaitDuration);
@@ -67,20 +81,28 @@ bool FrameProcessor::threadLoop() {
void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
status_t res;
+ ATRACE_CALL();
CameraMetadata frame;
while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) {
camera_metadata_entry_t entry;
+
entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
if (entry.count == 0) {
- ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
- __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ ALOGE("%s: Camera %d: Error reading frame number",
+ __FUNCTION__, client->getCameraId());
break;
}
res = processFaceDetect(frame, client);
if (res != OK) break;
- mLastFrame.acquire(frame);
+ // Must be last - listener can take ownership of frame
+ res = processListener(frame, client);
+ if (res != OK) break;
+
+ if (!frame.isEmpty()) {
+ mLastFrame.acquire(frame);
+ }
}
if (res != NOT_ENOUGH_DATA) {
ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
@@ -91,9 +113,43 @@ void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
return;
}
-status_t FrameProcessor::processFaceDetect(
- const CameraMetadata &frame, sp<Camera2Client> &client) {
+status_t FrameProcessor::processListener(CameraMetadata &frame,
+ sp<Camera2Client> &client) {
+ status_t res;
+ ATRACE_CALL();
+ camera_metadata_entry_t entry;
+
+ entry = frame.find(ANDROID_REQUEST_ID);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Error reading frame id",
+ __FUNCTION__, client->getCameraId());
+ return BAD_VALUE;
+ }
+ int32_t frameId = entry.data.i32[0];
+ ALOGV("%s: Got frame with ID %d", __FUNCTION__, frameId);
+
+ sp<FilteredListener> listener;
+ {
+ Mutex::Autolock l(mInputMutex);
+ ssize_t listenerIndex = mListeners.indexOfKey(frameId);
+ if (listenerIndex != NAME_NOT_FOUND) {
+ listener = mListeners[listenerIndex].promote();
+ if (listener == 0) {
+ mListeners.removeItemsAt(listenerIndex, 1);
+ }
+ }
+ }
+
+ if (listener != 0) {
+ listener->onFrameAvailable(frameId, frame);
+ }
+ return OK;
+}
+
+status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
+ sp<Camera2Client> &client) {
status_t res;
+ ATRACE_CALL();
camera_metadata_ro_entry_t entry;
bool enableFaceDetect;
int maxFaces;
@@ -209,6 +265,5 @@ status_t FrameProcessor::processFaceDetect(
return OK;
}
-
}; // namespace camera2
}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h
index 2cdf7f0..25d489a 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.h
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.h
@@ -20,6 +20,7 @@
#include <utils/Thread.h>
#include <utils/String16.h>
#include <utils/Vector.h>
+#include <utils/KeyedVector.h>
#include "CameraMetadata.h"
namespace android {
@@ -36,6 +37,17 @@ class FrameProcessor: public Thread {
FrameProcessor(wp<Camera2Client> client);
~FrameProcessor();
+ struct FilteredListener: virtual public RefBase {
+ // Listener may take ownership of frame
+ virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame) = 0;
+ };
+
+ // Register a listener for a specific frame ID (android.request.id).
+ // De-registers any existing listeners for that ID
+ status_t registerListener(int32_t id, wp<FilteredListener> listener);
+
+ status_t removeListener(int32_t id);
+
void dump(int fd, const Vector<String16>& args);
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
@@ -43,10 +55,17 @@ class FrameProcessor: public Thread {
virtual bool threadLoop();
+ Mutex mInputMutex;
+ KeyedVector<int32_t, wp<FilteredListener> > mListeners;
+
void processNewFrames(sp<Camera2Client> &client);
+
status_t processFaceDetect(const CameraMetadata &frame,
sp<Camera2Client> &client);
+ status_t processListener(CameraMetadata &frame,
+ sp<Camera2Client> &client);
+
CameraMetadata mLastFrame;
};
diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.cpp b/services/camera/libcameraservice/camera2/JpegCompressor.cpp
new file mode 100644
index 0000000..55964b6
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/JpegCompressor.cpp
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "JpegCompressor"
+
+#include <utils/Log.h>
+#include <ui/GraphicBufferMapper.h>
+
+#include "JpegCompressor.h"
+
+namespace android {
+namespace camera2 {
+
+JpegCompressor::JpegCompressor():
+ Thread(false),
+ mIsBusy(false),
+ mCaptureTime(0) {
+}
+
+JpegCompressor::~JpegCompressor() {
+ ALOGV("%s", __FUNCTION__);
+ Mutex::Autolock lock(mMutex);
+}
+
+status_t JpegCompressor::start(Vector<CpuConsumer::LockedBuffer*> buffers,
+ nsecs_t captureTime) {
+ ALOGV("%s", __FUNCTION__);
+ Mutex::Autolock busyLock(mBusyMutex);
+
+ if (mIsBusy) {
+ ALOGE("%s: Already processing a buffer!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ mIsBusy = true;
+
+ mBuffers = buffers;
+ mCaptureTime = captureTime;
+
+ status_t res;
+ res = run("JpegCompressor");
+ if (res != OK) {
+ ALOGE("%s: Unable to start up compression thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ //delete mBuffers; // necessary?
+ }
+ return res;
+}
+
+status_t JpegCompressor::cancel() {
+ ALOGV("%s", __FUNCTION__);
+ requestExitAndWait();
+ return OK;
+}
+
+status_t JpegCompressor::readyToRun() {
+ ALOGV("%s", __FUNCTION__);
+ return OK;
+}
+
+bool JpegCompressor::threadLoop() {
+ ALOGV("%s", __FUNCTION__);
+
+ mAuxBuffer = mBuffers[0]; // input
+ mJpegBuffer = mBuffers[1]; // output
+
+ // Set up error management
+ mJpegErrorInfo = NULL;
+ JpegError error;
+ error.parent = this;
+
+ mCInfo.err = jpeg_std_error(&error);
+ mCInfo.err->error_exit = jpegErrorHandler;
+
+ jpeg_create_compress(&mCInfo);
+ if (checkError("Error initializing compression")) return false;
+
+ // Route compressed data straight to output stream buffer
+ JpegDestination jpegDestMgr;
+ jpegDestMgr.parent = this;
+ jpegDestMgr.init_destination = jpegInitDestination;
+ jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
+ jpegDestMgr.term_destination = jpegTermDestination;
+
+ mCInfo.dest = &jpegDestMgr;
+
+ // Set up compression parameters
+ mCInfo.image_width = mAuxBuffer->width;
+ mCInfo.image_height = mAuxBuffer->height;
+ mCInfo.input_components = 1; // 3;
+ mCInfo.in_color_space = JCS_GRAYSCALE; // JCS_RGB
+
+ ALOGV("%s: image_width = %d, image_height = %d", __FUNCTION__, mCInfo.image_width, mCInfo.image_height);
+
+ jpeg_set_defaults(&mCInfo);
+ if (checkError("Error configuring defaults")) return false;
+
+ // Do compression
+ jpeg_start_compress(&mCInfo, TRUE);
+ if (checkError("Error starting compression")) return false;
+
+ size_t rowStride = mAuxBuffer->stride;// * 3;
+ const size_t kChunkSize = 32;
+ while (mCInfo.next_scanline < mCInfo.image_height) {
+ JSAMPROW chunk[kChunkSize];
+ for (size_t i = 0 ; i < kChunkSize; i++) {
+ chunk[i] = (JSAMPROW)
+ (mAuxBuffer->data + (i + mCInfo.next_scanline) * rowStride);
+ }
+ jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
+ if (checkError("Error while compressing")) return false;
+ if (exitPending()) {
+ ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
+ cleanUp();
+ return false;
+ }
+ }
+
+ jpeg_finish_compress(&mCInfo);
+ if (checkError("Error while finishing compression")) return false;
+
+ cleanUp();
+ return false;
+}
+
+bool JpegCompressor::isBusy() {
+ ALOGV("%s", __FUNCTION__);
+ Mutex::Autolock busyLock(mBusyMutex);
+ return mIsBusy;
+}
+
+// old function -- TODO: update for new buffer type
+bool JpegCompressor::isStreamInUse(uint32_t id) {
+ ALOGV("%s", __FUNCTION__);
+ Mutex::Autolock lock(mBusyMutex);
+
+ if (mBuffers.size() && mIsBusy) {
+ for (size_t i = 0; i < mBuffers.size(); i++) {
+// if ( mBuffers[i].streamId == (int)id ) return true;
+ }
+ }
+ return false;
+}
+
+bool JpegCompressor::waitForDone(nsecs_t timeout) {
+ ALOGV("%s", __FUNCTION__);
+ Mutex::Autolock lock(mBusyMutex);
+ status_t res = OK;
+ if (mIsBusy) {
+ res = mDone.waitRelative(mBusyMutex, timeout);
+ }
+ return (res == OK);
+}
+
+bool JpegCompressor::checkError(const char *msg) {
+ ALOGV("%s", __FUNCTION__);
+ if (mJpegErrorInfo) {
+ char errBuffer[JMSG_LENGTH_MAX];
+ mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
+ ALOGE("%s: %s: %s",
+ __FUNCTION__, msg, errBuffer);
+ cleanUp();
+ mJpegErrorInfo = NULL;
+ return true;
+ }
+ return false;
+}
+
+void JpegCompressor::cleanUp() {
+ ALOGV("%s", __FUNCTION__);
+ jpeg_destroy_compress(&mCInfo);
+ Mutex::Autolock lock(mBusyMutex);
+ mIsBusy = false;
+ mDone.signal();
+}
+
+void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
+ ALOGV("%s", __FUNCTION__);
+ JpegError *error = static_cast<JpegError*>(cinfo->err);
+ error->parent->mJpegErrorInfo = cinfo;
+}
+
+void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
+ ALOGV("%s", __FUNCTION__);
+ JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
+ ALOGV("%s: Setting destination to %p, size %d",
+ __FUNCTION__, dest->parent->mJpegBuffer->data, kMaxJpegSize);
+ dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer->data);
+ dest->free_in_buffer = kMaxJpegSize;
+}
+
+boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) {
+ ALOGV("%s", __FUNCTION__);
+ ALOGE("%s: JPEG destination buffer overflow!",
+ __FUNCTION__);
+ return true;
+}
+
+void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
+ ALOGV("%s", __FUNCTION__);
+ ALOGV("%s: Done writing JPEG data. %d bytes left in buffer",
+ __FUNCTION__, cinfo->dest->free_in_buffer);
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.h b/services/camera/libcameraservice/camera2/JpegCompressor.h
new file mode 100644
index 0000000..945b1de
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/JpegCompressor.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+ * This class simulates a hardware JPEG compressor. It receives image buffers
+ * in RGBA_8888 format, processes them in a worker thread, and then pushes them
+ * out to their destination stream.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
+#define ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
+
+#include "utils/Thread.h"
+#include "utils/Mutex.h"
+#include "utils/Timers.h"
+#include "utils/Vector.h"
+//#include "Base.h"
+#include <stdio.h>
+#include <gui/CpuConsumer.h>
+
+extern "C" {
+#include <jpeglib.h>
+}
+
+
+namespace android {
+namespace camera2 {
+
+class JpegCompressor: private Thread, public virtual RefBase {
+ public:
+
+ JpegCompressor();
+ ~JpegCompressor();
+
+ // Start compressing COMPRESSED format buffers; JpegCompressor takes
+ // ownership of the Buffers vector.
+ status_t start(Vector<CpuConsumer::LockedBuffer*> buffers,
+ nsecs_t captureTime);
+
+ status_t cancel();
+
+ bool isBusy();
+ bool isStreamInUse(uint32_t id);
+
+ bool waitForDone(nsecs_t timeout);
+
+ // TODO: Measure this
+ static const size_t kMaxJpegSize = 300000;
+
+ private:
+ Mutex mBusyMutex;
+ Mutex mMutex;
+ bool mIsBusy;
+ Condition mDone;
+ nsecs_t mCaptureTime;
+
+ Vector<CpuConsumer::LockedBuffer*> mBuffers;
+ CpuConsumer::LockedBuffer *mJpegBuffer;
+ CpuConsumer::LockedBuffer *mAuxBuffer;
+ bool mFoundJpeg, mFoundAux;
+
+ jpeg_compress_struct mCInfo;
+
+ struct JpegError : public jpeg_error_mgr {
+ JpegCompressor *parent;
+ };
+ j_common_ptr mJpegErrorInfo;
+
+ struct JpegDestination : public jpeg_destination_mgr {
+ JpegCompressor *parent;
+ };
+
+ static void jpegErrorHandler(j_common_ptr cinfo);
+
+ static void jpegInitDestination(j_compress_ptr cinfo);
+ static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
+ static void jpegTermDestination(j_compress_ptr cinfo);
+
+ bool checkError(const char *msg);
+ void cleanUp();
+
+ /**
+ * Inherited Thread virtual overrides
+ */
+ private:
+ virtual status_t readyToRun();
+ virtual bool threadLoop();
+};
+
+}; // namespace camera2
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
index b17f9d2..b230d2d 100644
--- a/services/camera/libcameraservice/camera2/CaptureProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
@@ -14,14 +14,14 @@
* limitations under the License.
*/
-#define LOG_TAG "Camera2Client::CaptureProcessor"
+#define LOG_TAG "Camera2Client::JpegProcessor"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <utils/Trace.h>
-#include "CaptureProcessor.h"
+#include "JpegProcessor.h"
#include <gui/SurfaceTextureClient.h>
#include "../Camera2Device.h"
#include "../Camera2Client.h"
@@ -30,18 +30,22 @@
namespace android {
namespace camera2 {
-CaptureProcessor::CaptureProcessor(wp<Camera2Client> client):
+JpegProcessor::JpegProcessor(
+ wp<Camera2Client> client,
+ wp<CaptureSequencer> sequencer):
Thread(false),
mClient(client),
+ mSequencer(sequencer),
mCaptureAvailable(false),
mCaptureStreamId(NO_STREAM) {
}
-CaptureProcessor::~CaptureProcessor() {
+JpegProcessor::~JpegProcessor() {
ALOGV("%s: Exit", __FUNCTION__);
+ deleteStream();
}
-void CaptureProcessor::onFrameAvailable() {
+void JpegProcessor::onFrameAvailable() {
Mutex::Autolock l(mInputMutex);
if (!mCaptureAvailable) {
mCaptureAvailable = true;
@@ -49,7 +53,7 @@ void CaptureProcessor::onFrameAvailable() {
}
}
-status_t CaptureProcessor::updateStream(const Parameters &params) {
+status_t JpegProcessor::updateStream(const Parameters &params) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
status_t res;
@@ -127,7 +131,7 @@ status_t CaptureProcessor::updateStream(const Parameters &params) {
return OK;
}
-status_t CaptureProcessor::deleteStream() {
+status_t JpegProcessor::deleteStream() {
ATRACE_CALL();
status_t res;
@@ -139,20 +143,25 @@ status_t CaptureProcessor::deleteStream() {
sp<Camera2Device> device = client->getCameraDevice();
device->deleteStream(mCaptureStreamId);
+
+ mCaptureHeap.clear();
+ mCaptureWindow.clear();
+ mCaptureConsumer.clear();
+
mCaptureStreamId = NO_STREAM;
}
return OK;
}
-int CaptureProcessor::getStreamId() const {
+int JpegProcessor::getStreamId() const {
Mutex::Autolock l(mInputMutex);
return mCaptureStreamId;
}
-void CaptureProcessor::dump(int fd, const Vector<String16>& args) {
+void JpegProcessor::dump(int fd, const Vector<String16>& args) const {
}
-bool CaptureProcessor::threadLoop() {
+bool JpegProcessor::threadLoop() {
status_t res;
{
@@ -174,7 +183,7 @@ bool CaptureProcessor::threadLoop() {
return true;
}
-status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
+status_t JpegProcessor::processNewCapture(sp<Camera2Client> &client) {
ATRACE_CALL();
status_t res;
sp<Camera2Heap> captureHeap;
@@ -200,10 +209,7 @@ status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
switch (l.mParameters.state) {
case Parameters::STILL_CAPTURE:
- l.mParameters.state = Parameters::STOPPED;
- break;
case Parameters::VIDEO_SNAPSHOT:
- l.mParameters.state = Parameters::RECORD;
break;
default:
ALOGE("%s: Camera %d: Still image produced unexpectedly "
@@ -224,6 +230,11 @@ status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
return OK;
}
+ sp<CaptureSequencer> sequencer = mSequencer.promote();
+ if (sequencer != 0) {
+ sequencer->onCaptureAvailable(imgBuffer.timestamp);
+ }
+
// TODO: Optimize this to avoid memcopy
void* captureMemory = mCaptureHeap->mHeap->getBase();
size_t size = mCaptureHeap->mHeap->getSize();
diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h
index 8e35739..6e7a860 100644
--- a/services/camera/libcameraservice/camera2/CaptureProcessor.h
+++ b/services/camera/libcameraservice/camera2/JpegProcessor.h
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
-#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H
#include <utils/Thread.h>
#include <utils/String16.h>
@@ -33,14 +33,16 @@ class Camera2Client;
namespace camera2 {
+class CaptureSequencer;
+
/***
* Still image capture output image processing
*/
-class CaptureProcessor:
+class JpegProcessor:
public Thread, public CpuConsumer::FrameAvailableListener {
public:
- CaptureProcessor(wp<Camera2Client> client);
- ~CaptureProcessor();
+ JpegProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+ ~JpegProcessor();
void onFrameAvailable();
@@ -48,10 +50,11 @@ class CaptureProcessor:
status_t deleteStream();
int getStreamId() const;
- void dump(int fd, const Vector<String16>& args);
+ void dump(int fd, const Vector<String16>& args) const;
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
wp<Camera2Client> mClient;
+ wp<CaptureSequencer> mSequencer;
mutable Mutex mInputMutex;
bool mCaptureAvailable;
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp
index 2f7d023..e5942dc 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/camera2/Parameters.cpp
@@ -18,6 +18,9 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
#include <math.h>
#include <stdlib.h>
@@ -738,11 +741,17 @@ status_t Parameters::initialize(const CameraMetadata *info) {
enableFaceDetect = false;
enableFocusMoveMessages = false;
- afTriggerCounter = 0;
+ afTriggerCounter = 1;
currentAfTriggerId = -1;
+ precaptureTriggerCounter = 1;
+
previewCallbackFlags = 0;
+ zslMode = false;
+
+ lightFx = LIGHTFX_NONE;
+
state = STOPPED;
paramsFlattened = params.flatten();
@@ -1310,6 +1319,10 @@ status_t Parameters::set(const String8& params) {
ALOGE("%s: Video stabilization not supported", __FUNCTION__);
}
+ // LIGHTFX
+ validatedParams.lightFx = lightFxStringToEnum(
+ newParams.get(CameraParameters::KEY_LIGHTFX));
+
/** Update internal parameters */
validatedParams.paramsFlattened = params;
@@ -1318,6 +1331,209 @@ status_t Parameters::set(const String8& params) {
return OK;
}
+status_t Parameters::updateRequest(CameraMetadata *request) const {
+ ATRACE_CALL();
+ status_t res;
+
+ uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
+ res = request->update(ANDROID_REQUEST_METADATA_MODE,
+ &metadataMode, 1);
+ if (res != OK) return res;
+
+ res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ previewFpsRange, 2);
+ if (res != OK) return res;
+
+ res = request->update(ANDROID_CONTROL_AWB_MODE,
+ &wbMode, 1);
+ if (res != OK) return res;
+
+ uint8_t reqWbLock = autoWhiteBalanceLock ?
+ ANDROID_CONTROL_AWB_LOCK_ON : ANDROID_CONTROL_AWB_LOCK_OFF;
+ res = request->update(ANDROID_CONTROL_AWB_LOCK,
+ &reqWbLock, 1);
+
+ res = request->update(ANDROID_CONTROL_EFFECT_MODE,
+ &effectMode, 1);
+ if (res != OK) return res;
+ res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ &antibandingMode, 1);
+ if (res != OK) return res;
+
+ uint8_t reqControlMode =
+ (sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ?
+ ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE;
+ res = request->update(ANDROID_CONTROL_MODE,
+ &reqControlMode, 1);
+ if (res != OK) return res;
+ if (reqControlMode == ANDROID_CONTROL_USE_SCENE_MODE) {
+ res = request->update(ANDROID_CONTROL_SCENE_MODE,
+ &sceneMode, 1);
+ if (res != OK) return res;
+ }
+
+ uint8_t reqFlashMode = ANDROID_FLASH_OFF;
+ uint8_t reqAeMode;
+ switch (flashMode) {
+ case Parameters::FLASH_MODE_OFF:
+ reqAeMode = ANDROID_CONTROL_AE_ON; break;
+ case Parameters::FLASH_MODE_AUTO:
+ reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break;
+ case Parameters::FLASH_MODE_ON:
+ reqAeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break;
+ case Parameters::FLASH_MODE_TORCH:
+ reqAeMode = ANDROID_CONTROL_AE_ON;
+ reqFlashMode = ANDROID_FLASH_TORCH;
+ break;
+ case Parameters::FLASH_MODE_RED_EYE:
+ reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break;
+ default:
+ ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__,
+ cameraId, flashMode);
+ return BAD_VALUE;
+ }
+ res = request->update(ANDROID_FLASH_MODE,
+ &reqFlashMode, 1);
+ if (res != OK) return res;
+ res = request->update(ANDROID_CONTROL_AE_MODE,
+ &reqAeMode, 1);
+ if (res != OK) return res;
+
+ uint8_t reqAeLock = autoExposureLock ?
+ ANDROID_CONTROL_AE_LOCK_ON : ANDROID_CONTROL_AE_LOCK_OFF;
+ res = request->update(ANDROID_CONTROL_AE_LOCK,
+ &reqAeLock, 1);
+
+ float reqFocusDistance = 0; // infinity focus in diopters
+ uint8_t reqFocusMode;
+ switch (focusMode) {
+ case Parameters::FOCUS_MODE_AUTO:
+ case Parameters::FOCUS_MODE_MACRO:
+ case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
+ case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
+ case Parameters::FOCUS_MODE_EDOF:
+ reqFocusMode = focusMode;
+ break;
+ case Parameters::FOCUS_MODE_INFINITY:
+ case Parameters::FOCUS_MODE_FIXED:
+ reqFocusMode = ANDROID_CONTROL_AF_OFF;
+ break;
+ default:
+ ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__,
+ cameraId, focusMode);
+ return BAD_VALUE;
+ }
+ res = request->update(ANDROID_LENS_FOCUS_DISTANCE,
+ &reqFocusDistance, 1);
+ if (res != OK) return res;
+ res = request->update(ANDROID_CONTROL_AF_MODE,
+ &reqFocusMode, 1);
+ if (res != OK) return res;
+
+ size_t reqFocusingAreasSize = focusingAreas.size() * 5;
+ int32_t *reqFocusingAreas = new int32_t[reqFocusingAreasSize];
+ for (size_t i = 0; i < reqFocusingAreasSize; i += 5) {
+ if (focusingAreas[i].weight != 0) {
+ reqFocusingAreas[i + 0] =
+ normalizedXToArray(focusingAreas[i].left);
+ reqFocusingAreas[i + 1] =
+ normalizedYToArray(focusingAreas[i].top);
+ reqFocusingAreas[i + 2] =
+ normalizedXToArray(focusingAreas[i].right);
+ reqFocusingAreas[i + 3] =
+ normalizedYToArray(focusingAreas[i].bottom);
+ } else {
+ reqFocusingAreas[i + 0] = 0;
+ reqFocusingAreas[i + 1] = 0;
+ reqFocusingAreas[i + 2] = 0;
+ reqFocusingAreas[i + 3] = 0;
+ }
+ reqFocusingAreas[i + 4] = focusingAreas[i].weight;
+ }
+ res = request->update(ANDROID_CONTROL_AF_REGIONS,
+ reqFocusingAreas, reqFocusingAreasSize);
+ if (res != OK) return res;
+ delete[] reqFocusingAreas;
+
+ res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION,
+ &exposureCompensation, 1);
+ if (res != OK) return res;
+
+ size_t reqMeteringAreasSize = meteringAreas.size() * 5;
+ int32_t *reqMeteringAreas = new int32_t[reqMeteringAreasSize];
+ for (size_t i = 0; i < reqMeteringAreasSize; i += 5) {
+ if (meteringAreas[i].weight != 0) {
+ reqMeteringAreas[i + 0] =
+ normalizedXToArray(meteringAreas[i].left);
+ reqMeteringAreas[i + 1] =
+ normalizedYToArray(meteringAreas[i].top);
+ reqMeteringAreas[i + 2] =
+ normalizedXToArray(meteringAreas[i].right);
+ reqMeteringAreas[i + 3] =
+ normalizedYToArray(meteringAreas[i].bottom);
+ } else {
+ reqMeteringAreas[i + 0] = 0;
+ reqMeteringAreas[i + 1] = 0;
+ reqMeteringAreas[i + 2] = 0;
+ reqMeteringAreas[i + 3] = 0;
+ }
+ reqMeteringAreas[i + 4] = meteringAreas[i].weight;
+ }
+ res = request->update(ANDROID_CONTROL_AE_REGIONS,
+ reqMeteringAreas, reqMeteringAreasSize);
+ if (res != OK) return res;
+
+ res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+ reqMeteringAreas, reqMeteringAreasSize);
+ if (res != OK) return res;
+ delete[] reqMeteringAreas;
+
+ // Need to convert zoom index into a crop rectangle. The rectangle is
+ // chosen to maximize its area on the sensor
+
+ camera_metadata_ro_entry_t maxDigitalZoom =
+ staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM);
+ float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) /
+ (NUM_ZOOM_STEPS-1);
+ float zoomRatio = 1 + zoomIncrement * zoom;
+
+ float zoomLeft, zoomTop, zoomWidth, zoomHeight;
+ if (previewWidth >= previewHeight) {
+ zoomWidth = fastInfo.arrayWidth / zoomRatio;
+ zoomHeight = zoomWidth *
+ previewHeight / previewWidth;
+ } else {
+ zoomHeight = fastInfo.arrayHeight / zoomRatio;
+ zoomWidth = zoomHeight *
+ previewWidth / previewHeight;
+ }
+ zoomLeft = (fastInfo.arrayWidth - zoomWidth) / 2;
+ zoomTop = (fastInfo.arrayHeight - zoomHeight) / 2;
+
+ int32_t reqCropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
+ res = request->update(ANDROID_SCALER_CROP_REGION,
+ reqCropRegion, 3);
+ if (res != OK) return res;
+
+ // TODO: Decide how to map recordingHint, or whether just to ignore it
+
+ uint8_t reqVstabMode = videoStabilization ?
+ ANDROID_CONTROL_VIDEO_STABILIZATION_ON :
+ ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
+ res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ &reqVstabMode, 1);
+ if (res != OK) return res;
+
+ uint8_t reqFaceDetectMode = enableFaceDetect ?
+ fastInfo.bestFaceDetectMode :
+ (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF;
+ res = request->update(ANDROID_STATS_FACE_DETECT_MODE,
+ &reqFaceDetectMode, 1);
+ if (res != OK) return res;
+
+ return OK;
+}
+
const char* Parameters::getStateName(State state) {
#define CASE_ENUM_TO_CHAR(x) case x: return(#x); break;
switch(state) {
@@ -1518,6 +1734,16 @@ Parameters::Parameters::focusMode_t Parameters::focusModeStringToEnum(
Parameters::FOCUS_MODE_INVALID;
}
+Parameters::Parameters::lightFxMode_t Parameters::lightFxStringToEnum(
+ const char *lightFxMode) {
+ return
+ !strcmp(lightFxMode, CameraParameters::LIGHTFX_LOWLIGHT) ?
+ Parameters::LIGHTFX_LOWLIGHT :
+ !strcmp(lightFxMode, CameraParameters::LIGHTFX_HDR) ?
+ Parameters::LIGHTFX_HDR :
+ Parameters::LIGHTFX_NONE;
+}
+
status_t Parameters::parseAreas(const char *areasCStr,
Vector<Parameters::Area> *areas) {
static const size_t NUM_FIELDS = 5;
diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h
index 817d001..f768605 100644
--- a/services/camera/libcameraservice/camera2/Parameters.h
+++ b/services/camera/libcameraservice/camera2/Parameters.h
@@ -29,12 +29,17 @@
namespace android {
namespace camera2 {
-// Current camera state; this is the full state of the Camera under the old
-// camera API (contents of the CameraParameters object in a more-efficient
-// format, plus other state). The enum values are mostly based off the
-// corresponding camera2 enums, not the camera1 strings. A few are defined here
-// if they don't cleanly map to camera2 values.
+/**
+ * Current camera state; this is the full state of the Camera under the old
+ * camera API (contents of the CameraParameters object in a more-efficient
+ * format, plus other state). The enum values are mostly based off the
+ * corresponding camera2 enums, not the camera1 strings. A few are defined here
+ * if they don't cleanly map to camera2 values.
+ */
struct Parameters {
+ /**
+ * Parameters and other state
+ */
int cameraId;
int cameraFacing;
@@ -104,6 +109,12 @@ struct Parameters {
bool recordingHint;
bool videoStabilization;
+ enum lightFxMode_t {
+ LIGHTFX_NONE = 0,
+ LIGHTFX_LOWLIGHT,
+ LIGHTFX_HDR
+ } lightFx;
+
String8 paramsFlattened;
// These parameters are also part of the camera API-visible state, but not
@@ -117,9 +128,13 @@ struct Parameters {
int currentAfTriggerId;
bool afInMotion;
+ int precaptureTriggerCounter;
+
uint32_t previewCallbackFlags;
bool previewCallbackOneShot;
+ bool zslMode;
+
// Overall camera state
enum State {
DISCONNECTED,
@@ -132,7 +147,7 @@ struct Parameters {
} state;
// Number of zoom steps to simulate
- static const unsigned int NUM_ZOOM_STEPS = 10;
+ static const unsigned int NUM_ZOOM_STEPS = 30;
// Full static camera info, object owned by someone else, such as
// Camera2Device.
@@ -149,7 +164,9 @@ struct Parameters {
int32_t maxFaces;
} fastInfo;
- // Parameter manipulation and setup methods
+ /**
+ * Parameter manipulation and setup methods
+ */
Parameters(int cameraId, int cameraFacing);
~Parameters();
@@ -170,6 +187,9 @@ struct Parameters {
// Validate and update camera parameters based on new settings
status_t set(const String8 &params);
+ // Update passed-in request for common parameters
+ status_t updateRequest(CameraMetadata *request) const;
+
// Static methods for debugging and converting between camera1 and camera2
// parameters
@@ -184,6 +204,7 @@ struct Parameters {
static int sceneModeStringToEnum(const char *sceneMode);
static flashMode_t flashModeStringToEnum(const char *flashMode);
static focusMode_t focusModeStringToEnum(const char *focusMode);
+ static lightFxMode_t lightFxStringToEnum(const char *lightFxMode);
static status_t parseAreas(const char *areasCStr,
Vector<Area> *areas);
static status_t validateAreas(const Vector<Area> &areas,
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
new file mode 100644
index 0000000..ac02afc
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
@@ -0,0 +1,383 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::ZslProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "ZslProcessor.h"
+#include <gui/SurfaceTextureClient.h>
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+
+
+namespace android {
+namespace camera2 {
+
+ZslProcessor::ZslProcessor(
+ wp<Camera2Client> client,
+ wp<CaptureSequencer> sequencer):
+ Thread(false),
+ mState(RUNNING),
+ mClient(client),
+ mSequencer(sequencer),
+ mZslBufferAvailable(false),
+ mZslStreamId(NO_STREAM),
+ mZslReprocessStreamId(NO_STREAM),
+ mFrameListHead(0),
+ mZslQueueHead(0),
+ mZslQueueTail(0) {
+ mZslQueue.insertAt(0, kZslBufferDepth);
+ mFrameList.insertAt(0, kFrameListDepth);
+ sp<CaptureSequencer> captureSequencer = mSequencer.promote();
+ if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
+}
+
+ZslProcessor::~ZslProcessor() {
+ ALOGV("%s: Exit", __FUNCTION__);
+ deleteStream();
+}
+
+void ZslProcessor::onFrameAvailable() {
+ Mutex::Autolock l(mInputMutex);
+ if (!mZslBufferAvailable) {
+ mZslBufferAvailable = true;
+ mZslBufferAvailableSignal.signal();
+ }
+}
+
+void ZslProcessor::onFrameAvailable(int32_t frameId, CameraMetadata &frame) {
+ Mutex::Autolock l(mInputMutex);
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ nsecs_t timestamp = entry.data.i64[0];
+ ALOGVV("Got preview frame for timestamp %lld", timestamp);
+
+ if (mState != RUNNING) return;
+
+ mFrameList.editItemAt(mFrameListHead).acquire(frame);
+ mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
+
+ findMatchesLocked();
+}
+
+void ZslProcessor::onBufferReleased(buffer_handle_t *handle) {
+ Mutex::Autolock l(mInputMutex);
+
+ buffer_handle_t *expectedHandle =
+ &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle);
+
+ if (handle != expectedHandle) {
+ ALOGE("%s: Expected buffer %p, got buffer %p",
+ __FUNCTION__, expectedHandle, handle);
+ }
+
+ mState = RUNNING;
+}
+
+status_t ZslProcessor::updateStream(const Parameters &params) {
+ ATRACE_CALL();
+ ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ if (mZslConsumer == 0) {
+ // Create CPU buffer queue endpoint
+ mZslConsumer = new BufferItemConsumer(
+ GRALLOC_USAGE_HW_CAMERA_ZSL,
+ kZslBufferDepth,
+ true);
+ mZslConsumer->setFrameAvailableListener(this);
+ mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
+ mZslWindow = new SurfaceTextureClient(
+ mZslConsumer->getProducerInterface());
+ }
+
+ if (mZslStreamId != NO_STREAM) {
+ // Check if stream parameters have to change
+ uint32_t currentWidth, currentHeight;
+ res = device->getStreamInfo(mZslStreamId,
+ &currentWidth, &currentHeight, 0);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Error querying capture output stream info: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ if (currentWidth != (uint32_t)params.pictureWidth ||
+ currentHeight != (uint32_t)params.pictureHeight) {
+ res = device->deleteStream(mZslReprocessStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete old reprocess stream "
+ "for ZSL: %s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ res = device->deleteStream(mZslStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete old output stream "
+ "for ZSL: %s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ mZslStreamId = NO_STREAM;
+ }
+ }
+
+ if (mZslStreamId == NO_STREAM) {
+ // Create stream for HAL production
+ res = device->createStream(mZslWindow,
+ params.pictureWidth, params.pictureHeight,
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 0,
+ &mZslStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't create output stream for ZSL: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ res = device->createReprocessStreamFromStream(mZslStreamId,
+ &mZslReprocessStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ }
+ client->registerFrameListener(Camera2Client::kPreviewRequestId, this);
+
+ return OK;
+}
+
+status_t ZslProcessor::deleteStream() {
+ ATRACE_CALL();
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ if (mZslStreamId != NO_STREAM) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ device->deleteStream(mZslReprocessStreamId);
+ mZslReprocessStreamId = NO_STREAM;
+ device->deleteStream(mZslStreamId);
+
+ mZslWindow.clear();
+ mZslConsumer.clear();
+
+ mZslStreamId = NO_STREAM;
+ }
+ return OK;
+}
+
+int ZslProcessor::getStreamId() const {
+ Mutex::Autolock l(mInputMutex);
+ return mZslStreamId;
+}
+
+int ZslProcessor::getReprocessStreamId() const {
+ Mutex::Autolock l(mInputMutex);
+ return mZslReprocessStreamId;
+}
+
+status_t ZslProcessor::pushToReprocess(int32_t requestId) {
+ ALOGV("%s: Send in reprocess request with id %d",
+ __FUNCTION__, requestId);
+ Mutex::Autolock l(mInputMutex);
+ status_t res;
+ sp<Camera2Client> client = mClient.promote();
+
+ if (client == 0) return false;
+
+ if (mZslQueueTail != mZslQueueHead) {
+ buffer_handle_t *handle =
+ &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle);
+ CameraMetadata request = mZslQueue[mZslQueueTail].frame;
+ uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
+ res = request.update(ANDROID_REQUEST_TYPE,
+ &requestType, 1);
+ uint8_t inputStreams[1] = { mZslReprocessStreamId };
+ if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
+ inputStreams, 1);
+ uint8_t outputStreams[1] = { client->getCaptureStreamId() };
+ if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ outputStreams, 1);
+ res = request.update(ANDROID_REQUEST_ID,
+ &requestId, 1);
+
+ if (res != OK ) {
+ ALOGE("%s: Unable to update frame to a reprocess request", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ res = client->getCameraDevice()->pushReprocessBuffer(mZslReprocessStreamId,
+ handle, this);
+ if (res != OK) {
+ ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ res = client->getCameraDevice()->capture(request);
+ if (res != OK ) {
+ ALOGE("%s: Unable to send ZSL reprocess request to capture: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ mState = LOCKED;
+ } else {
+ ALOGE("%s: Nothing to push", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+void ZslProcessor::dump(int fd, const Vector<String16>& args) const {
+}
+
+bool ZslProcessor::threadLoop() {
+ status_t res;
+
+ {
+ Mutex::Autolock l(mInputMutex);
+ while (!mZslBufferAvailable) {
+ res = mZslBufferAvailableSignal.waitRelative(mInputMutex,
+ kWaitDuration);
+ if (res == TIMED_OUT) return true;
+ }
+ mZslBufferAvailable = false;
+ }
+
+ do {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ res = processNewZslBuffer(client);
+ } while (res == OK);
+
+ return true;
+}
+
+status_t ZslProcessor::processNewZslBuffer(sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ status_t res;
+ Mutex::Autolock l(mInputMutex);
+
+ if (mState == LOCKED) {
+ BufferItemConsumer::BufferItem item;
+ res = mZslConsumer->acquireBuffer(&item);
+ if (res != OK) {
+ if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
+ ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ }
+ return res;
+ }
+ mZslConsumer->releaseBuffer(item);
+ return OK;
+ }
+
+ ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail);
+
+ if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) {
+ mZslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer);
+ mZslQueue.replaceAt(mZslQueueTail);
+ mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth;
+ }
+
+ ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead);
+
+ res = mZslConsumer->acquireBuffer(&(queueHead.buffer));
+ if (res != OK) {
+ if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
+ ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ }
+ return res;
+ }
+ queueHead.frame.release();
+
+ mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth;
+
+ ALOGVV(" Added buffer, timestamp %lld", queueHead.buffer.mTimestamp);
+
+ findMatchesLocked();
+
+ return OK;
+}
+
+void ZslProcessor::findMatchesLocked() {
+ for (size_t i = 0; i < mZslQueue.size(); i++) {
+ ZslPair &queueEntry = mZslQueue.editItemAt(i);
+ nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
+ if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) {
+ // Have buffer, no matching frame. Look for one
+ for (size_t j = 0; j < mFrameList.size(); j++) {
+ bool match = false;
+ CameraMetadata &frame = mFrameList.editItemAt(j);
+ if (!frame.isEmpty()) {
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count == 0) {
+ ALOGE("%s: Can't find timestamp in frame!",
+ __FUNCTION__);
+ continue;
+ }
+ nsecs_t frameTimestamp = entry.data.i64[0];
+ if (bufferTimestamp == frameTimestamp) {
+ ALOGVV("%s: Found match %lld", __FUNCTION__,
+ frameTimestamp);
+ match = true;
+ } else {
+ int64_t delta = abs(bufferTimestamp - frameTimestamp);
+ if ( delta < 1000000) {
+ ALOGVV("%s: Found close match %lld (delta %lld)",
+ __FUNCTION__, bufferTimestamp, delta);
+ match = true;
+ }
+ }
+ }
+ if (match) {
+ queueEntry.frame.acquire(frame);
+ break;
+ }
+ }
+ }
+ }
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h
new file mode 100644
index 0000000..74921a3
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <gui/BufferItemConsumer.h>
+#include "Parameters.h"
+#include "FrameProcessor.h"
+#include "CameraMetadata.h"
+#include "Camera2Heap.h"
+#include "../Camera2Device.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class CaptureSequencer;
+
+/***
+ * ZSL queue processing
+ */
+class ZslProcessor:
+ virtual public Thread,
+ virtual public BufferItemConsumer::FrameAvailableListener,
+ virtual public FrameProcessor::FilteredListener,
+ virtual public Camera2Device::BufferReleasedListener {
+ public:
+ ZslProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+ ~ZslProcessor();
+
+ // From mZslConsumer
+ virtual void onFrameAvailable();
+ // From FrameProcessor
+ virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame);
+
+ virtual void onBufferReleased(buffer_handle_t *handle);
+
+ status_t updateStream(const Parameters &params);
+ status_t deleteStream();
+ int getStreamId() const;
+ int getReprocessStreamId() const;
+
+ status_t pushToReprocess(int32_t requestId);
+
+ void dump(int fd, const Vector<String16>& args) const;
+ private:
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+
+ enum {
+ RUNNING,
+ LOCKED
+ } mState;
+
+ wp<Camera2Client> mClient;
+ wp<CaptureSequencer> mSequencer;
+
+ mutable Mutex mInputMutex;
+ bool mZslBufferAvailable;
+ Condition mZslBufferAvailableSignal;
+
+ enum {
+ NO_STREAM = -1
+ };
+
+ int mZslStreamId;
+ int mZslReprocessStreamId;
+ sp<BufferItemConsumer> mZslConsumer;
+ sp<ANativeWindow> mZslWindow;
+
+ struct ZslPair {
+ BufferItemConsumer::BufferItem buffer;
+ CameraMetadata frame;
+ };
+
+ static const size_t kZslBufferDepth = 3;
+ static const size_t kFrameListDepth = kZslBufferDepth * 2;
+ Vector<CameraMetadata> mFrameList;
+ size_t mFrameListHead;
+
+ ZslPair mNextPair;
+
+ Vector<ZslPair> mZslQueue;
+ size_t mZslQueueHead;
+ size_t mZslQueueTail;
+
+ virtual bool threadLoop();
+
+ status_t processNewZslBuffer(sp<Camera2Client> &client);
+
+ // Match up entries from frame list to buffers in ZSL queue
+ void findMatchesLocked();
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif