summaryrefslogtreecommitdiffstats
path: root/services/camera
diff options
context:
space:
mode:
Diffstat (limited to 'services/camera')
-rw-r--r--services/camera/libcameraservice/Android.mk15
-rw-r--r--services/camera/libcameraservice/CameraDeviceFactory.cpp2
-rw-r--r--services/camera/libcameraservice/CameraService.cpp133
-rw-r--r--services/camera/libcameraservice/CameraService.h44
-rw-r--r--services/camera/libcameraservice/api1/Camera2Client.cpp10
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp24
-rw-r--r--services/camera/libcameraservice/api1/client2/CaptureSequencer.h5
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.cpp215
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.h13
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.cpp17
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.h3
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp18
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.h4
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.cpp216
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.h26
-rw-r--r--services/camera/libcameraservice/api_pro/ProCamera2Client.cpp9
-rw-r--r--services/camera/libcameraservice/api_pro/ProCamera2Client.h5
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.cpp17
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.h7
-rw-r--r--services/camera/libcameraservice/common/CameraDeviceBase.h46
-rw-r--r--services/camera/libcameraservice/common/FrameProcessorBase.cpp47
-rw-r--r--services/camera/libcameraservice/common/FrameProcessorBase.h8
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.cpp56
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.h16
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.cpp478
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.h94
-rw-r--r--services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp58
-rw-r--r--services/camera/libcameraservice/device3/Camera3IOStreamBase.h12
-rw-r--r--services/camera/libcameraservice/device3/Camera3InputStream.cpp34
-rw-r--r--services/camera/libcameraservice/device3/Camera3InputStream.h2
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStream.cpp4
-rw-r--r--services/camera/libcameraservice/device3/Camera3Stream.cpp99
-rw-r--r--services/camera/libcameraservice/device3/Camera3Stream.h26
-rw-r--r--services/camera/libcameraservice/device3/Camera3ZslStream.cpp14
-rw-r--r--services/camera/libcameraservice/device3/Camera3ZslStream.h4
-rw-r--r--services/camera/libcameraservice/gui/RingBufferConsumer.h2
41 files changed, 1215 insertions, 602 deletions
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 51ba698..2f485b9 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -1,3 +1,17 @@
+# Copyright 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
LOCAL_PATH:= $(call my-dir)
#
@@ -53,6 +67,7 @@ LOCAL_SHARED_LIBRARIES:= \
LOCAL_C_INCLUDES += \
system/media/camera/include \
+ system/media/private/camera/include \
external/jpeg
diff --git a/services/camera/libcameraservice/CameraDeviceFactory.cpp b/services/camera/libcameraservice/CameraDeviceFactory.cpp
index 7fdf304..bfef50e 100644
--- a/services/camera/libcameraservice/CameraDeviceFactory.cpp
+++ b/services/camera/libcameraservice/CameraDeviceFactory.cpp
@@ -46,6 +46,8 @@ sp<CameraDeviceBase> CameraDeviceFactory::createDevice(int cameraId) {
device = new Camera2Device(cameraId);
break;
case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2:
device = new Camera3Device(cameraId);
break;
default:
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 9ce7daf..fe1e707 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -1,24 +1,24 @@
/*
-**
-** Copyright (C) 2008, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
#define LOG_TAG "CameraService"
//#define LOG_NDEBUG 0
#include <stdio.h>
+#include <string.h>
#include <sys/types.h>
#include <pthread.h>
@@ -32,10 +32,13 @@
#include <gui/Surface.h>
#include <hardware/hardware.h>
#include <media/AudioSystem.h>
+#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
#include <utils/Errors.h>
#include <utils/Log.h>
#include <utils/String16.h>
+#include <utils/Trace.h>
+#include <system/camera_vendor_tags.h>
#include "CameraService.h"
#include "api1/CameraClient.h"
@@ -130,6 +133,12 @@ void CameraService::onFirstRef()
mModule->set_callbacks(this);
}
+ VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+
+ if (mModule->common.module_api_version >= CAMERA_MODULE_API_VERSION_2_2) {
+ setUpVendorTags();
+ }
+
CameraDeviceFactory::registerService(this);
}
}
@@ -141,6 +150,7 @@ CameraService::~CameraService() {
}
}
+ VendorTagDescriptor::clearGlobalVendorTagDescriptor();
gCameraService = NULL;
}
@@ -269,6 +279,22 @@ status_t CameraService::getCameraCharacteristics(int cameraId,
return ret;
}
+status_t CameraService::getCameraVendorTagDescriptor(/*out*/sp<VendorTagDescriptor>& desc) {
+ if (!mModule) {
+ ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__);
+ return -ENODEV;
+ }
+
+ if (mModule->common.module_api_version < CAMERA_MODULE_API_VERSION_2_2) {
+ // TODO: Remove this check once HAL1 shim is in place.
+ ALOGW("%s: Only HAL module version V2.2 or higher supports vendor tags", __FUNCTION__);
+ return -EOPNOTSUPP;
+ }
+
+ desc = VendorTagDescriptor::getGlobalVendorTagDescriptor();
+ return OK;
+}
+
int CameraService::getDeviceVersion(int cameraId, int* facing) {
struct camera_info info;
if (mModule->get_camera_info(cameraId, &info) != OK) {
@@ -298,6 +324,8 @@ bool CameraService::isValidCameraId(int cameraId) {
case CAMERA_DEVICE_API_VERSION_2_0:
case CAMERA_DEVICE_API_VERSION_2_1:
case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2:
return true;
default:
return false;
@@ -306,6 +334,44 @@ bool CameraService::isValidCameraId(int cameraId) {
return false;
}
+bool CameraService::setUpVendorTags() {
+ vendor_tag_ops_t vOps = vendor_tag_ops_t();
+
+ // Check if vendor operations have been implemented
+ if (mModule->get_vendor_tag_ops == NULL) {
+ ALOGI("%s: No vendor tags defined for this device.", __FUNCTION__);
+ return false;
+ }
+
+ ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops");
+ mModule->get_vendor_tag_ops(&vOps);
+ ATRACE_END();
+
+ // Ensure all vendor operations are present
+ if (vOps.get_tag_count == NULL || vOps.get_all_tags == NULL ||
+ vOps.get_section_name == NULL || vOps.get_tag_name == NULL ||
+ vOps.get_tag_type == NULL) {
+ ALOGE("%s: Vendor tag operations not fully defined. Ignoring definitions."
+ , __FUNCTION__);
+ return false;
+ }
+
+ // Read all vendor tag definitions into a descriptor
+ sp<VendorTagDescriptor> desc;
+ status_t res;
+ if ((res = VendorTagDescriptor::createDescriptorFromOps(&vOps, /*out*/desc))
+ != OK) {
+ ALOGE("%s: Could not generate descriptor from vendor tag operations,"
+ "received error %s (%d). Camera clients will not be able to use"
+ "vendor tags", __FUNCTION__, strerror(res), res);
+ return false;
+ }
+
+ // Set the global descriptor to use with camera metadata
+ VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc);
+ return true;
+}
+
status_t CameraService::validateConnect(int cameraId,
/*inout*/
int& clientUid) const {
@@ -455,6 +521,8 @@ status_t CameraService::connect(
case CAMERA_DEVICE_API_VERSION_2_0:
case CAMERA_DEVICE_API_VERSION_2_1:
case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2:
client = new Camera2Client(this, cameraClient,
clientPackageName, cameraId,
facing, callingPid, clientUid, getpid(),
@@ -507,6 +575,11 @@ status_t CameraService::connectPro(
/*out*/
sp<IProCameraUser>& device)
{
+ if (cameraCb == 0) {
+ ALOGE("%s: Callback must not be null", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
String8 clientName8(clientPackageName);
int callingPid = getCallingPid();
@@ -541,6 +614,8 @@ status_t CameraService::connectPro(
case CAMERA_DEVICE_API_VERSION_2_0:
case CAMERA_DEVICE_API_VERSION_2_1:
case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2:
client = new ProCamera2Client(this, cameraCb, String16(),
cameraId, facing, callingPid, USE_CALLING_UID, getpid());
break;
@@ -619,6 +694,8 @@ status_t CameraService::connectDevice(
case CAMERA_DEVICE_API_VERSION_2_0:
case CAMERA_DEVICE_API_VERSION_2_1:
case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2:
client = new CameraDeviceClient(this, cameraCb, String16(),
cameraId, facing, callingPid, USE_CALLING_UID, getpid());
break;
@@ -876,7 +953,7 @@ void CameraService::setCameraFree(int cameraId) {
MediaPlayer* CameraService::newMediaPlayer(const char *file) {
MediaPlayer* mp = new MediaPlayer();
- if (mp->setDataSource(file, NULL) == NO_ERROR) {
+ if (mp->setDataSource(NULL /* httpService */, file, NULL) == NO_ERROR) {
mp->setAudioStreamType(AUDIO_STREAM_ENFORCED_AUDIBLE);
mp->prepare();
} else {
@@ -1044,7 +1121,8 @@ void CameraService::BasicClient::opChanged(int32_t op, const String16& packageNa
// Reset the client PID to allow server-initiated disconnect,
// and to prevent further calls by client.
mClientPid = getCallingPid();
- notifyError();
+ CaptureResultExtras resultExtras; // a dummy result (invalid)
+ notifyError(ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE, resultExtras);
disconnect();
}
}
@@ -1073,7 +1151,8 @@ CameraService::Client* CameraService::Client::getClientFromCookie(void* user) {
return client;
}
-void CameraService::Client::notifyError() {
+void CameraService::Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras) {
mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
}
@@ -1127,7 +1206,8 @@ CameraService::ProClient::ProClient(const sp<CameraService>& cameraService,
CameraService::ProClient::~ProClient() {
}
-void CameraService::ProClient::notifyError() {
+void CameraService::ProClient::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras) {
mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
}
@@ -1182,7 +1262,20 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) {
result.appendFormat("Camera module author: %s\n",
mModule->common.author);
result.appendFormat("Number of camera devices: %d\n\n", mNumberOfCameras);
+
+ sp<VendorTagDescriptor> desc = VendorTagDescriptor::getGlobalVendorTagDescriptor();
+ if (desc == NULL) {
+ result.appendFormat("Vendor tags left unimplemented.\n");
+ } else {
+ result.appendFormat("Vendor tag definitions:\n");
+ }
+
write(fd, result.string(), result.size());
+
+ if (desc != NULL) {
+ desc->dump(fd, /*verbosity*/2, /*indentation*/4);
+ }
+
for (int i = 0; i < mNumberOfCameras; i++) {
result = String8::format("Camera %d static information:\n", i);
camera_info info;
@@ -1207,7 +1300,7 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) {
result.appendFormat(" Device static metadata:\n");
write(fd, result.string(), result.size());
dump_indented_camera_metadata(info.static_camera_characteristics,
- fd, 2, 4);
+ fd, /*verbosity*/2, /*indentation*/4);
} else {
write(fd, result.string(), result.size());
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index ad6a582..76ea7be 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -1,19 +1,18 @@
/*
-**
-** Copyright (C) 2008, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
#ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
#define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
@@ -31,6 +30,8 @@
#include <camera/IProCameraCallbacks.h>
#include <camera/camera2/ICameraDeviceUser.h>
#include <camera/camera2/ICameraDeviceCallbacks.h>
+#include <camera/VendorTagDescriptor.h>
+#include <camera/CaptureResult.h>
#include <camera/ICameraServiceListener.h>
@@ -73,6 +74,7 @@ public:
struct CameraInfo* cameraInfo);
virtual status_t getCameraCharacteristics(int cameraId,
CameraMetadata* cameraInfo);
+ virtual status_t getCameraVendorTagDescriptor(/*out*/ sp<VendorTagDescriptor>& desc);
virtual status_t connect(const sp<ICameraClient>& cameraClient, int cameraId,
const String16& clientPackageName, int clientUid,
@@ -181,7 +183,9 @@ public:
status_t finishCameraOps();
// Notify client about a fatal error
- virtual void notifyError() = 0;
+ virtual void notifyError(
+ ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras) = 0;
private:
AppOpsManager mAppOpsManager;
@@ -258,7 +262,8 @@ public:
// convert client from cookie. Client lock should be acquired before getting Client.
static Client* getClientFromCookie(void* user);
- virtual void notifyError();
+ virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras);
// Initialized in constructor
@@ -306,7 +311,8 @@ public:
virtual void onExclusiveLockStolen() = 0;
protected:
- virtual void notifyError();
+ virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras);
sp<IProCameraCallbacks> mRemoteCallback;
}; // class ProClient
@@ -387,6 +393,8 @@ private:
// Helpers
bool isValidCameraId(int cameraId);
+
+ bool setUpVendorTags();
};
} // namespace android
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index af23557..0447979 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -118,7 +118,9 @@ status_t Camera2Client::initialize(camera_module_t *module)
mZslProcessorThread = zslProc;
break;
}
- case CAMERA_DEVICE_API_VERSION_3_0:{
+ case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2: {
sp<ZslProcessor3> zslProc =
new ZslProcessor3(this, mCaptureSequencer);
mZslProcessor = zslProc;
@@ -238,7 +240,7 @@ status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
result.append(" Scene mode: ");
switch (p.sceneMode) {
- case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED:
+ case ANDROID_CONTROL_SCENE_MODE_DISABLED:
result.append("AUTO\n"); break;
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
@@ -816,6 +818,8 @@ status_t Camera2Client::startPreviewL(Parameters &params, bool restart) {
return res;
}
outputStreams.push(getZslStreamId());
+ } else {
+ mZslProcessor->deleteStream();
}
outputStreams.push(getPreviewStreamId());
@@ -1162,7 +1166,7 @@ status_t Camera2Client::autoFocus() {
* Handle quirk mode for AF in scene modes
*/
if (l.mParameters.quirks.triggerAfWithAuto &&
- l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED &&
+ l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
!l.mParameters.focusingAreas[0].isEmpty()) {
ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index d2ac79c..c266213 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -110,11 +110,13 @@ status_t CallbackProcessor::updateStream(const Parameters &params) {
if (!mCallbackToApp && mCallbackConsumer == 0) {
// Create CPU buffer queue endpoint, since app hasn't given us one
// Make it async to avoid disconnect deadlocks
- sp<BufferQueue> bq = new BufferQueue();
- mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount);
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mCallbackConsumer = new CpuConsumer(consumer, kCallbackHeapCount);
mCallbackConsumer->setFrameAvailableListener(this);
mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
- mCallbackWindow = new Surface(bq);
+ mCallbackWindow = new Surface(producer);
}
if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index f5c28ed..8268f65 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -106,13 +106,12 @@ void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) {
}
}
-void CaptureSequencer::onFrameAvailable(int32_t requestId,
- const CameraMetadata &frame) {
- ALOGV("%s: Listener found new frame", __FUNCTION__);
+void CaptureSequencer::onResultAvailable(const CaptureResult &result) {
ATRACE_CALL();
+ ALOGV("%s: New result available.", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
- mNewFrameId = requestId;
- mNewFrame = frame;
+ mNewFrameId = result.mResultExtras.requestId;
+ mNewFrame = result.mMetadata;
if (!mNewFrameReceived) {
mNewFrameReceived = true;
mNewFrameSignal.signal();
@@ -585,12 +584,15 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait(
entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP);
if (entry.count == 0) {
ALOGE("No timestamp field in capture frame!");
- }
- if (entry.data.i64[0] != mCaptureTimestamp) {
- ALOGW("Mismatched capture timestamps: Metadata frame %" PRId64 ","
- " captured buffer %" PRId64,
- entry.data.i64[0],
- mCaptureTimestamp);
+ } else if (entry.count == 1) {
+ if (entry.data.i64[0] != mCaptureTimestamp) {
+ ALOGW("Mismatched capture timestamps: Metadata frame %" PRId64 ","
+ " captured buffer %" PRId64,
+ entry.data.i64[0],
+ mCaptureTimestamp);
+ }
+ } else {
+ ALOGE("Timestamp metadata is malformed!");
}
client->removeFrameListener(mCaptureId, mCaptureId + 1, this);
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index 9fb4ee7..d42ab13 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -24,6 +24,7 @@
#include <utils/Mutex.h>
#include <utils/Condition.h>
#include "camera/CameraMetadata.h"
+#include "camera/CaptureResult.h"
#include "Parameters.h"
#include "FrameProcessor.h"
@@ -61,8 +62,8 @@ class CaptureSequencer:
// Notifications about AE state changes
void notifyAutoExposure(uint8_t newState, int triggerId);
- // Notifications from the frame processor
- virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
+ // Notification from the frame processor
+ virtual void onResultAvailable(const CaptureResult &result);
// Notifications from the JPEG processor
void onCaptureAvailable(nsecs_t timestamp, sp<MemoryBase> captureBuffer);
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index dd5b27c..69bea24 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -55,7 +55,7 @@ FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
FrameProcessor::~FrameProcessor() {
}
-bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
+bool FrameProcessor::processSingleFrame(CaptureResult &frame,
const sp<CameraDeviceBase> &device) {
sp<Camera2Client> client = mClient.promote();
@@ -66,19 +66,19 @@ bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
bool partialResult = false;
if (mUsePartialQuirk) {
camera_metadata_entry_t entry;
- entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+ entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
if (entry.count > 0 &&
entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
partialResult = true;
}
}
- if (!partialResult && processFaceDetect(frame, client) != OK) {
+ if (!partialResult && processFaceDetect(frame.mMetadata, client) != OK) {
return false;
}
if (mSynthesize3ANotify) {
- process3aState(frame, client);
+ process3aState(frame.mMetadata, client);
}
return FrameProcessorBase::processSingleFrame(frame, device);
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 856ad32..514bd1a 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -51,7 +51,7 @@ class FrameProcessor : public FrameProcessorBase {
void processNewFrames(const sp<Camera2Client> &client);
- virtual bool processSingleFrame(CameraMetadata &frame,
+ virtual bool processSingleFrame(CaptureResult &frame,
const sp<CameraDeviceBase> &device);
status_t processFaceDetect(const CameraMetadata &frame,
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 2de7a2b..964d278 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -83,11 +83,13 @@ status_t JpegProcessor::updateStream(const Parameters &params) {
if (mCaptureConsumer == 0) {
// Create CPU buffer queue endpoint
- sp<BufferQueue> bq = new BufferQueue();
- mCaptureConsumer = new CpuConsumer(bq, 1);
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mCaptureConsumer = new CpuConsumer(consumer, 1);
mCaptureConsumer->setFrameAvailableListener(this);
mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
- mCaptureWindow = new Surface(bq);
+ mCaptureWindow = new Surface(producer);
// Create memory for API consumption
mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
"Camera2Client::CaptureHeap");
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 07654c0..5bfb969 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -16,7 +16,7 @@
#define LOG_TAG "Camera2-Parameters"
#define ATRACE_TAG ATRACE_TAG_CAMERA
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -92,6 +92,26 @@ status_t Parameters::initialize(const CameraMetadata *info) {
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
if (!availableFpsRanges.count) return NO_INIT;
+ previewFpsRange[0] = availableFpsRanges.data.i32[0];
+ previewFpsRange[1] = availableFpsRanges.data.i32[1];
+
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,
+ String8::format("%d,%d",
+ previewFpsRange[0] * kFpsToApiScale,
+ previewFpsRange[1] * kFpsToApiScale));
+
+ {
+ String8 supportedPreviewFpsRange;
+ for (size_t i=0; i < availableFpsRanges.count; i += 2) {
+ if (i != 0) supportedPreviewFpsRange += ",";
+ supportedPreviewFpsRange += String8::format("(%d,%d)",
+ availableFpsRanges.data.i32[i] * kFpsToApiScale,
+ availableFpsRanges.data.i32[i+1] * kFpsToApiScale);
+ }
+ params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ supportedPreviewFpsRange);
+ }
+
previewFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
params.set(CameraParameters::KEY_PREVIEW_FORMAT,
formatEnumToString(previewFormat)); // NV21
@@ -159,9 +179,6 @@ status_t Parameters::initialize(const CameraMetadata *info) {
supportedPreviewFormats);
}
- previewFpsRange[0] = availableFpsRanges.data.i32[0];
- previewFpsRange[1] = availableFpsRanges.data.i32[1];
-
// PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but
// still have to do something sane for them
@@ -170,27 +187,6 @@ status_t Parameters::initialize(const CameraMetadata *info) {
params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,
previewFps);
- // PREVIEW_FPS_RANGE
- // -- Order matters. Set range after single value to so that a roundtrip
- // of setParameters(getParameters()) would keep the FPS range in higher
- // order.
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,
- String8::format("%d,%d",
- previewFpsRange[0] * kFpsToApiScale,
- previewFpsRange[1] * kFpsToApiScale));
-
- {
- String8 supportedPreviewFpsRange;
- for (size_t i=0; i < availableFpsRanges.count; i += 2) {
- if (i != 0) supportedPreviewFpsRange += ",";
- supportedPreviewFpsRange += String8::format("(%d,%d)",
- availableFpsRanges.data.i32[i] * kFpsToApiScale,
- availableFpsRanges.data.i32[i+1] * kFpsToApiScale);
- }
- params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
- supportedPreviewFpsRange);
- }
-
{
SortedVector<int32_t> sortedPreviewFrameRates;
@@ -470,7 +466,7 @@ status_t Parameters::initialize(const CameraMetadata *info) {
supportedAntibanding);
}
- sceneMode = ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
params.set(CameraParameters::KEY_SCENE_MODE,
CameraParameters::SCENE_MODE_AUTO);
@@ -486,7 +482,7 @@ status_t Parameters::initialize(const CameraMetadata *info) {
if (addComma) supportedSceneModes += ",";
addComma = true;
switch (availableSceneModes.data.u8[i]) {
- case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED:
+ case ANDROID_CONTROL_SCENE_MODE_DISABLED:
noSceneModes = true;
break;
case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
@@ -668,13 +664,13 @@ status_t Parameters::initialize(const CameraMetadata *info) {
focusState = ANDROID_CONTROL_AF_STATE_INACTIVE;
shadowFocusMode = FOCUS_MODE_INVALID;
- camera_metadata_ro_entry_t max3aRegions =
- staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1);
- if (!max3aRegions.count) return NO_INIT;
+ camera_metadata_ro_entry_t max3aRegions = staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION);
+ if (max3aRegions.count != Parameters::NUM_REGION) return NO_INIT;
int32_t maxNumFocusAreas = 0;
if (focusMode != Parameters::FOCUS_MODE_FIXED) {
- maxNumFocusAreas = max3aRegions.data.i32[0];
+ maxNumFocusAreas = max3aRegions.data.i32[Parameters::REGION_AF];
}
params.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, maxNumFocusAreas);
params.set(CameraParameters::KEY_FOCUS_AREAS,
@@ -734,7 +730,7 @@ status_t Parameters::initialize(const CameraMetadata *info) {
meteringAreas.add(Parameters::Area(0, 0, 0, 0, 0));
params.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS,
- max3aRegions.data.i32[0]);
+ max3aRegions.data.i32[Parameters::REGION_AE]);
params.set(CameraParameters::KEY_METERING_AREAS,
"(0,0,0,0,0)");
@@ -1088,7 +1084,7 @@ camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag,
status_t Parameters::set(const String8& paramString) {
status_t res;
- CameraParameters2 newParams(paramString);
+ CameraParameters newParams(paramString);
// TODO: Currently ignoring any changes to supposedly read-only parameters
// such as supported preview sizes, etc. Should probably produce an error if
@@ -1131,73 +1127,29 @@ status_t Parameters::set(const String8& paramString) {
// RECORDING_HINT (always supported)
validatedParams.recordingHint = boolFromString(
newParams.get(CameraParameters::KEY_RECORDING_HINT) );
- IF_ALOGV() { // Avoid unused variable warning
- bool recordingHintChanged =
- validatedParams.recordingHint != recordingHint;
- if (recordingHintChanged) {
- ALOGV("%s: Recording hint changed to %d",
- __FUNCTION__, validatedParams.recordingHint);
- }
- }
+ bool recordingHintChanged = validatedParams.recordingHint != recordingHint;
+ ALOGV_IF(recordingHintChanged, "%s: Recording hint changed to %d",
+ __FUNCTION__, recordingHintChanged);
// PREVIEW_FPS_RANGE
+ bool fpsRangeChanged = false;
+ int32_t lastSetFpsRange[2];
- /**
- * Use the single FPS value if it was set later than the range.
- * Otherwise, use the range value.
- */
- bool fpsUseSingleValue;
- {
- const char *fpsRange, *fpsSingle;
-
- fpsRange = newParams.get(CameraParameters::KEY_PREVIEW_FRAME_RATE);
- fpsSingle = newParams.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
-
- /**
- * Pick either the range or the single key if only one was set.
- *
- * If both are set, pick the one that has greater set order.
- */
- if (fpsRange == NULL && fpsSingle == NULL) {
- ALOGE("%s: FPS was not set. One of %s or %s must be set.",
- __FUNCTION__, CameraParameters::KEY_PREVIEW_FRAME_RATE,
- CameraParameters::KEY_PREVIEW_FPS_RANGE);
- return BAD_VALUE;
- } else if (fpsRange == NULL) {
- fpsUseSingleValue = true;
- ALOGV("%s: FPS range not set, using FPS single value",
- __FUNCTION__);
- } else if (fpsSingle == NULL) {
- fpsUseSingleValue = false;
- ALOGV("%s: FPS single not set, using FPS range value",
- __FUNCTION__);
- } else {
- int fpsKeyOrder;
- res = newParams.compareSetOrder(
- CameraParameters::KEY_PREVIEW_FRAME_RATE,
- CameraParameters::KEY_PREVIEW_FPS_RANGE,
- &fpsKeyOrder);
- LOG_ALWAYS_FATAL_IF(res != OK, "Impossibly bad FPS keys");
-
- fpsUseSingleValue = (fpsKeyOrder > 0);
+ params.getPreviewFpsRange(&lastSetFpsRange[0], &lastSetFpsRange[1]);
+ lastSetFpsRange[0] /= kFpsToApiScale;
+ lastSetFpsRange[1] /= kFpsToApiScale;
- }
-
- ALOGV("%s: Preview FPS value is used from '%s'",
- __FUNCTION__, fpsUseSingleValue ? "single" : "range");
- }
newParams.getPreviewFpsRange(&validatedParams.previewFpsRange[0],
&validatedParams.previewFpsRange[1]);
-
validatedParams.previewFpsRange[0] /= kFpsToApiScale;
validatedParams.previewFpsRange[1] /= kFpsToApiScale;
- // Ignore the FPS range if the FPS single has higher precedence
- if (!fpsUseSingleValue) {
- ALOGV("%s: Preview FPS range (%d, %d)", __FUNCTION__,
- validatedParams.previewFpsRange[0],
- validatedParams.previewFpsRange[1]);
+ // Compare the FPS range value from the last set() to the current set()
+ // to determine if the client has changed it
+ if (validatedParams.previewFpsRange[0] != lastSetFpsRange[0] ||
+ validatedParams.previewFpsRange[1] != lastSetFpsRange[1]) {
+ fpsRangeChanged = true;
camera_metadata_ro_entry_t availablePreviewFpsRanges =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
for (i = 0; i < availablePreviewFpsRanges.count; i += 2) {
@@ -1248,13 +1200,14 @@ status_t Parameters::set(const String8& paramString) {
}
}
- // PREVIEW_FRAME_RATE Deprecated
- // - Use only if the single FPS value was set later than the FPS range
- if (fpsUseSingleValue) {
+ // PREVIEW_FRAME_RATE Deprecated, only use if the preview fps range is
+ // unchanged this time. The single-value FPS is the same as the minimum of
+ // the range. To detect whether the application has changed the value of
+ // previewFps, compare against their last-set preview FPS.
+ if (!fpsRangeChanged) {
int previewFps = newParams.getPreviewFrameRate();
- ALOGV("%s: Preview FPS single value requested: %d",
- __FUNCTION__, previewFps);
- {
+ int lastSetPreviewFps = params.getPreviewFrameRate();
+ if (previewFps != lastSetPreviewFps || recordingHintChanged) {
camera_metadata_ro_entry_t availableFrameRates =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
/**
@@ -1323,35 +1276,6 @@ status_t Parameters::set(const String8& paramString) {
}
}
- /**
- * Update Preview FPS and Preview FPS ranges based on
- * what we actually set.
- *
- * This updates the API-visible (Camera.Parameters#getParameters) values of
- * the FPS fields, not only the internal versions.
- *
- * Order matters: The value that was set last takes precedence.
- * - If the client does a setParameters(getParameters()) we retain
- * the same order for preview FPS.
- */
- if (!fpsUseSingleValue) {
- // Set fps single, then fps range (range wins)
- newParams.setPreviewFrameRate(
- fpsFromRange(/*min*/validatedParams.previewFpsRange[0],
- /*max*/validatedParams.previewFpsRange[1]));
- newParams.setPreviewFpsRange(
- validatedParams.previewFpsRange[0] * kFpsToApiScale,
- validatedParams.previewFpsRange[1] * kFpsToApiScale);
- } else {
- // Set fps range, then fps single (single wins)
- newParams.setPreviewFpsRange(
- validatedParams.previewFpsRange[0] * kFpsToApiScale,
- validatedParams.previewFpsRange[1] * kFpsToApiScale);
- // Set this to the same value, but with higher priority
- newParams.setPreviewFrameRate(
- newParams.getPreviewFrameRate());
- }
-
// PICTURE_SIZE
newParams.getPictureSize(&validatedParams.pictureWidth,
&validatedParams.pictureHeight);
@@ -1522,7 +1446,7 @@ status_t Parameters::set(const String8& paramString) {
newParams.get(CameraParameters::KEY_SCENE_MODE) );
if (validatedParams.sceneMode != sceneMode &&
validatedParams.sceneMode !=
- ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) {
+ ANDROID_CONTROL_SCENE_MODE_DISABLED) {
camera_metadata_ro_entry_t availableSceneModes =
staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
for (i = 0; i < availableSceneModes.count; i++) {
@@ -1537,7 +1461,7 @@ status_t Parameters::set(const String8& paramString) {
}
}
bool sceneModeSet =
- validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED;
// FLASH_MODE
if (sceneModeSet) {
@@ -1667,10 +1591,11 @@ status_t Parameters::set(const String8& paramString) {
// FOCUS_AREAS
res = parseAreas(newParams.get(CameraParameters::KEY_FOCUS_AREAS),
&validatedParams.focusingAreas);
- size_t max3aRegions =
- (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1).data.i32[0];
+ size_t maxAfRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION).
+ data.i32[Parameters::REGION_AF];
if (res == OK) res = validateAreas(validatedParams.focusingAreas,
- max3aRegions, AREA_KIND_FOCUS);
+ maxAfRegions, AREA_KIND_FOCUS);
if (res != OK) {
ALOGE("%s: Requested focus areas are malformed: %s",
__FUNCTION__, newParams.get(CameraParameters::KEY_FOCUS_AREAS));
@@ -1700,10 +1625,13 @@ status_t Parameters::set(const String8& paramString) {
newParams.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
// METERING_AREAS
+ size_t maxAeRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION).
+ data.i32[Parameters::REGION_AE];
res = parseAreas(newParams.get(CameraParameters::KEY_METERING_AREAS),
&validatedParams.meteringAreas);
if (res == OK) {
- res = validateAreas(validatedParams.meteringAreas, max3aRegions,
+ res = validateAreas(validatedParams.meteringAreas, maxAeRegions,
AREA_KIND_METERING);
}
if (res != OK) {
@@ -1852,7 +1780,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const {
// (face detection statistics and face priority scene mode). Map from other
// to the other.
bool sceneModeActive =
- sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_DISABLED;
uint8_t reqControlMode = ANDROID_CONTROL_MODE_AUTO;
if (enableFaceDetect || sceneModeActive) {
reqControlMode = ANDROID_CONTROL_MODE_USE_SCENE_MODE;
@@ -1864,7 +1792,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const {
uint8_t reqSceneMode =
sceneModeActive ? sceneMode :
enableFaceDetect ? (uint8_t)ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
- (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ (uint8_t)ANDROID_CONTROL_SCENE_MODE_DISABLED;
res = request->update(ANDROID_CONTROL_SCENE_MODE,
&reqSceneMode, 1);
if (res != OK) return res;
@@ -1985,6 +1913,23 @@ status_t Parameters::updateRequest(CameraMetadata *request) const {
reqMeteringAreas, reqMeteringAreasSize);
if (res != OK) return res;
+ // Set awb regions to be the same as the metering regions if allowed
+ size_t maxAwbRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION).
+ data.i32[Parameters::REGION_AWB];
+ if (maxAwbRegions > 0) {
+ if (maxAwbRegions >= meteringAreas.size()) {
+ res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+ reqMeteringAreas, reqMeteringAreasSize);
+ } else {
+ // Ensure the awb regions are zeroed if the region count is too high.
+ int32_t zeroedAwbAreas[5] = {0, 0, 0, 0, 0};
+ res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+ zeroedAwbAreas, sizeof(zeroedAwbAreas)/sizeof(int32_t));
+ }
+ if (res != OK) return res;
+ }
+
delete[] reqMeteringAreas;
/* don't include jpeg thumbnail size - it's valid for
@@ -2225,9 +2170,9 @@ int Parameters::abModeStringToEnum(const char *abMode) {
int Parameters::sceneModeStringToEnum(const char *sceneMode) {
return
!sceneMode ?
- ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED :
+ ANDROID_CONTROL_SCENE_MODE_DISABLED :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_AUTO) ?
- ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED :
+ ANDROID_CONTROL_SCENE_MODE_DISABLED :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_ACTION) ?
ANDROID_CONTROL_SCENE_MODE_ACTION :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_PORTRAIT) ?
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index da07ccf..60c4687 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -25,7 +25,6 @@
#include <utils/Vector.h>
#include <utils/KeyedVector.h>
#include <camera/CameraParameters.h>
-#include <camera/CameraParameters2.h>
#include <camera/CameraMetadata.h>
namespace android {
@@ -33,7 +32,7 @@ namespace camera2 {
/**
* Current camera state; this is the full state of the Camera under the old
- * camera API (contents of the CameraParameters2 object in a more-efficient
+ * camera API (contents of the CameraParameters object in a more-efficient
* format, plus other state). The enum values are mostly based off the
* corresponding camera2 enums, not the camera1 strings. A few are defined here
* if they don't cleanly map to camera2 values.
@@ -114,6 +113,14 @@ struct Parameters {
bool autoExposureLock;
bool autoWhiteBalanceLock;
+ // 3A region types, for use with ANDROID_CONTROL_MAX_REGIONS
+ enum region_t {
+ REGION_AE = 0,
+ REGION_AWB,
+ REGION_AF,
+ NUM_REGION // Number of region types
+ } region;
+
Vector<Area> meteringAreas;
int zoom;
@@ -129,7 +136,7 @@ struct Parameters {
LIGHTFX_HDR
} lightFx;
- CameraParameters2 params;
+ CameraParameters params;
String8 paramsFlattened;
// These parameters are also part of the camera API-visible state, but not
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 77ae7ec..2064e2c 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -319,13 +319,15 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters &params) {
// Create CPU buffer queue endpoint. We need one more buffer here so that we can
// always acquire and free a buffer when the heap is full; otherwise the consumer
// will have buffers in flight we'll never clear out.
- sp<BufferQueue> bq = new BufferQueue();
- mRecordingConsumer = new BufferItemConsumer(bq,
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mRecordingConsumer = new BufferItemConsumer(consumer,
GRALLOC_USAGE_HW_VIDEO_ENCODER,
mRecordingHeapCount + 1);
mRecordingConsumer->setFrameAvailableListener(this);
mRecordingConsumer->setName(String8("Camera2-RecordingConsumer"));
- mRecordingWindow = new Surface(bq);
+ mRecordingWindow = new Surface(producer);
newConsumer = true;
// Allocate memory later, since we don't know buffer size until receipt
}
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 130f81a..2a2a5af 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -73,18 +73,19 @@ void ZslProcessor::onFrameAvailable() {
}
}
-void ZslProcessor::onFrameAvailable(int32_t /*requestId*/,
- const CameraMetadata &frame) {
+void ZslProcessor::onResultAvailable(const CaptureResult &result) {
+ ATRACE_CALL();
+ ALOGV("%s:", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
camera_metadata_ro_entry_t entry;
- entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
nsecs_t timestamp = entry.data.i64[0];
(void)timestamp;
ALOGVV("Got preview frame for timestamp %" PRId64, timestamp);
if (mState != RUNNING) return;
- mFrameList.editItemAt(mFrameListHead) = frame;
+ mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
findMatchesLocked();
@@ -130,13 +131,15 @@ status_t ZslProcessor::updateStream(const Parameters &params) {
if (mZslConsumer == 0) {
// Create CPU buffer queue endpoint
- sp<BufferQueue> bq = new BufferQueue();
- mZslConsumer = new BufferItemConsumer(bq,
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mZslConsumer = new BufferItemConsumer(consumer,
GRALLOC_USAGE_HW_CAMERA_ZSL,
kZslBufferDepth);
mZslConsumer->setFrameAvailableListener(this);
mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
- mZslWindow = new Surface(bq);
+ mZslWindow = new Surface(producer);
}
if (mZslStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 6d3cb85..f4cf0c8 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -24,6 +24,7 @@
#include <utils/Condition.h>
#include <gui/BufferItemConsumer.h>
#include <camera/CameraMetadata.h>
+#include <camera/CaptureResult.h>
#include "common/CameraDeviceBase.h"
#include "api1/client2/ZslProcessorInterface.h"
@@ -54,7 +55,7 @@ class ZslProcessor:
// From mZslConsumer
virtual void onFrameAvailable();
// From FrameProcessor
- virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
+ virtual void onResultAvailable(const CaptureResult &result);
virtual void onBufferReleased(buffer_handle_t *handle);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 2fce2b6..1dcb718 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -63,18 +63,19 @@ ZslProcessor3::~ZslProcessor3() {
deleteStream();
}
-void ZslProcessor3::onFrameAvailable(int32_t /*requestId*/,
- const CameraMetadata &frame) {
+void ZslProcessor3::onResultAvailable(const CaptureResult &result) {
+ ATRACE_CALL();
+ ALOGV("%s:", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
camera_metadata_ro_entry_t entry;
- entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
nsecs_t timestamp = entry.data.i64[0];
(void)timestamp;
ALOGVV("Got preview metadata for timestamp %" PRId64, timestamp);
if (mState != RUNNING) return;
- mFrameList.editItemAt(mFrameListHead) = frame;
+ mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
}
@@ -275,6 +276,15 @@ status_t ZslProcessor3::pushToReprocess(int32_t requestId) {
return INVALID_OPERATION;
}
+ // Flush device to clear out all in-flight requests pending in HAL.
+ res = client->getCameraDevice()->flush();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Failed to flush device: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+
// Update JPEG settings
{
SharedParameters::Lock l(client->getParameters());
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
index d2f8322..4c52a64 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
@@ -50,8 +50,8 @@ class ZslProcessor3 :
ZslProcessor3(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
~ZslProcessor3();
- // From FrameProcessor
- virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
+ // From FrameProcessor::FilteredListener
+ virtual void onResultAvailable(const CaptureResult &result);
/**
****************************************
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 142da9e..5a48a62 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -16,7 +16,7 @@
#define LOG_TAG "CameraDeviceClient"
#define ATRACE_TAG ATRACE_TAG_CAMERA
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#include <cutils/properties.h>
#include <utils/Log.h>
@@ -91,79 +91,101 @@ CameraDeviceClient::~CameraDeviceClient() {
}
status_t CameraDeviceClient::submitRequest(sp<CaptureRequest> request,
- bool streaming) {
+ bool streaming,
+ /*out*/
+ int64_t* lastFrameNumber) {
+ List<sp<CaptureRequest> > requestList;
+ requestList.push_back(request);
+ return submitRequestList(requestList, streaming, lastFrameNumber);
+}
+
+status_t CameraDeviceClient::submitRequestList(List<sp<CaptureRequest> > requests,
+ bool streaming, int64_t* lastFrameNumber) {
ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
+ ALOGV("%s-start of function. Request list size %d", __FUNCTION__, requests.size());
status_t res;
-
if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
Mutex::Autolock icl(mBinderSerializationLock);
if (!mDevice.get()) return DEAD_OBJECT;
- if (request == 0) {
+ if (requests.empty()) {
ALOGE("%s: Camera %d: Sent null request. Rejecting request.",
__FUNCTION__, mCameraId);
return BAD_VALUE;
}
- CameraMetadata metadata(request->mMetadata);
-
- if (metadata.isEmpty()) {
- ALOGE("%s: Camera %d: Sent empty metadata packet. Rejecting request.",
- __FUNCTION__, mCameraId);
- return BAD_VALUE;
- } else if (request->mSurfaceList.size() == 0) {
- ALOGE("%s: Camera %d: Requests must have at least one surface target. "
- "Rejecting request.", __FUNCTION__, mCameraId);
- return BAD_VALUE;
- }
+ List<const CameraMetadata> metadataRequestList;
+ int32_t requestId = mRequestIdCounter;
+ uint32_t loopCounter = 0;
- if (!enforceRequestPermissions(metadata)) {
- // Callee logs
- return PERMISSION_DENIED;
- }
+ for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end(); ++it) {
+ sp<CaptureRequest> request = *it;
+ if (request == 0) {
+ ALOGE("%s: Camera %d: Sent null request.",
+ __FUNCTION__, mCameraId);
+ return BAD_VALUE;
+ }
- /**
- * Write in the output stream IDs which we calculate from
- * the capture request's list of surface targets
- */
- Vector<int32_t> outputStreamIds;
- outputStreamIds.setCapacity(request->mSurfaceList.size());
- for (size_t i = 0; i < request->mSurfaceList.size(); ++i) {
- sp<Surface> surface = request->mSurfaceList[i];
+ CameraMetadata metadata(request->mMetadata);
+ if (metadata.isEmpty()) {
+ ALOGE("%s: Camera %d: Sent empty metadata packet. Rejecting request.",
+ __FUNCTION__, mCameraId);
+ return BAD_VALUE;
+ } else if (request->mSurfaceList.isEmpty()) {
+ ALOGE("%s: Camera %d: Requests must have at least one surface target. "
+ "Rejecting request.", __FUNCTION__, mCameraId);
+ return BAD_VALUE;
+ }
- if (surface == 0) continue;
+ if (!enforceRequestPermissions(metadata)) {
+ // Callee logs
+ return PERMISSION_DENIED;
+ }
- sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
- int idx = mStreamMap.indexOfKey(gbp->asBinder());
+ /**
+ * Write in the output stream IDs which we calculate from
+ * the capture request's list of surface targets
+ */
+ Vector<int32_t> outputStreamIds;
+ outputStreamIds.setCapacity(request->mSurfaceList.size());
+ for (size_t i = 0; i < request->mSurfaceList.size(); ++i) {
+ sp<Surface> surface = request->mSurfaceList[i];
+ if (surface == 0) continue;
+
+ sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
+ int idx = mStreamMap.indexOfKey(gbp->asBinder());
+
+ // Trying to submit request with surface that wasn't created
+ if (idx == NAME_NOT_FOUND) {
+ ALOGE("%s: Camera %d: Tried to submit a request with a surface that"
+ " we have not called createStream on",
+ __FUNCTION__, mCameraId);
+ return BAD_VALUE;
+ }
- // Trying to submit request with surface that wasn't created
- if (idx == NAME_NOT_FOUND) {
- ALOGE("%s: Camera %d: Tried to submit a request with a surface that"
- " we have not called createStream on",
- __FUNCTION__, mCameraId);
- return BAD_VALUE;
+ int streamId = mStreamMap.valueAt(idx);
+ outputStreamIds.push_back(streamId);
+ ALOGV("%s: Camera %d: Appending output stream %d to request",
+ __FUNCTION__, mCameraId, streamId);
}
- int streamId = mStreamMap.valueAt(idx);
- outputStreamIds.push_back(streamId);
- ALOGV("%s: Camera %d: Appending output stream %d to request",
- __FUNCTION__, mCameraId, streamId);
- }
+ metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
+ outputStreamIds.size());
- metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
- outputStreamIds.size());
+ metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1);
+ loopCounter++; // loopCounter starts from 1
+ ALOGV("%s: Camera %d: Creating request with ID %d (%d of %d)",
+ __FUNCTION__, mCameraId, requestId, loopCounter, requests.size());
- int32_t requestId = mRequestIdCounter++;
- metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1);
- ALOGV("%s: Camera %d: Submitting request with ID %d",
- __FUNCTION__, mCameraId, requestId);
+ metadataRequestList.push_back(metadata);
+ }
+ mRequestIdCounter++;
if (streaming) {
- res = mDevice->setStreamingRequest(metadata);
+ res = mDevice->setStreamingRequestList(metadataRequestList, lastFrameNumber);
if (res != OK) {
ALOGE("%s: Camera %d: Got error %d after trying to set streaming "
"request", __FUNCTION__, mCameraId, res);
@@ -171,11 +193,12 @@ status_t CameraDeviceClient::submitRequest(sp<CaptureRequest> request,
mStreamingRequestList.push_back(requestId);
}
} else {
- res = mDevice->capture(metadata);
+ res = mDevice->captureList(metadataRequestList, lastFrameNumber);
if (res != OK) {
ALOGE("%s: Camera %d: Got error %d after trying to set capture",
- __FUNCTION__, mCameraId, res);
+ __FUNCTION__, mCameraId, res);
}
+ ALOGV("%s: requestId = %d ", __FUNCTION__, requestId);
}
ALOGV("%s: Camera %d: End of function", __FUNCTION__, mCameraId);
@@ -186,7 +209,7 @@ status_t CameraDeviceClient::submitRequest(sp<CaptureRequest> request,
return res;
}
-status_t CameraDeviceClient::cancelRequest(int requestId) {
+status_t CameraDeviceClient::cancelRequest(int requestId, int64_t* lastFrameNumber) {
ATRACE_CALL();
ALOGV("%s, requestId = %d", __FUNCTION__, requestId);
@@ -212,7 +235,7 @@ status_t CameraDeviceClient::cancelRequest(int requestId) {
return BAD_VALUE;
}
- res = mDevice->clearStreamingRequest();
+ res = mDevice->clearStreamingRequest(lastFrameNumber);
if (res == OK) {
ALOGV("%s: Camera %d: Successfully cleared streaming request",
@@ -259,8 +282,6 @@ status_t CameraDeviceClient::deleteStream(int streamId) {
} else if (res == OK) {
mStreamMap.removeItemsAt(index);
- ALOGV("%s: Camera %d: Successfully deleted stream ID (%d)",
- __FUNCTION__, mCameraId, streamId);
}
return res;
@@ -465,7 +486,7 @@ status_t CameraDeviceClient::waitUntilIdle()
return res;
}
-status_t CameraDeviceClient::flush() {
+status_t CameraDeviceClient::flush(int64_t* lastFrameNumber) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
@@ -476,7 +497,8 @@ status_t CameraDeviceClient::flush() {
if (!mDevice.get()) return DEAD_OBJECT;
- return mDevice->flush();
+ mStreamingRequestList.clear();
+ return mDevice->flush(lastFrameNumber);
}
status_t CameraDeviceClient::dump(int fd, const Vector<String16>& args) {
@@ -493,13 +515,13 @@ status_t CameraDeviceClient::dump(int fd, const Vector<String16>& args) {
return dumpDevice(fd, args);
}
-
-void CameraDeviceClient::notifyError() {
+void CameraDeviceClient::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras) {
// Thread safe. Don't bother locking.
sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
if (remoteCb != 0) {
- remoteCb->onDeviceError(ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE);
+ remoteCb->onDeviceError(errorCode, resultExtras);
}
}
@@ -512,12 +534,12 @@ void CameraDeviceClient::notifyIdle() {
}
}
-void CameraDeviceClient::notifyShutter(int requestId,
+void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp) {
// Thread safe. Don't bother locking.
sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
if (remoteCb != 0) {
- remoteCb->onCaptureStarted(requestId, timestamp);
+ remoteCb->onCaptureStarted(resultExtras, timestamp);
}
}
@@ -552,16 +574,14 @@ void CameraDeviceClient::detachDevice() {
}
/** Device-related methods */
-void CameraDeviceClient::onFrameAvailable(int32_t requestId,
- const CameraMetadata& frame) {
+void CameraDeviceClient::onResultAvailable(const CaptureResult& result) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
// Thread-safe. No lock necessary.
sp<ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
if (remoteCb != NULL) {
- ALOGV("%s: frame = %p ", __FUNCTION__, &frame);
- remoteCb->onResultReceived(requestId, frame);
+ remoteCb->onResultReceived(result.mMetadata, result.mResultExtras);
}
}
@@ -635,26 +655,56 @@ status_t CameraDeviceClient::getRotationTransformLocked(int32_t* transform) {
return INVALID_OPERATION;
}
+ camera_metadata_ro_entry_t entryFacing = staticInfo.find(ANDROID_LENS_FACING);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Can't find android.lens.facing in "
+ "static metadata!", __FUNCTION__, mCameraId);
+ return INVALID_OPERATION;
+ }
+
int32_t& flags = *transform;
+ bool mirror = (entryFacing.data.u8[0] == ANDROID_LENS_FACING_FRONT);
int orientation = entry.data.i32[0];
- switch (orientation) {
- case 0:
- flags = 0;
- break;
- case 90:
- flags = NATIVE_WINDOW_TRANSFORM_ROT_90;
- break;
- case 180:
- flags = NATIVE_WINDOW_TRANSFORM_ROT_180;
- break;
- case 270:
- flags = NATIVE_WINDOW_TRANSFORM_ROT_270;
- break;
- default:
- ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
- __FUNCTION__, orientation);
- return INVALID_OPERATION;
+ if (!mirror) {
+ switch (orientation) {
+ case 0:
+ flags = 0;
+ break;
+ case 90:
+ flags = NATIVE_WINDOW_TRANSFORM_ROT_90;
+ break;
+ case 180:
+ flags = NATIVE_WINDOW_TRANSFORM_ROT_180;
+ break;
+ case 270:
+ flags = NATIVE_WINDOW_TRANSFORM_ROT_270;
+ break;
+ default:
+ ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
+ __FUNCTION__, orientation);
+ return INVALID_OPERATION;
+ }
+ } else {
+ switch (orientation) {
+ case 0:
+ flags = HAL_TRANSFORM_FLIP_H;
+ break;
+ case 90:
+ flags = HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90;
+ break;
+ case 180:
+ flags = HAL_TRANSFORM_FLIP_V;
+ break;
+ case 270:
+ flags = HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90;
+ break;
+ default:
+ ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
+ __FUNCTION__, orientation);
+ return INVALID_OPERATION;
+ }
+
}
/**
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index b9c16aa..0b37784 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -63,9 +63,18 @@ public:
*/
// Note that the callee gets a copy of the metadata.
- virtual int submitRequest(sp<CaptureRequest> request,
- bool streaming = false);
- virtual status_t cancelRequest(int requestId);
+ virtual status_t submitRequest(sp<CaptureRequest> request,
+ bool streaming = false,
+ /*out*/
+ int64_t* lastFrameNumber = NULL);
+ // List of requests are copied.
+ virtual status_t submitRequestList(List<sp<CaptureRequest> > requests,
+ bool streaming = false,
+ /*out*/
+ int64_t* lastFrameNumber = NULL);
+ virtual status_t cancelRequest(int requestId,
+ /*out*/
+ int64_t* lastFrameNumber = NULL);
// Returns -EBUSY if device is not idle
virtual status_t deleteStream(int streamId);
@@ -89,7 +98,8 @@ public:
virtual status_t waitUntilIdle();
// Flush all active and pending requests as fast as possible
- virtual status_t flush();
+ virtual status_t flush(/*out*/
+ int64_t* lastFrameNumber = NULL);
/**
* Interface used by CameraService
@@ -114,16 +124,16 @@ public:
*/
virtual void notifyIdle();
- virtual void notifyError();
- virtual void notifyShutter(int requestId, nsecs_t timestamp);
+ virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras);
+ virtual void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp);
/**
* Interface used by independent components of CameraDeviceClient.
*/
protected:
/** FilteredListener implementation **/
- virtual void onFrameAvailable(int32_t requestId,
- const CameraMetadata& frame);
+ virtual void onResultAvailable(const CaptureResult& result);
virtual void detachDevice();
// Calculate the ANativeWindow transform from android.sensor.orientation
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
index 1a7a7a7..0f6d278 100644
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
@@ -373,9 +373,7 @@ void ProCamera2Client::detachDevice() {
Camera2ClientBase::detachDevice();
}
-/** Device-related methods */
-void ProCamera2Client::onFrameAvailable(int32_t requestId,
- const CameraMetadata& frame) {
+void ProCamera2Client::onResultAvailable(const CaptureResult& result) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
@@ -383,13 +381,12 @@ void ProCamera2Client::onFrameAvailable(int32_t requestId,
SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
if (mRemoteCallback != NULL) {
- CameraMetadata tmp(frame);
+ CameraMetadata tmp(result.mMetadata);
camera_metadata_t* meta = tmp.release();
ALOGV("%s: meta = %p ", __FUNCTION__, meta);
- mRemoteCallback->onResultReceived(requestId, meta);
+ mRemoteCallback->onResultReceived(result.mResultExtras.requestId, meta);
tmp.acquire(meta);
}
-
}
bool ProCamera2Client::enforceRequestPermissions(CameraMetadata& metadata) {
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
index 8a0f547..9d83122 100644
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.h
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
@@ -21,6 +21,7 @@
#include "common/FrameProcessorBase.h"
#include "common/Camera2ClientBase.h"
#include "device2/Camera2Device.h"
+#include "camera/CaptureResult.h"
namespace android {
@@ -97,8 +98,8 @@ public:
protected:
/** FilteredListener implementation **/
- virtual void onFrameAvailable(int32_t requestId,
- const CameraMetadata& frame);
+ virtual void onResultAvailable(const CaptureResult& result);
+
virtual void detachDevice();
private:
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 6a88c87..19efd30 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -221,10 +221,11 @@ status_t Camera2ClientBase<TClientBase>::connect(
/** Device-related methods */
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyError(int errorCode, int arg1,
- int arg2) {
- ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode,
- arg1, arg2);
+void Camera2ClientBase<TClientBase>::notifyError(
+ ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras) {
+ ALOGE("Error condition %d reported by HAL, requestId %" PRId32, errorCode,
+ resultExtras.requestId);
}
template <typename TClientBase>
@@ -233,13 +234,13 @@ void Camera2ClientBase<TClientBase>::notifyIdle() {
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(int requestId,
+void Camera2ClientBase<TClientBase>::notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp) {
- (void)requestId;
+ (void)resultExtras;
(void)timestamp;
- ALOGV("%s: Shutter notification for request id %d at time %" PRId64,
- __FUNCTION__, requestId, timestamp);
+ ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
+ __FUNCTION__, resultExtras.requestId, timestamp);
}
template <typename TClientBase>
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 61e44f0..9feca93 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -18,6 +18,7 @@
#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H
#include "common/CameraDeviceBase.h"
+#include "camera/CaptureResult.h"
namespace android {
@@ -61,9 +62,11 @@ public:
* CameraDeviceBase::NotificationListener implementation
*/
- virtual void notifyError(int errorCode, int arg1, int arg2);
+ virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras& resultExtras);
virtual void notifyIdle();
- virtual void notifyShutter(int requestId, nsecs_t timestamp);
+ virtual void notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp);
virtual void notifyAutoFocus(uint8_t newState, int triggerId);
virtual void notifyAutoExposure(uint8_t newState, int triggerId);
virtual void notifyAutoWhitebalance(uint8_t newState,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index e80abf1..7597b10 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -22,9 +22,12 @@
#include <utils/String16.h>
#include <utils/Vector.h>
#include <utils/Timers.h>
+#include <utils/List.h>
+#include <camera/camera2/ICameraDeviceCallbacks.h>
#include "hardware/camera2.h"
#include "camera/CameraMetadata.h"
+#include "camera/CaptureResult.h"
namespace android {
@@ -44,7 +47,7 @@ class CameraDeviceBase : public virtual RefBase {
virtual status_t initialize(camera_module_t *module) = 0;
virtual status_t disconnect() = 0;
- virtual status_t dump(int fd, const Vector<String16>& args) = 0;
+ virtual status_t dump(int fd, const Vector<String16> &args) = 0;
/**
* The device's static characteristics metadata buffer
@@ -54,19 +57,37 @@ class CameraDeviceBase : public virtual RefBase {
/**
* Submit request for capture. The CameraDevice takes ownership of the
* passed-in buffer.
+ * Output lastFrameNumber is the expected frame number of this request.
*/
- virtual status_t capture(CameraMetadata &request) = 0;
+ virtual status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL) = 0;
+
+ /**
+ * Submit a list of requests.
+ * Output lastFrameNumber is the expected last frame number of the list of requests.
+ */
+ virtual status_t captureList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber = NULL) = 0;
/**
* Submit request for streaming. The CameraDevice makes a copy of the
* passed-in buffer and the caller retains ownership.
+ * Output lastFrameNumber is the last frame number of the previous streaming request.
+ */
+ virtual status_t setStreamingRequest(const CameraMetadata &request,
+ int64_t *lastFrameNumber = NULL) = 0;
+
+ /**
+ * Submit a list of requests for streaming.
+ * Output lastFrameNumber is the last frame number of the previous streaming request.
*/
- virtual status_t setStreamingRequest(const CameraMetadata &request) = 0;
+ virtual status_t setStreamingRequestList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber = NULL) = 0;
/**
* Clear the streaming request slot.
+ * Output lastFrameNumber is the last frame number of the previous streaming request.
*/
- virtual status_t clearStreamingRequest() = 0;
+ virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL) = 0;
/**
* Wait until a request with the given ID has been dequeued by the
@@ -142,11 +163,12 @@ class CameraDeviceBase : public virtual RefBase {
// API1 and API2.
// Required for API 1 and 2
- virtual void notifyError(int errorCode, int arg1, int arg2) = 0;
+ virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
+ const CaptureResultExtras &resultExtras) = 0;
// Required only for API2
virtual void notifyIdle() = 0;
- virtual void notifyShutter(int requestId,
+ virtual void notifyShutter(const CaptureResultExtras &resultExtras,
nsecs_t timestamp) = 0;
// Required only for API1
@@ -179,11 +201,12 @@ class CameraDeviceBase : public virtual RefBase {
virtual status_t waitForNextFrame(nsecs_t timeout) = 0;
/**
- * Get next metadata frame from the frame queue. Returns NULL if the queue
- * is empty; caller takes ownership of the metadata buffer.
- * May be called concurrently to most methods, except for waitForNextFrame
+ * Get next capture result frame from the result queue. Returns NOT_ENOUGH_DATA
+ * if the queue is empty; caller takes ownership of the metadata buffer inside
+ * the capture result object's metadata field.
+ * May be called concurrently to most methods, except for waitForNextFrame.
*/
- virtual status_t getNextFrame(CameraMetadata *frame) = 0;
+ virtual status_t getNextResult(CaptureResult *frame) = 0;
/**
* Trigger auto-focus. The latest ID used in a trigger autofocus or cancel
@@ -224,8 +247,9 @@ class CameraDeviceBase : public virtual RefBase {
/**
* Flush all pending and in-flight requests. Blocks until flush is
* complete.
+ * Output lastFrameNumber is the last frame number of the previous streaming request.
*/
- virtual status_t flush() = 0;
+ virtual status_t flush(int64_t *lastFrameNumber = NULL) = 0;
};
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 4d31667..f6a971a 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -99,15 +99,17 @@ bool FrameProcessorBase::threadLoop() {
void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device) {
status_t res;
ATRACE_CALL();
- CameraMetadata frame;
+ CaptureResult result;
ALOGV("%s: Camera %d: Process new frames", __FUNCTION__, device->getId());
- while ( (res = device->getNextFrame(&frame)) == OK) {
+ while ( (res = device->getNextResult(&result)) == OK) {
+ // TODO: instead of getting frame number from metadata, we should read
+ // this from result.mResultExtras when CameraDeviceBase interface is fixed.
camera_metadata_entry_t entry;
- entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
+ entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
if (entry.count == 0) {
ALOGE("%s: Camera %d: Error reading frame number",
__FUNCTION__, device->getId());
@@ -115,13 +117,13 @@ void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device) {
}
ATRACE_INT("cam2_frame", entry.data.i32[0]);
- if (!processSingleFrame(frame, device)) {
+ if (!processSingleFrame(result, device)) {
break;
}
- if (!frame.isEmpty()) {
+ if (!result.mMetadata.isEmpty()) {
Mutex::Autolock al(mLastFrameMutex);
- mLastFrame.acquire(frame);
+ mLastFrame.acquire(result.mMetadata);
}
}
if (res != NOT_ENOUGH_DATA) {
@@ -133,21 +135,22 @@ void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device) {
return;
}
-bool FrameProcessorBase::processSingleFrame(CameraMetadata &frame,
- const sp<CameraDeviceBase> &device) {
+bool FrameProcessorBase::processSingleFrame(CaptureResult &result,
+ const sp<CameraDeviceBase> &device) {
ALOGV("%s: Camera %d: Process single frame (is empty? %d)",
- __FUNCTION__, device->getId(), frame.isEmpty());
- return processListeners(frame, device) == OK;
+ __FUNCTION__, device->getId(), result.mMetadata.isEmpty());
+ return processListeners(result, device) == OK;
}
-status_t FrameProcessorBase::processListeners(const CameraMetadata &frame,
+status_t FrameProcessorBase::processListeners(const CaptureResult &result,
const sp<CameraDeviceBase> &device) {
ATRACE_CALL();
+
camera_metadata_ro_entry_t entry;
// Quirks: Don't deliver partial results to listeners that don't want them
bool quirkIsPartial = false;
- entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+ entry = result.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
if (entry.count != 0 &&
entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
ALOGV("%s: Camera %d: Not forwarding partial result to listeners",
@@ -155,10 +158,13 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame,
quirkIsPartial = true;
}
- entry = frame.find(ANDROID_REQUEST_ID);
+ // TODO: instead of getting requestID from CameraMetadata, we should get it
+ // from CaptureResultExtras. This will require changing Camera2Device.
+ // Currently Camera2Device uses MetadataQueue to store results, which does not
+ // include CaptureResultExtras.
+ entry = result.mMetadata.find(ANDROID_REQUEST_ID);
if (entry.count == 0) {
- ALOGE("%s: Camera %d: Error reading frame id",
- __FUNCTION__, device->getId());
+ ALOGE("%s: Camera %d: Error reading frame id", __FUNCTION__, device->getId());
return BAD_VALUE;
}
int32_t requestId = entry.data.i32[0];
@@ -169,9 +175,8 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame,
List<RangeListener>::iterator item = mRangeListeners.begin();
while (item != mRangeListeners.end()) {
- if (requestId >= item->minId &&
- requestId < item->maxId &&
- (!quirkIsPartial || item->quirkSendPartials) ) {
+ if (requestId >= item->minId && requestId < item->maxId &&
+ (!quirkIsPartial || item->quirkSendPartials)) {
sp<FilteredListener> listener = item->listener.promote();
if (listener == 0) {
item = mRangeListeners.erase(item);
@@ -183,10 +188,12 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame,
item++;
}
}
- ALOGV("Got %zu range listeners out of %zu", listeners.size(), mRangeListeners.size());
+ ALOGV("%s: Camera %d: Got %zu range listeners out of %zu", __FUNCTION__,
+ device->getId(), listeners.size(), mRangeListeners.size());
+
List<sp<FilteredListener> >::iterator item = listeners.begin();
for (; item != listeners.end(); item++) {
- (*item)->onFrameAvailable(requestId, frame);
+ (*item)->onResultAvailable(result);
}
return OK;
}
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h
index 89b608a..15a014e 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.h
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.h
@@ -23,6 +23,7 @@
#include <utils/KeyedVector.h>
#include <utils/List.h>
#include <camera/CameraMetadata.h>
+#include <camera/CaptureResult.h>
namespace android {
@@ -39,8 +40,7 @@ class FrameProcessorBase: public Thread {
virtual ~FrameProcessorBase();
struct FilteredListener: virtual public RefBase {
- virtual void onFrameAvailable(int32_t requestId,
- const CameraMetadata &frame) = 0;
+ virtual void onResultAvailable(const CaptureResult &result) = 0;
};
// Register a listener for a range of IDs [minId, maxId). Multiple listeners
@@ -72,10 +72,10 @@ class FrameProcessorBase: public Thread {
void processNewFrames(const sp<CameraDeviceBase> &device);
- virtual bool processSingleFrame(CameraMetadata &frame,
+ virtual bool processSingleFrame(CaptureResult &result,
const sp<CameraDeviceBase> &device);
- status_t processListeners(const CameraMetadata &frame,
+ status_t processListeners(const CaptureResult &result,
const sp<CameraDeviceBase> &device);
CameraMetadata mLastFrame;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index 2966d82..c33c166 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -112,20 +112,6 @@ status_t Camera2Device::initialize(camera_module_t *module)
return res;
}
- res = device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps);
- if (res != OK ) {
- ALOGE("%s: Camera %d: Unable to retrieve tag ops from device: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- device->common.close(&device->common);
- return res;
- }
- res = set_camera_metadata_vendor_tag_ops(mVendorTagOps);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to set tag ops: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- device->common.close(&device->common);
- return res;
- }
res = device->ops->set_notify_callback(device, notificationCallback,
NULL);
if (res != OK) {
@@ -213,7 +199,7 @@ const CameraMetadata& Camera2Device::info() const {
return mDeviceInfo;
}
-status_t Camera2Device::capture(CameraMetadata &request) {
+status_t Camera2Device::capture(CameraMetadata &request, int64_t* /*lastFrameNumber*/) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
@@ -221,15 +207,29 @@ status_t Camera2Device::capture(CameraMetadata &request) {
return OK;
}
+status_t Camera2Device::captureList(const List<const CameraMetadata> &requests,
+ int64_t* /*lastFrameNumber*/) {
+ ATRACE_CALL();
+ ALOGE("%s: Camera2Device burst capture not implemented", __FUNCTION__);
+ return INVALID_OPERATION;
+}
-status_t Camera2Device::setStreamingRequest(const CameraMetadata &request) {
+status_t Camera2Device::setStreamingRequest(const CameraMetadata &request,
+ int64_t* /*lastFrameNumber*/) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
CameraMetadata streamRequest(request);
return mRequestQueue.setStreamSlot(streamRequest.release());
}
-status_t Camera2Device::clearStreamingRequest() {
+status_t Camera2Device::setStreamingRequestList(const List<const CameraMetadata> &requests,
+ int64_t* /*lastFrameNumber*/) {
+ ATRACE_CALL();
+ ALOGE("%s, Camera2Device streaming burst not implemented", __FUNCTION__);
+ return INVALID_OPERATION;
+}
+
+status_t Camera2Device::clearStreamingRequest(int64_t* /*lastFrameNumber*/) {
ATRACE_CALL();
return mRequestQueue.setStreamSlot(NULL);
}
@@ -462,7 +462,13 @@ void Camera2Device::notificationCallback(int32_t msg_type,
if (listener != NULL) {
switch (msg_type) {
case CAMERA2_MSG_ERROR:
- listener->notifyError(ext1, ext2, ext3);
+ // TODO: This needs to be fixed. ext2 and ext3 need to be considered.
+ listener->notifyError(
+ ((ext1 == CAMERA2_MSG_ERROR_DEVICE)
+ || (ext1 == CAMERA2_MSG_ERROR_HARDWARE)) ?
+ ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE :
+ ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE,
+ CaptureResultExtras());
break;
case CAMERA2_MSG_SHUTTER: {
// TODO: Only needed for camera2 API, which is unsupported
@@ -491,16 +497,22 @@ status_t Camera2Device::waitForNextFrame(nsecs_t timeout) {
return mFrameQueue.waitForBuffer(timeout);
}
-status_t Camera2Device::getNextFrame(CameraMetadata *frame) {
+status_t Camera2Device::getNextResult(CaptureResult *result) {
ATRACE_CALL();
+ ALOGV("%s: get CaptureResult", __FUNCTION__);
+ if (result == NULL) {
+ ALOGE("%s: result pointer is NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
status_t res;
camera_metadata_t *rawFrame;
res = mFrameQueue.dequeue(&rawFrame);
- if (rawFrame == NULL) {
+ if (rawFrame == NULL) {
return NOT_ENOUGH_DATA;
} else if (res == OK) {
- frame->acquire(rawFrame);
+ result->mMetadata.acquire(rawFrame);
}
+
return res;
}
@@ -570,7 +582,7 @@ status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId,
return res;
}
-status_t Camera2Device::flush() {
+status_t Camera2Device::flush(int64_t* /*lastFrameNumber*/) {
ATRACE_CALL();
mRequestQueue.clear();
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 1f53c56..22a13ac 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -47,9 +47,14 @@ class Camera2Device: public CameraDeviceBase {
virtual status_t disconnect();
virtual status_t dump(int fd, const Vector<String16>& args);
virtual const CameraMetadata& info() const;
- virtual status_t capture(CameraMetadata &request);
- virtual status_t setStreamingRequest(const CameraMetadata &request);
- virtual status_t clearStreamingRequest();
+ virtual status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL);
+ virtual status_t captureList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber = NULL);
+ virtual status_t setStreamingRequest(const CameraMetadata &request,
+ int64_t *lastFrameNumber = NULL);
+ virtual status_t setStreamingRequestList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber = NULL);
+ virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL);
virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout);
virtual status_t createStream(sp<ANativeWindow> consumer,
uint32_t width, uint32_t height, int format, size_t size,
@@ -65,20 +70,19 @@ class Camera2Device: public CameraDeviceBase {
virtual status_t setNotifyCallback(NotificationListener *listener);
virtual bool willNotify3A();
virtual status_t waitForNextFrame(nsecs_t timeout);
- virtual status_t getNextFrame(CameraMetadata *frame);
+ virtual status_t getNextResult(CaptureResult *frame);
virtual status_t triggerAutofocus(uint32_t id);
virtual status_t triggerCancelAutofocus(uint32_t id);
virtual status_t triggerPrecaptureMetering(uint32_t id);
virtual status_t pushReprocessBuffer(int reprocessStreamId,
buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
// Flush implemented as just a wait
- virtual status_t flush();
+ virtual status_t flush(int64_t *lastFrameNumber = NULL);
private:
const int mId;
camera2_device_t *mHal2Device;
CameraMetadata mDeviceInfo;
- vendor_tag_query_ops_t *mVendorTagOps;
/**
* Queue class for both sending requests to a camera2 device, and for
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 1d4768c..16d6f42 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -102,8 +102,10 @@ status_t Camera3Device::initialize(camera_module_t *module)
camera3_device_t *device;
+ ATRACE_BEGIN("camera3->open");
res = module->common.methods->open(&module->common, deviceName.string(),
reinterpret_cast<hw_device_t**>(&device));
+ ATRACE_END();
if (res != OK) {
SET_ERR_L("Could not open camera: %s (%d)", strerror(-res), res);
@@ -112,9 +114,9 @@ status_t Camera3Device::initialize(camera_module_t *module)
/** Cross-check device version */
- if (device->common.version != CAMERA_DEVICE_API_VERSION_3_0) {
+ if (device->common.version < CAMERA_DEVICE_API_VERSION_3_0) {
SET_ERR_L("Could not open camera: "
- "Camera device is not version %x, reports %x instead",
+ "Camera device should be at least %x, reports %x instead",
CAMERA_DEVICE_API_VERSION_3_0,
device->common.version);
device->common.close(&device->common);
@@ -128,7 +130,7 @@ status_t Camera3Device::initialize(camera_module_t *module)
if (info.device_version != device->common.version) {
SET_ERR_L("HAL reporting mismatched camera_info version (%x)"
" and device version (%x).",
- device->common.version, info.device_version);
+ info.device_version, device->common.version);
device->common.close(&device->common);
return BAD_VALUE;
}
@@ -146,24 +148,6 @@ status_t Camera3Device::initialize(camera_module_t *module)
return BAD_VALUE;
}
- /** Get vendor metadata tags */
-
- mVendorTagOps.get_camera_vendor_section_name = NULL;
-
- ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops");
- device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps);
- ATRACE_END();
-
- if (mVendorTagOps.get_camera_vendor_section_name != NULL) {
- res = set_camera_metadata_vendor_tag_ops(&mVendorTagOps);
- if (res != OK) {
- SET_ERR_L("Unable to set tag ops: %s (%d)",
- strerror(-res), res);
- device->common.close(&device->common);
- return res;
- }
- }
-
/** Start up status tracker thread */
mStatusTracker = new StatusTracker(this);
res = mStatusTracker->run(String8::format("C3Dev-%d-Status", mId).string());
@@ -271,7 +255,9 @@ status_t Camera3Device::disconnect() {
mStatusTracker.clear();
if (mHal3Device != NULL) {
+ ATRACE_BEGIN("camera3->close");
mHal3Device->common.close(&mHal3Device->common);
+ ATRACE_END();
mHal3Device = NULL;
}
@@ -298,6 +284,53 @@ bool Camera3Device::tryLockSpinRightRound(Mutex& lock) {
return gotLock;
}
+ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
+ // TODO: replace below with availableStreamConfiguration for HAL3.2+.
+ camera_metadata_ro_entry availableJpegSizes =
+ mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
+ if (availableJpegSizes.count == 0 || availableJpegSizes.count % 2 != 0) {
+ ALOGE("%s: Camera %d: Can't find find valid available jpeg sizes in static metadata!",
+ __FUNCTION__, mId);
+ return BAD_VALUE;
+ }
+
+ // Get max jpeg size (area-wise).
+ int32_t maxJpegWidth = 0, maxJpegHeight = 0;
+ bool foundMax = false;
+ for (size_t i = 0; i < availableJpegSizes.count; i += 2) {
+ if ((availableJpegSizes.data.i32[i] * availableJpegSizes.data.i32[i + 1])
+ > (maxJpegWidth * maxJpegHeight)) {
+ maxJpegWidth = availableJpegSizes.data.i32[i];
+ maxJpegHeight = availableJpegSizes.data.i32[i + 1];
+ foundMax = true;
+ }
+ }
+ if (!foundMax) {
+ return BAD_VALUE;
+ }
+
+ // Get max jpeg buffer size
+ ssize_t maxJpegBufferSize = 0;
+ camera_metadata_ro_entry jpegMaxSize = mDeviceInfo.find(ANDROID_JPEG_MAX_SIZE);
+ if (jpegMaxSize.count == 0) {
+ ALOGE("%s: Camera %d: Can't find maximum JPEG size in static metadata!", __FUNCTION__, mId);
+ return BAD_VALUE;
+ }
+ maxJpegBufferSize = jpegMaxSize.data.i32[0];
+
+ // Calculate final jpeg buffer size for the given resolution.
+ float scaleFactor = ((float) (width * height)) / (maxJpegWidth * maxJpegHeight);
+ ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
+ // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
+ if (jpegBufferSize > maxJpegBufferSize) {
+ jpegBufferSize = maxJpegBufferSize;
+ } else if (jpegBufferSize < kMinJpegBufferSize) {
+ jpegBufferSize = kMinJpegBufferSize;
+ }
+
+ return jpegBufferSize;
+}
+
status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
ATRACE_CALL();
(void)args;
@@ -386,14 +419,7 @@ const CameraMetadata& Camera3Device::info() const {
return mDeviceInfo;
}
-status_t Camera3Device::capture(CameraMetadata &request) {
- ATRACE_CALL();
- status_t res;
- Mutex::Autolock il(mInterfaceLock);
- Mutex::Autolock l(mLock);
-
- // TODO: take ownership of the request
-
+status_t Camera3Device::checkStatusOkToCaptureLocked() {
switch (mStatus) {
case STATUS_ERROR:
CLOGE("Device has encountered a serious error");
@@ -402,7 +428,6 @@ status_t Camera3Device::capture(CameraMetadata &request) {
CLOGE("Device not initialized");
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
- // May be lazily configuring streams, will check during setup
case STATUS_CONFIGURED:
case STATUS_ACTIVE:
// OK
@@ -411,71 +436,119 @@ status_t Camera3Device::capture(CameraMetadata &request) {
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
+ return OK;
+}
- sp<CaptureRequest> newRequest = setUpRequestLocked(request);
- if (newRequest == NULL) {
- CLOGE("Can't create capture request");
+status_t Camera3Device::convertMetadataListToRequestListLocked(
+ const List<const CameraMetadata> &metadataList, RequestList *requestList) {
+ if (requestList == NULL) {
+ CLOGE("requestList cannot be NULL.");
return BAD_VALUE;
}
- res = mRequestThread->queueRequest(newRequest);
- if (res == OK) {
- waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
- if (res != OK) {
- SET_ERR_L("Can't transition to active in %f seconds!",
- kActiveTimeout/1e9);
+ int32_t burstId = 0;
+ for (List<const CameraMetadata>::const_iterator it = metadataList.begin();
+ it != metadataList.end(); ++it) {
+ sp<CaptureRequest> newRequest = setUpRequestLocked(*it);
+ if (newRequest == 0) {
+ CLOGE("Can't create capture request");
+ return BAD_VALUE;
}
- ALOGV("Camera %d: Capture request enqueued", mId);
+
+ // Setup burst Id and request Id
+ newRequest->mResultExtras.burstId = burstId++;
+ if (it->exists(ANDROID_REQUEST_ID)) {
+ if (it->find(ANDROID_REQUEST_ID).count == 0) {
+ CLOGE("RequestID entry exists; but must not be empty in metadata");
+ return BAD_VALUE;
+ }
+ newRequest->mResultExtras.requestId = it->find(ANDROID_REQUEST_ID).data.i32[0];
+ } else {
+ CLOGE("RequestID does not exist in metadata");
+ return BAD_VALUE;
+ }
+
+ requestList->push_back(newRequest);
+
+ ALOGV("%s: requestId = %" PRId32, __FUNCTION__, newRequest->mResultExtras.requestId);
}
- return res;
+ return OK;
}
+status_t Camera3Device::capture(CameraMetadata &request, int64_t* /*lastFrameNumber*/) {
+ ATRACE_CALL();
+
+ List<const CameraMetadata> requests;
+ requests.push_back(request);
+ return captureList(requests, /*lastFrameNumber*/NULL);
+}
-status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) {
+status_t Camera3Device::submitRequestsHelper(
+ const List<const CameraMetadata> &requests, bool repeating,
+ /*out*/
+ int64_t *lastFrameNumber) {
ATRACE_CALL();
- status_t res;
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- switch (mStatus) {
- case STATUS_ERROR:
- CLOGE("Device has encountered a serious error");
- return INVALID_OPERATION;
- case STATUS_UNINITIALIZED:
- CLOGE("Device not initialized");
- return INVALID_OPERATION;
- case STATUS_UNCONFIGURED:
- // May be lazily configuring streams, will check during setup
- case STATUS_CONFIGURED:
- case STATUS_ACTIVE:
- // OK
- break;
- default:
- SET_ERR_L("Unexpected status: %d", mStatus);
- return INVALID_OPERATION;
+ status_t res = checkStatusOkToCaptureLocked();
+ if (res != OK) {
+ // error logged by previous call
+ return res;
}
- sp<CaptureRequest> newRepeatingRequest = setUpRequestLocked(request);
- if (newRepeatingRequest == NULL) {
- CLOGE("Can't create repeating request");
- return BAD_VALUE;
+ RequestList requestList;
+
+ res = convertMetadataListToRequestListLocked(requests, /*out*/&requestList);
+ if (res != OK) {
+ // error logged by previous call
+ return res;
}
- RequestList newRepeatingRequests;
- newRepeatingRequests.push_back(newRepeatingRequest);
+ if (repeating) {
+ res = mRequestThread->setRepeatingRequests(requestList, lastFrameNumber);
+ } else {
+ res = mRequestThread->queueRequestList(requestList, lastFrameNumber);
+ }
- res = mRequestThread->setRepeatingRequests(newRepeatingRequests);
if (res == OK) {
- waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+ waitUntilStateThenRelock(/*active*/true, kActiveTimeout);
if (res != OK) {
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
}
- ALOGV("Camera %d: Repeating request set", mId);
+ ALOGV("Camera %d: Capture request %" PRId32 " enqueued", mId,
+ (*(requestList.begin()))->mResultExtras.requestId);
+ } else {
+ CLOGE("Cannot queue request. Impossible.");
+ return BAD_VALUE;
}
+
return res;
}
+status_t Camera3Device::captureList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber) {
+ ATRACE_CALL();
+
+ return submitRequestsHelper(requests, /*repeating*/false, lastFrameNumber);
+}
+
+status_t Camera3Device::setStreamingRequest(const CameraMetadata &request,
+ int64_t* /*lastFrameNumber*/) {
+ ATRACE_CALL();
+
+ List<const CameraMetadata> requests;
+ requests.push_back(request);
+ return setStreamingRequestList(requests, /*lastFrameNumber*/NULL);
+}
+
+status_t Camera3Device::setStreamingRequestList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber) {
+ ATRACE_CALL();
+
+ return submitRequestsHelper(requests, /*repeating*/true, lastFrameNumber);
+}
sp<Camera3Device::CaptureRequest> Camera3Device::setUpRequestLocked(
const CameraMetadata &request) {
@@ -497,7 +570,7 @@ sp<Camera3Device::CaptureRequest> Camera3Device::setUpRequestLocked(
return newRequest;
}
-status_t Camera3Device::clearStreamingRequest() {
+status_t Camera3Device::clearStreamingRequest(int64_t *lastFrameNumber) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -519,7 +592,8 @@ status_t Camera3Device::clearStreamingRequest() {
return INVALID_OPERATION;
}
ALOGV("Camera %d: Clearing repeating request", mId);
- return mRequestThread->clearRepeatingRequests();
+
+ return mRequestThread->clearRepeatingRequests(lastFrameNumber);
}
status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) {
@@ -714,8 +788,17 @@ status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
sp<Camera3OutputStream> newStream;
if (format == HAL_PIXEL_FORMAT_BLOB) {
+ ssize_t jpegBufferSize = getJpegBufferSize(width, height);
+ if (jpegBufferSize > 0) {
+ ALOGV("%s: Overwrite Jpeg output buffer size from %zu to %zu",
+ __FUNCTION__, size, jpegBufferSize);
+ } else {
+ SET_ERR_L("Invalid jpeg buffer size %zd", jpegBufferSize);
+ return BAD_VALUE;
+ }
+
newStream = new Camera3OutputStream(mNextStreamId, consumer,
- width, height, size, format);
+ width, height, jpegBufferSize, format);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumer,
width, height, format);
@@ -840,16 +923,20 @@ status_t Camera3Device::deleteStream(int id) {
}
sp<Camera3StreamInterface> deletedStream;
+ ssize_t outputStreamIdx = mOutputStreams.indexOfKey(id);
if (mInputStream != NULL && id == mInputStream->getId()) {
deletedStream = mInputStream;
mInputStream.clear();
} else {
- ssize_t idx = mOutputStreams.indexOfKey(id);
- if (idx == NAME_NOT_FOUND) {
+ if (outputStreamIdx == NAME_NOT_FOUND) {
CLOGE("Stream %d does not exist", id);
return BAD_VALUE;
}
- deletedStream = mOutputStreams.editValueAt(idx);
+ }
+
+ // Delete output stream or the output part of a bi-directional stream.
+ if (outputStreamIdx != NAME_NOT_FOUND) {
+ deletedStream = mOutputStreams.editValueAt(outputStreamIdx);
mOutputStreams.removeItem(id);
}
@@ -918,6 +1005,10 @@ status_t Camera3Device::waitUntilDrained() {
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
+ return waitUntilDrainedLocked();
+}
+
+status_t Camera3Device::waitUntilDrainedLocked() {
switch (mStatus) {
case STATUS_UNINITIALIZED:
case STATUS_UNCONFIGURED:
@@ -1030,7 +1121,7 @@ status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
return OK;
}
-status_t Camera3Device::getNextFrame(CameraMetadata *frame) {
+status_t Camera3Device::getNextResult(CaptureResult *frame) {
ATRACE_CALL();
Mutex::Autolock l(mOutputLock);
@@ -1038,8 +1129,14 @@ status_t Camera3Device::getNextFrame(CameraMetadata *frame) {
return NOT_ENOUGH_DATA;
}
- CameraMetadata &result = *(mResultQueue.begin());
- frame->acquire(result);
+ if (frame == NULL) {
+ ALOGE("%s: argument cannot be NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ CaptureResult &result = *(mResultQueue.begin());
+ frame->mResultExtras = result.mResultExtras;
+ frame->mMetadata.acquire(result.mMetadata);
mResultQueue.erase(mResultQueue.begin());
return OK;
@@ -1117,14 +1214,25 @@ status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId,
return INVALID_OPERATION;
}
-status_t Camera3Device::flush() {
+status_t Camera3Device::flush(int64_t *frameNumber) {
ATRACE_CALL();
ALOGV("%s: Camera %d: Flushing all requests", __FUNCTION__, mId);
Mutex::Autolock il(mInterfaceLock);
- Mutex::Autolock l(mLock);
- mRequestThread->clear();
- return mHal3Device->ops->flush(mHal3Device);
+ {
+ Mutex::Autolock l(mLock);
+ mRequestThread->clear(/*out*/frameNumber);
+ }
+
+ status_t res;
+ if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
+ res = mHal3Device->ops->flush(mHal3Device);
+ } else {
+ Mutex::Autolock l(mLock);
+ res = waitUntilDrainedLocked();
+ }
+
+ return res;
}
/**
@@ -1392,13 +1500,13 @@ void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) {
* In-flight request management
*/
-status_t Camera3Device::registerInFlight(int32_t frameNumber,
- int32_t requestId, int32_t numBuffers) {
+status_t Camera3Device::registerInFlight(uint32_t frameNumber,
+ int32_t numBuffers, CaptureResultExtras resultExtras) {
ATRACE_CALL();
Mutex::Autolock l(mInFlightLock);
ssize_t res;
- res = mInFlightMap.add(frameNumber, InFlightRequest(requestId, numBuffers));
+ res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras));
if (res < 0) return res;
return OK;
@@ -1410,8 +1518,8 @@ status_t Camera3Device::registerInFlight(int32_t frameNumber,
* to the output frame queue
*/
bool Camera3Device::processPartial3AQuirk(
- int32_t frameNumber, int32_t requestId,
- const CameraMetadata& partial) {
+ uint32_t frameNumber,
+ const CameraMetadata& partial, const CaptureResultExtras& resultExtras) {
// Check if all 3A states are present
// The full list of fields is
@@ -1460,7 +1568,7 @@ bool Camera3Device::processPartial3AQuirk(
ALOGVV("%s: Camera %d: Frame %d, Request ID %d: AF mode %d, AWB mode %d, "
"AF state %d, AE state %d, AWB state %d, "
"AF trigger %d, AE precapture trigger %d",
- __FUNCTION__, mId, frameNumber, requestId,
+ __FUNCTION__, mId, frameNumber, resultExtras.requestId,
afMode, awbMode,
afState, aeState, awbState,
afTriggerId, aeTriggerId);
@@ -1475,58 +1583,63 @@ bool Camera3Device::processPartial3AQuirk(
Mutex::Autolock l(mOutputLock);
- CameraMetadata& min3AResult =
- *mResultQueue.insert(
- mResultQueue.end(),
- CameraMetadata(kMinimal3AResultEntries, /*dataCapacity*/ 0));
-
- if (!insert3AResult(min3AResult, ANDROID_REQUEST_FRAME_COUNT,
- &frameNumber, frameNumber)) {
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = CameraMetadata(kMinimal3AResultEntries, /*dataCapacity*/ 0);
+ // TODO: change this to sp<CaptureResult>. This will need other changes, including,
+ // but not limited to CameraDeviceBase::getNextResult
+ CaptureResult& min3AResult =
+ *mResultQueue.insert(mResultQueue.end(), captureResult);
+
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_REQUEST_FRAME_COUNT,
+ // TODO: This is problematic casting. Need to fix CameraMetadata.
+ reinterpret_cast<int32_t*>(&frameNumber), frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_REQUEST_ID,
+ int32_t requestId = resultExtras.requestId;
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_REQUEST_ID,
&requestId, frameNumber)) {
return false;
}
static const uint8_t partialResult = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
- if (!insert3AResult(min3AResult, ANDROID_QUIRKS_PARTIAL_RESULT,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_QUIRKS_PARTIAL_RESULT,
&partialResult, frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_MODE,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_MODE,
&afMode, frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_MODE,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AWB_MODE,
&awbMode, frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_STATE,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AE_STATE,
&aeState, frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_STATE,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_STATE,
&afState, frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_STATE,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AWB_STATE,
&awbState, frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_TRIGGER_ID,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_TRIGGER_ID,
&afTriggerId, frameNumber)) {
return false;
}
- if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+ if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
&aeTriggerId, frameNumber)) {
return false;
}
@@ -1538,7 +1651,7 @@ bool Camera3Device::processPartial3AQuirk(
template<typename T>
bool Camera3Device::get3AResult(const CameraMetadata& result, int32_t tag,
- T* value, int32_t frameNumber) {
+ T* value, uint32_t frameNumber) {
(void) frameNumber;
camera_metadata_ro_entry_t entry;
@@ -1563,7 +1676,7 @@ bool Camera3Device::get3AResult(const CameraMetadata& result, int32_t tag,
template<typename T>
bool Camera3Device::insert3AResult(CameraMetadata& result, int32_t tag,
- const T* value, int32_t frameNumber) {
+ const T* value, uint32_t frameNumber) {
if (result.update(tag, value, 1) != NO_ERROR) {
mResultQueue.erase(--mResultQueue.end(), mResultQueue.end());
SET_ERR("Frame %d: Failed to set %s in partial metadata",
@@ -1590,11 +1703,12 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
}
bool partialResultQuirk = false;
CameraMetadata collectedQuirkResult;
+ CaptureResultExtras resultExtras;
- // Get capture timestamp from list of in-flight requests, where it was added
- // by the shutter notification for this frame. Then update the in-flight
- // status and remove the in-flight entry if all result data has been
- // received.
+ // Get capture timestamp and resultExtras from list of in-flight requests,
+ // where it was added by the shutter notification for this frame.
+ // Then update the in-flight status and remove the in-flight entry if
+ // all result data has been received.
nsecs_t timestamp = 0;
{
Mutex::Autolock l(mInFlightLock);
@@ -1605,6 +1719,10 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
return;
}
InFlightRequest &request = mInFlightMap.editValueAt(idx);
+ ALOGVV("%s: got InFlightRequest requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32,
+ __FUNCTION__, request.resultExtras.requestId, request.resultExtras.frameNumber,
+ request.resultExtras.burstId);
// Check if this result carries only partial metadata
if (mUsePartialResultQuirk && result->result != NULL) {
@@ -1626,13 +1744,15 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
if (!request.partialResultQuirk.haveSent3A) {
request.partialResultQuirk.haveSent3A =
processPartial3AQuirk(frameNumber,
- request.requestId,
- request.partialResultQuirk.collectedResult);
+ request.partialResultQuirk.collectedResult,
+ request.resultExtras);
}
}
}
timestamp = request.captureTimestamp;
+ resultExtras = request.resultExtras;
+
/**
* One of the following must happen before it's legal to call process_capture_result,
* unless partial metadata is being provided:
@@ -1668,8 +1788,10 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
return;
}
- // Check if everything has arrived for this result (buffers and metadata)
- if (request.haveResultMetadata && request.numBuffersLeft == 0) {
+ // Check if everything has arrived for this result (buffers and metadata), remove it from
+ // InFlightMap if both arrived or HAL reports error for this request (i.e. during flush).
+ if ((request.requestStatus != OK) ||
+ (request.haveResultMetadata && request.numBuffersLeft == 0)) {
ATRACE_ASYNC_END("frame capture", frameNumber);
mInFlightMap.removeItemsAt(idx, 1);
}
@@ -1689,19 +1811,21 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
gotResult = true;
- if (frameNumber != mNextResultFrameNumber) {
+ // TODO: need to track errors for tighter bounds on expected frame number
+ if (frameNumber < mNextResultFrameNumber) {
SET_ERR("Out-of-order capture result metadata submitted! "
"(got frame number %d, expecting %d)",
frameNumber, mNextResultFrameNumber);
return;
}
- mNextResultFrameNumber++;
+ mNextResultFrameNumber = frameNumber + 1;
- CameraMetadata captureResult;
- captureResult = result->result;
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = result->result;
- if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT,
- (int32_t*)&frameNumber, 1) != OK) {
+ if (captureResult.mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+ (int32_t*)&frameNumber, 1) != OK) {
SET_ERR("Failed to set frame# in metadata (%d)",
frameNumber);
gotResult = false;
@@ -1712,15 +1836,15 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
// Append any previous partials to form a complete result
if (mUsePartialResultQuirk && !collectedQuirkResult.isEmpty()) {
- captureResult.append(collectedQuirkResult);
+ captureResult.mMetadata.append(collectedQuirkResult);
}
- captureResult.sort();
+ captureResult.mMetadata.sort();
// Check that there's a timestamp in the result metadata
camera_metadata_entry entry =
- captureResult.find(ANDROID_SENSOR_TIMESTAMP);
+ captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
if (entry.count == 0) {
SET_ERR("No timestamp provided by HAL for frame %d!",
frameNumber);
@@ -1734,9 +1858,13 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
if (gotResult) {
// Valid result, insert into queue
- CameraMetadata& queuedResult =
- *mResultQueue.insert(mResultQueue.end(), CameraMetadata());
- queuedResult.swap(captureResult);
+ List<CaptureResult>::iterator queuedResult =
+ mResultQueue.insert(mResultQueue.end(), CaptureResult(captureResult));
+ ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32, __FUNCTION__,
+ queuedResult->mResultExtras.requestId,
+ queuedResult->mResultExtras.frameNumber,
+ queuedResult->mResultExtras.burstId);
}
} // scope for mOutputLock
@@ -1762,8 +1890,6 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
}
-
-
void Camera3Device::notify(const camera3_notify_msg *msg) {
ATRACE_CALL();
NotificationListener *listener;
@@ -1790,18 +1916,29 @@ void Camera3Device::notify(const camera3_notify_msg *msg) {
mId, __FUNCTION__, msg->message.error.frame_number,
streamId, msg->message.error.error_code);
+ CaptureResultExtras resultExtras;
// Set request error status for the request in the in-flight tracking
{
Mutex::Autolock l(mInFlightLock);
ssize_t idx = mInFlightMap.indexOfKey(msg->message.error.frame_number);
if (idx >= 0) {
- mInFlightMap.editValueAt(idx).requestStatus = msg->message.error.error_code;
+ InFlightRequest &r = mInFlightMap.editValueAt(idx);
+ r.requestStatus = msg->message.error.error_code;
+ resultExtras = r.resultExtras;
+ } else {
+ resultExtras.frameNumber = msg->message.error.frame_number;
+ ALOGE("Camera %d: %s: cannot find in-flight request on frame %" PRId64
+ " error", mId, __FUNCTION__, resultExtras.frameNumber);
}
}
if (listener != NULL) {
- listener->notifyError(msg->message.error.error_code,
- msg->message.error.frame_number, streamId);
+ if (msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
+ listener->notifyError(ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ resultExtras);
+ }
+ } else {
+ ALOGE("Camera %d: %s: no listener available", mId, __FUNCTION__);
}
break;
}
@@ -1812,16 +1949,17 @@ void Camera3Device::notify(const camera3_notify_msg *msg) {
// Verify ordering of shutter notifications
{
Mutex::Autolock l(mOutputLock);
- if (frameNumber != mNextShutterFrameNumber) {
+ // TODO: need to track errors for tighter bounds on expected frame number.
+ if (frameNumber < mNextShutterFrameNumber) {
SET_ERR("Shutter notification out-of-order. Expected "
"notification for frame %d, got frame %d",
mNextShutterFrameNumber, frameNumber);
break;
}
- mNextShutterFrameNumber++;
+ mNextShutterFrameNumber = frameNumber + 1;
}
- int32_t requestId = -1;
+ CaptureResultExtras resultExtras;
// Set timestamp for the request in the in-flight tracking
// and get the request ID to send upstream
@@ -1831,7 +1969,7 @@ void Camera3Device::notify(const camera3_notify_msg *msg) {
if (idx >= 0) {
InFlightRequest &r = mInFlightMap.editValueAt(idx);
r.captureTimestamp = timestamp;
- requestId = r.requestId;
+ resultExtras = r.resultExtras;
}
}
if (idx < 0) {
@@ -1840,10 +1978,10 @@ void Camera3Device::notify(const camera3_notify_msg *msg) {
break;
}
ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %" PRId64,
- mId, __FUNCTION__, frameNumber, requestId, timestamp);
+ mId, __FUNCTION__, frameNumber, resultExtras.requestId, timestamp);
// Call listener, if any
if (listener != NULL) {
- listener->notifyShutter(requestId, timestamp);
+ listener->notifyShutter(resultExtras, timestamp);
}
break;
}
@@ -1865,6 +2003,7 @@ CameraMetadata Camera3Device::getLatestRequestLocked() {
return retVal;
}
+
/**
* RequestThread inner class methods
*/
@@ -1881,7 +2020,8 @@ Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
mDoPause(false),
mPaused(true),
mFrameNumber(0),
- mLatestRequestId(NAME_NOT_FOUND) {
+ mLatestRequestId(NAME_NOT_FOUND),
+ mRepeatingLastFrameNumber(NO_IN_FLIGHT_REPEATING_FRAMES) {
mStatusId = statusTracker->addComponent();
}
@@ -1890,10 +2030,22 @@ void Camera3Device::RequestThread::configurationComplete() {
mReconfigured = true;
}
-status_t Camera3Device::RequestThread::queueRequest(
- sp<CaptureRequest> request) {
+status_t Camera3Device::RequestThread::queueRequestList(
+ List<sp<CaptureRequest> > &requests,
+ /*out*/
+ int64_t *lastFrameNumber) {
Mutex::Autolock l(mRequestLock);
- mRequestQueue.push_back(request);
+ for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end();
+ ++it) {
+ mRequestQueue.push_back(*it);
+ }
+
+ if (lastFrameNumber != NULL) {
+ *lastFrameNumber = mFrameNumber + mRequestQueue.size() - 1;
+ ALOGV("%s: requestId %d, mFrameNumber %" PRId32 ", lastFrameNumber %" PRId64 ".",
+ __FUNCTION__, (*(requests.begin()))->mResultExtras.requestId, mFrameNumber,
+ *lastFrameNumber);
+ }
unpauseForNewRequests();
@@ -1957,28 +2109,43 @@ status_t Camera3Device::RequestThread::queueTriggerLocked(
}
status_t Camera3Device::RequestThread::setRepeatingRequests(
- const RequestList &requests) {
+ const RequestList &requests,
+ /*out*/
+ int64_t *lastFrameNumber) {
Mutex::Autolock l(mRequestLock);
+ if (lastFrameNumber != NULL) {
+ *lastFrameNumber = mRepeatingLastFrameNumber;
+ }
mRepeatingRequests.clear();
mRepeatingRequests.insert(mRepeatingRequests.begin(),
requests.begin(), requests.end());
unpauseForNewRequests();
+ mRepeatingLastFrameNumber = NO_IN_FLIGHT_REPEATING_FRAMES;
return OK;
}
-status_t Camera3Device::RequestThread::clearRepeatingRequests() {
+status_t Camera3Device::RequestThread::clearRepeatingRequests(/*out*/int64_t *lastFrameNumber) {
Mutex::Autolock l(mRequestLock);
mRepeatingRequests.clear();
+ if (lastFrameNumber != NULL) {
+ *lastFrameNumber = mRepeatingLastFrameNumber;
+ }
+ mRepeatingLastFrameNumber = NO_IN_FLIGHT_REPEATING_FRAMES;
return OK;
}
-status_t Camera3Device::RequestThread::clear() {
+status_t Camera3Device::RequestThread::clear(/*out*/int64_t *lastFrameNumber) {
Mutex::Autolock l(mRequestLock);
+ ALOGV("RequestThread::%s:", __FUNCTION__);
mRepeatingRequests.clear();
mRequestQueue.clear();
mTriggerMap.clear();
+ if (lastFrameNumber != NULL) {
+ *lastFrameNumber = mRepeatingLastFrameNumber;
+ }
+ mRepeatingLastFrameNumber = NO_IN_FLIGHT_REPEATING_FRAMES;
return OK;
}
@@ -2030,6 +2197,7 @@ bool Camera3Device::RequestThread::threadLoop() {
// Create request to HAL
camera3_capture_request_t request = camera3_capture_request_t();
+ request.frame_number = nextRequest->mResultExtras.frameNumber;
Vector<camera3_stream_buffer_t> outputBuffers;
// Get the request ID, if any
@@ -2050,7 +2218,7 @@ bool Camera3Device::RequestThread::threadLoop() {
if (res < 0) {
SET_ERR("RequestThread: Unable to insert triggers "
"(capture request %d, HAL device: %s (%d)",
- (mFrameNumber+1), strerror(-res), res);
+ request.frame_number, strerror(-res), res);
cleanUpFailedRequest(request, nextRequest, outputBuffers);
return false;
}
@@ -2068,7 +2236,7 @@ bool Camera3Device::RequestThread::threadLoop() {
if (res != OK) {
SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
"(capture request %d, HAL device: %s (%d)",
- (mFrameNumber+1), strerror(-res), res);
+ request.frame_number, strerror(-res), res);
cleanUpFailedRequest(request, nextRequest, outputBuffers);
return false;
}
@@ -2092,7 +2260,7 @@ bool Camera3Device::RequestThread::threadLoop() {
if (e.count > 0) {
ALOGV("%s: Request (frame num %d) had AF trigger 0x%x",
__FUNCTION__,
- mFrameNumber+1,
+ request.frame_number,
e.data.u8[0]);
}
}
@@ -2134,8 +2302,6 @@ bool Camera3Device::RequestThread::threadLoop() {
request.num_output_buffers++;
}
- request.frame_number = mFrameNumber++;
-
// Log request in the in-flight queue
sp<Camera3Device> parent = mParent.promote();
if (parent == NULL) {
@@ -2144,8 +2310,13 @@ bool Camera3Device::RequestThread::threadLoop() {
return false;
}
- res = parent->registerInFlight(request.frame_number, requestId,
- request.num_output_buffers);
+ res = parent->registerInFlight(request.frame_number,
+ request.num_output_buffers, nextRequest->mResultExtras);
+ ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32 ".",
+ __FUNCTION__,
+ nextRequest->mResultExtras.requestId, nextRequest->mResultExtras.frameNumber,
+ nextRequest->mResultExtras.burstId);
if (res != OK) {
SET_ERR("RequestThread: Unable to register new in-flight request:"
" %s (%d)", strerror(-res), res);
@@ -2222,6 +2393,7 @@ CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
return mLatestRequest;
}
+
void Camera3Device::RequestThread::cleanUpFailedRequest(
camera3_capture_request_t &request,
sp<CaptureRequest> &nextRequest,
@@ -2263,6 +2435,9 @@ sp<Camera3Device::CaptureRequest>
++firstRequest,
requests.end());
// No need to wait any longer
+
+ mRepeatingLastFrameNumber = mFrameNumber + requests.size() - 1;
+
break;
}
@@ -2314,6 +2489,9 @@ sp<Camera3Device::CaptureRequest>
mReconfigured = false;
}
+ if (nextRequest != NULL) {
+ nextRequest->mResultExtras.frameNumber = mFrameNumber++;
+ }
return nextRequest;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 468f641..00ae771 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -24,6 +24,8 @@
#include <utils/Thread.h>
#include <utils/KeyedVector.h>
#include <hardware/camera3.h>
+#include <camera/CaptureResult.h>
+#include <camera/camera2/ICameraDeviceUser.h>
#include "common/CameraDeviceBase.h"
#include "device3/StatusTracker.h"
@@ -54,7 +56,7 @@ class Camera3StreamInterface;
}
/**
- * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_3_0
+ * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_3_0 or higher.
*/
class Camera3Device :
public CameraDeviceBase,
@@ -78,15 +80,21 @@ class Camera3Device :
// Capture and setStreamingRequest will configure streams if currently in
// idle state
- virtual status_t capture(CameraMetadata &request);
- virtual status_t setStreamingRequest(const CameraMetadata &request);
- virtual status_t clearStreamingRequest();
+ virtual status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL);
+ virtual status_t captureList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber = NULL);
+ virtual status_t setStreamingRequest(const CameraMetadata &request,
+ int64_t *lastFrameNumber = NULL);
+ virtual status_t setStreamingRequestList(const List<const CameraMetadata> &requests,
+ int64_t *lastFrameNumber = NULL);
+ virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL);
virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout);
// Actual stream creation/deletion is delayed until first request is submitted
// If adding streams while actively capturing, will pause device before adding
- // stream, reconfiguring device, and unpausing.
+ // stream, reconfiguring device, and unpausing. Note that, for JPEG stream, the
+ // buffer size may be overwritten by an more accurate value calculated by Camera3Device.
virtual status_t createStream(sp<ANativeWindow> consumer,
uint32_t width, uint32_t height, int format, size_t size,
int *id);
@@ -116,7 +124,7 @@ class Camera3Device :
virtual status_t setNotifyCallback(NotificationListener *listener);
virtual bool willNotify3A();
virtual status_t waitForNextFrame(nsecs_t timeout);
- virtual status_t getNextFrame(CameraMetadata *frame);
+ virtual status_t getNextResult(CaptureResult *frame);
virtual status_t triggerAutofocus(uint32_t id);
virtual status_t triggerCancelAutofocus(uint32_t id);
@@ -125,7 +133,7 @@ class Camera3Device :
virtual status_t pushReprocessBuffer(int reprocessStreamId,
buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
- virtual status_t flush();
+ virtual status_t flush(int64_t *lastFrameNumber = NULL);
// Methods called by subclasses
void notifyStatus(bool idle); // updates from StatusTracker
@@ -137,6 +145,8 @@ class Camera3Device :
static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec
static const nsecs_t kActiveTimeout = 500000000; // 500 ms
struct RequestTrigger;
+ // minimal jpeg buffer size: 256KB + blob header
+ static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
// A lock to enforce serialization on the input/configure side
// of the public interface.
@@ -157,7 +167,6 @@ class Camera3Device :
camera3_device_t *mHal3Device;
CameraMetadata mDeviceInfo;
- vendor_tag_query_ops_t mVendorTagOps;
enum Status {
STATUS_ERROR,
@@ -199,9 +208,20 @@ class Camera3Device :
sp<camera3::Camera3Stream> mInputStream;
Vector<sp<camera3::Camera3OutputStreamInterface> >
mOutputStreams;
+ CaptureResultExtras mResultExtras;
};
typedef List<sp<CaptureRequest> > RequestList;
+ status_t checkStatusOkToCaptureLocked();
+
+ status_t convertMetadataListToRequestListLocked(
+ const List<const CameraMetadata> &metadataList,
+ /*out*/
+ RequestList *requestList);
+
+ status_t submitRequestsHelper(const List<const CameraMetadata> &requests, bool repeating,
+ int64_t *lastFrameNumber = NULL);
+
/**
* Get the last request submitted to the hal by the request thread.
*
@@ -237,6 +257,13 @@ class Camera3Device :
status_t waitUntilStateThenRelock(bool active, nsecs_t timeout);
/**
+ * Implementation of waitUntilDrained. On success, will transition to IDLE state.
+ *
+ * Need to be called with mLock and mInterfaceLock held.
+ */
+ status_t waitUntilDrainedLocked();
+
+ /**
* Do common work for setting up a streaming or single capture request.
* On success, will transition to ACTIVE if in IDLE.
*/
@@ -270,6 +297,12 @@ class Camera3Device :
*/
bool tryLockSpinRightRound(Mutex& lock);
+ /**
+ * Get Jpeg buffer size for a given jpeg resolution.
+ * Negative values are error codes.
+ */
+ ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
+
struct RequestTrigger {
// Metadata tag number, e.g. android.control.aePrecaptureTrigger
uint32_t metadataTag;
@@ -308,15 +341,21 @@ class Camera3Device :
* on either. Use waitUntilPaused to wait until request queue
* has emptied out.
*/
- status_t setRepeatingRequests(const RequestList& requests);
- status_t clearRepeatingRequests();
+ status_t setRepeatingRequests(const RequestList& requests,
+ /*out*/
+ int64_t *lastFrameNumber = NULL);
+ status_t clearRepeatingRequests(/*out*/
+ int64_t *lastFrameNumber = NULL);
- status_t queueRequest(sp<CaptureRequest> request);
+ status_t queueRequestList(List<sp<CaptureRequest> > &requests,
+ /*out*/
+ int64_t *lastFrameNumber = NULL);
/**
* Remove all queued and repeating requests, and pending triggers
*/
- status_t clear();
+ status_t clear(/*out*/
+ int64_t *lastFrameNumber = NULL);
/**
* Queue a trigger to be dispatched with the next outgoing
@@ -429,6 +468,8 @@ class Camera3Device :
TriggerMap mTriggerMap;
TriggerMap mTriggerRemovedMap;
TriggerMap mTriggerReplacedMap;
+
+ int64_t mRepeatingLastFrameNumber;
};
sp<RequestThread> mRequestThread;
@@ -437,8 +478,6 @@ class Camera3Device :
*/
struct InFlightRequest {
- // android.request.id for the request
- int requestId;
// Set by notify() SHUTTER call.
nsecs_t captureTimestamp;
int requestStatus;
@@ -447,6 +486,7 @@ class Camera3Device :
// Decremented by calls to process_capture_result with valid output
// buffers
int numBuffersLeft;
+ CaptureResultExtras resultExtras;
// Fields used by the partial result quirk only
struct PartialResultQuirkInFlight {
@@ -462,20 +502,26 @@ class Camera3Device :
// Default constructor needed by KeyedVector
InFlightRequest() :
- requestId(0),
captureTimestamp(0),
requestStatus(OK),
haveResultMetadata(false),
numBuffersLeft(0) {
}
- InFlightRequest(int id, int numBuffers) :
- requestId(id),
+ InFlightRequest(int numBuffers) :
captureTimestamp(0),
requestStatus(OK),
haveResultMetadata(false),
numBuffersLeft(numBuffers) {
}
+
+ InFlightRequest(int numBuffers, CaptureResultExtras extras) :
+ captureTimestamp(0),
+ requestStatus(OK),
+ haveResultMetadata(false),
+ numBuffersLeft(numBuffers),
+ resultExtras(extras) {
+ }
};
// Map from frame number to the in-flight request state
typedef KeyedVector<uint32_t, InFlightRequest> InFlightMap;
@@ -483,25 +529,25 @@ class Camera3Device :
Mutex mInFlightLock; // Protects mInFlightMap
InFlightMap mInFlightMap;
- status_t registerInFlight(int32_t frameNumber, int32_t requestId,
- int32_t numBuffers);
+ status_t registerInFlight(uint32_t frameNumber,
+ int32_t numBuffers, CaptureResultExtras resultExtras);
/**
* For the partial result quirk, check if all 3A state fields are available
* and if so, queue up 3A-only result to the client. Returns true if 3A
* is sent.
*/
- bool processPartial3AQuirk(int32_t frameNumber, int32_t requestId,
- const CameraMetadata& partial);
+ bool processPartial3AQuirk(uint32_t frameNumber,
+ const CameraMetadata& partial, const CaptureResultExtras& resultExtras);
// Helpers for reading and writing 3A metadata into to/from partial results
template<typename T>
bool get3AResult(const CameraMetadata& result, int32_t tag,
- T* value, int32_t frameNumber);
+ T* value, uint32_t frameNumber);
template<typename T>
bool insert3AResult(CameraMetadata &result, int32_t tag, const T* value,
- int32_t frameNumber);
+ uint32_t frameNumber);
/**
* Tracking for idle detection
*/
@@ -518,7 +564,7 @@ class Camera3Device :
uint32_t mNextResultFrameNumber;
uint32_t mNextShutterFrameNumber;
- List<CameraMetadata> mResultQueue;
+ List<CaptureResult> mResultQueue;
Condition mResultSignal;
NotificationListener *mListener;
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index d662cc2..50a2c10 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -34,7 +34,8 @@ Camera3IOStreamBase::Camera3IOStreamBase(int id, camera3_stream_type_t type,
Camera3Stream(id, type,
width, height, maxSize, format),
mTotalBufferCount(0),
- mDequeuedBufferCount(0),
+ mHandoutTotalBufferCount(0),
+ mHandoutOutputBufferCount(0),
mFrameCount(0),
mLastTimestamp(0) {
@@ -55,8 +56,8 @@ bool Camera3IOStreamBase::hasOutstandingBuffersLocked() const {
nsecs_t signalTime = mCombinedFence->getSignalTime();
ALOGV("%s: Stream %d: Has %zu outstanding buffers,"
" buffer signal time is %" PRId64,
- __FUNCTION__, mId, mDequeuedBufferCount, signalTime);
- if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) {
+ __FUNCTION__, mId, mHandoutTotalBufferCount, signalTime);
+ if (mHandoutTotalBufferCount > 0 || signalTime == INT64_MAX) {
return true;
}
return false;
@@ -75,7 +76,7 @@ void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
mFrameCount, mLastTimestamp);
lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu\n",
- mTotalBufferCount, mDequeuedBufferCount);
+ mTotalBufferCount, mHandoutTotalBufferCount);
write(fd, lines.string(), lines.size());
}
@@ -104,6 +105,14 @@ size_t Camera3IOStreamBase::getBufferCountLocked() {
return mTotalBufferCount;
}
+size_t Camera3IOStreamBase::getHandoutOutputBufferCountLocked() {
+ return mHandoutOutputBufferCount;
+}
+
+size_t Camera3IOStreamBase::getHandoutInputBufferCountLocked() {
+ return (mHandoutTotalBufferCount - mHandoutOutputBufferCount);
+}
+
status_t Camera3IOStreamBase::disconnectLocked() {
switch (mState) {
case STATE_IN_RECONFIG:
@@ -117,9 +126,9 @@ status_t Camera3IOStreamBase::disconnectLocked() {
return -ENOTCONN;
}
- if (mDequeuedBufferCount > 0) {
+ if (mHandoutTotalBufferCount > 0) {
ALOGE("%s: Can't disconnect with %zu buffers still dequeued!",
- __FUNCTION__, mDequeuedBufferCount);
+ __FUNCTION__, mHandoutTotalBufferCount);
return INVALID_OPERATION;
}
@@ -130,7 +139,8 @@ void Camera3IOStreamBase::handoutBufferLocked(camera3_stream_buffer &buffer,
buffer_handle_t *handle,
int acquireFence,
int releaseFence,
- camera3_buffer_status_t status) {
+ camera3_buffer_status_t status,
+ bool output) {
/**
* Note that all fences are now owned by HAL.
*/
@@ -144,14 +154,25 @@ void Camera3IOStreamBase::handoutBufferLocked(camera3_stream_buffer &buffer,
buffer.status = status;
// Inform tracker about becoming busy
- if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG &&
+ if (mHandoutTotalBufferCount == 0 && mState != STATE_IN_CONFIG &&
mState != STATE_IN_RECONFIG) {
+ /**
+ * Avoid a spurious IDLE->ACTIVE->IDLE transition when using buffers
+ * before/after register_stream_buffers during initial configuration
+ * or re-configuration.
+ *
+ * TODO: IN_CONFIG and IN_RECONFIG checks only make sense for <HAL3.2
+ */
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != 0) {
statusTracker->markComponentActive(mStatusId);
}
}
- mDequeuedBufferCount++;
+ mHandoutTotalBufferCount++;
+
+ if (output) {
+ mHandoutOutputBufferCount++;
+ }
}
status_t Camera3IOStreamBase::getBufferPreconditionCheckLocked() const {
@@ -165,7 +186,7 @@ status_t Camera3IOStreamBase::getBufferPreconditionCheckLocked() const {
// Only limit dequeue amount when fully configured
if (mState == STATE_CONFIGURED &&
- mDequeuedBufferCount == camera3_stream::max_buffers) {
+ mHandoutTotalBufferCount == camera3_stream::max_buffers) {
ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous"
" buffers (%d)", __FUNCTION__, mId,
camera3_stream::max_buffers);
@@ -183,7 +204,7 @@ status_t Camera3IOStreamBase::returnBufferPreconditionCheckLocked() const {
__FUNCTION__, mId, mState);
return INVALID_OPERATION;
}
- if (mDequeuedBufferCount == 0) {
+ if (mHandoutTotalBufferCount == 0) {
ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__,
mId);
return INVALID_OPERATION;
@@ -221,9 +242,20 @@ status_t Camera3IOStreamBase::returnAnyBufferLocked(
mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
}
- mDequeuedBufferCount--;
- if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG &&
+ if (output) {
+ mHandoutOutputBufferCount--;
+ }
+
+ mHandoutTotalBufferCount--;
+ if (mHandoutTotalBufferCount == 0 && mState != STATE_IN_CONFIG &&
mState != STATE_IN_RECONFIG) {
+ /**
+ * Avoid a spurious IDLE->ACTIVE->IDLE transition when using buffers
+ * before/after register_stream_buffers during initial configuration
+ * or re-configuration.
+ *
+ * TODO: IN_CONFIG and IN_RECONFIG checks only make sense for <HAL3.2
+ */
ALOGV("%s: Stream %d: All buffers returned; now idle", __FUNCTION__,
mId);
sp<StatusTracker> statusTracker = mStatusTracker.promote();
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index fcb9d04..a35c290 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -48,7 +48,10 @@ class Camera3IOStreamBase :
protected:
size_t mTotalBufferCount;
// sum of input and output buffers that are currently acquired by HAL
- size_t mDequeuedBufferCount;
+ size_t mHandoutTotalBufferCount;
+ // number of output buffers that are currently acquired by HAL. This will be
+ // Redundant when camera3 streams are no longer bidirectional streams.
+ size_t mHandoutOutputBufferCount;
Condition mBufferReturnedSignal;
uint32_t mFrameCount;
// Last received output buffer's timestamp
@@ -76,6 +79,10 @@ class Camera3IOStreamBase :
virtual size_t getBufferCountLocked();
+ virtual size_t getHandoutOutputBufferCountLocked();
+
+ virtual size_t getHandoutInputBufferCountLocked();
+
virtual status_t getEndpointUsage(uint32_t *usage) = 0;
status_t getBufferPreconditionCheckLocked() const;
@@ -92,7 +99,8 @@ class Camera3IOStreamBase :
buffer_handle_t *handle,
int acquire_fence,
int release_fence,
- camera3_buffer_status_t status);
+ camera3_buffer_status_t status,
+ bool output);
}; // class Camera3IOStreamBase
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 5aa9a3e..319be1d 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -81,7 +81,7 @@ status_t Camera3InputStream::getInputBufferLocked(
* in which case we reassign it to acquire_fence
*/
handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
- /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK);
+ /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/false);
mBuffersInFlight.push_back(bufferItem);
return OK;
@@ -199,14 +199,36 @@ status_t Camera3InputStream::configureQueueLocked() {
assert(mMaxSize == 0);
assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB);
- mTotalBufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS +
- camera3_stream::max_buffers;
- mDequeuedBufferCount = 0;
+ mHandoutTotalBufferCount = 0;
mFrameCount = 0;
if (mConsumer.get() == 0) {
- sp<BufferQueue> bq = new BufferQueue();
- mConsumer = new BufferItemConsumer(bq, camera3_stream::usage,
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+
+ int minUndequeuedBuffers = 0;
+ res = producer->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
+ if (res != OK || minUndequeuedBuffers < 0) {
+ ALOGE("%s: Stream %d: Could not query min undequeued buffers (error %d, bufCount %d)",
+ __FUNCTION__, mId, res, minUndequeuedBuffers);
+ return res;
+ }
+ size_t minBufs = static_cast<size_t>(minUndequeuedBuffers);
+ /*
+ * We promise never to 'acquire' more than camera3_stream::max_buffers
+ * at any one time.
+ *
+ * Boost the number up to meet the minimum required buffer count.
+ *
+ * (Note that this sets consumer-side buffer count only,
+ * and not the sum of producer+consumer side as in other camera streams).
+ */
+ mTotalBufferCount = camera3_stream::max_buffers > minBufs ?
+ camera3_stream::max_buffers : minBufs;
+ // TODO: somehow set the total buffer count when producer connects?
+
+ mConsumer = new BufferItemConsumer(consumer, camera3_stream::usage,
mTotalBufferCount);
mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
}
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 681d684..ae49467 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -44,6 +44,8 @@ class Camera3InputStream : public Camera3IOStreamBase {
virtual void dump(int fd, const Vector<String16> &args) const;
+ // TODO: expose an interface to get the IGraphicBufferProducer
+
private:
typedef BufferItemConsumer::BufferItem BufferItem;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 682755d..7ec649b 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -119,7 +119,7 @@ status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) {
* in which case we reassign it to acquire_fence
*/
handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
- /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK);
+ /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/true);
return OK;
}
@@ -324,7 +324,7 @@ status_t Camera3OutputStream::configureQueueLocked() {
}
mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers;
- mDequeuedBufferCount = 0;
+ mHandoutTotalBufferCount = 0;
mFrameCount = 0;
mLastTimestamp = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 70406f1..7645a2a 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -23,6 +23,8 @@
#include "device3/Camera3Stream.h"
#include "device3/StatusTracker.h"
+#include <cutils/properties.h>
+
namespace android {
namespace camera3 {
@@ -137,6 +139,7 @@ camera3_stream* Camera3Stream::startConfiguration() {
if (mState == STATE_CONSTRUCTED) {
mState = STATE_IN_CONFIG;
} else { // mState == STATE_CONFIGURED
+ LOG_ALWAYS_FATAL_IF(mState != STATE_CONFIGURED, "Invalid state: 0x%x", mState);
mState = STATE_IN_RECONFIG;
}
@@ -209,8 +212,30 @@ status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) {
status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
+ status_t res = OK;
+
+ // This function should be only called when the stream is configured already.
+ if (mState != STATE_CONFIGURED) {
+ ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Wait for new buffer returned back if we are running into the limit.
+ if (getHandoutOutputBufferCountLocked() == camera3_stream::max_buffers) {
+ ALOGV("%s: Already dequeued max output buffers (%d), wait for next returned one.",
+ __FUNCTION__, camera3_stream::max_buffers);
+ res = mOutputBufferReturnedSignal.waitRelative(mLock, kWaitForBufferDuration);
+ if (res != OK) {
+ if (res == TIMED_OUT) {
+ ALOGE("%s: wait for output buffer return timed out after %lldms", __FUNCTION__,
+ kWaitForBufferDuration / 1000000LL);
+ }
+ return res;
+ }
+ }
- status_t res = getBufferLocked(buffer);
+ res = getBufferLocked(buffer);
if (res == OK) {
fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
}
@@ -223,9 +248,18 @@ status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
ATRACE_CALL();
Mutex::Autolock l(mLock);
+ /**
+ * TODO: Check that the state is valid first.
+ *
+ * <HAL3.2 IN_CONFIG and IN_RECONFIG in addition to CONFIGURED.
+ * >= HAL3.2 CONFIGURED only
+ *
+ * Do this for getBuffer as well.
+ */
status_t res = returnBufferLocked(buffer, timestamp);
if (res == OK) {
fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true);
+ mOutputBufferReturnedSignal.signal();
}
return res;
@@ -234,8 +268,30 @@ status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
+ status_t res = OK;
+
+ // This function should be only called when the stream is configured already.
+ if (mState != STATE_CONFIGURED) {
+ ALOGE("%s: Stream %d: Can't get input buffers if stream is not in CONFIGURED state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Wait for new buffer returned back if we are running into the limit.
+ if (getHandoutInputBufferCountLocked() == camera3_stream::max_buffers) {
+ ALOGV("%s: Already dequeued max input buffers (%d), wait for next returned one.",
+ __FUNCTION__, camera3_stream::max_buffers);
+ res = mInputBufferReturnedSignal.waitRelative(mLock, kWaitForBufferDuration);
+ if (res != OK) {
+ if (res == TIMED_OUT) {
+ ALOGE("%s: wait for input buffer return timed out after %lldms", __FUNCTION__,
+ kWaitForBufferDuration / 1000000LL);
+ }
+ return res;
+ }
+ }
- status_t res = getInputBufferLocked(buffer);
+ res = getInputBufferLocked(buffer);
if (res == OK) {
fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false);
}
@@ -250,6 +306,7 @@ status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) {
status_t res = returnInputBufferLocked(buffer);
if (res == OK) {
fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/false);
+ mInputBufferReturnedSignal.signal();
}
return res;
}
@@ -314,12 +371,46 @@ status_t Camera3Stream::disconnect() {
status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) {
ATRACE_CALL();
+
+ /**
+ * >= CAMERA_DEVICE_API_VERSION_3_2:
+ *
+ * camera3_device_t->ops->register_stream_buffers() is not called and must
+ * be NULL.
+ */
+ if (hal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_2) {
+ ALOGV("%s: register_stream_buffers unused as of HAL3.2", __FUNCTION__);
+
+ /**
+ * Skip the NULL check if camera.dev.register_stream is 1.
+ *
+ * For development-validation purposes only.
+ *
+ * TODO: Remove the property check before shipping L (b/13914251).
+ */
+ char value[PROPERTY_VALUE_MAX] = { '\0', };
+ property_get("camera.dev.register_stream", value, "0");
+ int propInt = atoi(value);
+
+ if (propInt == 0 && hal3Device->ops->register_stream_buffers != NULL) {
+ ALOGE("%s: register_stream_buffers is deprecated in HAL3.2; "
+ "must be set to NULL in camera3_device::ops", __FUNCTION__);
+ return INVALID_OPERATION;
+ } else {
+ ALOGD("%s: Skipping NULL check for deprecated register_stream_buffers", __FUNCTION__);
+ }
+
+ return OK;
+ } else {
+ ALOGV("%s: register_stream_buffers using deprecated code path", __FUNCTION__);
+ }
+
status_t res;
size_t bufferCount = getBufferCountLocked();
Vector<buffer_handle_t*> buffers;
- buffers.insertAt(NULL, 0, bufferCount);
+ buffers.insertAt(/*prototype_item*/NULL, /*index*/0, bufferCount);
camera3_stream_buffer_set bufferSet = camera3_stream_buffer_set();
bufferSet.stream = this;
@@ -327,7 +418,7 @@ status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) {
bufferSet.buffers = buffers.editArray();
Vector<camera3_stream_buffer_t> streamBuffers;
- streamBuffers.insertAt(camera3_stream_buffer_t(), 0, bufferCount);
+ streamBuffers.insertAt(camera3_stream_buffer_t(), /*index*/0, bufferCount);
// Register all buffers with the HAL. This means getting all the buffers
// from the stream, providing them to the HAL with the
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 6eeb721..14f5387 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -82,6 +82,23 @@ namespace camera3 {
* STATE_CONFIGURED => STATE_CONSTRUCTED:
* When disconnect() is called after making sure stream is idle with
* waitUntilIdle().
+ *
+ * Status Tracking:
+ * Each stream is tracked by StatusTracker as a separate component,
+ * depending on the handed out buffer count. The state must be STATE_CONFIGURED
+ * in order for the component to be marked.
+ *
+ * It's marked in one of two ways:
+ *
+ * - ACTIVE: One or more buffers have been handed out (with #getBuffer).
+ * - IDLE: All buffers have been returned (with #returnBuffer), and their
+ * respective release_fence(s) have been signaled.
+ *
+ * A typical use case is output streams. When the HAL has any buffers
+ * dequeued, the stream is marked ACTIVE. When the HAL returns all buffers
+ * (e.g. if no capture requests are active), the stream is marked IDLE.
+ * In this use case, the app consumer does not affect the component status.
+ *
*/
class Camera3Stream :
protected camera3_stream,
@@ -262,6 +279,12 @@ class Camera3Stream :
// Get the total number of buffers in the queue
virtual size_t getBufferCountLocked() = 0;
+ // Get handout output buffer count.
+ virtual size_t getHandoutOutputBufferCountLocked() = 0;
+
+ // Get handout input buffer count.
+ virtual size_t getHandoutInputBufferCountLocked() = 0;
+
// Get the usage flags for the other endpoint, or return
// INVALID_OPERATION if they cannot be obtained.
virtual status_t getEndpointUsage(uint32_t *usage) = 0;
@@ -274,6 +297,9 @@ class Camera3Stream :
private:
uint32_t oldUsage;
uint32_t oldMaxBuffers;
+ Condition mOutputBufferReturnedSignal;
+ Condition mInputBufferReturnedSignal;
+ static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
// Gets all buffers from endpoint and registers them with the HAL.
status_t registerBuffersLocked(camera3_device *hal3Device);
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
index 44d8188..05b3d1f 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
@@ -111,15 +111,17 @@ struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
} // namespace anonymous
Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height,
- int depth) :
+ int bufferCount) :
Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL,
width, height,
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
- mDepth(depth) {
+ mDepth(bufferCount) {
- sp<BufferQueue> bq = new BufferQueue();
- mProducer = new RingBufferConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, depth);
- mConsumer = new Surface(bq);
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL, bufferCount);
+ mConsumer = new Surface(producer);
}
Camera3ZslStream::~Camera3ZslStream() {
@@ -174,7 +176,7 @@ status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) {
* in which case we reassign it to acquire_fence
*/
handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
- /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK);
+ /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/false);
mBuffersInFlight.push_back(bufferItem);
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.h b/services/camera/libcameraservice/device3/Camera3ZslStream.h
index c7f4490..6721832 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.h
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.h
@@ -37,10 +37,10 @@ class Camera3ZslStream :
public Camera3OutputStream {
public:
/**
- * Set up a ZSL stream of a given resolution. Depth is the number of buffers
+ * Set up a ZSL stream of a given resolution. bufferCount is the number of buffers
* cached within the stream that can be retrieved for input.
*/
- Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth);
+ Camera3ZslStream(int id, uint32_t width, uint32_t height, int bufferCount);
~Camera3ZslStream();
virtual void dump(int fd, const Vector<String16> &args) const;
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index b4ad824..a03736d 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -64,7 +64,7 @@ class RingBufferConsumer : public ConsumerBase,
// bufferCount parameter specifies how many buffers can be pinned for user
// access at the same time.
RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint32_t consumerUsage,
- int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS);
+ int bufferCount);
virtual ~RingBufferConsumer();