summaryrefslogtreecommitdiffstats
path: root/services/camera/libcameraservice/api1/client2
diff options
context:
space:
mode:
Diffstat (limited to 'services/camera/libcameraservice/api1/client2')
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp24
-rw-r--r--services/camera/libcameraservice/api1/client2/CaptureSequencer.h5
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.cpp215
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.h13
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp8
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.cpp17
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.h3
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp18
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.h4
13 files changed, 154 insertions, 179 deletions
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index d2ac79c..c266213 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -110,11 +110,13 @@ status_t CallbackProcessor::updateStream(const Parameters &params) {
if (!mCallbackToApp && mCallbackConsumer == 0) {
// Create CPU buffer queue endpoint, since app hasn't given us one
// Make it async to avoid disconnect deadlocks
- sp<BufferQueue> bq = new BufferQueue();
- mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount);
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mCallbackConsumer = new CpuConsumer(consumer, kCallbackHeapCount);
mCallbackConsumer->setFrameAvailableListener(this);
mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
- mCallbackWindow = new Surface(bq);
+ mCallbackWindow = new Surface(producer);
}
if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index f5c28ed..8268f65 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -106,13 +106,12 @@ void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) {
}
}
-void CaptureSequencer::onFrameAvailable(int32_t requestId,
- const CameraMetadata &frame) {
- ALOGV("%s: Listener found new frame", __FUNCTION__);
+void CaptureSequencer::onResultAvailable(const CaptureResult &result) {
ATRACE_CALL();
+ ALOGV("%s: New result available.", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
- mNewFrameId = requestId;
- mNewFrame = frame;
+ mNewFrameId = result.mResultExtras.requestId;
+ mNewFrame = result.mMetadata;
if (!mNewFrameReceived) {
mNewFrameReceived = true;
mNewFrameSignal.signal();
@@ -585,12 +584,15 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait(
entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP);
if (entry.count == 0) {
ALOGE("No timestamp field in capture frame!");
- }
- if (entry.data.i64[0] != mCaptureTimestamp) {
- ALOGW("Mismatched capture timestamps: Metadata frame %" PRId64 ","
- " captured buffer %" PRId64,
- entry.data.i64[0],
- mCaptureTimestamp);
+ } else if (entry.count == 1) {
+ if (entry.data.i64[0] != mCaptureTimestamp) {
+ ALOGW("Mismatched capture timestamps: Metadata frame %" PRId64 ","
+ " captured buffer %" PRId64,
+ entry.data.i64[0],
+ mCaptureTimestamp);
+ }
+ } else {
+ ALOGE("Timestamp metadata is malformed!");
}
client->removeFrameListener(mCaptureId, mCaptureId + 1, this);
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index 9fb4ee7..d42ab13 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -24,6 +24,7 @@
#include <utils/Mutex.h>
#include <utils/Condition.h>
#include "camera/CameraMetadata.h"
+#include "camera/CaptureResult.h"
#include "Parameters.h"
#include "FrameProcessor.h"
@@ -61,8 +62,8 @@ class CaptureSequencer:
// Notifications about AE state changes
void notifyAutoExposure(uint8_t newState, int triggerId);
- // Notifications from the frame processor
- virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
+ // Notification from the frame processor
+ virtual void onResultAvailable(const CaptureResult &result);
// Notifications from the JPEG processor
void onCaptureAvailable(nsecs_t timestamp, sp<MemoryBase> captureBuffer);
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index dd5b27c..69bea24 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -55,7 +55,7 @@ FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
FrameProcessor::~FrameProcessor() {
}
-bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
+bool FrameProcessor::processSingleFrame(CaptureResult &frame,
const sp<CameraDeviceBase> &device) {
sp<Camera2Client> client = mClient.promote();
@@ -66,19 +66,19 @@ bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
bool partialResult = false;
if (mUsePartialQuirk) {
camera_metadata_entry_t entry;
- entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+ entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
if (entry.count > 0 &&
entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
partialResult = true;
}
}
- if (!partialResult && processFaceDetect(frame, client) != OK) {
+ if (!partialResult && processFaceDetect(frame.mMetadata, client) != OK) {
return false;
}
if (mSynthesize3ANotify) {
- process3aState(frame, client);
+ process3aState(frame.mMetadata, client);
}
return FrameProcessorBase::processSingleFrame(frame, device);
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 856ad32..514bd1a 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -51,7 +51,7 @@ class FrameProcessor : public FrameProcessorBase {
void processNewFrames(const sp<Camera2Client> &client);
- virtual bool processSingleFrame(CameraMetadata &frame,
+ virtual bool processSingleFrame(CaptureResult &frame,
const sp<CameraDeviceBase> &device);
status_t processFaceDetect(const CameraMetadata &frame,
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 2de7a2b..964d278 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -83,11 +83,13 @@ status_t JpegProcessor::updateStream(const Parameters &params) {
if (mCaptureConsumer == 0) {
// Create CPU buffer queue endpoint
- sp<BufferQueue> bq = new BufferQueue();
- mCaptureConsumer = new CpuConsumer(bq, 1);
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mCaptureConsumer = new CpuConsumer(consumer, 1);
mCaptureConsumer->setFrameAvailableListener(this);
mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
- mCaptureWindow = new Surface(bq);
+ mCaptureWindow = new Surface(producer);
// Create memory for API consumption
mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
"Camera2Client::CaptureHeap");
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 07654c0..5bfb969 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -16,7 +16,7 @@
#define LOG_TAG "Camera2-Parameters"
#define ATRACE_TAG ATRACE_TAG_CAMERA
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -92,6 +92,26 @@ status_t Parameters::initialize(const CameraMetadata *info) {
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
if (!availableFpsRanges.count) return NO_INIT;
+ previewFpsRange[0] = availableFpsRanges.data.i32[0];
+ previewFpsRange[1] = availableFpsRanges.data.i32[1];
+
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,
+ String8::format("%d,%d",
+ previewFpsRange[0] * kFpsToApiScale,
+ previewFpsRange[1] * kFpsToApiScale));
+
+ {
+ String8 supportedPreviewFpsRange;
+ for (size_t i=0; i < availableFpsRanges.count; i += 2) {
+ if (i != 0) supportedPreviewFpsRange += ",";
+ supportedPreviewFpsRange += String8::format("(%d,%d)",
+ availableFpsRanges.data.i32[i] * kFpsToApiScale,
+ availableFpsRanges.data.i32[i+1] * kFpsToApiScale);
+ }
+ params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ supportedPreviewFpsRange);
+ }
+
previewFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
params.set(CameraParameters::KEY_PREVIEW_FORMAT,
formatEnumToString(previewFormat)); // NV21
@@ -159,9 +179,6 @@ status_t Parameters::initialize(const CameraMetadata *info) {
supportedPreviewFormats);
}
- previewFpsRange[0] = availableFpsRanges.data.i32[0];
- previewFpsRange[1] = availableFpsRanges.data.i32[1];
-
// PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but
// still have to do something sane for them
@@ -170,27 +187,6 @@ status_t Parameters::initialize(const CameraMetadata *info) {
params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,
previewFps);
- // PREVIEW_FPS_RANGE
- // -- Order matters. Set range after single value to so that a roundtrip
- // of setParameters(getParameters()) would keep the FPS range in higher
- // order.
- params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,
- String8::format("%d,%d",
- previewFpsRange[0] * kFpsToApiScale,
- previewFpsRange[1] * kFpsToApiScale));
-
- {
- String8 supportedPreviewFpsRange;
- for (size_t i=0; i < availableFpsRanges.count; i += 2) {
- if (i != 0) supportedPreviewFpsRange += ",";
- supportedPreviewFpsRange += String8::format("(%d,%d)",
- availableFpsRanges.data.i32[i] * kFpsToApiScale,
- availableFpsRanges.data.i32[i+1] * kFpsToApiScale);
- }
- params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
- supportedPreviewFpsRange);
- }
-
{
SortedVector<int32_t> sortedPreviewFrameRates;
@@ -470,7 +466,7 @@ status_t Parameters::initialize(const CameraMetadata *info) {
supportedAntibanding);
}
- sceneMode = ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
params.set(CameraParameters::KEY_SCENE_MODE,
CameraParameters::SCENE_MODE_AUTO);
@@ -486,7 +482,7 @@ status_t Parameters::initialize(const CameraMetadata *info) {
if (addComma) supportedSceneModes += ",";
addComma = true;
switch (availableSceneModes.data.u8[i]) {
- case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED:
+ case ANDROID_CONTROL_SCENE_MODE_DISABLED:
noSceneModes = true;
break;
case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
@@ -668,13 +664,13 @@ status_t Parameters::initialize(const CameraMetadata *info) {
focusState = ANDROID_CONTROL_AF_STATE_INACTIVE;
shadowFocusMode = FOCUS_MODE_INVALID;
- camera_metadata_ro_entry_t max3aRegions =
- staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1);
- if (!max3aRegions.count) return NO_INIT;
+ camera_metadata_ro_entry_t max3aRegions = staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION);
+ if (max3aRegions.count != Parameters::NUM_REGION) return NO_INIT;
int32_t maxNumFocusAreas = 0;
if (focusMode != Parameters::FOCUS_MODE_FIXED) {
- maxNumFocusAreas = max3aRegions.data.i32[0];
+ maxNumFocusAreas = max3aRegions.data.i32[Parameters::REGION_AF];
}
params.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, maxNumFocusAreas);
params.set(CameraParameters::KEY_FOCUS_AREAS,
@@ -734,7 +730,7 @@ status_t Parameters::initialize(const CameraMetadata *info) {
meteringAreas.add(Parameters::Area(0, 0, 0, 0, 0));
params.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS,
- max3aRegions.data.i32[0]);
+ max3aRegions.data.i32[Parameters::REGION_AE]);
params.set(CameraParameters::KEY_METERING_AREAS,
"(0,0,0,0,0)");
@@ -1088,7 +1084,7 @@ camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag,
status_t Parameters::set(const String8& paramString) {
status_t res;
- CameraParameters2 newParams(paramString);
+ CameraParameters newParams(paramString);
// TODO: Currently ignoring any changes to supposedly read-only parameters
// such as supported preview sizes, etc. Should probably produce an error if
@@ -1131,73 +1127,29 @@ status_t Parameters::set(const String8& paramString) {
// RECORDING_HINT (always supported)
validatedParams.recordingHint = boolFromString(
newParams.get(CameraParameters::KEY_RECORDING_HINT) );
- IF_ALOGV() { // Avoid unused variable warning
- bool recordingHintChanged =
- validatedParams.recordingHint != recordingHint;
- if (recordingHintChanged) {
- ALOGV("%s: Recording hint changed to %d",
- __FUNCTION__, validatedParams.recordingHint);
- }
- }
+ bool recordingHintChanged = validatedParams.recordingHint != recordingHint;
+ ALOGV_IF(recordingHintChanged, "%s: Recording hint changed to %d",
+ __FUNCTION__, recordingHintChanged);
// PREVIEW_FPS_RANGE
+ bool fpsRangeChanged = false;
+ int32_t lastSetFpsRange[2];
- /**
- * Use the single FPS value if it was set later than the range.
- * Otherwise, use the range value.
- */
- bool fpsUseSingleValue;
- {
- const char *fpsRange, *fpsSingle;
-
- fpsRange = newParams.get(CameraParameters::KEY_PREVIEW_FRAME_RATE);
- fpsSingle = newParams.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
-
- /**
- * Pick either the range or the single key if only one was set.
- *
- * If both are set, pick the one that has greater set order.
- */
- if (fpsRange == NULL && fpsSingle == NULL) {
- ALOGE("%s: FPS was not set. One of %s or %s must be set.",
- __FUNCTION__, CameraParameters::KEY_PREVIEW_FRAME_RATE,
- CameraParameters::KEY_PREVIEW_FPS_RANGE);
- return BAD_VALUE;
- } else if (fpsRange == NULL) {
- fpsUseSingleValue = true;
- ALOGV("%s: FPS range not set, using FPS single value",
- __FUNCTION__);
- } else if (fpsSingle == NULL) {
- fpsUseSingleValue = false;
- ALOGV("%s: FPS single not set, using FPS range value",
- __FUNCTION__);
- } else {
- int fpsKeyOrder;
- res = newParams.compareSetOrder(
- CameraParameters::KEY_PREVIEW_FRAME_RATE,
- CameraParameters::KEY_PREVIEW_FPS_RANGE,
- &fpsKeyOrder);
- LOG_ALWAYS_FATAL_IF(res != OK, "Impossibly bad FPS keys");
-
- fpsUseSingleValue = (fpsKeyOrder > 0);
+ params.getPreviewFpsRange(&lastSetFpsRange[0], &lastSetFpsRange[1]);
+ lastSetFpsRange[0] /= kFpsToApiScale;
+ lastSetFpsRange[1] /= kFpsToApiScale;
- }
-
- ALOGV("%s: Preview FPS value is used from '%s'",
- __FUNCTION__, fpsUseSingleValue ? "single" : "range");
- }
newParams.getPreviewFpsRange(&validatedParams.previewFpsRange[0],
&validatedParams.previewFpsRange[1]);
-
validatedParams.previewFpsRange[0] /= kFpsToApiScale;
validatedParams.previewFpsRange[1] /= kFpsToApiScale;
- // Ignore the FPS range if the FPS single has higher precedence
- if (!fpsUseSingleValue) {
- ALOGV("%s: Preview FPS range (%d, %d)", __FUNCTION__,
- validatedParams.previewFpsRange[0],
- validatedParams.previewFpsRange[1]);
+ // Compare the FPS range value from the last set() to the current set()
+ // to determine if the client has changed it
+ if (validatedParams.previewFpsRange[0] != lastSetFpsRange[0] ||
+ validatedParams.previewFpsRange[1] != lastSetFpsRange[1]) {
+ fpsRangeChanged = true;
camera_metadata_ro_entry_t availablePreviewFpsRanges =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
for (i = 0; i < availablePreviewFpsRanges.count; i += 2) {
@@ -1248,13 +1200,14 @@ status_t Parameters::set(const String8& paramString) {
}
}
- // PREVIEW_FRAME_RATE Deprecated
- // - Use only if the single FPS value was set later than the FPS range
- if (fpsUseSingleValue) {
+ // PREVIEW_FRAME_RATE Deprecated, only use if the preview fps range is
+ // unchanged this time. The single-value FPS is the same as the minimum of
+ // the range. To detect whether the application has changed the value of
+ // previewFps, compare against their last-set preview FPS.
+ if (!fpsRangeChanged) {
int previewFps = newParams.getPreviewFrameRate();
- ALOGV("%s: Preview FPS single value requested: %d",
- __FUNCTION__, previewFps);
- {
+ int lastSetPreviewFps = params.getPreviewFrameRate();
+ if (previewFps != lastSetPreviewFps || recordingHintChanged) {
camera_metadata_ro_entry_t availableFrameRates =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
/**
@@ -1323,35 +1276,6 @@ status_t Parameters::set(const String8& paramString) {
}
}
- /**
- * Update Preview FPS and Preview FPS ranges based on
- * what we actually set.
- *
- * This updates the API-visible (Camera.Parameters#getParameters) values of
- * the FPS fields, not only the internal versions.
- *
- * Order matters: The value that was set last takes precedence.
- * - If the client does a setParameters(getParameters()) we retain
- * the same order for preview FPS.
- */
- if (!fpsUseSingleValue) {
- // Set fps single, then fps range (range wins)
- newParams.setPreviewFrameRate(
- fpsFromRange(/*min*/validatedParams.previewFpsRange[0],
- /*max*/validatedParams.previewFpsRange[1]));
- newParams.setPreviewFpsRange(
- validatedParams.previewFpsRange[0] * kFpsToApiScale,
- validatedParams.previewFpsRange[1] * kFpsToApiScale);
- } else {
- // Set fps range, then fps single (single wins)
- newParams.setPreviewFpsRange(
- validatedParams.previewFpsRange[0] * kFpsToApiScale,
- validatedParams.previewFpsRange[1] * kFpsToApiScale);
- // Set this to the same value, but with higher priority
- newParams.setPreviewFrameRate(
- newParams.getPreviewFrameRate());
- }
-
// PICTURE_SIZE
newParams.getPictureSize(&validatedParams.pictureWidth,
&validatedParams.pictureHeight);
@@ -1522,7 +1446,7 @@ status_t Parameters::set(const String8& paramString) {
newParams.get(CameraParameters::KEY_SCENE_MODE) );
if (validatedParams.sceneMode != sceneMode &&
validatedParams.sceneMode !=
- ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) {
+ ANDROID_CONTROL_SCENE_MODE_DISABLED) {
camera_metadata_ro_entry_t availableSceneModes =
staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
for (i = 0; i < availableSceneModes.count; i++) {
@@ -1537,7 +1461,7 @@ status_t Parameters::set(const String8& paramString) {
}
}
bool sceneModeSet =
- validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED;
// FLASH_MODE
if (sceneModeSet) {
@@ -1667,10 +1591,11 @@ status_t Parameters::set(const String8& paramString) {
// FOCUS_AREAS
res = parseAreas(newParams.get(CameraParameters::KEY_FOCUS_AREAS),
&validatedParams.focusingAreas);
- size_t max3aRegions =
- (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1).data.i32[0];
+ size_t maxAfRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION).
+ data.i32[Parameters::REGION_AF];
if (res == OK) res = validateAreas(validatedParams.focusingAreas,
- max3aRegions, AREA_KIND_FOCUS);
+ maxAfRegions, AREA_KIND_FOCUS);
if (res != OK) {
ALOGE("%s: Requested focus areas are malformed: %s",
__FUNCTION__, newParams.get(CameraParameters::KEY_FOCUS_AREAS));
@@ -1700,10 +1625,13 @@ status_t Parameters::set(const String8& paramString) {
newParams.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
// METERING_AREAS
+ size_t maxAeRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION).
+ data.i32[Parameters::REGION_AE];
res = parseAreas(newParams.get(CameraParameters::KEY_METERING_AREAS),
&validatedParams.meteringAreas);
if (res == OK) {
- res = validateAreas(validatedParams.meteringAreas, max3aRegions,
+ res = validateAreas(validatedParams.meteringAreas, maxAeRegions,
AREA_KIND_METERING);
}
if (res != OK) {
@@ -1852,7 +1780,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const {
// (face detection statistics and face priority scene mode). Map from other
// to the other.
bool sceneModeActive =
- sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_DISABLED;
uint8_t reqControlMode = ANDROID_CONTROL_MODE_AUTO;
if (enableFaceDetect || sceneModeActive) {
reqControlMode = ANDROID_CONTROL_MODE_USE_SCENE_MODE;
@@ -1864,7 +1792,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const {
uint8_t reqSceneMode =
sceneModeActive ? sceneMode :
enableFaceDetect ? (uint8_t)ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
- (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
+ (uint8_t)ANDROID_CONTROL_SCENE_MODE_DISABLED;
res = request->update(ANDROID_CONTROL_SCENE_MODE,
&reqSceneMode, 1);
if (res != OK) return res;
@@ -1985,6 +1913,23 @@ status_t Parameters::updateRequest(CameraMetadata *request) const {
reqMeteringAreas, reqMeteringAreasSize);
if (res != OK) return res;
+ // Set awb regions to be the same as the metering regions if allowed
+ size_t maxAwbRegions = (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS,
+ Parameters::NUM_REGION, Parameters::NUM_REGION).
+ data.i32[Parameters::REGION_AWB];
+ if (maxAwbRegions > 0) {
+ if (maxAwbRegions >= meteringAreas.size()) {
+ res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+ reqMeteringAreas, reqMeteringAreasSize);
+ } else {
+ // Ensure the awb regions are zeroed if the region count is too high.
+ int32_t zeroedAwbAreas[5] = {0, 0, 0, 0, 0};
+ res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+ zeroedAwbAreas, sizeof(zeroedAwbAreas)/sizeof(int32_t));
+ }
+ if (res != OK) return res;
+ }
+
delete[] reqMeteringAreas;
/* don't include jpeg thumbnail size - it's valid for
@@ -2225,9 +2170,9 @@ int Parameters::abModeStringToEnum(const char *abMode) {
int Parameters::sceneModeStringToEnum(const char *sceneMode) {
return
!sceneMode ?
- ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED :
+ ANDROID_CONTROL_SCENE_MODE_DISABLED :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_AUTO) ?
- ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED :
+ ANDROID_CONTROL_SCENE_MODE_DISABLED :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_ACTION) ?
ANDROID_CONTROL_SCENE_MODE_ACTION :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_PORTRAIT) ?
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index da07ccf..60c4687 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -25,7 +25,6 @@
#include <utils/Vector.h>
#include <utils/KeyedVector.h>
#include <camera/CameraParameters.h>
-#include <camera/CameraParameters2.h>
#include <camera/CameraMetadata.h>
namespace android {
@@ -33,7 +32,7 @@ namespace camera2 {
/**
* Current camera state; this is the full state of the Camera under the old
- * camera API (contents of the CameraParameters2 object in a more-efficient
+ * camera API (contents of the CameraParameters object in a more-efficient
* format, plus other state). The enum values are mostly based off the
* corresponding camera2 enums, not the camera1 strings. A few are defined here
* if they don't cleanly map to camera2 values.
@@ -114,6 +113,14 @@ struct Parameters {
bool autoExposureLock;
bool autoWhiteBalanceLock;
+ // 3A region types, for use with ANDROID_CONTROL_MAX_REGIONS
+ enum region_t {
+ REGION_AE = 0,
+ REGION_AWB,
+ REGION_AF,
+ NUM_REGION // Number of region types
+ } region;
+
Vector<Area> meteringAreas;
int zoom;
@@ -129,7 +136,7 @@ struct Parameters {
LIGHTFX_HDR
} lightFx;
- CameraParameters2 params;
+ CameraParameters params;
String8 paramsFlattened;
// These parameters are also part of the camera API-visible state, but not
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 77ae7ec..2064e2c 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -319,13 +319,15 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters &params) {
// Create CPU buffer queue endpoint. We need one more buffer here so that we can
// always acquire and free a buffer when the heap is full; otherwise the consumer
// will have buffers in flight we'll never clear out.
- sp<BufferQueue> bq = new BufferQueue();
- mRecordingConsumer = new BufferItemConsumer(bq,
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mRecordingConsumer = new BufferItemConsumer(consumer,
GRALLOC_USAGE_HW_VIDEO_ENCODER,
mRecordingHeapCount + 1);
mRecordingConsumer->setFrameAvailableListener(this);
mRecordingConsumer->setName(String8("Camera2-RecordingConsumer"));
- mRecordingWindow = new Surface(bq);
+ mRecordingWindow = new Surface(producer);
newConsumer = true;
// Allocate memory later, since we don't know buffer size until receipt
}
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 130f81a..2a2a5af 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -73,18 +73,19 @@ void ZslProcessor::onFrameAvailable() {
}
}
-void ZslProcessor::onFrameAvailable(int32_t /*requestId*/,
- const CameraMetadata &frame) {
+void ZslProcessor::onResultAvailable(const CaptureResult &result) {
+ ATRACE_CALL();
+ ALOGV("%s:", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
camera_metadata_ro_entry_t entry;
- entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
nsecs_t timestamp = entry.data.i64[0];
(void)timestamp;
ALOGVV("Got preview frame for timestamp %" PRId64, timestamp);
if (mState != RUNNING) return;
- mFrameList.editItemAt(mFrameListHead) = frame;
+ mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
findMatchesLocked();
@@ -130,13 +131,15 @@ status_t ZslProcessor::updateStream(const Parameters &params) {
if (mZslConsumer == 0) {
// Create CPU buffer queue endpoint
- sp<BufferQueue> bq = new BufferQueue();
- mZslConsumer = new BufferItemConsumer(bq,
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mZslConsumer = new BufferItemConsumer(consumer,
GRALLOC_USAGE_HW_CAMERA_ZSL,
kZslBufferDepth);
mZslConsumer->setFrameAvailableListener(this);
mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
- mZslWindow = new Surface(bq);
+ mZslWindow = new Surface(producer);
}
if (mZslStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 6d3cb85..f4cf0c8 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -24,6 +24,7 @@
#include <utils/Condition.h>
#include <gui/BufferItemConsumer.h>
#include <camera/CameraMetadata.h>
+#include <camera/CaptureResult.h>
#include "common/CameraDeviceBase.h"
#include "api1/client2/ZslProcessorInterface.h"
@@ -54,7 +55,7 @@ class ZslProcessor:
// From mZslConsumer
virtual void onFrameAvailable();
// From FrameProcessor
- virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
+ virtual void onResultAvailable(const CaptureResult &result);
virtual void onBufferReleased(buffer_handle_t *handle);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 2fce2b6..1dcb718 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -63,18 +63,19 @@ ZslProcessor3::~ZslProcessor3() {
deleteStream();
}
-void ZslProcessor3::onFrameAvailable(int32_t /*requestId*/,
- const CameraMetadata &frame) {
+void ZslProcessor3::onResultAvailable(const CaptureResult &result) {
+ ATRACE_CALL();
+ ALOGV("%s:", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
camera_metadata_ro_entry_t entry;
- entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
nsecs_t timestamp = entry.data.i64[0];
(void)timestamp;
ALOGVV("Got preview metadata for timestamp %" PRId64, timestamp);
if (mState != RUNNING) return;
- mFrameList.editItemAt(mFrameListHead) = frame;
+ mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
}
@@ -275,6 +276,15 @@ status_t ZslProcessor3::pushToReprocess(int32_t requestId) {
return INVALID_OPERATION;
}
+ // Flush device to clear out all in-flight requests pending in HAL.
+ res = client->getCameraDevice()->flush();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Failed to flush device: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+
// Update JPEG settings
{
SharedParameters::Lock l(client->getParameters());
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
index d2f8322..4c52a64 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
@@ -50,8 +50,8 @@ class ZslProcessor3 :
ZslProcessor3(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
~ZslProcessor3();
- // From FrameProcessor
- virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
+ // From FrameProcessor::FilteredListener
+ virtual void onResultAvailable(const CaptureResult &result);
/**
****************************************