summaryrefslogtreecommitdiffstats
path: root/services/camera/libcameraservice
diff options
context:
space:
mode:
Diffstat (limited to 'services/camera/libcameraservice')
-rw-r--r--services/camera/libcameraservice/Android.mk1
-rw-r--r--services/camera/libcameraservice/CameraService.cpp9
-rw-r--r--services/camera/libcameraservice/api1/Camera2Client.cpp22
-rw-r--r--services/camera/libcameraservice/api1/CameraClient.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.cpp181
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.h30
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.cpp56
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.h3
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp2
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.cpp3
-rw-r--r--services/camera/libcameraservice/common/FrameProcessorBase.cpp17
-rw-r--r--services/camera/libcameraservice/common/FrameProcessorBase.h7
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.cpp9
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.cpp256
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.h31
-rw-r--r--services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp4
-rw-r--r--services/camera/libcameraservice/device3/Camera3ZslStream.cpp2
-rw-r--r--services/camera/libcameraservice/gui/RingBufferConsumer.cpp10
-rw-r--r--services/camera/libcameraservice/utils/CameraTraces.cpp94
-rw-r--r--services/camera/libcameraservice/utils/CameraTraces.h66
21 files changed, 664 insertions, 143 deletions
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index d23f8b9..51ba698 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -35,6 +35,7 @@ LOCAL_SRC_FILES:= \
device3/Camera3ZslStream.cpp \
device3/StatusTracker.cpp \
gui/RingBufferConsumer.cpp \
+ utils/CameraTraces.cpp \
LOCAL_SHARED_LIBRARIES:= \
libui \
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 34a5b15..87027f7 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -42,6 +42,7 @@
#include "api1/Camera2Client.h"
#include "api_pro/ProCamera2Client.h"
#include "api2/CameraDeviceClient.h"
+#include "utils/CameraTraces.h"
#include "CameraDeviceFactory.h"
namespace android {
@@ -1041,13 +1042,13 @@ void CameraService::BasicClient::opChanged(int32_t op, const String16& packageNa
// ----------------------------------------------------------------------------
Mutex* CameraService::Client::getClientLockFromCookie(void* user) {
- return gCameraService->getClientLockById((int) user);
+ return gCameraService->getClientLockById((int)(intptr_t) user);
}
// Provide client pointer for callbacks. Client lock returned from getClientLockFromCookie should
// be acquired for this to be safe
CameraService::Client* CameraService::Client::getClientFromCookie(void* user) {
- BasicClient *basicClient = gCameraService->getClientByIdUnsafe((int) user);
+ BasicClient *basicClient = gCameraService->getClientByIdUnsafe((int)(intptr_t) user);
// OK: only CameraClient calls this, and they already cast anyway.
Client* client = static_cast<Client*>(basicClient);
@@ -1219,6 +1220,10 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) {
if (locked) mServiceLock.unlock();
+ // Dump camera traces if there were any
+ write(fd, "\n", 1);
+ camera3::CameraTraces::dump(fd, args);
+
// change logging level
int n = args.size();
for (int i = 0; i + 1 < n; i++) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index df3b162..ba1e772 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -18,6 +18,7 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <inttypes.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -76,13 +77,15 @@ status_t Camera2Client::initialize(camera_module_t *module)
return res;
}
- SharedParameters::Lock l(mParameters);
+ {
+ SharedParameters::Lock l(mParameters);
- res = l.mParameters.initialize(&(mDevice->info()));
- if (res != OK) {
- ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- return NO_INIT;
+ res = l.mParameters.initialize(&(mDevice->info()));
+ if (res != OK) {
+ ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ return NO_INIT;
+ }
}
String8 threadName;
@@ -135,6 +138,7 @@ status_t Camera2Client::initialize(camera_module_t *module)
mCallbackProcessor->run(threadName.string());
if (gLogLevel >= 1) {
+ SharedParameters::Lock l(mParameters);
ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
mCameraId);
ALOGD("%s", l.mParameters.paramsFlattened.string());
@@ -190,7 +194,7 @@ status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
p.gpsCoordinates[0], p.gpsCoordinates[1],
p.gpsCoordinates[2]);
- result.appendFormat(" GPS timestamp: %lld\n",
+ result.appendFormat(" GPS timestamp: %" PRId64 "\n",
p.gpsTimestamp);
result.appendFormat(" GPS processing method: %s\n",
p.gpsProcessingMethod.string());
@@ -353,6 +357,10 @@ status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
result.appendFormat(" meteringCropRegion\n");
haveQuirk = true;
}
+ if (p.quirks.partialResults) {
+ result.appendFormat(" usePartialResult\n");
+ haveQuirk = true;
+ }
if (!haveQuirk) {
result.appendFormat(" none\n");
}
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index bd6805d..30b7bb8 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -85,7 +85,7 @@ status_t CameraClient::initialize(camera_module_t *module) {
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
- (void *)mCameraId);
+ (void *)(uintptr_t)mCameraId);
// Enable zoom, error, focus, and metadata messages by default
enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index c34cb12..19acae4 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -29,13 +29,27 @@ namespace android {
namespace camera2 {
FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
- wp<Camera2Client> client) :
+ sp<Camera2Client> client) :
FrameProcessorBase(device),
mClient(client),
- mLastFrameNumberOfFaces(0) {
+ mLastFrameNumberOfFaces(0),
+ mLast3AFrameNumber(-1) {
sp<CameraDeviceBase> d = device.promote();
mSynthesize3ANotify = !(d->willNotify3A());
+
+ {
+ SharedParameters::Lock l(client->getParameters());
+ mUsePartialQuirk = l.mParameters.quirks.partialResults;
+
+ // Initialize starting 3A state
+ m3aState.afTriggerId = l.mParameters.afTriggerCounter;
+ m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
+ // Check if lens is fixed-focus
+ if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
+ m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
+ }
+ }
}
FrameProcessor::~FrameProcessor() {
@@ -49,20 +63,25 @@ bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
return false;
}
- if (processFaceDetect(frame, client) != OK) {
+ bool partialResult = false;
+ if (mUsePartialQuirk) {
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+ if (entry.count > 0 &&
+ entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+ partialResult = true;
+ }
+ }
+
+ if (!partialResult && processFaceDetect(frame, client) != OK) {
return false;
}
if (mSynthesize3ANotify) {
- // Ignoring missing fields for now
process3aState(frame, client);
}
- if (!FrameProcessorBase::processSingleFrame(frame, device)) {
- return false;
- }
-
- return true;
+ return FrameProcessorBase::processSingleFrame(frame, device);
}
status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
@@ -198,86 +217,75 @@ status_t FrameProcessor::process3aState(const CameraMetadata &frame,
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
- int mId = client->getCameraId();
+ int cameraId = client->getCameraId();
entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
int32_t frameNumber = entry.data.i32[0];
+ // Don't send 3A notifications for the same frame number twice
+ if (frameNumber <= mLast3AFrameNumber) {
+ ALOGV("%s: Already sent 3A for frame number %d, skipping",
+ __FUNCTION__, frameNumber);
+ return OK;
+ }
+
+ mLast3AFrameNumber = frameNumber;
+
// Get 3A states from result metadata
bool gotAllStates = true;
AlgState new3aState;
- entry = frame.find(ANDROID_CONTROL_AE_STATE);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- new3aState.aeState =
- static_cast<camera_metadata_enum_android_control_ae_state>(
- entry.data.u8[0]);
- }
+ // TODO: Also use AE mode, AE trigger ID
- entry = frame.find(ANDROID_CONTROL_AF_STATE);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- new3aState.afState =
- static_cast<camera_metadata_enum_android_control_af_state>(
- entry.data.u8[0]);
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_MODE,
+ &new3aState.afMode, frameNumber, cameraId);
- entry = frame.find(ANDROID_CONTROL_AWB_STATE);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- new3aState.awbState =
- static_cast<camera_metadata_enum_android_control_awb_state>(
- entry.data.u8[0]);
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_MODE,
+ &new3aState.awbMode, frameNumber, cameraId);
- int32_t afTriggerId = 0;
- entry = frame.find(ANDROID_CONTROL_AF_TRIGGER_ID);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- afTriggerId = entry.data.i32[0];
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AE_STATE,
+ &new3aState.aeState, frameNumber, cameraId);
- int32_t aeTriggerId = 0;
- entry = frame.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL"
- " for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- aeTriggerId = entry.data.i32[0];
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_STATE,
+ &new3aState.afState, frameNumber, cameraId);
+
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_STATE,
+ &new3aState.awbState, frameNumber, cameraId);
+
+ gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AF_TRIGGER_ID,
+ &new3aState.afTriggerId, frameNumber, cameraId);
+
+ gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+ &new3aState.aeTriggerId, frameNumber, cameraId);
if (!gotAllStates) return BAD_VALUE;
if (new3aState.aeState != m3aState.aeState) {
- ALOGV("%s: AE state changed from 0x%x to 0x%x",
- __FUNCTION__, m3aState.aeState, new3aState.aeState);
- client->notifyAutoExposure(new3aState.aeState, aeTriggerId);
+ ALOGV("%s: Camera %d: AE state %d->%d",
+ __FUNCTION__, cameraId,
+ m3aState.aeState, new3aState.aeState);
+ client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
}
- if (new3aState.afState != m3aState.afState) {
- ALOGV("%s: AF state changed from 0x%x to 0x%x",
- __FUNCTION__, m3aState.afState, new3aState.afState);
- client->notifyAutoFocus(new3aState.afState, afTriggerId);
+
+ if (new3aState.afState != m3aState.afState ||
+ new3aState.afMode != m3aState.afMode ||
+ new3aState.afTriggerId != m3aState.afTriggerId) {
+ ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
+ __FUNCTION__, cameraId,
+ m3aState.afState, new3aState.afState,
+ m3aState.afMode, new3aState.afMode,
+ m3aState.afTriggerId, new3aState.afTriggerId);
+ client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
}
- if (new3aState.awbState != m3aState.awbState) {
- ALOGV("%s: AWB state changed from 0x%x to 0x%x",
- __FUNCTION__, m3aState.awbState, new3aState.awbState);
- client->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId);
+ if (new3aState.awbState != m3aState.awbState ||
+ new3aState.awbMode != m3aState.awbMode) {
+ ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
+ __FUNCTION__, cameraId,
+ m3aState.awbState, new3aState.awbState,
+ m3aState.awbMode, new3aState.awbMode);
+ client->notifyAutoWhitebalance(new3aState.awbState,
+ new3aState.aeTriggerId);
}
m3aState = new3aState;
@@ -285,6 +293,39 @@ status_t FrameProcessor::process3aState(const CameraMetadata &frame,
return OK;
}
+template<typename Src, typename T>
+bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
+ T* value, int32_t frameNumber, int cameraId) {
+ camera_metadata_ro_entry_t entry;
+ if (value == NULL) {
+ ALOGE("%s: Camera %d: Value to write to is NULL",
+ __FUNCTION__, cameraId);
+ return false;
+ }
+
+ entry = result.find(tag);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
+ __FUNCTION__, cameraId,
+ get_camera_metadata_tag_name(tag), frameNumber);
+ return false;
+ } else {
+ switch(sizeof(Src)){
+ case sizeof(uint8_t):
+ *value = static_cast<T>(entry.data.u8[0]);
+ break;
+ case sizeof(int32_t):
+ *value = static_cast<T>(entry.data.i32[0]);
+ break;
+ default:
+ ALOGE("%s: Camera %d: Unsupported source",
+ __FUNCTION__, cameraId);
+ return false;
+ }
+ }
+ return true;
+}
+
void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
const camera_frame_metadata &metadata) {
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 2a17d45..856ad32 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -39,7 +39,7 @@ namespace camera2 {
*/
class FrameProcessor : public FrameProcessorBase {
public:
- FrameProcessor(wp<CameraDeviceBase> device, wp<Camera2Client> client);
+ FrameProcessor(wp<CameraDeviceBase> device, sp<Camera2Client> client);
~FrameProcessor();
private:
@@ -61,18 +61,44 @@ class FrameProcessor : public FrameProcessorBase {
status_t process3aState(const CameraMetadata &frame,
const sp<Camera2Client> &client);
+ // Helper for process3aState
+ template<typename Src, typename T>
+ bool get3aResult(const CameraMetadata& result, int32_t tag, T* value,
+ int32_t frameNumber, int cameraId);
+
+
struct AlgState {
+ // TODO: also track AE mode
+ camera_metadata_enum_android_control_af_mode afMode;
+ camera_metadata_enum_android_control_awb_mode awbMode;
+
camera_metadata_enum_android_control_ae_state aeState;
camera_metadata_enum_android_control_af_state afState;
camera_metadata_enum_android_control_awb_state awbState;
+ int32_t afTriggerId;
+ int32_t aeTriggerId;
+
+ // These defaults need to match those in Parameters.cpp
AlgState() :
+ afMode(ANDROID_CONTROL_AF_MODE_AUTO),
+ awbMode(ANDROID_CONTROL_AWB_MODE_AUTO),
aeState(ANDROID_CONTROL_AE_STATE_INACTIVE),
afState(ANDROID_CONTROL_AF_STATE_INACTIVE),
- awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE) {
+ awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE),
+ afTriggerId(0),
+ aeTriggerId(0) {
}
} m3aState;
+ // Whether the partial result quirk is enabled for this device
+ bool mUsePartialQuirk;
+
+ // Track most recent frame number for which 3A notifications were sent for.
+ // Used to filter against sending 3A notifications for the same frame
+ // several times.
+ int32_t mLast3AFrameNumber;
+
// Emit FaceDetection event to java if faces changed
void callbackFaceDetection(sp<Camera2Client> client,
const camera_frame_metadata &metadata);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 8a4e75c..08af566 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -183,8 +183,7 @@ status_t Parameters::initialize(const CameraMetadata *info) {
// still have to do something sane for them
// NOTE: Not scaled like FPS range values are.
- previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]);
- lastSetPreviewFps = previewFps;
+ int previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]);
params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,
previewFps);
@@ -1047,6 +1046,11 @@ status_t Parameters::buildQuirks() {
ALOGV_IF(quirks.meteringCropRegion, "Camera %d: Quirk meteringCropRegion"
" enabled", cameraId);
+ entry = info->find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
+ quirks.partialResults = (entry.count != 0 && entry.data.u8[0] == 1);
+ ALOGV_IF(quirks.partialResults, "Camera %d: Quirk usePartialResult"
+ " enabled", cameraId);
+
return OK;
}
@@ -1129,13 +1133,22 @@ status_t Parameters::set(const String8& paramString) {
// PREVIEW_FPS_RANGE
bool fpsRangeChanged = false;
+ int32_t lastSetFpsRange[2];
+
+ params.getPreviewFpsRange(&lastSetFpsRange[0], &lastSetFpsRange[1]);
+ lastSetFpsRange[0] /= kFpsToApiScale;
+ lastSetFpsRange[1] /= kFpsToApiScale;
+
newParams.getPreviewFpsRange(&validatedParams.previewFpsRange[0],
&validatedParams.previewFpsRange[1]);
validatedParams.previewFpsRange[0] /= kFpsToApiScale;
validatedParams.previewFpsRange[1] /= kFpsToApiScale;
- if (validatedParams.previewFpsRange[0] != previewFpsRange[0] ||
- validatedParams.previewFpsRange[1] != previewFpsRange[1]) {
+ // Compare the FPS range value from the last set() to the current set()
+ // to determine if the client has changed it
+ if (validatedParams.previewFpsRange[0] != lastSetFpsRange[0] ||
+ validatedParams.previewFpsRange[1] != lastSetFpsRange[1]) {
+
fpsRangeChanged = true;
camera_metadata_ro_entry_t availablePreviewFpsRanges =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
@@ -1153,16 +1166,6 @@ status_t Parameters::set(const String8& paramString) {
validatedParams.previewFpsRange[1]);
return BAD_VALUE;
}
- validatedParams.previewFps =
- fpsFromRange(validatedParams.previewFpsRange[0],
- validatedParams.previewFpsRange[1]);
-
- // Update our last-seen single preview FPS, needed for disambiguating
- // when the application is intending to use the deprecated single-FPS
- // setting vs. the range FPS setting
- validatedParams.lastSetPreviewFps = newParams.getPreviewFrameRate();
-
- newParams.setPreviewFrameRate(validatedParams.previewFps);
}
// PREVIEW_FORMAT
@@ -1200,12 +1203,11 @@ status_t Parameters::set(const String8& paramString) {
// PREVIEW_FRAME_RATE Deprecated, only use if the preview fps range is
// unchanged this time. The single-value FPS is the same as the minimum of
// the range. To detect whether the application has changed the value of
- // previewFps, compare against their last-set preview FPS instead of the
- // single FPS we may have synthesized from a range FPS set.
+ // previewFps, compare against their last-set preview FPS.
if (!fpsRangeChanged) {
- validatedParams.previewFps = newParams.getPreviewFrameRate();
- if (validatedParams.previewFps != lastSetPreviewFps ||
- recordingHintChanged) {
+ int previewFps = newParams.getPreviewFrameRate();
+ int lastSetPreviewFps = params.getPreviewFrameRate();
+ if (previewFps != lastSetPreviewFps || recordingHintChanged) {
camera_metadata_ro_entry_t availableFrameRates =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
/**
@@ -1218,8 +1220,8 @@ status_t Parameters::set(const String8& paramString) {
* Either way, in case of multiple ranges, break the tie by
* selecting the smaller range.
*/
- int targetFps = validatedParams.previewFps;
- // all ranges which have targetFps
+
+ // all ranges which have previewFps
Vector<Range> candidateRanges;
for (i = 0; i < availableFrameRates.count; i+=2) {
Range r = {
@@ -1227,13 +1229,13 @@ status_t Parameters::set(const String8& paramString) {
availableFrameRates.data.i32[i+1]
};
- if (r.min <= targetFps && targetFps <= r.max) {
+ if (r.min <= previewFps && previewFps <= r.max) {
candidateRanges.push(r);
}
}
if (candidateRanges.isEmpty()) {
ALOGE("%s: Requested preview frame rate %d is not supported",
- __FUNCTION__, validatedParams.previewFps);
+ __FUNCTION__, previewFps);
return BAD_VALUE;
}
// most applicable range with targetFps
@@ -1272,14 +1274,6 @@ status_t Parameters::set(const String8& paramString) {
validatedParams.previewFpsRange[1],
validatedParams.recordingHint);
}
- newParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,
- String8::format("%d,%d",
- validatedParams.previewFpsRange[0] * kFpsToApiScale,
- validatedParams.previewFpsRange[1] * kFpsToApiScale));
- // Update our last-seen single preview FPS, needed for disambiguating
- // when the application is intending to use the deprecated single-FPS
- // setting vs. the range FPS setting
- validatedParams.lastSetPreviewFps = validatedParams.previewFps;
}
// PICTURE_SIZE
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index bcbdb99..32dbd42 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -46,8 +46,6 @@ struct Parameters {
int previewWidth, previewHeight;
int32_t previewFpsRange[2];
- int lastSetPreviewFps; // the last single FPS value seen in a set call
- int previewFps; // deprecated, here only for tracking changes
int previewFormat;
int previewTransform; // set by CAMERA_CMD_SET_DISPLAY_ORIENTATION
@@ -209,6 +207,7 @@ struct Parameters {
bool triggerAfWithAuto;
bool useZslFormat;
bool meteringCropRegion;
+ bool partialResults;
} quirks;
/**
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 4207ba9..453d54c 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -540,7 +540,7 @@ void ZslProcessor::dumpZslQueue(int fd) const {
if (entry.count > 0) frameAeState = entry.data.u8[0];
}
String8 result =
- String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i,
+ String8::format(" %zu: b: %lld\tf: %lld, AE state: %d", i,
bufferTimestamp, frameTimestamp, frameAeState);
ALOGV("%s", result.string());
if (fd != -1) {
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 776ebe2..6b4e57a 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -355,7 +355,7 @@ void ZslProcessor3::dumpZslQueue(int fd) const {
if (entry.count > 0) frameAeState = entry.data.u8[0];
}
String8 result =
- String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i,
+ String8::format(" %zu: b: %lld\tf: %lld, AE state: %d", i,
bufferTimestamp, frameTimestamp, frameAeState);
ALOGV("%s", result.string());
if (fd != -1) {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 72126c1..1cdf8dc 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -81,7 +81,8 @@ status_t CameraDeviceClient::initialize(camera_module_t *module)
mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID,
FRAME_PROCESSOR_LISTENER_MAX_ID,
- /*listener*/this);
+ /*listener*/this,
+ /*quirkSendPartials*/true);
return OK;
}
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 52906ee..f2064fb 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -37,11 +37,11 @@ FrameProcessorBase::~FrameProcessorBase() {
}
status_t FrameProcessorBase::registerListener(int32_t minId,
- int32_t maxId, wp<FilteredListener> listener) {
+ int32_t maxId, wp<FilteredListener> listener, bool quirkSendPartials) {
Mutex::Autolock l(mInputMutex);
ALOGV("%s: Registering listener for frame id range %d - %d",
__FUNCTION__, minId, maxId);
- RangeListener rListener = { minId, maxId, listener };
+ RangeListener rListener = { minId, maxId, listener, quirkSendPartials };
mRangeListeners.push_back(rListener);
return OK;
}
@@ -145,6 +145,16 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame,
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
+ // Quirks: Don't deliver partial results to listeners that don't want them
+ bool quirkIsPartial = false;
+ entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+ if (entry.count != 0 &&
+ entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+ ALOGV("%s: Camera %d: Not forwarding partial result to listeners",
+ __FUNCTION__, device->getId());
+ quirkIsPartial = true;
+ }
+
entry = frame.find(ANDROID_REQUEST_ID);
if (entry.count == 0) {
ALOGE("%s: Camera %d: Error reading frame id",
@@ -160,7 +170,8 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame,
List<RangeListener>::iterator item = mRangeListeners.begin();
while (item != mRangeListeners.end()) {
if (requestId >= item->minId &&
- requestId < item->maxId) {
+ requestId < item->maxId &&
+ (!quirkIsPartial || item->quirkSendPartials) ) {
sp<FilteredListener> listener = item->listener.promote();
if (listener == 0) {
item = mRangeListeners.erase(item);
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h
index 4d80ebf..89b608a 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.h
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.h
@@ -44,9 +44,11 @@ class FrameProcessorBase: public Thread {
};
// Register a listener for a range of IDs [minId, maxId). Multiple listeners
- // can be listening to the same range
+ // can be listening to the same range.
+ // QUIRK: sendPartials controls whether partial results will be sent.
status_t registerListener(int32_t minId, int32_t maxId,
- wp<FilteredListener> listener);
+ wp<FilteredListener> listener,
+ bool quirkSendPartials = true);
status_t removeListener(int32_t minId, int32_t maxId,
wp<FilteredListener> listener);
@@ -64,6 +66,7 @@ class FrameProcessorBase: public Thread {
int32_t minId;
int32_t maxId;
wp<FilteredListener> listener;
+ bool quirkSendPartials;
};
List<RangeListener> mRangeListeners;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index 2bc1a8a..dc97c47 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -25,6 +25,7 @@
#define ALOGVV(...) ((void)0)
#endif
+#include <inttypes.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <utils/Timers.h>
@@ -822,7 +823,7 @@ status_t Camera2Device::MetadataQueue::dump(int fd,
result.append(" Stream slot: Empty\n");
write(fd, result.string(), result.size());
} else {
- result.appendFormat(" Stream slot: %d entries\n",
+ result.appendFormat(" Stream slot: %zu entries\n",
mStreamSlot.size());
int i = 0;
for (List<camera_metadata_t*>::iterator r = mStreamSlot.begin();
@@ -837,7 +838,7 @@ status_t Camera2Device::MetadataQueue::dump(int fd,
result = " Main queue is empty\n";
write(fd, result.string(), result.size());
} else {
- result = String8::format(" Main queue has %d entries:\n",
+ result = String8::format(" Main queue has %zu entries:\n",
mEntries.size());
int i = 0;
for (List<camera_metadata_t*>::iterator r = mEntries.begin();
@@ -1214,11 +1215,11 @@ status_t Camera2Device::StreamAdapter::dump(int fd,
ATRACE_CALL();
String8 result = String8::format(" Stream %d: %d x %d, format 0x%x\n",
mId, mWidth, mHeight, mFormat);
- result.appendFormat(" size %d, usage 0x%x, requested format 0x%x\n",
+ result.appendFormat(" size %zu, usage 0x%x, requested format 0x%x\n",
mSize, mUsage, mFormatRequested);
result.appendFormat(" total buffers: %d, dequeued buffers: %d\n",
mTotalBuffers, mActiveBuffers);
- result.appendFormat(" frame count: %d, last timestamp %lld\n",
+ result.appendFormat(" frame count: %d, last timestamp %" PRId64 "\n",
mFrameCount, mLastTimestamp);
write(fd, result.string(), result.size());
return OK;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 6f2dc85..3dbc1b0 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -41,6 +41,7 @@
#include <utils/Trace.h>
#include <utils/Timers.h>
+#include "utils/CameraTraces.h"
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3InputStream.h"
@@ -54,6 +55,7 @@ Camera3Device::Camera3Device(int id):
mId(id),
mHal3Device(NULL),
mStatus(STATUS_UNINITIALIZED),
+ mUsePartialResultQuirk(false),
mNextResultFrameNumber(0),
mNextShutterFrameNumber(0),
mListener(NULL)
@@ -192,6 +194,15 @@ status_t Camera3Device::initialize(camera_module_t *module)
mNeedConfig = true;
mPauseStateNotify = false;
+ /** Check for quirks */
+
+ // Will the HAL be sending in early partial result metadata?
+ camera_metadata_entry partialResultsQuirk =
+ mDeviceInfo.find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
+ if (partialResultsQuirk.count > 0 && partialResultsQuirk.data.u8[0] == 1) {
+ mUsePartialResultQuirk = true;
+ }
+
return OK;
}
@@ -1363,6 +1374,10 @@ void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) {
// But only do error state transition steps for the first error
if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
+ // Save stack trace. View by dumping it later.
+ CameraTraces::saveTrace();
+ // TODO: consider adding errorCause and client pid/procname
+
mErrorCause = errorCause;
mRequestThread->setPaused(true);
@@ -1386,6 +1401,175 @@ status_t Camera3Device::registerInFlight(int32_t frameNumber,
}
/**
+ * QUIRK(partial results)
+ * Check if all 3A fields are ready, and send off a partial 3A-only result
+ * to the output frame queue
+ */
+bool Camera3Device::processPartial3AQuirk(
+ int32_t frameNumber, int32_t requestId,
+ const CameraMetadata& partial) {
+
+ // Check if all 3A states are present
+ // The full list of fields is
+ // android.control.afMode
+ // android.control.awbMode
+ // android.control.aeState
+ // android.control.awbState
+ // android.control.afState
+ // android.control.afTriggerID
+ // android.control.aePrecaptureID
+ // TODO: Add android.control.aeMode
+
+ bool gotAllStates = true;
+
+ uint8_t afMode;
+ uint8_t awbMode;
+ uint8_t aeState;
+ uint8_t afState;
+ uint8_t awbState;
+ int32_t afTriggerId;
+ int32_t aeTriggerId;
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_MODE,
+ &afMode, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_MODE,
+ &awbMode, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_STATE,
+ &aeState, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_STATE,
+ &afState, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_STATE,
+ &awbState, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_TRIGGER_ID,
+ &afTriggerId, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+ &aeTriggerId, frameNumber);
+
+ if (!gotAllStates) return false;
+
+ ALOGVV("%s: Camera %d: Frame %d, Request ID %d: AF mode %d, AWB mode %d, "
+ "AF state %d, AE state %d, AWB state %d, "
+ "AF trigger %d, AE precapture trigger %d",
+ __FUNCTION__, mId, frameNumber, requestId,
+ afMode, awbMode,
+ afState, aeState, awbState,
+ afTriggerId, aeTriggerId);
+
+ // Got all states, so construct a minimal result to send
+ // In addition to the above fields, this means adding in
+ // android.request.frameCount
+ // android.request.requestId
+ // android.quirks.partialResult
+
+ const size_t kMinimal3AResultEntries = 10;
+
+ Mutex::Autolock l(mOutputLock);
+
+ CameraMetadata& min3AResult =
+ *mResultQueue.insert(
+ mResultQueue.end(),
+ CameraMetadata(kMinimal3AResultEntries, /*dataCapacity*/ 0));
+
+ if (!insert3AResult(min3AResult, ANDROID_REQUEST_FRAME_COUNT,
+ &frameNumber, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_REQUEST_ID,
+ &requestId, frameNumber)) {
+ return false;
+ }
+
+ static const uint8_t partialResult = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
+ if (!insert3AResult(min3AResult, ANDROID_QUIRKS_PARTIAL_RESULT,
+ &partialResult, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_MODE,
+ &afMode, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_MODE,
+ &awbMode, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_STATE,
+ &aeState, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_STATE,
+ &afState, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_STATE,
+ &awbState, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_TRIGGER_ID,
+ &afTriggerId, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+ &aeTriggerId, frameNumber)) {
+ return false;
+ }
+
+ mResultSignal.signal();
+
+ return true;
+}
+
+template<typename T>
+bool Camera3Device::get3AResult(const CameraMetadata& result, int32_t tag,
+ T* value, int32_t frameNumber) {
+ (void) frameNumber;
+
+ camera_metadata_ro_entry_t entry;
+
+ entry = result.find(tag);
+ if (entry.count == 0) {
+ ALOGVV("%s: Camera %d: Frame %d: No %s provided by HAL!", __FUNCTION__,
+ mId, frameNumber, get_camera_metadata_tag_name(tag));
+ return false;
+ }
+
+ if (sizeof(T) == sizeof(uint8_t)) {
+ *value = entry.data.u8[0];
+ } else if (sizeof(T) == sizeof(int32_t)) {
+ *value = entry.data.i32[0];
+ } else {
+ ALOGE("%s: Unexpected type", __FUNCTION__);
+ return false;
+ }
+ return true;
+}
+
+template<typename T>
+bool Camera3Device::insert3AResult(CameraMetadata& result, int32_t tag,
+ const T* value, int32_t frameNumber) {
+ if (result.update(tag, value, 1) != NO_ERROR) {
+ mResultQueue.erase(--mResultQueue.end(), mResultQueue.end());
+ SET_ERR("Frame %d: Failed to set %s in partial metadata",
+ frameNumber, get_camera_metadata_tag_name(tag));
+ return false;
+ }
+ return true;
+}
+
+/**
* Camera HAL device callback methods
*/
@@ -1400,6 +1584,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
frameNumber);
return;
}
+ bool partialResultQuirk = false;
+ CameraMetadata collectedQuirkResult;
// Get capture timestamp from list of in-flight requests, where it was added
// by the shutter notification for this frame. Then update the in-flight
@@ -1415,24 +1601,58 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
return;
}
InFlightRequest &request = mInFlightMap.editValueAt(idx);
+
+ // Check if this result carries only partial metadata
+ if (mUsePartialResultQuirk && result->result != NULL) {
+ camera_metadata_ro_entry_t partialResultEntry;
+ res = find_camera_metadata_ro_entry(result->result,
+ ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry);
+ if (res != NAME_NOT_FOUND &&
+ partialResultEntry.count > 0 &&
+ partialResultEntry.data.u8[0] ==
+ ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+ // A partial result. Flag this as such, and collect this
+ // set of metadata into the in-flight entry.
+ partialResultQuirk = true;
+ request.partialResultQuirk.collectedResult.append(
+ result->result);
+ request.partialResultQuirk.collectedResult.erase(
+ ANDROID_QUIRKS_PARTIAL_RESULT);
+ // Fire off a 3A-only result if possible
+ if (!request.partialResultQuirk.haveSent3A) {
+ request.partialResultQuirk.haveSent3A =
+ processPartial3AQuirk(frameNumber,
+ request.requestId,
+ request.partialResultQuirk.collectedResult);
+ }
+ }
+ }
+
timestamp = request.captureTimestamp;
/**
- * One of the following must happen before it's legal to call process_capture_result:
+ * One of the following must happen before it's legal to call process_capture_result,
+ * unless partial metadata is being provided:
* - CAMERA3_MSG_SHUTTER (expected during normal operation)
* - CAMERA3_MSG_ERROR (expected during flush)
*/
- if (request.requestStatus == OK && timestamp == 0) {
+ if (request.requestStatus == OK && timestamp == 0 && !partialResultQuirk) {
SET_ERR("Called before shutter notify for frame %d",
frameNumber);
return;
}
- if (result->result != NULL) {
+ // Did we get the (final) result metadata for this capture?
+ if (result->result != NULL && !partialResultQuirk) {
if (request.haveResultMetadata) {
SET_ERR("Called multiple times with metadata for frame %d",
frameNumber);
return;
}
+ if (mUsePartialResultQuirk &&
+ !request.partialResultQuirk.collectedResult.isEmpty()) {
+ collectedQuirkResult.acquire(
+ request.partialResultQuirk.collectedResult);
+ }
request.haveResultMetadata = true;
}
@@ -1444,6 +1664,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
return;
}
+ // Check if everything has arrived for this result (buffers and metadata)
if (request.haveResultMetadata && request.numBuffersLeft == 0) {
ATRACE_ASYNC_END("frame capture", frameNumber);
mInFlightMap.removeItemsAt(idx, 1);
@@ -1458,9 +1679,12 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
}
// Process the result metadata, if provided
- if (result->result != NULL) {
+ bool gotResult = false;
+ if (result->result != NULL && !partialResultQuirk) {
Mutex::Autolock l(mOutputLock);
+ gotResult = true;
+
if (frameNumber != mNextResultFrameNumber) {
SET_ERR("Out-of-order capture result metadata submitted! "
"(got frame number %d, expecting %d)",
@@ -1469,19 +1693,26 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
}
mNextResultFrameNumber++;
- CameraMetadata &captureResult =
- *mResultQueue.insert(mResultQueue.end(), CameraMetadata());
-
+ CameraMetadata captureResult;
captureResult = result->result;
+
if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT,
(int32_t*)&frameNumber, 1) != OK) {
SET_ERR("Failed to set frame# in metadata (%d)",
frameNumber);
+ gotResult = false;
} else {
ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
__FUNCTION__, mId, frameNumber);
}
+ // Append any previous partials to form a complete result
+ if (mUsePartialResultQuirk && !collectedQuirkResult.isEmpty()) {
+ captureResult.append(collectedQuirkResult);
+ }
+
+ captureResult.sort();
+
// Check that there's a timestamp in the result metadata
camera_metadata_entry entry =
@@ -1489,10 +1720,19 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
if (entry.count == 0) {
SET_ERR("No timestamp provided by HAL for frame %d!",
frameNumber);
+ gotResult = false;
} else if (timestamp != entry.data.i64[0]) {
SET_ERR("Timestamp mismatch between shutter notify and result"
" metadata for frame %d (%lld vs %lld respectively)",
frameNumber, timestamp, entry.data.i64[0]);
+ gotResult = false;
+ }
+
+ if (gotResult) {
+ // Valid result, insert into queue
+ CameraMetadata& queuedResult =
+ *mResultQueue.insert(mResultQueue.end(), CameraMetadata());
+ queuedResult.swap(captureResult);
}
} // scope for mOutputLock
@@ -1512,7 +1752,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
// Finally, signal any waiters for new frames
- if (result->result != NULL) {
+ if (gotResult) {
mResultSignal.signal();
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 12252c8..468f641 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -188,6 +188,9 @@ class Camera3Device :
// Need to hold on to stream references until configure completes.
Vector<sp<camera3::Camera3StreamInterface> > mDeletedStreams;
+ // Whether quirk ANDROID_QUIRKS_USE_PARTIAL_RESULT is enabled
+ bool mUsePartialResultQuirk;
+
/**** End scope for mLock ****/
class CaptureRequest : public LightRefBase<CaptureRequest> {
@@ -445,6 +448,18 @@ class Camera3Device :
// buffers
int numBuffersLeft;
+ // Fields used by the partial result quirk only
+ struct PartialResultQuirkInFlight {
+ // Set by process_capture_result once 3A has been sent to clients
+ bool haveSent3A;
+ // Result metadata collected so far, when partial results are in use
+ CameraMetadata collectedResult;
+
+ PartialResultQuirkInFlight():
+ haveSent3A(false) {
+ }
+ } partialResultQuirk;
+
// Default constructor needed by KeyedVector
InFlightRequest() :
requestId(0),
@@ -472,6 +487,22 @@ class Camera3Device :
int32_t numBuffers);
/**
+ * For the partial result quirk, check if all 3A state fields are available
+ * and if so, queue up 3A-only result to the client. Returns true if 3A
+ * is sent.
+ */
+ bool processPartial3AQuirk(int32_t frameNumber, int32_t requestId,
+ const CameraMetadata& partial);
+
+ // Helpers for reading and writing 3A metadata into to/from partial results
+ template<typename T>
+ bool get3AResult(const CameraMetadata& result, int32_t tag,
+ T* value, int32_t frameNumber);
+
+ template<typename T>
+ bool insert3AResult(CameraMetadata &result, int32_t tag, const T* value,
+ int32_t frameNumber);
+ /**
* Tracking for idle detection
*/
sp<camera3::StatusTracker> mStatusTracker;
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index da51228..42e02d8 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -70,12 +70,12 @@ void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
lines.appendFormat(" Dims: %d x %d, format 0x%x\n",
camera3_stream::width, camera3_stream::height,
camera3_stream::format);
- lines.appendFormat(" Max size: %d\n", mMaxSize);
+ lines.appendFormat(" Max size: %zu\n", mMaxSize);
lines.appendFormat(" Usage: %d, max HAL buffers: %d\n",
camera3_stream::usage, camera3_stream::max_buffers);
lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n",
mFrameCount, mLastTimestamp);
- lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n",
+ lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu\n",
mTotalBufferCount, mDequeuedBufferCount);
write(fd, lines.string(), lines.size());
}
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
index 04f5dc5..1a54923 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
@@ -271,7 +271,7 @@ void Camera3ZslStream::dump(int fd, const Vector<String16> &args) const {
Camera3IOStreamBase::dump(fd, args);
lines = String8();
- lines.appendFormat(" Input buffers pending: %d, in flight %d\n",
+ lines.appendFormat(" Input buffers pending: %zu, in flight %zu\n",
mInputBufferQueue.size(), mBuffersInFlight.size());
write(fd, lines.string(), lines.size());
}
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index ebc7ea7..9a6dc28 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -21,11 +21,11 @@
#include <gui/RingBufferConsumer.h>
-#define BI_LOGV(x, ...) ALOGV("[%s] "x, mName.string(), ##__VA_ARGS__)
-#define BI_LOGD(x, ...) ALOGD("[%s] "x, mName.string(), ##__VA_ARGS__)
-#define BI_LOGI(x, ...) ALOGI("[%s] "x, mName.string(), ##__VA_ARGS__)
-#define BI_LOGW(x, ...) ALOGW("[%s] "x, mName.string(), ##__VA_ARGS__)
-#define BI_LOGE(x, ...) ALOGE("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGV(x, ...) ALOGV("[%s] " x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGD(x, ...) ALOGD("[%s] " x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGI(x, ...) ALOGI("[%s] " x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGW(x, ...) ALOGW("[%s] " x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGE(x, ...) ALOGE("[%s] " x, mName.string(), ##__VA_ARGS__)
#undef assert
#define assert(x) ALOG_ASSERT((x), #x)
diff --git a/services/camera/libcameraservice/utils/CameraTraces.cpp b/services/camera/libcameraservice/utils/CameraTraces.cpp
new file mode 100644
index 0000000..346e15f
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraTraces.cpp
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraTraces"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include "utils/CameraTraces.h"
+#include <utils/ProcessCallStack.h>
+
+#include <utils/Mutex.h>
+#include <utils/List.h>
+
+#include <utils/Log.h>
+#include <cutils/trace.h>
+
+namespace android {
+namespace camera3 {
+
+struct CameraTracesImpl {
+ Mutex tracesLock;
+ List<ProcessCallStack> pcsList;
+}; // class CameraTraces::Impl;
+
+static CameraTracesImpl gImpl;
+CameraTracesImpl& CameraTraces::sImpl = gImpl;
+
+void CameraTraces::saveTrace() {
+ ALOGV("%s: begin", __FUNCTION__);
+ ATRACE_BEGIN("CameraTraces::saveTrace");
+ Mutex::Autolock al(sImpl.tracesLock);
+
+ List<ProcessCallStack>& pcsList = sImpl.pcsList;
+
+ // Insert new ProcessCallStack, and immediately crawl all the threads
+ pcsList.push_front(ProcessCallStack());
+ ProcessCallStack& pcs = *pcsList.begin();
+ pcs.update();
+
+ if (pcsList.size() > MAX_TRACES) {
+ // Prune list periodically and discard oldest entry
+ pcsList.erase(--pcsList.end());
+ }
+
+ IF_ALOGV() {
+ pcs.log(LOG_TAG, ANDROID_LOG_VERBOSE);
+ }
+
+ ALOGD("Process trace saved. Use dumpsys media.camera to view.");
+
+ ATRACE_END();
+}
+
+status_t CameraTraces::dump(int fd, const Vector<String16> &args __attribute__((unused))) {
+ ALOGV("%s: fd = %d", __FUNCTION__, fd);
+ Mutex::Autolock al(sImpl.tracesLock);
+ List<ProcessCallStack>& pcsList = sImpl.pcsList;
+
+ if (fd < 0) {
+ ALOGW("%s: Negative FD (%d)", __FUNCTION__, fd);
+ return BAD_VALUE;
+ }
+
+ fdprintf(fd, "Camera traces (%zu):\n", pcsList.size());
+
+ if (pcsList.empty()) {
+ fdprintf(fd, " No camera traces collected.\n");
+ }
+
+ // Print newest items first
+ List<ProcessCallStack>::iterator it, end;
+ for (it = pcsList.begin(), end = pcsList.end(); it != end; ++it) {
+ const ProcessCallStack& pcs = *it;
+ pcs.dump(fd, DUMP_INDENT);
+ }
+
+ return OK;
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraTraces.h b/services/camera/libcameraservice/utils/CameraTraces.h
new file mode 100644
index 0000000..d10dbc9
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraTraces.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_TRACES_H_
+#define ANDROID_SERVERS_CAMERA_TRACES_H_
+
+#include <utils/Errors.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+namespace android {
+namespace camera3 {
+
+class CameraTracesImpl;
+
+// Collect a list of the process's stack traces
+class CameraTraces {
+public:
+ /**
+ * Save the current stack trace for each thread in the process. At most
+ * MAX_TRACES will be saved, after which the oldest traces will be discarded.
+ *
+ * <p>Use CameraTraces::dump to print out the traces.</p>
+ */
+ static void saveTrace();
+
+ /**
+ * Prints all saved traces to the specified file descriptor.
+ *
+ * <p>Each line is indented by DUMP_INDENT spaces.</p>
+ */
+ static status_t dump(int fd, const Vector<String16>& args);
+
+private:
+ enum {
+ // Don't collect more than 100 traces. Discard oldest.
+ MAX_TRACES = 100,
+
+ // Insert 2 spaces when dumping the traces
+ DUMP_INDENT = 2,
+ };
+
+ CameraTraces();
+ ~CameraTraces();
+ CameraTraces(CameraTraces& rhs);
+
+ static CameraTracesImpl& sImpl;
+}; // class CameraTraces
+
+}; // namespace camera3
+}; // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_TRACES_H_