summaryrefslogtreecommitdiffstats
path: root/services/camera/libcameraservice/api1
diff options
context:
space:
mode:
Diffstat (limited to 'services/camera/libcameraservice/api1')
-rw-r--r--services/camera/libcameraservice/api1/Camera2Client.cpp77
-rw-r--r--services/camera/libcameraservice/api1/client2/BurstCapture.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/BurstCapture.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.cpp49
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.cpp110
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.h30
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.cpp2
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.h2
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp2
15 files changed, 191 insertions, 97 deletions
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index ec1a341..60939f9 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -420,12 +420,20 @@ void Camera2Client::disconnect() {
ALOGV("Camera %d: Waiting for threads", mCameraId);
- mStreamingProcessor->join();
- mFrameProcessor->join();
- mCaptureSequencer->join();
- mJpegProcessor->join();
- mZslProcessorThread->join();
- mCallbackProcessor->join();
+ {
+ // Don't wait with lock held, in case the other threads need to
+ // complete callbacks that re-enter Camera2Client
+ mBinderSerializationLock.unlock();
+
+ mStreamingProcessor->join();
+ mFrameProcessor->join();
+ mCaptureSequencer->join();
+ mJpegProcessor->join();
+ mZslProcessorThread->join();
+ mCallbackProcessor->join();
+
+ mBinderSerializationLock.lock();
+ }
ALOGV("Camera %d: Deleting streams", mCameraId);
@@ -922,13 +930,6 @@ void Camera2Client::stopPreviewL() {
"stop preview: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
}
- {
- // Ideally we should recover the override after recording stopped, but
- // right now recording stream will live until here, so we are forced to
- // recover here. TODO: find a better way to handle that (b/17495165)
- SharedParameters::Lock l(mParameters);
- l.mParameters.recoverOverriddenJpegSize();
- }
// no break
case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
SharedParameters::Lock l(mParameters);
@@ -1199,6 +1200,28 @@ void Camera2Client::stopRecording() {
mCameraService->playSound(CameraService::SOUND_RECORDING);
+ // Remove recording stream to prevent it from slowing down takePicture later
+ if (!l.mParameters.recordingHint && l.mParameters.isJpegSizeOverridden()) {
+ res = stopStream();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ res = mDevice->waitUntilDrained();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ // Clean up recording stream
+ res = mStreamingProcessor->deleteRecordingStream();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete recording stream before "
+ "stop preview: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ l.mParameters.recoverOverriddenJpegSize();
+ }
+
res = startPreviewL(l.mParameters, true);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to return to preview",
@@ -1381,6 +1404,34 @@ status_t Camera2Client::takePicture(int msgType) {
return res;
}
l.mParameters.state = Parameters::STILL_CAPTURE;
+
+ // Remove recording stream to prevent video snapshot jpeg logic kicking in
+ if (l.mParameters.isJpegSizeOverridden() &&
+ mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
+ res = mStreamingProcessor->togglePauseStream(/*pause*/true);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ res = mDevice->waitUntilDrained();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ // Clean up recording stream
+ res = mStreamingProcessor->deleteRecordingStream();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete recording stream before "
+ "stop preview: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ res = mStreamingProcessor->togglePauseStream(/*pause*/false);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ l.mParameters.recoverOverriddenJpegSize();
+ }
break;
case Parameters::RECORD:
// Good to go for video snapshot
diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.cpp b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
index 0bfdfd4..5502dcb 100644
--- a/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
+++ b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
@@ -44,7 +44,7 @@ status_t BurstCapture::start(Vector<CameraMetadata> &/*metadatas*/,
return INVALID_OPERATION;
}
-void BurstCapture::onFrameAvailable() {
+void BurstCapture::onFrameAvailable(const BufferItem &/*item*/) {
ALOGV("%s", __FUNCTION__);
Mutex::Autolock l(mInputMutex);
if(!mInputChanged) {
diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.h b/services/camera/libcameraservice/api1/client2/BurstCapture.h
index ea321fd..c3b7722 100644
--- a/services/camera/libcameraservice/api1/client2/BurstCapture.h
+++ b/services/camera/libcameraservice/api1/client2/BurstCapture.h
@@ -39,7 +39,7 @@ public:
BurstCapture(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
virtual ~BurstCapture();
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
virtual status_t start(Vector<CameraMetadata> &metadatas, int32_t firstCaptureId);
protected:
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index bf3318e..eadaa00 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -46,7 +46,7 @@ CallbackProcessor::~CallbackProcessor() {
deleteStream();
}
-void CallbackProcessor::onFrameAvailable() {
+void CallbackProcessor::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock l(mInputMutex);
if (!mCallbackAvailable) {
mCallbackAvailable = true;
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
index 613f5be..7fdc329 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
@@ -44,7 +44,7 @@ class CallbackProcessor:
CallbackProcessor(sp<Camera2Client> client);
~CallbackProcessor();
- void onFrameAvailable();
+ void onFrameAvailable(const BufferItem& item);
// Set to NULL to disable the direct-to-app callback window
status_t setCallbackWindow(sp<ANativeWindow> callbackWindow);
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 312a78c..40d53b3 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -168,6 +168,19 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
faceIds = entry.data.i32;
}
+ entry = frame.find(ANDROID_SCALER_CROP_REGION);
+ if (entry.count < 4) {
+ ALOGE("%s: Camera %d: Unable to read crop region (count = %d)",
+ __FUNCTION__, client->getCameraId(), entry.count);
+ return res;
+ }
+
+ Parameters::CropRegion scalerCrop = {
+ static_cast<float>(entry.data.i32[0]),
+ static_cast<float>(entry.data.i32[1]),
+ static_cast<float>(entry.data.i32[2]),
+ static_cast<float>(entry.data.i32[3])};
+
faces.setCapacity(metadata.number_of_faces);
size_t maxFaces = metadata.number_of_faces;
@@ -183,26 +196,30 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
camera_face_t face;
- face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
- face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
- face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
- face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
+ face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceRects[i*4 + 0], scalerCrop);
+ face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceRects[i*4 + 1], scalerCrop);
+ face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceRects[i*4 + 2], scalerCrop);
+ face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceRects[i*4 + 3], scalerCrop);
face.score = faceScores[i];
if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
face.id = faceIds[i];
- face.left_eye[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
- face.left_eye[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
- face.right_eye[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
- face.right_eye[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
- face.mouth[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
- face.mouth[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
+ face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceLandmarks[i*6 + 0], scalerCrop);
+ face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceLandmarks[i*6 + 1], scalerCrop);
+ face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceLandmarks[i*6 + 2], scalerCrop);
+ face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceLandmarks[i*6 + 3], scalerCrop);
+ face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
+ faceLandmarks[i*6 + 4], scalerCrop);
+ face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
+ faceLandmarks[i*6 + 5], scalerCrop);
} else {
face.id = 0;
face.left_eye[0] = face.left_eye[1] = -2000;
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index b433781..2772267 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -51,7 +51,7 @@ JpegProcessor::~JpegProcessor() {
deleteStream();
}
-void JpegProcessor::onFrameAvailable() {
+void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock l(mInputMutex);
if (!mCaptureAvailable) {
mCaptureAvailable = true;
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index b2c05df..2040b30 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -47,7 +47,7 @@ class JpegProcessor:
~JpegProcessor();
// CpuConsumer listener implementation
- void onFrameAvailable();
+ void onFrameAvailable(const BufferItem& item);
status_t updateStream(const Parameters &params);
status_t deleteStream();
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 7b90d28..4f4cfb0 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -596,6 +596,10 @@ status_t Parameters::initialize(const CameraMetadata *info, int deviceVersion) {
supportedSceneModes +=
CameraParameters::SCENE_MODE_BARCODE;
break;
+ case ANDROID_CONTROL_SCENE_MODE_HDR:
+ supportedSceneModes +=
+ CameraParameters::SCENE_MODE_HDR;
+ break;
default:
ALOGW("%s: Camera %d: Unknown scene mode value: %d",
__FUNCTION__, cameraId,
@@ -2203,6 +2207,10 @@ status_t Parameters::recoverOverriddenJpegSize() {
return OK;
}
+bool Parameters::isJpegSizeOverridden() {
+ return pictureSizeOverriden;
+}
+
const char* Parameters::getStateName(State state) {
#define CASE_ENUM_TO_CHAR(x) case x: return(#x); break;
switch(state) {
@@ -2382,6 +2390,8 @@ int Parameters::sceneModeStringToEnum(const char *sceneMode) {
ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_BARCODE) ?
ANDROID_CONTROL_SCENE_MODE_BARCODE:
+ !strcmp(sceneMode, CameraParameters::SCENE_MODE_HDR) ?
+ ANDROID_CONTROL_SCENE_MODE_HDR:
-1;
}
@@ -2619,75 +2629,71 @@ int Parameters::normalizedYToCrop(int y) const {
return (y + 1000) * (previewCrop.height - 1) / 2000;
}
-int Parameters::arrayXToCrop(int x) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return x - previewCrop.left;
-}
-
-int Parameters::arrayYToCrop(int y) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return y - previewCrop.top;
-}
+int Parameters::normalizedXToArray(int x) const {
-int Parameters::cropXToNormalized(int x) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return x * 2000 / (previewCrop.width - 1) - 1000;
-}
+ // Work-around for HAL pre-scaling the coordinates themselves
+ if (quirks.meteringCropRegion) {
+ return (x + 1000) * (fastInfo.arrayWidth - 1) / 2000;
+ }
-int Parameters::cropYToNormalized(int y) const {
- CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
- return y * 2000 / (previewCrop.height - 1) - 1000;
+ return cropXToArray(normalizedXToCrop(x));
}
-int Parameters::arrayXToNormalized(int width) const {
- int ret = cropXToNormalized(arrayXToCrop(width));
-
- ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of "
- "lower bounds %d", ret);
- ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of "
- "upper bounds %d", ret);
-
+int Parameters::normalizedYToArray(int y) const {
// Work-around for HAL pre-scaling the coordinates themselves
if (quirks.meteringCropRegion) {
- return width * 2000 / (fastInfo.arrayWidth - 1) - 1000;
+ return (y + 1000) * (fastInfo.arrayHeight - 1) / 2000;
}
- return ret;
+ return cropYToArray(normalizedYToCrop(y));
}
-int Parameters::arrayYToNormalized(int height) const {
- int ret = cropYToNormalized(arrayYToCrop(height));
- ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of lower bounds"
- " %d", ret);
- ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of upper bounds"
- " %d", ret);
+Parameters::CropRegion Parameters::calculatePreviewCrop(
+ const CropRegion &scalerCrop) const {
+ float left, top, width, height;
+ float previewAspect = static_cast<float>(previewWidth) / previewHeight;
+ float cropAspect = scalerCrop.width / scalerCrop.height;
- // Work-around for HAL pre-scaling the coordinates themselves
- if (quirks.meteringCropRegion) {
- return height * 2000 / (fastInfo.arrayHeight - 1) - 1000;
+ if (previewAspect > cropAspect) {
+ width = scalerCrop.width;
+ height = cropAspect * scalerCrop.height / previewAspect;
+
+ left = scalerCrop.left;
+ top = scalerCrop.top + (scalerCrop.height - height) / 2;
+ } else {
+ width = previewAspect * scalerCrop.width / cropAspect;
+ height = scalerCrop.height;
+
+ left = scalerCrop.left + (scalerCrop.width - width) / 2;
+ top = scalerCrop.top;
}
- return ret;
-}
+ CropRegion previewCrop = {left, top, width, height};
-int Parameters::normalizedXToArray(int x) const {
+ return previewCrop;
+}
+int Parameters::arrayXToNormalizedWithCrop(int x,
+ const CropRegion &scalerCrop) const {
// Work-around for HAL pre-scaling the coordinates themselves
if (quirks.meteringCropRegion) {
- return (x + 1000) * (fastInfo.arrayWidth - 1) / 2000;
+ return x * 2000 / (fastInfo.arrayWidth - 1) - 1000;
+ } else {
+ CropRegion previewCrop = calculatePreviewCrop(scalerCrop);
+ return (x - previewCrop.left) * 2000 / (previewCrop.width - 1) - 1000;
}
-
- return cropXToArray(normalizedXToCrop(x));
}
-int Parameters::normalizedYToArray(int y) const {
+int Parameters::arrayYToNormalizedWithCrop(int y,
+ const CropRegion &scalerCrop) const {
// Work-around for HAL pre-scaling the coordinates themselves
if (quirks.meteringCropRegion) {
- return (y + 1000) * (fastInfo.arrayHeight - 1) / 2000;
+ return y * 2000 / (fastInfo.arrayHeight - 1) - 1000;
+ } else {
+ CropRegion previewCrop = calculatePreviewCrop(scalerCrop);
+ return (y - previewCrop.top) * 2000 / (previewCrop.height - 1) - 1000;
}
-
- return cropYToArray(normalizedYToCrop(y));
}
status_t Parameters::getFilteredSizes(Size limit, Vector<Size> *sizes) {
@@ -2954,6 +2960,10 @@ status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
if (!sensorSize.count) return NO_INIT;
+ camera_metadata_ro_entry_t pixelArraySize =
+ staticInfo(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 2, 2);
+ if (!pixelArraySize.count) return NO_INIT;
+
float arrayAspect = static_cast<float>(fastInfo.arrayWidth) /
fastInfo.arrayHeight;
float stillAspect = static_cast<float>(pictureWidth) / pictureHeight;
@@ -3003,6 +3013,16 @@ status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
vertCropFactor = (arrayAspect < stillAspect) ?
(arrayAspect / stillAspect) : 1.f;
}
+
+ /**
+ * Convert the crop factors w.r.t the active array size to the crop factors
+ * w.r.t the pixel array size.
+ */
+ horizCropFactor *= (static_cast<float>(fastInfo.arrayWidth) /
+ pixelArraySize.data.i32[0]);
+ vertCropFactor *= (static_cast<float>(fastInfo.arrayHeight) /
+ pixelArraySize.data.i32[1]);
+
ALOGV("Horiz crop factor: %f, vert crop fact: %f",
horizCropFactor, vertCropFactor);
/**
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index e4e9a92..e628a7e 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -268,6 +268,8 @@ struct Parameters {
status_t overrideJpegSizeByVideoSize();
// Recover overridden jpeg size. Called during stopRecording.
status_t recoverOverriddenJpegSize();
+ // if video snapshot size is currently overridden
+ bool isJpegSizeOverridden();
// Calculate the crop region rectangle based on current stream sizes
struct CropRegion {
@@ -327,13 +329,17 @@ struct Parameters {
// Note that this doesn't apply to the (deprecated) single FPS value.
static const int kFpsToApiScale = 1000;
- // Transform between (-1000,-1000)-(1000,1000) normalized coords from camera
- // API and HAL2 (0,0)-(activePixelArray.width/height) coordinates
- int arrayXToNormalized(int width) const;
- int arrayYToNormalized(int height) const;
+ // Transform from (-1000,-1000)-(1000,1000) normalized coords from camera
+ // API to HAL2 (0,0)-(activePixelArray.width/height) coordinates
int normalizedXToArray(int x) const;
int normalizedYToArray(int y) const;
+ // Transform from HAL3 (0,0)-(activePixelArray.width/height) coordinates to
+ // (-1000,-1000)-(1000,1000) normalized coordinates given a scaler crop
+ // region.
+ int arrayXToNormalizedWithCrop(int x, const CropRegion &scalerCrop) const;
+ int arrayYToNormalizedWithCrop(int y, const CropRegion &scalerCrop) const;
+
struct Range {
int min;
int max;
@@ -343,20 +349,20 @@ struct Parameters {
private:
- // Convert between HAL2 sensor array coordinates and
- // viewfinder crop-region relative array coordinates
+ // Convert from viewfinder crop-region relative array coordinates
+ // to HAL2 sensor array coordinates
int cropXToArray(int x) const;
int cropYToArray(int y) const;
- int arrayXToCrop(int x) const;
- int arrayYToCrop(int y) const;
- // Convert between viewfinder crop-region relative array coordinates
- // and camera API (-1000,1000)-(1000,1000) normalized coords
- int cropXToNormalized(int x) const;
- int cropYToNormalized(int y) const;
+ // Convert from camera API (-1000,1000)-(1000,1000) normalized coords
+ // to viewfinder crop-region relative array coordinates
int normalizedXToCrop(int x) const;
int normalizedYToCrop(int y) const;
+ // Given a scaler crop region, calculate preview crop region based on
+ // preview aspect ratio.
+ CropRegion calculatePreviewCrop(const CropRegion &scalerCrop) const;
+
Vector<Size> availablePreviewSizes;
Vector<Size> availableVideoSizes;
// Get size list (that are no larger than limit) from static metadata.
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 9e7fff8..470624b 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -635,7 +635,7 @@ status_t StreamingProcessor::incrementStreamingIds() {
return OK;
}
-void StreamingProcessor::onFrameAvailable() {
+void StreamingProcessor::onFrameAvailable(const BufferItem& /*item*/) {
ATRACE_CALL();
Mutex::Autolock l(mMutex);
if (!mRecordingFrameAvailable) {
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index 8466af4..1d679a4 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -80,7 +80,7 @@ class StreamingProcessor:
status_t incrementStreamingIds();
// Callback for new recording frames from HAL
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// Callback from stagefright which returns used recording frames
void releaseRecordingFrame(const sp<IMemory>& mem);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 8f78103..8b7e4b4 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -66,7 +66,7 @@ ZslProcessor::~ZslProcessor() {
disconnect();
}
-void ZslProcessor::onFrameAvailable() {
+void ZslProcessor::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock l(mInputMutex);
if (!mZslBufferAvailable) {
mZslBufferAvailable = true;
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index b6533cf..2099c38 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -53,7 +53,7 @@ class ZslProcessor:
~ZslProcessor();
// From mZslConsumer
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// From FrameProcessor
virtual void onResultAvailable(const CaptureResult &result);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index f110b66..470a6d6 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -592,7 +592,7 @@ nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const {
if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
- ALOGW("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
+ ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
__FUNCTION__, afState);
continue;
}