summaryrefslogtreecommitdiffstats
path: root/camera/OMXCameraAdapter
diff options
context:
space:
mode:
authorZiyan <jaraidaniel@gmail.com>2016-01-03 14:19:34 +0100
committerDániel Járai <jaraidaniel@gmail.com>2016-01-03 06:57:39 -0800
commite0c5a929875a3f858926d7fec7e236d6db1006a3 (patch)
tree175f7cedf1c33e09e5ede924348530827637e4fb /camera/OMXCameraAdapter
parent0b9a81bcf50f519eaaf7933984ec975f275f0530 (diff)
downloaddevice_samsung_tuna-e0c5a929875a3f858926d7fec7e236d6db1006a3.zip
device_samsung_tuna-e0c5a929875a3f858926d7fec7e236d6db1006a3.tar.gz
device_samsung_tuna-e0c5a929875a3f858926d7fec7e236d6db1006a3.tar.bz2
Add camera sources in-tree
From hardware/ti/omap4 revision e57f2b6. Change-Id: I2e6164fdf6c0e2a2e75aee3bba3fe58a9c470add
Diffstat (limited to 'camera/OMXCameraAdapter')
-rw-r--r--camera/OMXCameraAdapter/OMX3A.cpp2161
-rw-r--r--camera/OMXCameraAdapter/OMXAlgo.cpp1355
-rw-r--r--camera/OMXCameraAdapter/OMXCameraAdapter.cpp4534
-rw-r--r--camera/OMXCameraAdapter/OMXCapabilities.cpp2513
-rw-r--r--camera/OMXCameraAdapter/OMXCapture.cpp2098
-rw-r--r--camera/OMXCameraAdapter/OMXDCC.cpp227
-rw-r--r--camera/OMXCameraAdapter/OMXDccDataSave.cpp361
-rw-r--r--camera/OMXCameraAdapter/OMXDefaults.cpp87
-rw-r--r--camera/OMXCameraAdapter/OMXExif.cpp838
-rw-r--r--camera/OMXCameraAdapter/OMXFD.cpp504
-rw-r--r--camera/OMXCameraAdapter/OMXFocus.cpp892
-rw-r--r--camera/OMXCameraAdapter/OMXMetadata.cpp181
-rw-r--r--camera/OMXCameraAdapter/OMXReprocess.cpp382
-rw-r--r--camera/OMXCameraAdapter/OMXZoom.cpp289
14 files changed, 16422 insertions, 0 deletions
diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp
new file mode 100644
index 0000000..8e6437a
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMX3A.cpp
@@ -0,0 +1,2161 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMX3A.cpp
+*
+* This file contains functionality for handling 3A configurations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+#include <cutils/properties.h>
+
+#define METERING_AREAS_RANGE 0xFF
+
+static const char PARAM_SEP[] = ",";
+
+namespace Ti {
+namespace Camera {
+
+const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
+ OMX_SCENEMODETYPE scene) {
+ const SceneModesEntry* cameraLUT = NULL;
+ const SceneModesEntry* entry = NULL;
+ unsigned int numEntries = 0;
+
+ // 1. Find camera's scene mode LUT
+ for (unsigned int i = 0; i < ARRAY_SIZE(CameraToSensorModesLUT); i++) {
+ if (strcmp(CameraToSensorModesLUT[i].name, name) == 0) {
+ cameraLUT = CameraToSensorModesLUT[i].Table;
+ numEntries = CameraToSensorModesLUT[i].size;
+ break;
+ }
+ }
+
+ // 2. Find scene mode entry in table
+ if (!cameraLUT) {
+ goto EXIT;
+ }
+
+ for (unsigned int i = 0; i < numEntries; i++) {
+ if(cameraLUT[i].scene == scene) {
+ entry = cameraLUT + i;
+ break;
+ }
+ }
+ EXIT:
+ return entry;
+}
+
+status_t OMXCameraAdapter::setParameters3A(const android::CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ int mode = 0;
+ const char *str = NULL;
+ int varint = 0;
+ BaseCameraAdapter::AdapterState nextState;
+ BaseCameraAdapter::getNextState(nextState);
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(m3ASettingsUpdateLock);
+
+ str = params.get(android::CameraParameters::KEY_SCENE_MODE);
+ mode = getLUTvalue_HALtoOMX( str, SceneLUT);
+ if ( mFirstTimeInit || ((str != NULL) && ( mParameters3A.SceneMode != mode )) ) {
+ if ( 0 <= mode ) {
+ mParameters3A.SceneMode = mode;
+ if ((mode == OMX_Manual) && (mFirstTimeInit == false)){//Auto mode
+ mFirstTimeInit = true;
+ }
+ if ((mode != OMX_Manual) &&
+ (state & PREVIEW_ACTIVE) && !(nextState & CAPTURE_ACTIVE)) {
+ // if setting preset scene mode, previewing, and not in the middle of capture
+ // set preset scene mode immediately instead of in next FBD
+ // for feedback params to work properly since they need to be read
+ // by application in subsequent getParameters()
+ ret |= setScene(mParameters3A);
+ // re-apply EV compensation after setting scene mode since it probably reset it
+ if(mParameters3A.EVCompensation) {
+ setEVCompensation(mParameters3A);
+ }
+ return ret;
+ } else {
+ mPending3Asettings |= SetSceneMode;
+ }
+ } else {
+ mParameters3A.SceneMode = OMX_Manual;
+ }
+ CAMHAL_LOGVB("SceneMode %d", mParameters3A.SceneMode);
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ if ( (str = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL ) {
+ mode = getLUTvalue_HALtoOMX(str, ExpLUT);
+ if ( mParameters3A.Exposure != mode ) {
+ // If either the new or the old exposure mode is manual set also
+ // the SetManualExposure flag to call setManualExposureVal where
+ // the auto gain and exposure flags are configured
+ if ( mParameters3A.Exposure == OMX_ExposureControlOff ||
+ mode == OMX_ExposureControlOff ) {
+ mPending3Asettings |= SetManualExposure;
+ }
+ mParameters3A.Exposure = mode;
+ CAMHAL_LOGDB("Exposure mode %d", mode);
+ if ( 0 <= mParameters3A.Exposure ) {
+ mPending3Asettings |= SetExpMode;
+ }
+ }
+ if ( mode == OMX_ExposureControlOff ) {
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE);
+ if ( mParameters3A.ManualExposure != mode ) {
+ mParameters3A.ManualExposure = mode;
+ CAMHAL_LOGDB("Manual Exposure = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT);
+ if ( mParameters3A.ManualExposureRight != mode ) {
+ mParameters3A.ManualExposureRight = mode;
+ CAMHAL_LOGDB("Manual Exposure right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO);
+ if ( mParameters3A.ManualGain != mode ) {
+ mParameters3A.ManualGain = mode;
+ CAMHAL_LOGDB("Manual Gain = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT);
+ if ( mParameters3A.ManualGainRight != mode ) {
+ mParameters3A.ManualGainRight = mode;
+ CAMHAL_LOGDB("Manual Gain right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ }
+ }
+#endif
+
+ str = params.get(android::CameraParameters::KEY_WHITE_BALANCE);
+ mode = getLUTvalue_HALtoOMX( str, WBalLUT);
+ if (mFirstTimeInit || ((str != NULL) && (mode != mParameters3A.WhiteBallance)))
+ {
+ mParameters3A.WhiteBallance = mode;
+ CAMHAL_LOGDB("Whitebalance mode %d", mode);
+ if ( 0 <= mParameters3A.WhiteBallance )
+ {
+ mPending3Asettings |= SetWhiteBallance;
+ }
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ varint = params.getInt(TICameraParameters::KEY_CONTRAST);
+ if ( 0 <= varint )
+ {
+ if ( mFirstTimeInit ||
+ ( (mParameters3A.Contrast + CONTRAST_OFFSET) != varint ) )
+ {
+ mParameters3A.Contrast = varint - CONTRAST_OFFSET;
+ CAMHAL_LOGDB("Contrast %d", mParameters3A.Contrast);
+ mPending3Asettings |= SetContrast;
+ }
+ }
+
+ varint = params.getInt(TICameraParameters::KEY_SHARPNESS);
+ if ( 0 <= varint )
+ {
+ if ( mFirstTimeInit ||
+ ((mParameters3A.Sharpness + SHARPNESS_OFFSET) != varint ))
+ {
+ mParameters3A.Sharpness = varint - SHARPNESS_OFFSET;
+ CAMHAL_LOGDB("Sharpness %d", mParameters3A.Sharpness);
+ mPending3Asettings |= SetSharpness;
+ }
+ }
+
+ varint = params.getInt(TICameraParameters::KEY_SATURATION);
+ if ( 0 <= varint )
+ {
+ if ( mFirstTimeInit ||
+ ((mParameters3A.Saturation + SATURATION_OFFSET) != varint ) )
+ {
+ mParameters3A.Saturation = varint - SATURATION_OFFSET;
+ CAMHAL_LOGDB("Saturation %d", mParameters3A.Saturation);
+ mPending3Asettings |= SetSaturation;
+ }
+ }
+
+ varint = params.getInt(TICameraParameters::KEY_BRIGHTNESS);
+ if ( 0 <= varint )
+ {
+ if ( mFirstTimeInit ||
+ (( mParameters3A.Brightness != varint )) )
+ {
+ mParameters3A.Brightness = (unsigned) varint;
+ CAMHAL_LOGDB("Brightness %d", mParameters3A.Brightness);
+ mPending3Asettings |= SetBrightness;
+ }
+ }
+#endif
+
+ str = params.get(android::CameraParameters::KEY_ANTIBANDING);
+ mode = getLUTvalue_HALtoOMX(str,FlickerLUT);
+ if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Flicker != mode ) ))
+ {
+ mParameters3A.Flicker = mode;
+ CAMHAL_LOGDB("Flicker %d", mParameters3A.Flicker);
+ if ( 0 <= mParameters3A.Flicker )
+ {
+ mPending3Asettings |= SetFlicker;
+ }
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ str = params.get(TICameraParameters::KEY_ISO);
+ mode = getLUTvalue_HALtoOMX(str, IsoLUT);
+ CAMHAL_LOGVB("ISO mode arrived in HAL : %s", str);
+ if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.ISO != mode )) )
+ {
+ mParameters3A.ISO = mode;
+ CAMHAL_LOGDB("ISO %d", mParameters3A.ISO);
+ if ( 0 <= mParameters3A.ISO )
+ {
+ mPending3Asettings |= SetISO;
+ }
+ }
+#endif
+
+ str = params.get(android::CameraParameters::KEY_FOCUS_MODE);
+ mode = getLUTvalue_HALtoOMX(str, FocusLUT);
+ if ( (mFirstTimeInit || ((str != NULL) && (mParameters3A.Focus != mode))))
+ {
+ mPending3Asettings |= SetFocus;
+
+ mParameters3A.Focus = mode;
+
+ // if focus mode is set to infinity...update focus distance immediately
+ if (mode == OMX_IMAGE_FocusControlAutoInfinity) {
+ updateFocusDistances(mParameters);
+ }
+
+ CAMHAL_LOGDB("Focus %x", mParameters3A.Focus);
+ }
+
+ str = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ varint = params.getInt(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if ( mFirstTimeInit || (str && (mParameters3A.EVCompensation != varint))) {
+ CAMHAL_LOGDB("Setting EV Compensation to %d", varint);
+ mParameters3A.EVCompensation = varint;
+ mPending3Asettings |= SetEVCompensation;
+ }
+
+ str = params.get(android::CameraParameters::KEY_FLASH_MODE);
+ mode = getLUTvalue_HALtoOMX( str, FlashLUT);
+ if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.FlashMode != mode )) )
+ {
+ if ( 0 <= mode )
+ {
+ mParameters3A.FlashMode = mode;
+ mPending3Asettings |= SetFlash;
+ }
+ else
+ {
+ mParameters3A.FlashMode = OMX_IMAGE_FlashControlAuto;
+ }
+ }
+
+ CAMHAL_LOGVB("Flash Setting %s", str);
+ CAMHAL_LOGVB("FlashMode %d", mParameters3A.FlashMode);
+
+ str = params.get(android::CameraParameters::KEY_EFFECT);
+ mode = getLUTvalue_HALtoOMX( str, EffLUT);
+ if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.Effect != mode )) )
+ {
+ mParameters3A.Effect = mode;
+ CAMHAL_LOGDB("Effect %d", mParameters3A.Effect);
+ if ( 0 <= mParameters3A.Effect )
+ {
+ mPending3Asettings |= SetEffect;
+ }
+ }
+
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
+ {
+ OMX_BOOL lock = OMX_FALSE;
+ mUserSetExpLock = OMX_FALSE;
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
+ {
+ CAMHAL_LOGVA("Locking Exposure");
+ lock = OMX_TRUE;
+ mUserSetExpLock = OMX_TRUE;
+ }
+ else
+ {
+ CAMHAL_LOGVA("UnLocking Exposure");
+ }
+
+ if (mParameters3A.ExposureLock != lock)
+ {
+ mParameters3A.ExposureLock = lock;
+ CAMHAL_LOGDB("ExposureLock %d", lock);
+ mPending3Asettings |= SetExpLock;
+ }
+ }
+
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
+ {
+ OMX_BOOL lock = OMX_FALSE;
+ mUserSetWbLock = OMX_FALSE;
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
+ {
+ CAMHAL_LOGVA("Locking WhiteBalance");
+ lock = OMX_TRUE;
+ mUserSetWbLock = OMX_TRUE;
+ }
+ else
+ {
+ CAMHAL_LOGVA("UnLocking WhiteBalance");
+ }
+ if (mParameters3A.WhiteBalanceLock != lock)
+ {
+ mParameters3A.WhiteBalanceLock = lock;
+ CAMHAL_LOGDB("WhiteBalanceLock %d", lock);
+ mPending3Asettings |= SetWBLock;
+ }
+ }
+
+ str = params.get(TICameraParameters::KEY_AUTO_FOCUS_LOCK);
+ if (str && (strcmp(str, android::CameraParameters::TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
+ CAMHAL_LOGVA("Locking Focus");
+ mParameters3A.FocusLock = OMX_TRUE;
+ setFocusLock(mParameters3A);
+ } else if (str && (strcmp(str, android::CameraParameters::FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
+ CAMHAL_LOGVA("UnLocking Focus");
+ mParameters3A.FocusLock = OMX_FALSE;
+ setFocusLock(mParameters3A);
+ }
+
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
+ if ( (str != NULL) ) {
+ size_t MAX_METERING_AREAS;
+ android::Vector<android::sp<CameraArea> > tempAreas;
+
+ MAX_METERING_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS));
+
+ android::AutoMutex lock(mMeteringAreasLock);
+
+ ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
+
+ CAMHAL_LOGVB("areAreasDifferent? = %d",
+ CameraArea::areAreasDifferent(mMeteringAreas, tempAreas));
+
+ if ( (NO_ERROR == ret) && CameraArea::areAreasDifferent(mMeteringAreas, tempAreas) ) {
+ mMeteringAreas.clear();
+ mMeteringAreas = tempAreas;
+
+ if ( MAX_METERING_AREAS >= mMeteringAreas.size() ) {
+ CAMHAL_LOGDB("Setting Metering Areas %s",
+ params.get(android::CameraParameters::KEY_METERING_AREAS));
+
+ mPending3Asettings |= SetMeteringAreas;
+ } else {
+ CAMHAL_LOGEB("Metering areas supported %d, metering areas set %d",
+ MAX_METERING_AREAS, mMeteringAreas.size());
+ ret = -EINVAL;
+ }
+ }
+ }
+
+#ifndef MOTOROLA_CAMERA
+// TI extensions for enable/disable algos
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA,
+ mParameters3A.AlgoExternalGamma, SetAlgoExternalGamma, "External Gamma");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF1,
+ mParameters3A.AlgoNSF1, SetAlgoNSF1, "NSF1");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF2,
+ mParameters3A.AlgoNSF2, SetAlgoNSF2, "NSF2");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_SHARPENING,
+ mParameters3A.AlgoSharpening, SetAlgoSharpening, "Sharpening");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_THREELINCOLORMAP,
+ mParameters3A.AlgoThreeLinColorMap, SetAlgoThreeLinColorMap, "ThreeLinColorMap");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_GIC, mParameters3A.AlgoGIC, SetAlgoGIC, "GIC");
+
+ // Gamma table
+ str = params.get(TICameraParameters::KEY_GAMMA_TABLE);
+ updateGammaTable(str);
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#ifndef MOTOROLA_CAMERA
+void OMXCameraAdapter::updateGammaTable(const char* gamma)
+{
+ unsigned int plane = 0;
+ unsigned int i = 0;
+ bool gamma_changed = false;
+ const char *a = gamma;
+ OMX_TI_GAMMATABLE_ELEM_TYPE *elem[3] = { mParameters3A.mGammaTable.pR,
+ mParameters3A.mGammaTable.pG,
+ mParameters3A.mGammaTable.pB};
+
+ if (!gamma) return;
+
+ mPending3Asettings &= ~SetGammaTable;
+ memset(&mParameters3A.mGammaTable, 0, sizeof(mParameters3A.mGammaTable));
+ for (plane = 0; plane < 3; plane++) {
+ a = strchr(a, '(');
+ if (NULL != a) {
+ a++;
+ for (i = 0; i < OMX_TI_GAMMATABLE_SIZE; i++) {
+ char *b;
+ int newVal;
+ newVal = strtod(a, &b);
+ if (newVal != elem[plane][i].nOffset) {
+ elem[plane][i].nOffset = newVal;
+ gamma_changed = true;
+ }
+ a = strpbrk(b, ",:)");
+ if ((NULL != a) && (':' == *a)) {
+ a++;
+ } else if ((NULL != a) && (',' == *a)){
+ a++;
+ break;
+ } else if ((NULL != a) && (')' == *a)){
+ a++;
+ break;
+ } else {
+ CAMHAL_LOGE("Error while parsing values");
+ gamma_changed = false;
+ break;
+ }
+ newVal = strtod(a, &b);
+ if (newVal != elem[plane][i].nSlope) {
+ elem[plane][i].nSlope = newVal;
+ gamma_changed = true;
+ }
+ a = strpbrk(b, ",:)");
+ if ((NULL != a) && (',' == *a)) {
+ a++;
+ } else if ((NULL != a) && (':' == *a)){
+ a++;
+ break;
+ } else if ((NULL != a) && (')' == *a)){
+ a++;
+ break;
+ } else {
+ CAMHAL_LOGE("Error while parsing values");
+ gamma_changed = false;
+ break;
+ }
+ }
+ if ((OMX_TI_GAMMATABLE_SIZE - 1) != i) {
+ CAMHAL_LOGE("Error while parsing values (incorrect count %u)", i);
+ gamma_changed = false;
+ break;
+ }
+ } else {
+ CAMHAL_LOGE("Error while parsing planes (%u)", plane);
+ gamma_changed = false;
+ break;
+ }
+ }
+
+ if (gamma_changed) {
+ mPending3Asettings |= SetGammaTable;
+ }
+}
+#endif
+
+void OMXCameraAdapter::declareParameter3ABool(const android::CameraParameters &params, const char *key,
+ OMX_BOOL &current_setting, E3ASettingsFlags pending,
+ const char *msg)
+{
+ OMX_BOOL val = OMX_TRUE;
+ const char *str = params.get(key);
+
+ if (str && ((strcmp(str, android::CameraParameters::FALSE)) == 0))
+ {
+ CAMHAL_LOGVB("Disabling %s", msg);
+ val = OMX_FALSE;
+ }
+ else
+ {
+ CAMHAL_LOGVB("Enabling %s", msg);
+ }
+ if (current_setting != val)
+ {
+ current_setting = val;
+ CAMHAL_LOGDB("%s %s", msg, current_setting ? "enabled" : "disabled");
+ mPending3Asettings |= pending;
+ }
+}
+
+int OMXCameraAdapter::getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT)
+{
+ int LUTsize = LUT.size;
+ if( HalValue )
+ for(int i = 0; i < LUTsize; i++)
+ if( 0 == strcmp(LUT.Table[i].userDefinition, HalValue) )
+ return LUT.Table[i].omxDefinition;
+
+ return -ENOENT;
+}
+
+const char* OMXCameraAdapter::getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT)
+{
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].omxDefinition == OMXValue )
+ return LUT.Table[i].userDefinition;
+
+ return NULL;
+}
+
+int OMXCameraAdapter::getMultipleLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT, char * supported)
+{
+ int num = 0;
+ int remaining_size;
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].omxDefinition == OMXValue )
+ {
+ num++;
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ remaining_size = ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(supported)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(supported)));
+ strncat(supported, LUT.Table[i].userDefinition, remaining_size);
+ }
+
+ return num;
+}
+
+status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSURECONTROLTYPE exp;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSURECONTROLTYPE);
+ exp.nPortIndex = OMX_ALL;
+ exp.eExposureControl = (OMX_EXPOSURECONTROLTYPE)Gen3A.Exposure;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposure,
+ &exp);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring exposure mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera exposure mode configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+static bool isFlashDisabled() {
+#if (PROPERTY_VALUE_MAX < 5)
+#error "PROPERTY_VALUE_MAX must be at least 5"
+#endif
+
+ // Ignore flash_off system property for user build.
+ char buildType[PROPERTY_VALUE_MAX];
+ if (property_get("ro.build.type", buildType, NULL) &&
+ !strcasecmp(buildType, "user")) {
+ return false;
+ }
+
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("camera.flash_off", value, NULL) &&
+ (!strcasecmp(value, android::CameraParameters::TRUE) || !strcasecmp(value, "1"))) {
+ CAMHAL_LOGW("flash is disabled for testing purpose");
+ return true;
+ }
+
+ return false;
+}
+
+status_t OMXCameraAdapter::setManualExposureVal(Gen3A_settings& Gen3A) {
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expVal;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expVal, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
+ expVal.nPortIndex = OMX_ALL;
+ expValRight.nPortIndex = OMX_ALL;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( Gen3A.Exposure != OMX_ExposureControlOff ) {
+ expVal.bAutoShutterSpeed = OMX_TRUE;
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoShutterSpeed = OMX_FALSE;
+ expVal.nShutterSpeedMsec = Gen3A.ManualExposure;
+ expValRight.nShutterSpeedMsec = Gen3A.ManualExposureRight;
+ if ( Gen3A.ManualGain <= 0 || Gen3A.ManualGainRight <= 0 ) {
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoSensitivity = OMX_FALSE;
+ expVal.nSensitivity = Gen3A.ManualGain;
+ expValRight.nSensitivity = Gen3A.ManualGainRight;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error 0x%x while configuring manual exposure values", eError);
+ } else {
+ CAMHAL_LOGDA("Camera manual exposure values configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_PARAM_FLASHCONTROLTYPE flash;
+ OMX_CONFIG_FOCUSASSISTTYPE focusAssist;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&flash, OMX_IMAGE_PARAM_FLASHCONTROLTYPE);
+ flash.nPortIndex = OMX_ALL;
+
+ if (isFlashDisabled()) {
+ flash.eFlashControl = ( OMX_IMAGE_FLASHCONTROLTYPE ) OMX_IMAGE_FlashControlOff;
+ } else {
+ flash.eFlashControl = ( OMX_IMAGE_FLASHCONTROLTYPE ) Gen3A.FlashMode;
+ }
+
+ CAMHAL_LOGDB("Configuring flash mode 0x%x", flash.eFlashControl);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigFlashControl,
+ &flash);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring flash mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera flash mode configured successfully");
+ }
+
+ if ( OMX_ErrorNone == eError )
+ {
+ OMX_INIT_STRUCT_PTR (&focusAssist, OMX_CONFIG_FOCUSASSISTTYPE);
+ focusAssist.nPortIndex = OMX_ALL;
+ if ( flash.eFlashControl == OMX_IMAGE_FlashControlOff )
+ {
+ focusAssist.bFocusAssist = OMX_FALSE;
+ }
+ else
+ {
+ focusAssist.bFocusAssist = OMX_TRUE;
+ }
+
+ CAMHAL_LOGDB("Configuring AF Assist mode 0x%x", focusAssist.bFocusAssist);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigFocusAssist,
+ &focusAssist);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring AF Assist mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera AF Assist mode configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_PARAM_FLASHCONTROLTYPE flash;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&flash, OMX_IMAGE_PARAM_FLASHCONTROLTYPE);
+ flash.nPortIndex = OMX_ALL;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigFlashControl,
+ &flash);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while getting flash mode 0x%x", eError);
+ } else {
+ Gen3A.FlashMode = flash.eFlashControl;
+ CAMHAL_LOGDB("Gen3A.FlashMode 0x%x", Gen3A.FlashMode);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focus;
+ size_t top, left, width, height, weight;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+
+ LOG_FUNCTION_NAME;
+
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+
+ ///Face detection takes precedence over touch AF
+ if ( mFaceDetectionRunning )
+ {
+ //Disable region priority first
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+
+ //Enable face algorithm priority for focus
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
+
+ //Do normal focus afterwards
+ ////FIXME: Check if the extended focus control is needed? this overrides caf
+ //focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) OMX_IMAGE_FocusControlExtended;
+ }
+ else if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea()) )
+ {
+
+ //Disable face priority first
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);
+
+ //Enable region algorithm priority
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
+
+
+ //Do normal focus afterwards
+ //FIXME: Check if the extended focus control is needed? this overrides caf
+ //focus.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) OMX_IMAGE_FocusControlExtended;
+
+ }
+ else
+ {
+
+ //Disable both region and face priority
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);
+
+ }
+
+ if ( NO_ERROR == ret && ((state & AF_ACTIVE) == 0) )
+ {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+
+ if ( Gen3A.Focus == OMX_IMAGE_FocusControlAutoInfinity)
+ {
+ // Don't lock at infinity, otherwise the AF cannot drive
+ // the lens at infinity position
+ if( set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_FALSE) != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Error Applying 3A locks");
+ } else {
+ CAMHAL_LOGDA("Focus locked. Applied focus locks successfully");
+ }
+ }
+ if ( Gen3A.Focus == OMX_IMAGE_FocusControlAuto ||
+ Gen3A.Focus == OMX_IMAGE_FocusControlAutoInfinity)
+ {
+ // Run focus scanning if switching to continuous infinity focus mode
+ bOMX.bEnabled = OMX_TRUE;
+ }
+ else
+ {
+ bOMX.bEnabled = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
+ &bOMX);
+
+ OMX_INIT_STRUCT_PTR (&focus, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focus.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ focus.eFocusControl = (OMX_IMAGE_FOCUSCONTROLTYPE)Gen3A.Focus;
+
+ CAMHAL_LOGDB("Configuring focus mode 0x%x", focus.eFocusControl);
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigFocusControl, &focus);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring focus mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera focus mode configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focus;
+ size_t top, left, width, height, weight;
+
+ LOG_FUNCTION_NAME;
+
+ if (OMX_StateInvalid == mComponentState) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&focus, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focus.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigFocusControl, &focus);
+
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring focus mode 0x%x", eError);
+ } else {
+ Gen3A.Focus = focus.eFocusControl;
+ CAMHAL_LOGDB("Gen3A.Focus 0x%x", Gen3A.Focus);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_SCENEMODETYPE scene;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&scene, OMX_CONFIG_SCENEMODETYPE);
+ scene.nPortIndex = OMX_ALL;
+ scene.eSceneMode = ( OMX_SCENEMODETYPE ) Gen3A.SceneMode;
+
+ CAMHAL_LOGDB("Configuring scene mode 0x%x", scene.eSceneMode);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSceneMode,
+ &scene);
+
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring scene mode 0x%x", eError);
+ } else {
+ CAMHAL_LOGDA("Camera scene configured successfully");
+ if (Gen3A.SceneMode != OMX_Manual) {
+ // Get preset scene mode feedback
+ getFocusMode(Gen3A);
+ getFlashMode(Gen3A);
+ getWBMode(Gen3A);
+
+ // TODO(XXX): Re-enable these for mainline
+ // getSharpness(Gen3A);
+ // getSaturation(Gen3A);
+ // getISO(Gen3A);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+ CAMHAL_LOGDB("old EV Compensation for OMX = 0x%x", (int)expValues.xEVCompensation);
+ CAMHAL_LOGDB("EV Compensation for HAL = %d", Gen3A.EVCompensation);
+
+ expValues.xEVCompensation = ( Gen3A.EVCompensation * ( 1 << Q16_OFFSET ) ) / 10;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+ CAMHAL_LOGDB("new EV Compensation for OMX = 0x%x", (int)expValues.xEVCompensation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring EV Compensation 0x%x error = 0x%x",
+ ( unsigned int ) expValues.xEVCompensation,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("EV Compensation 0x%x configured successfully",
+ ( unsigned int ) expValues.xEVCompensation);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while getting EV Compensation error = 0x%x", eError);
+ } else {
+ Gen3A.EVCompensation = (10 * expValues.xEVCompensation) / (1 << Q16_OFFSET);
+ CAMHAL_LOGDB("Gen3A.EVCompensation 0x%x", Gen3A.EVCompensation);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_WHITEBALCONTROLTYPE wb;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&wb, OMX_CONFIG_WHITEBALCONTROLTYPE);
+ wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ wb.eWhiteBalControl = ( OMX_WHITEBALCONTROLTYPE ) Gen3A.WhiteBallance;
+
+ // disable face and region priorities
+ setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
+ setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonWhiteBalance,
+ &wb);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Whitebalance mode 0x%x error = 0x%x",
+ ( unsigned int ) wb.eWhiteBalControl,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Whitebalance mode 0x%x configured successfully",
+ ( unsigned int ) wb.eWhiteBalControl);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return eError;
+}
+
+status_t OMXCameraAdapter::getWBMode(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_WHITEBALCONTROLTYPE wb;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&wb, OMX_CONFIG_WHITEBALCONTROLTYPE);
+ wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonWhiteBalance,
+ &wb);
+
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while getting Whitebalance mode error = 0x%x", eError);
+ } else {
+ Gen3A.WhiteBallance = wb.eWhiteBalControl;
+ CAMHAL_LOGDB("Gen3A.WhiteBallance 0x%x", Gen3A.WhiteBallance);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return eError;
+}
+
+status_t OMXCameraAdapter::setFlicker(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_FLICKERCANCELTYPE flicker;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&flicker, OMX_CONFIG_FLICKERCANCELTYPE);
+ flicker.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ flicker.eFlickerCancel = (OMX_COMMONFLICKERCANCELTYPE)Gen3A.Flicker;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigFlickerCancel,
+ &flicker );
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Flicker mode 0x%x error = 0x%x",
+ ( unsigned int ) flicker.eFlickerCancel,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Flicker mode 0x%x configured successfully",
+ ( unsigned int ) flicker.eFlickerCancel);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BRIGHTNESSTYPE brightness;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&brightness, OMX_CONFIG_BRIGHTNESSTYPE);
+ brightness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ brightness.nBrightness = Gen3A.Brightness;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonBrightness,
+ &brightness);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Brightness 0x%x error = 0x%x",
+ ( unsigned int ) brightness.nBrightness,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Brightness 0x%x configured successfully",
+ ( unsigned int ) brightness.nBrightness);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setContrast(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CONTRASTTYPE contrast;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&contrast, OMX_CONFIG_CONTRASTTYPE);
+ contrast.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ contrast.nContrast = Gen3A.Contrast;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonContrast,
+ &contrast);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Contrast 0x%x error = 0x%x",
+ ( unsigned int ) contrast.nContrast,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Contrast 0x%x configured successfully",
+ ( unsigned int ) contrast.nContrast);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return eError;
+}
+
+status_t OMXCameraAdapter::setSharpness(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE procSharpness;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&procSharpness, OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE);
+ procSharpness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ procSharpness.nLevel = Gen3A.Sharpness;
+
+ if( procSharpness.nLevel == 0 )
+ {
+ procSharpness.bAuto = OMX_TRUE;
+ }
+ else
+ {
+ procSharpness.bAuto = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigSharpeningLevel,
+ &procSharpness);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Sharpness 0x%x error = 0x%x",
+ ( unsigned int ) procSharpness.nLevel,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Sharpness 0x%x configured successfully",
+ ( unsigned int ) procSharpness.nLevel);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE procSharpness;
+
+ LOG_FUNCTION_NAME;
+
+ if (OMX_StateInvalid == mComponentState) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&procSharpness, OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE);
+ procSharpness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigSharpeningLevel,
+ &procSharpness);
+
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring Sharpness error = 0x%x", eError);
+ } else {
+ Gen3A.Sharpness = procSharpness.nLevel;
+ CAMHAL_LOGDB("Gen3A.Sharpness 0x%x", Gen3A.Sharpness);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_SATURATIONTYPE saturation;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&saturation, OMX_CONFIG_SATURATIONTYPE);
+ saturation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ saturation.nSaturation = Gen3A.Saturation;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonSaturation,
+ &saturation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Saturation 0x%x error = 0x%x",
+ ( unsigned int ) saturation.nSaturation,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Saturation 0x%x configured successfully",
+ ( unsigned int ) saturation.nSaturation);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_SATURATIONTYPE saturation;
+
+ LOG_FUNCTION_NAME;
+
+ if (OMX_StateInvalid == mComponentState) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&saturation, OMX_CONFIG_SATURATIONTYPE);
+ saturation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonSaturation,
+ &saturation);
+
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while getting Saturation error = 0x%x", eError);
+ } else {
+ Gen3A.Saturation = saturation.nSaturation;
+ CAMHAL_LOGDB("Gen3A.Saturation 0x%x", Gen3A.Saturation);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ // In case of manual exposure Gain is applied from setManualExposureVal
+ if ( Gen3A.Exposure == OMX_ExposureControlOff ) {
+ return NO_ERROR;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
+ expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ expValRight.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if( 0 == Gen3A.ISO ) {
+ expValues.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expValues.bAutoSensitivity = OMX_FALSE;
+ expValues.nSensitivity = Gen3A.ISO;
+ expValRight.nSensitivity = expValues.nSensitivity;
+ }
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring ISO 0x%x error = 0x%x",
+ ( unsigned int ) expValues.nSensitivity,
+ eError);
+ } else {
+ CAMHAL_LOGDB("ISO 0x%x configured successfully",
+ ( unsigned int ) expValues.nSensitivity);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+
+ LOG_FUNCTION_NAME;
+
+ if (OMX_StateInvalid == mComponentState) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while getting ISO error = 0x%x", eError);
+ } else {
+ Gen3A.ISO = expValues.nSensitivity;
+ CAMHAL_LOGDB("Gen3A.ISO %d", Gen3A.ISO);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_IMAGEFILTERTYPE effect;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&effect, OMX_CONFIG_IMAGEFILTERTYPE);
+ effect.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ effect.eImageFilter = (OMX_IMAGEFILTERTYPE ) Gen3A.Effect;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonImageFilter,
+ &effect);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Effect 0x%x error = 0x%x",
+ ( unsigned int ) effect.eImageFilter,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Effect 0x%x configured successfully",
+ ( unsigned int ) effect.eImageFilter);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_LOCKTYPE lock;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ lock.bLock = Gen3A.WhiteBalanceLock;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageWhiteBalanceLock,
+ &lock);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring WhiteBalance Lock error = 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("WhiteBalance Lock configured successfully %d ", lock.bLock);
+ }
+ LOG_FUNCTION_NAME_EXIT
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_LOCKTYPE lock;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ lock.bLock = Gen3A.ExposureLock;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageExposureLock,
+ &lock);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Exposure Lock error = 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Exposure Lock configured successfully %d ", lock.bLock);
+ }
+ LOG_FUNCTION_NAME_EXIT
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_LOCKTYPE lock;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ lock.bLock = Gen3A.FocusLock;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageFocusLock,
+ &lock);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring Focus Lock error = 0x%x", eError);
+ } else {
+ CAMHAL_LOGDB("Focus Lock configured successfully %d ", lock.bLock);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_LOCKTYPE lock;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ mParameters3A.ExposureLock = toggleExp;
+ mParameters3A.FocusLock = toggleFocus;
+ mParameters3A.WhiteBalanceLock = toggleWb;
+
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageExposureLock,
+ &lock);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error GetConfig Exposure Lock error = 0x%x", eError);
+ goto EXIT;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Exposure Lock GetConfig successfull");
+
+ /* Apply locks only when not applied already */
+ if ( lock.bLock != toggleExp )
+ {
+ setExposureLock(mParameters3A);
+ }
+
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageFocusLock,
+ &lock);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error GetConfig Focus Lock error = 0x%x", eError);
+ goto EXIT;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Focus Lock GetConfig successfull bLock(%d)", lock.bLock);
+
+ /* Apply locks only when not applied already */
+ if ( lock.bLock != toggleFocus )
+ {
+ setFocusLock(mParameters3A);
+ }
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageWhiteBalanceLock,
+ &lock);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error GetConfig WhiteBalance Lock error = 0x%x", eError);
+ goto EXIT;
+ }
+ else
+ {
+ CAMHAL_LOGDA("WhiteBalance Lock GetConfig successfull");
+
+ /* Apply locks only when not applied already */
+ if ( lock.bLock != toggleWb )
+ {
+ setWhiteBalanceLock(mParameters3A);
+ }
+
+ }
+ EXIT:
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ CameraBuffer *bufferlist;
+ OMX_ALGOAREASTYPE *meteringAreas;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ int areasSize = 0;
+
+ LOG_FUNCTION_NAME
+
+ android::AutoMutex lock(mMeteringAreasLock);
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ meteringAreas = (OMX_ALGOAREASTYPE *)bufferlist[0].opaque;
+
+ OMXCameraPortParameters * mPreviewData = NULL;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ if (!meteringAreas)
+ {
+ CAMHAL_LOGEB("Error allocating buffer for metering areas %d", eError);
+ return -ENOMEM;
+ }
+
+ OMX_INIT_STRUCT_PTR (meteringAreas, OMX_ALGOAREASTYPE);
+
+ meteringAreas->nPortIndex = OMX_ALL;
+ meteringAreas->nNumAreas = mMeteringAreas.size();
+ meteringAreas->nAlgoAreaPurpose = OMX_AlgoAreaExposure;
+
+ for ( unsigned int n = 0; n < mMeteringAreas.size(); n++)
+ {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
+ // transform the coordinates to 3A-type coordinates
+ mMeteringAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)meteringAreas->tAlgoAreas[n].nTop,
+ (size_t&)meteringAreas->tAlgoAreas[n].nLeft,
+ (size_t&)meteringAreas->tAlgoAreas[n].nWidth,
+ (size_t&)meteringAreas->tAlgoAreas[n].nHeight);
+
+ meteringAreas->tAlgoAreas[n].nLeft =
+ ( meteringAreas->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nTop =
+ ( meteringAreas->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+ meteringAreas->tAlgoAreas[n].nWidth =
+ ( meteringAreas->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nHeight =
+ ( meteringAreas->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+
+ meteringAreas->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight();
+
+ CAMHAL_LOGDB("Metering area %d : top = %d left = %d width = %d height = %d prio = %d",
+ n, (int)meteringAreas->tAlgoAreas[n].nTop, (int)meteringAreas->tAlgoAreas[n].nLeft,
+ (int)meteringAreas->tAlgoAreas[n].nWidth, (int)meteringAreas->tAlgoAreas[n].nHeight,
+ (int)meteringAreas->tAlgoAreas[n].nPriority);
+
+ }
+
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+
+ sharedBuffer.nPortIndex = OMX_ALL;
+ sharedBuffer.nSharedBuffSize = areasSize;
+ sharedBuffer.pSharedBuff = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufferlist[0]);
+
+ if ( NULL == sharedBuffer.pSharedBuff )
+ {
+ CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
+ ret = -ENOMEM;
+ goto EXIT;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigAlgoAreas, &sharedBuffer);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting Focus Areas configuration 0x%x", eError);
+ ret = -EINVAL;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Metering Areas SetConfig successfull.");
+ }
+
+ EXIT:
+ if (NULL != bufferlist)
+ {
+ mMemMgr.freeBufferList(bufferlist);
+ }
+
+ return ret;
+}
+
+//TI extensions for enable/disable algos
+status_t OMXCameraAdapter::setParameter3ABoolInvert(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_BOOL inv_data;
+
+ if (OMX_TRUE == data)
+ {
+ inv_data = OMX_FALSE;
+ }
+ else if (OMX_FALSE == data)
+ {
+ inv_data = OMX_TRUE;
+ }
+ else
+ {
+ return BAD_VALUE;
+ }
+ return setParameter3ABool(omx_idx, inv_data, msg);
+}
+
+status_t OMXCameraAdapter::setParameter3ABool(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE cfgdata;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&cfgdata, OMX_CONFIG_BOOLEANTYPE);
+ cfgdata.bEnabled = data;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ omx_idx,
+ &cfgdata);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring %s error = 0x%x", msg, eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("%s configured successfully %d ", msg, cfgdata.bEnabled);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+#ifndef MOTOROLA_CAMERA
+status_t OMXCameraAdapter::setAlgoExternalGamma(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABool((OMX_INDEXTYPE) OMX_TI_IndexConfigExternalGamma, Gen3A.AlgoExternalGamma, "External Gamma");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF1(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF1, Gen3A.AlgoNSF1, "NSF1");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF2(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF2, Gen3A.AlgoNSF2, "NSF2");
+}
+
+status_t OMXCameraAdapter::setAlgoSharpening(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableSharpening, Gen3A.AlgoSharpening, "Sharpening");
+}
+
+status_t OMXCameraAdapter::setAlgoThreeLinColorMap(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableThreeLinColorMap, Gen3A.AlgoThreeLinColorMap, "Color Conversion");
+}
+
+status_t OMXCameraAdapter::setAlgoGIC(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableGIC, Gen3A.AlgoGIC, "Green Inballance Correction");
+}
+
+status_t OMXCameraAdapter::setGammaTable(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CameraBuffer *bufferlist = NULL;
+ OMX_TI_CONFIG_GAMMATABLE_TYPE *gammaTable = NULL;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ int tblSize = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = NO_INIT;
+ goto EXIT;
+ }
+
+ tblSize = ((sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE)+4095)/4096)*4096;
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, tblSize, 1);
+ if (NULL == bufferlist) {
+ CAMHAL_LOGEB("Error allocating buffer for gamma table");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+ gammaTable = (OMX_TI_CONFIG_GAMMATABLE_TYPE *)bufferlist[0].mapped;
+ if (NULL == gammaTable) {
+ CAMHAL_LOGEB("Error allocating buffer for gamma table (wrong data pointer)");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+
+ memcpy(gammaTable, &mParameters3A.mGammaTable, sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE));
+
+#ifdef CAMERAHAL_DEBUG
+ {
+ android::String8 DmpR;
+ android::String8 DmpG;
+ android::String8 DmpB;
+ for (unsigned int i=0; i<OMX_TI_GAMMATABLE_SIZE;i++) {
+ DmpR.appendFormat(" %d:%d;", (int)gammaTable->pR[i].nOffset, (int)(int)gammaTable->pR[i].nSlope);
+ DmpG.appendFormat(" %d:%d;", (int)gammaTable->pG[i].nOffset, (int)(int)gammaTable->pG[i].nSlope);
+ DmpB.appendFormat(" %d:%d;", (int)gammaTable->pB[i].nOffset, (int)(int)gammaTable->pB[i].nSlope);
+ }
+ CAMHAL_LOGE("Gamma table R:%s", DmpR.string());
+ CAMHAL_LOGE("Gamma table G:%s", DmpG.string());
+ CAMHAL_LOGE("Gamma table B:%s", DmpB.string());
+ }
+#endif
+
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+ sharedBuffer.nPortIndex = OMX_ALL;
+ sharedBuffer.nSharedBuffSize = sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE);
+ sharedBuffer.pSharedBuff = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufferlist[0]);
+ if ( NULL == sharedBuffer.pSharedBuff ) {
+ CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigGammaTable, &sharedBuffer);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while setting Gamma Table configuration 0x%x", eError);
+ ret = BAD_VALUE;
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("Gamma Table SetConfig successfull.");
+ }
+
+EXIT:
+
+ if (NULL != bufferlist) {
+ mMemMgr.freeBufferList(bufferlist);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+#endif
+
+status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
+{
+ status_t ret = NO_ERROR;
+ unsigned int currSett; // 32 bit
+ int portIndex;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(m3ASettingsUpdateLock);
+
+ /*
+ * Scenes have a priority during the process
+ * of applying 3A related parameters.
+ * They can override pretty much all other 3A
+ * settings and similarly get overridden when
+ * for instance the focus mode gets switched.
+ * There is only one exception to this rule,
+ * the manual a.k.a. auto scene.
+ */
+ if (SetSceneMode & mPending3Asettings) {
+ mPending3Asettings &= ~SetSceneMode;
+ ret |= setScene(Gen3A);
+ // re-apply EV compensation after setting scene mode since it probably reset it
+ if(Gen3A.EVCompensation) {
+ setEVCompensation(Gen3A);
+ }
+ return ret;
+ } else if (OMX_Manual != Gen3A.SceneMode) {
+ // only certain settings are allowed when scene mode is set
+ mPending3Asettings &= (SetEVCompensation | SetFocus | SetWBLock |
+ SetExpLock | SetWhiteBallance | SetFlash);
+ if ( mPending3Asettings == 0 ) return NO_ERROR;
+ }
+
+ for( currSett = 1; currSett < E3aSettingMax; currSett <<= 1)
+ {
+ if( currSett & mPending3Asettings )
+ {
+ switch( currSett )
+ {
+ case SetEVCompensation:
+ {
+ ret |= setEVCompensation(Gen3A);
+ break;
+ }
+
+ case SetWhiteBallance:
+ {
+ ret |= setWBMode(Gen3A);
+ break;
+ }
+
+ case SetFlicker:
+ {
+ ret |= setFlicker(Gen3A);
+ break;
+ }
+
+ case SetBrightness:
+ {
+ ret |= setBrightness(Gen3A);
+ break;
+ }
+
+ case SetContrast:
+ {
+ ret |= setContrast(Gen3A);
+ break;
+ }
+
+ case SetSharpness:
+ {
+ ret |= setSharpness(Gen3A);
+ break;
+ }
+
+ case SetSaturation:
+ {
+ ret |= setSaturation(Gen3A);
+ break;
+ }
+
+ case SetISO:
+ {
+ ret |= setISO(Gen3A);
+ break;
+ }
+
+ case SetEffect:
+ {
+ ret |= setEffect(Gen3A);
+ break;
+ }
+
+ case SetFocus:
+ {
+ ret |= setFocusMode(Gen3A);
+ break;
+ }
+
+ case SetExpMode:
+ {
+ ret |= setExposureMode(Gen3A);
+ break;
+ }
+
+ case SetManualExposure: {
+ ret |= setManualExposureVal(Gen3A);
+ break;
+ }
+
+ case SetFlash:
+ {
+ ret |= setFlashMode(Gen3A);
+ break;
+ }
+
+ case SetExpLock:
+ {
+ ret |= setExposureLock(Gen3A);
+ break;
+ }
+
+ case SetWBLock:
+ {
+ ret |= setWhiteBalanceLock(Gen3A);
+ break;
+ }
+ case SetMeteringAreas:
+ {
+ ret |= setMeteringAreas(Gen3A);
+ }
+ break;
+
+#ifndef MOTOROLA_CAMERA
+ //TI extensions for enable/disable algos
+ case SetAlgoExternalGamma:
+ {
+ ret |= setAlgoExternalGamma(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF1:
+ {
+ ret |= setAlgoNSF1(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF2:
+ {
+ ret |= setAlgoNSF2(Gen3A);
+ }
+ break;
+
+ case SetAlgoSharpening:
+ {
+ ret |= setAlgoSharpening(Gen3A);
+ }
+ break;
+
+ case SetAlgoThreeLinColorMap:
+ {
+ ret |= setAlgoThreeLinColorMap(Gen3A);
+ }
+ break;
+
+ case SetAlgoGIC:
+ {
+ ret |= setAlgoGIC(Gen3A);
+ }
+ break;
+
+ case SetGammaTable:
+ {
+ ret |= setGammaTable(Gen3A);
+ }
+ break;
+#endif
+
+ default:
+ CAMHAL_LOGEB("this setting (0x%x) is still not supported in CameraAdapter ",
+ currSett);
+ break;
+ }
+ mPending3Asettings &= ~currSett;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp
new file mode 100644
index 0000000..72ce1f6
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXAlgo.cpp
@@ -0,0 +1,1355 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXAlgo.cpp
+*
+* This file contains functionality for handling algorithm configurations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+#undef TRUE
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::setParametersAlgo(const android::CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *valstr = NULL;
+ const char *valManualStr = NULL;
+ const char *oldstr = NULL;
+ OMXCameraPortParameters *cap;
+ BrightnessMode gbce = BRIGHTNESS_OFF;
+ BrightnessMode glbce = BRIGHTNESS_OFF;
+
+ LOG_FUNCTION_NAME;
+
+ CaptureMode capMode;
+ CAMHAL_LOGDB("Capture mode %s", params.get(TICameraParameters::KEY_CAP_MODE));
+ if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL ) {
+ if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::EXPOSURE_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::ZOOM_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0) {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0) {
+ capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
+ mCapabilitiesOpMode = MODE_ZEROSHUTTERLAG;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
+ capMode = OMXCameraAdapter::VIDEO_MODE;
+ mCapabilitiesOpMode = MODE_VIDEO;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ) == 0) {
+ capMode = OMXCameraAdapter::VIDEO_MODE_HQ;
+ mCapabilitiesOpMode = MODE_VIDEO_HIGH_QUALITY;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::CP_CAM_MODE) == 0) {
+ capMode = OMXCameraAdapter::CP_CAM;
+ mCapabilitiesOpMode = MODE_CPCAM;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::TEMP_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ }
+
+ } else {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ }
+
+ if ( mSensorIndex == 2 ) {
+ mCapabilitiesOpMode = MODE_STEREO;
+ }
+
+ if ( mCapMode != capMode ) {
+ mCapMode = capMode;
+ mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetCapMode;
+ }
+
+ CAMHAL_LOGDB("Capture Mode set %d", mCapMode);
+
+ /// Configure IPP, LDCNSF, GBCE and GLBCE only in HQ mode
+ IPPMode ipp;
+ if((mCapMode == OMXCameraAdapter::HIGH_QUALITY) || (mCapMode == OMXCameraAdapter::HIGH_QUALITY_ZSL)
+ || (mCapMode == OMXCameraAdapter::VIDEO_MODE)
+ || (mCapMode == OMXCameraAdapter::CP_CAM))
+ {
+ if ( (valstr = params.get(TICameraParameters::KEY_IPP)) != NULL )
+ {
+ if (strcmp(valstr, (const char *) TICameraParameters::IPP_LDCNSF) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_LDCNSF;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::IPP_LDC) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_LDC;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::IPP_NSF) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_NSF;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::IPP_NONE) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+ else
+ {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+ }
+ else
+ {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+
+ CAMHAL_LOGVB("IPP Mode set %d", ipp);
+
+ if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) ) {
+ if (strcmp(valstr, android::CameraParameters::TRUE ) == 0) {
+ gbce = BRIGHTNESS_ON;
+ } else {
+ gbce = BRIGHTNESS_OFF;
+ }
+
+ if ( gbce != mGBCE ) {
+ mGBCE = gbce;
+ setGBCE(mGBCE);
+ }
+
+ } else if(mFirstTimeInit) {
+ //Disable GBCE by default
+ setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+
+ if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL ) {
+
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ glbce = BRIGHTNESS_ON;
+ } else {
+ glbce = BRIGHTNESS_OFF;
+ }
+
+ if ( glbce != mGLBCE ) {
+ mGLBCE = glbce;
+ setGLBCE(mGLBCE);
+ }
+
+ } else if(mFirstTimeInit) {
+ //Disable GLBCE by default
+ setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+
+ } else {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+
+ if ( mIPP != ipp )
+ {
+ mIPP = ipp;
+ mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetLDC;
+ mPendingPreviewSettings |= SetNSF;
+ }
+
+ ///Set VNF Configuration
+ bool vnfEnabled = false;
+ valstr = params.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0)
+ {
+ CAMHAL_LOGDA("VNF Enabled");
+ vnfEnabled = true;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VNF Disabled");
+ vnfEnabled = false;
+ }
+
+ if ( mVnfEnabled != vnfEnabled )
+ {
+ mVnfEnabled = vnfEnabled;
+ mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVNF;
+ }
+
+ ///Set VSTAB Configuration
+ bool vstabEnabled = false;
+ valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDA("VSTAB Enabled");
+ vstabEnabled = true;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VSTAB Disabled");
+ vstabEnabled = false;
+ }
+
+ if ( mVstabEnabled != vstabEnabled )
+ {
+ mVstabEnabled = vstabEnabled;
+ mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVSTAB;
+ }
+
+ //A work-around for a failing call to OMX flush buffers
+ if ( ( capMode = OMXCameraAdapter::VIDEO_MODE ) &&
+ ( mVstabEnabled ) )
+ {
+ mOMXStateSwitch = true;
+ }
+
+#ifdef OMAP_ENHANCEMENT
+
+ //Set Auto Convergence Mode
+ valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE_MODE);
+ valManualStr = params.get(TICameraParameters::KEY_MANUAL_CONVERGENCE);
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ if (cap->mFrameLayoutType != OMX_TI_StereoFrameLayout2D) {
+ if ((valstr != NULL) || (valManualStr != NULL)) {
+ setAutoConvergence(valstr, valManualStr, params);
+ if (valstr != NULL) {
+ CAMHAL_LOGDB("AutoConvergenceMode %s", valstr);
+ }
+ if (valManualStr != NULL) {
+ CAMHAL_LOGDB("Manual Convergence %s", valManualStr);
+ }
+ }
+
+ //Set Mechanical Misalignment Correction
+ valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
+ if ( valstr != NULL ) {
+ setMechanicalMisalignmentCorrection(strcmp(valstr, android::CameraParameters::TRUE) == 0);
+ CAMHAL_LOGDB("Mechanical Misalignment Correction %s", valstr);
+ }
+ }
+
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// Set AutoConvergence
+status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *pValManualstr, const android::CameraParameters &params)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
+ const char *str = NULL;
+ android::Vector<android::sp<CameraArea> > tempAreas;
+ int mode;
+ int changed = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( pValManualstr != NULL ) {
+ OMX_S32 manualConvergence = (OMX_S32)strtol(pValManualstr ,0 ,0);
+
+ if (mManualConv != manualConvergence) {
+ mManualConv = manualConvergence;
+ changed = 1;
+ }
+ }
+
+ if ( pValstr != NULL ) {
+ mode = getLUTvalue_HALtoOMX(pValstr, mAutoConvergenceLUT);
+
+ if ( NAME_NOT_FOUND == mode ) {
+ CAMHAL_LOGEB("Wrong convergence mode: %s", pValstr);
+ LOG_FUNCTION_NAME_EXIT;
+ return mode;
+ }
+
+ if ( mAutoConv != static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode) ) {
+ mAutoConv = static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode);
+ changed = 1;
+ }
+ }
+
+ if ( OMX_TI_AutoConvergenceModeFocusFaceTouch == mAutoConv ) {
+ android::AutoMutex lock(mTouchAreasLock);
+
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
+
+ if ( NULL != str ) {
+ ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
+ } else {
+ CAMHAL_LOGEB("Touch areas not received in %s",
+ android::CameraParameters::KEY_METERING_AREAS);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( CameraArea::areAreasDifferent(mTouchAreas, tempAreas) ) {
+ mTouchAreas.clear();
+ mTouchAreas = tempAreas;
+ changed = 1;
+ }
+ }
+
+ if (!changed) {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ OMXCameraPortParameters * mPreviewData;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ ACParams.nSize = (OMX_U32)sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
+ ACParams.nVersion = mLocalVersionParam;
+ ACParams.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
+ &ACParams);
+
+ ACParams.eACMode = mAutoConv;
+ ACParams.nManualConverence = mManualConv;
+
+ if (1 == mTouchAreas.size()) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
+ // transform the coordinates to 3A-type coordinates
+ mTouchAreas.itemAt(0)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&) ACParams.nACProcWinStartY,
+ (size_t&) ACParams.nACProcWinStartX,
+ (size_t&) ACParams.nACProcWinWidth,
+ (size_t&) ACParams.nACProcWinHeight);
+ }
+
+ CAMHAL_LOGDB("nSize %d", (int)ACParams.nSize);
+ CAMHAL_LOGDB("nPortIndex %d", (int)ACParams.nPortIndex);
+ CAMHAL_LOGDB("nManualConverence %d", (int)ACParams.nManualConverence);
+ CAMHAL_LOGDB("eACMode %d", (int)ACParams.eACMode);
+ CAMHAL_LOGDB("nACProcWinStartX %d", (int)ACParams.nACProcWinStartX);
+ CAMHAL_LOGDB("nACProcWinStartY %d", (int)ACParams.nACProcWinStartY);
+ CAMHAL_LOGDB("nACProcWinWidth %d", (int)ACParams.nACProcWinWidth);
+ CAMHAL_LOGDB("nACProcWinHeight %d", (int)ACParams.nACProcWinHeight);
+ CAMHAL_LOGDB("bACStatus %d", (int)ACParams.bACStatus);
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
+ &ACParams);
+
+ if ( eError != OMX_ErrorNone ) {
+ CAMHAL_LOGEB("Error while setting AutoConvergence 0x%x", eError);
+ ret = BAD_VALUE;
+ } else {
+ CAMHAL_LOGDA("AutoConvergence applied successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::enableVideoNoiseFilter(bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_VIDEONOISEFILTERTYPE vnfCfg;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&vnfCfg, OMX_PARAM_VIDEONOISEFILTERTYPE);
+
+ if ( enable )
+ {
+ CAMHAL_LOGDA("VNF is enabled");
+ vnfCfg.eMode = OMX_VideoNoiseFilterModeOn;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VNF is disabled");
+ vnfCfg.eMode = OMX_VideoNoiseFilterModeOff;
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamVideoNoiseFilter,
+ &vnfCfg);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring video noise filter 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Video noise filter is configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::enableVideoStabilization(bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_FRAMESTABTYPE frameStabCfg;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_CONFIG_BOOLEANTYPE vstabp;
+ OMX_INIT_STRUCT_PTR (&vstabp, OMX_CONFIG_BOOLEANTYPE);
+ if(enable)
+ {
+ vstabp.bEnabled = OMX_TRUE;
+ }
+ else
+ {
+ vstabp.bEnabled = OMX_FALSE;
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexParamFrameStabilisation,
+ &vstabp);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring video stabilization param 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Video stabilization param configured successfully");
+ }
+
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&frameStabCfg, OMX_CONFIG_FRAMESTABTYPE);
+
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigCommonFrameStabilisation,
+ &frameStabCfg);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while getting video stabilization mode 0x%x",
+ (unsigned int)eError);
+ ret = -1;
+ }
+
+ CAMHAL_LOGDB("VSTAB Port Index = %d", (int)frameStabCfg.nPortIndex);
+
+ frameStabCfg.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ if ( enable )
+ {
+ CAMHAL_LOGDA("VSTAB is enabled");
+ frameStabCfg.bStab = OMX_TRUE;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VSTAB is disabled");
+ frameStabCfg.bStab = OMX_FALSE;
+
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigCommonFrameStabilisation,
+ &frameStabCfg);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring video stabilization mode 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Video stabilization mode configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setGBCE(OMXCameraAdapter::BrightnessMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE bControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bControl, OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE);
+
+ bControl.nPortIndex = OMX_ALL;
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::BRIGHTNESS_ON:
+ {
+ bControl.eControl = OMX_TI_BceModeOn;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_AUTO:
+ {
+ bControl.eControl = OMX_TI_BceModeAuto;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_OFF:
+ default:
+ {
+ bControl.eControl = OMX_TI_BceModeOff;
+ break;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigGlobalBrightnessContrastEnhance,
+ &bControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting GBCE 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("GBCE configured successfully 0x%x", mode);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setGLBCE(OMXCameraAdapter::BrightnessMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE bControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bControl, OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE);
+ bControl.nPortIndex = OMX_ALL;
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::BRIGHTNESS_ON:
+ {
+ bControl.eControl = OMX_TI_BceModeOn;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_AUTO:
+ {
+ bControl.eControl = OMX_TI_BceModeAuto;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_OFF:
+ default:
+ {
+ bControl.eControl = OMX_TI_BceModeOff;
+ break;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigLocalBrightnessContrastEnhance,
+ &bControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configure GLBCE 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("GLBCE configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
+ OMX_CONFIG_BOOLEANTYPE bCAC;
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
+
+ LOG_FUNCTION_NAME;
+
+ //CAC is disabled by default
+ OMX_INIT_STRUCT_PTR (&bCAC, OMX_CONFIG_BOOLEANTYPE);
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
+ bCAC.bEnabled = OMX_FALSE;
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
+ if ( mSensorIndex == OMX_TI_StereoSensor ) {
+ if ( OMXCameraAdapter::VIDEO_MODE == mode ) {
+ CAMHAL_LOGDA("Camera mode: STEREO VIDEO");
+ camMode.eCamOperatingMode = OMX_TI_StereoVideo;
+ } else {
+ CAMHAL_LOGDA("Camera mode: STEREO");
+ camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
+ }
+ } else if ( OMXCameraAdapter::HIGH_SPEED == mode ) {
+ CAMHAL_LOGDA("Camera mode: HIGH SPEED");
+ camMode.eCamOperatingMode = OMX_CaptureImageHighSpeedTemporalBracketing;
+ } else if ( OMXCameraAdapter::CP_CAM == mode ) {
+ CAMHAL_LOGDA("Camera mode: CP CAM");
+ camMode.eCamOperatingMode = OMX_TI_CPCam;
+ // TODO(XXX): Hardcode for now until we implement re-proc pipe
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ } else if( OMXCameraAdapter::HIGH_QUALITY == mode ) {
+ CAMHAL_LOGDA("Camera mode: HIGH QUALITY");
+ camMode.eCamOperatingMode = OMX_CaptureImageProfileBase;
+ } else if( OMXCameraAdapter::HIGH_QUALITY_ZSL== mode ) {
+ const char* valstr = NULL;
+ CAMHAL_LOGDA("Camera mode: HIGH QUALITY_ZSL");
+ camMode.eCamOperatingMode = OMX_TI_CaptureImageProfileZeroShutterLag;
+
+#ifdef CAMERAHAL_TUNA
+ if ( !mIternalRecordingHint ) {
+ zslHistoryLen.nHistoryLen = 5;
+ }
+#endif
+
+ } else if( OMXCameraAdapter::VIDEO_MODE == mode ) {
+ CAMHAL_LOGDA("Camera mode: VIDEO MODE");
+ camMode.eCamOperatingMode = OMX_CaptureVideo;
+ } else if( OMXCameraAdapter::VIDEO_MODE_HQ == mode ) {
+ CAMHAL_LOGDA("Camera mode: VIDEO MODE HQ");
+ camMode.eCamOperatingMode = OMX_CaptureHighQualityVideo;
+ } else {
+ CAMHAL_LOGEA("Camera mode: INVALID mode passed!");
+ return BAD_VALUE;
+ }
+
+ if( NO_ERROR == ret )
+ {
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
+ &camMode);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring camera mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera mode configured successfully");
+ }
+ }
+
+ if((NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mode)) {
+ //Configure Single Preview Mode
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+
+
+ if( NO_ERROR == ret )
+ {
+ //Configure CAC
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigChromaticAberrationCorrection,
+ &bCAC);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring CAC 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("CAC configured successfully");
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setLDC(OMXCameraAdapter::IPPMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in loaded state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::IPP_LDCNSF:
+ case OMXCameraAdapter::IPP_LDC:
+ {
+ bOMX.bEnabled = OMX_TRUE;
+ break;
+ }
+ case OMXCameraAdapter::IPP_NONE:
+ case OMXCameraAdapter::IPP_NSF:
+ default:
+ {
+ bOMX.bEnabled = OMX_FALSE;
+ break;
+ }
+ }
+
+ CAMHAL_LOGVB("Configuring LDC mode 0x%x", bOMX.bEnabled);
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamLensDistortionCorrection,
+ &bOMX);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEA("Error while setting LDC");
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setNSF(OMXCameraAdapter::IPPMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_ISONOISEFILTERTYPE nsf;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in loaded state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&nsf, OMX_PARAM_ISONOISEFILTERTYPE);
+ nsf.nPortIndex = OMX_ALL;
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::IPP_LDCNSF:
+ case OMXCameraAdapter::IPP_NSF:
+ {
+ nsf.eMode = OMX_ISONoiseFilterModeOn;
+ break;
+ }
+ case OMXCameraAdapter::IPP_LDC:
+ case OMXCameraAdapter::IPP_NONE:
+ default:
+ {
+ nsf.eMode = OMX_ISONoiseFilterModeOff;
+ break;
+ }
+ }
+
+ CAMHAL_LOGVB("Configuring NSF mode 0x%x", nsf.eMode);
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexParamHighISONoiseFiler,
+ &nsf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEA("Error while setting NSF");
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setImageQuality(unsigned int quality)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_PARAM_QFACTORTYPE jpegQualityConf;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT(jpegQualityConf, OMX_IMAGE_PARAM_QFACTORTYPE);
+ jpegQualityConf.nQFactor = quality;
+ jpegQualityConf.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamQFactor,
+ &jpegQualityConf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring jpeg Quality 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setThumbnailParams(unsigned int width,
+ unsigned int height,
+ unsigned int quality)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_THUMBNAILTYPE thumbConf;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT(thumbConf, OMX_PARAM_THUMBNAILTYPE);
+ thumbConf.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamThumbnail,
+ &thumbConf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while retrieving thumbnail size 0x%x", eError);
+ ret = -1;
+ }
+
+ //CTS Requirement: width or height equal to zero should
+ //result in absent EXIF thumbnail
+ if ( ( 0 == width ) || ( 0 == height ) )
+ {
+ thumbConf.nWidth = mThumbRes[0].width;
+ thumbConf.nHeight = mThumbRes[0].height;
+ thumbConf.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+ else
+ {
+ thumbConf.nWidth = width;
+ thumbConf.nHeight = height;
+ thumbConf.nQuality = quality;
+ thumbConf.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ }
+
+ CAMHAL_LOGDB("Thumbnail width = %d, Thumbnail Height = %d", width, height);
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamThumbnail,
+ &thumbConf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring thumbnail size 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setAlgoPriority(AlgoPriority priority,
+ Algorithm3A algo,
+ bool enable)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ if ( FACE_PRIORITY == priority ) {
+
+ if ( algo & WHITE_BALANCE_ALGO ) {
+ if ( enable ) {
+ mFacePriority.bAwbFaceEnable = OMX_TRUE;
+ } else {
+ mFacePriority.bAwbFaceEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & EXPOSURE_ALGO ) {
+ if ( enable ) {
+ mFacePriority.bAeFaceEnable = OMX_TRUE;
+ } else {
+ mFacePriority.bAeFaceEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & FOCUS_ALGO ) {
+ if ( enable ) {
+ mFacePriority.bAfFaceEnable = OMX_TRUE;
+ } else {
+ mFacePriority.bAfFaceEnable = OMX_FALSE;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFacePriority3a,
+ &mFacePriority);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring face priority 0x%x", eError);
+ } else {
+ CAMHAL_LOGDB("Face priority for algorithms set successfully 0x%x, 0x%x, 0x%x",
+ mFacePriority.bAfFaceEnable,
+ mFacePriority.bAeFaceEnable,
+ mFacePriority.bAwbFaceEnable);
+ }
+
+ } else if ( REGION_PRIORITY == priority ) {
+
+ if ( algo & WHITE_BALANCE_ALGO ) {
+ if ( enable ) {
+ mRegionPriority.bAwbRegionEnable= OMX_TRUE;
+ } else {
+ mRegionPriority.bAwbRegionEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & EXPOSURE_ALGO ) {
+ if ( enable ) {
+ mRegionPriority.bAeRegionEnable = OMX_TRUE;
+ } else {
+ mRegionPriority.bAeRegionEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & FOCUS_ALGO ) {
+ if ( enable ) {
+ mRegionPriority.bAfRegionEnable = OMX_TRUE;
+ } else {
+ mRegionPriority.bAfRegionEnable = OMX_FALSE;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigRegionPriority3a,
+ &mRegionPriority);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring region priority 0x%x", eError);
+ } else {
+ CAMHAL_LOGDB("Region priority for algorithms set successfully 0x%x, 0x%x, 0x%x",
+ mRegionPriority.bAfRegionEnable,
+ mRegionPriority.bAeRegionEnable,
+ mRegionPriority.bAwbRegionEnable);
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setPictureRotation(unsigned int degree)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_ROTATIONTYPE rotation;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT(rotation, OMX_CONFIG_ROTATIONTYPE);
+ rotation.nRotation = degree;
+ rotation.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonRotate,
+ &rotation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setSensorOrientation(unsigned int degree)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_ROTATIONTYPE sensorOrientation;
+ int tmpHeight, tmpWidth;
+ OMXCameraPortParameters *mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ LOG_FUNCTION_NAME;
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ /* Set Temproary Port resolution.
+ * For resolution with height >= 720,
+ * resolution cannot be set without configuring orientation.
+ * So we first set a temp resolution. We have used VGA
+ */
+ if ( mPreviewData->mHeight >= 720 ) {
+ tmpHeight = mPreviewData->mHeight;
+ tmpWidth = mPreviewData->mWidth;
+ mPreviewData->mWidth = 640;
+ mPreviewData->mHeight = 480;
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
+ }
+
+ mPreviewData->mWidth = tmpWidth;
+ mPreviewData->mHeight = tmpHeight;
+ mPreviewPortInitialized = true;
+ }
+ else if (!mPreviewPortInitialized) {
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
+ }
+ mPreviewPortInitialized = true;
+ }
+
+ /* Now set Required Orientation*/
+ if ( NO_ERROR == ret ) {
+ OMX_INIT_STRUCT(sensorOrientation, OMX_CONFIG_ROTATIONTYPE);
+ sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ sensorOrientation.nRotation = degree;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonRotate,
+ &sensorOrientation);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError);
+ }
+ CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
+ ( unsigned int ) sensorOrientation.nRotation);
+ CAMHAL_LOGVB(" Sensor Configured for Port : %d",
+ ( unsigned int ) sensorOrientation.nPortIndex);
+ }
+
+ /* Now set the required resolution as requested */
+ if ( NO_ERROR == ret ) {
+ bool portConfigured = false;
+ ret = setSensorQuirks(degree,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex],
+ portConfigured);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring setSensorQuirks 0x%x", ret);
+ return ret;
+ }
+
+ if ( !portConfigured ) {
+ ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
+ }
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(mPreviewData->mMinFrameRate,
+ mPreviewData->mMaxFrameRate);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameRate)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_VARFRMRANGETYPE vfr;
+ OMXCameraPortParameters * mPreviewData =
+ &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( !mSetFormatDone ) {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret ) {
+ OMX_INIT_STRUCT_PTR (&vfr, OMX_TI_CONFIG_VARFRMRANGETYPE);
+
+ vfr.xMin = minFrameRate<<16;
+ vfr.xMax = maxFrameRate<<16;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigVarFrmRange,
+ &vfr);
+ if(OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while setting VFR min = %d, max = %d, error = 0x%x",
+ ( unsigned int ) minFrameRate,
+ ( unsigned int ) maxFrameRate,
+ eError);
+ ret = -1;
+ } else {
+ CAMHAL_LOGDB("VFR Configured Successfully [%d:%d]",
+ ( unsigned int ) minFrameRate,
+ ( unsigned int ) maxFrameRate);
+ }
+ }
+
+ return ret;
+ }
+
+status_t OMXCameraAdapter::setMechanicalMisalignmentCorrection(const bool enable)
+{
+ status_t ret = NO_ERROR;
+#ifndef MOTOROLA_CAMERA
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_MM mm;
+
+ LOG_FUNCTION_NAME;
+
+ mm.nVersion = mLocalVersionParam;
+ mm.nSize = sizeof(OMX_TI_CONFIG_MM);
+ mm.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ mm.bMM = enable ? OMX_TRUE : OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigMechanicalMisalignment,
+ &mm);
+
+ if(OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while enabling mechanical misalignment correction. error = 0x%x", eError);
+ ret = -1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+#endif
+
+ return ret;
+}
+
+#ifdef MOTOROLA_CAMERA
+// This function is used when setting LedFlash Intensity
+status_t OMXCameraAdapter::setLedFlash(int nLedFlashIntensP)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_LEDINTESITY LedIntensity;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&LedIntensity, OMX_CONFIG_LEDINTESITY);
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE) OMX_IndexConfigLedIntensity, &LedIntensity);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMXGetConfig() returned error 0x%x on index==%08x", eError, OMX_IndexConfigLedIntensity);
+ ret = -1;
+ }
+ if (NO_ERROR == ret)
+ {
+ CAMHAL_LOGEB("old LedIntensity.nLedFlashIntens is: %d ", LedIntensity.nLedFlashIntens);
+
+ LedIntensity.nLedFlashIntens = (OMX_U32)nLedFlashIntensP;
+ CAMHAL_LOGDB("new LedIntensity.nLedFlashIntens is: %d ", LedIntensity.nLedFlashIntens);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE) OMX_IndexConfigLedIntensity, &LedIntensity);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring LedFlash Intensity. "
+ "OMXSetConfig() returned error 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+// This function is used when setting LedTorch Intensity
+status_t OMXCameraAdapter::setLedTorch(int nLedTorchIntensP)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_LEDINTESITY LedIntensity;
+ char value[PROPERTY_VALUE_MAX];
+ unsigned int torchIntensity = DEFAULT_INTENSITY;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&LedIntensity, OMX_CONFIG_LEDINTESITY);
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE) OMX_IndexConfigLedIntensity, &LedIntensity);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMXGetConfig() returned error 0x%x", eError);
+ ret = -1;
+ }
+ if (NO_ERROR == ret)
+ {
+ CAMHAL_LOGEB("old LedIntensity.nLedTorchIntens is: %d ", LedIntensity.nLedTorchIntens);
+
+ // read product specific torvh value
+ if (property_get("ro.media.capture.torchIntensity", value, 0) > 0)
+ {
+ torchIntensity = atoi(value);
+ if ((torchIntensity < 0) || (torchIntensity > DEFAULT_INTENSITY))
+ {
+ torchIntensity = DEFAULT_INTENSITY;
+ }
+ }
+ else
+ {
+ torchIntensity = nLedTorchIntensP;
+ }
+
+ LedIntensity.nLedTorchIntens = (OMX_U32)torchIntensity;
+ CAMHAL_LOGEB("new LedIntensity.nLedTorchIntens is: %d ", LedIntensity.nLedTorchIntens);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE) OMX_IndexConfigLedIntensity, &LedIntensity);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring LedTorch Intensity. "
+ "OMXSetConfig() returned error 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+#endif
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
new file mode 100644
index 0000000..a52654a
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
@@ -0,0 +1,4534 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXCameraAdapter.cpp
+*
+* This file maps the Camera Hardware Interface to OMX.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#ifndef USES_LEGACY_DOMX_DCC
+#include "OMXDCC.h"
+#endif
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+#include <signal.h>
+#include <math.h>
+
+#include <cutils/properties.h>
+#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
+static int mDebugFps = 0;
+static int mDebugFcs = 0;
+
+#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+
+namespace Ti {
+namespace Camera {
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+const char OMXCameraAdapter::DEFAULT_PROFILE_PATH[] = "/data/dbg/profile_data.bin";
+
+#endif
+
+//frames skipped before recalculating the framerate
+#define FPS_PERIOD 30
+
+android::Mutex gAdapterLock;
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
+{
+ LOG_FUNCTION_NAME;
+
+ char value[PROPERTY_VALUE_MAX];
+ const char *mountOrientationString = NULL;
+
+ property_get("debug.camera.showfps", value, "0");
+ mDebugFps = atoi(value);
+ property_get("debug.camera.framecounts", value, "0");
+ mDebugFcs = atoi(value);
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ property_get("debug.camera.profile", value, "0");
+ mDebugProfile = atoi(value);
+
+#endif
+
+ TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
+
+ mLocalVersionParam.s.nVersionMajor = 0x1;
+ mLocalVersionParam.s.nVersionMinor = 0x1;
+ mLocalVersionParam.s.nRevision = 0x0 ;
+ mLocalVersionParam.s.nStep = 0x0;
+
+ mPending3Asettings = 0;//E3AsettingsAll;
+ mPendingCaptureSettings = 0;
+ mPendingPreviewSettings = 0;
+ mPendingReprocessSettings = 0;
+
+ ret = mMemMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
+ if ( 0 != mInitSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ ///Update the preview and image capture port indexes
+ mCameraAdapterParameters.mPrevPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
+ // temp changed in order to build OMX_CAMERA_PORT_VIDEO_OUT_IMAGE;
+ mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE;
+ mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT;
+ //currently not supported use preview port instead
+ mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ mCameraAdapterParameters.mVideoInPortIndex = OMX_CAMERA_PORT_VIDEO_IN_VIDEO;
+
+ eError = OMX_Init();
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ mOmxInitialized = true;
+
+ // Initialize the callback handles
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = Camera::OMXCameraAdapterEventHandler;
+ callbacks.EmptyBufferDone = Camera::OMXCameraAdapterEmptyBufferDone;
+ callbacks.FillBufferDone = Camera::OMXCameraAdapterFillBufferDone;
+
+ ///Get the handle to the OMX Component
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, this, callbacks);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError != OMX_ErrorNone), eError);
+
+ mComponentState = OMX_StateLoaded;
+
+ CAMHAL_LOGVB("OMX_GetHandle -0x%x sensor_index = %lu", eError, mSensorIndex);
+ initDccFileDataSave(&mCameraAdapterParameters.mHandleComp, mCameraAdapterParameters.mPrevPortIndex);
+
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ OMX_ALL,
+ NULL);
+
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortDisable) -0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError != OMX_ErrorNone), eError);
+
+ // Register for port enable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mInitSem);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ // Enable PREVIEW Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ // Wait for the port enable event to occur
+ ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("-Port enable event arrived");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout for enabling preview port expired!");
+ goto EXIT;
+ }
+
+ // Select the sensor
+ OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
+ OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
+ sensorSelect.eSensor = (OMX_SENSORSELECT) mSensorIndex;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while selecting the sensor index as %d - 0x%x", mSensorIndex, eError);
+ return BAD_VALUE;
+ } else {
+ CAMHAL_LOGDB("Sensor %d selected successfully", mSensorIndex);
+ }
+
+#ifdef CAMERAHAL_DEBUG
+
+ printComponentVersion(mCameraAdapterParameters.mHandleComp);
+
+#endif
+
+ mBracketingEnabled = false;
+ mZoomBracketingEnabled = false;
+ mBracketingBuffersQueuedCount = 0;
+ mBracketingRange = 1;
+ mLastBracetingBufferIdx = 0;
+ mBracketingBuffersQueued = NULL;
+ mOMXStateSwitch = false;
+ mBracketingSet = false;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+#endif
+
+ mCaptureSignalled = false;
+ mCaptureConfigured = false;
+ mReprocConfigured = false;
+ mRecording = false;
+ mWaitingForSnapshot = false;
+ mPictureFormatFromClient = NULL;
+
+ mCapabilitiesOpMode = MODE_MAX;
+ mCapMode = INITIAL_MODE;
+ mIPP = IPP_NULL;
+ mVstabEnabled = false;
+ mVnfEnabled = false;
+ mBurstFrames = 1;
+ mFlushShotConfigQueue = false;
+ mPictureQuality = 100;
+ mCurrentZoomIdx = 0;
+ mTargetZoomIdx = 0;
+ mPreviousZoomIndx = 0;
+ mReturnZoomStatus = false;
+ mZoomInc = 1;
+ mZoomParameterIdx = 0;
+ mExposureBracketingValidEntries = 0;
+ mZoomBracketingValidEntries = 0;
+ mSensorOverclock = false;
+ mAutoConv = OMX_TI_AutoConvergenceModeMax;
+ mManualConv = 0;
+
+#ifdef CAMERAHAL_TUNA
+ mIternalRecordingHint = false;
+#endif
+
+ mDeviceOrientation = 0;
+ mFaceOrientation = 0;
+ mCapabilities = caps;
+ mZoomUpdating = false;
+ mZoomUpdate = false;
+ mGBCE = BRIGHTNESS_OFF;
+ mGLBCE = BRIGHTNESS_OFF;
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
+
+ mEXIFData.mGPSData.mAltitudeValid = false;
+ mEXIFData.mGPSData.mDatestampValid = false;
+ mEXIFData.mGPSData.mLatValid = false;
+ mEXIFData.mGPSData.mLongValid = false;
+ mEXIFData.mGPSData.mMapDatumValid = false;
+ mEXIFData.mGPSData.mProcMethodValid = false;
+ mEXIFData.mGPSData.mVersionIdValid = false;
+ mEXIFData.mGPSData.mTimeStampValid = false;
+ mEXIFData.mModelValid = false;
+ mEXIFData.mMakeValid = false;
+
+ mCapturedFrames = 0;
+ mBurstFramesAccum = 0;
+ mBurstFramesQueued = 0;
+
+ //update the mDeviceOrientation with the sensor mount orientation.
+ //So that the face detect will work before onOrientationEvent()
+ //get triggered.
+ CAMHAL_ASSERT(mCapabilities);
+ mountOrientationString = mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ CAMHAL_ASSERT(mountOrientationString);
+ mDeviceOrientation = atoi(mountOrientationString);
+ mFaceOrientation = atoi(mountOrientationString);
+
+ if (mSensorIndex != 2) {
+ mCapabilities->setMode(MODE_HIGH_SPEED);
+ }
+
+ if (mCapabilities->get(CameraProperties::SUPPORTED_ZOOM_STAGES) != NULL) {
+ mMaxZoomSupported = mCapabilities->getInt(CameraProperties::SUPPORTED_ZOOM_STAGES) + 1;
+ } else {
+ mMaxZoomSupported = 1;
+ }
+
+ // initialize command handling thread
+ if(mCommandHandler.get() == NULL)
+ mCommandHandler = new CommandHandler(this);
+
+ if ( NULL == mCommandHandler.get() )
+ {
+ CAMHAL_LOGEA("Couldn't create command handler");
+ return NO_MEMORY;
+ }
+
+ ret = mCommandHandler->run("CallbackThread", android::PRIORITY_URGENT_DISPLAY);
+ if ( ret != NO_ERROR )
+ {
+ if( ret == INVALID_OPERATION){
+ CAMHAL_LOGDA("command handler thread already runnning!!");
+ ret = NO_ERROR;
+ } else {
+ CAMHAL_LOGEA("Couldn't run command handlerthread");
+ return ret;
+ }
+ }
+
+ // initialize omx callback handling thread
+ if(mOMXCallbackHandler.get() == NULL)
+ mOMXCallbackHandler = new OMXCallbackHandler(this);
+
+ if ( NULL == mOMXCallbackHandler.get() )
+ {
+ CAMHAL_LOGEA("Couldn't create omx callback handler");
+ return NO_MEMORY;
+ }
+
+ ret = mOMXCallbackHandler->run("OMXCallbackThread", android::PRIORITY_URGENT_DISPLAY);
+ if ( ret != NO_ERROR )
+ {
+ if( ret == INVALID_OPERATION){
+ CAMHAL_LOGDA("omx callback handler thread already runnning!!");
+ ret = NO_ERROR;
+ } else {
+ CAMHAL_LOGEA("Couldn't run omx callback handler thread");
+ return ret;
+ }
+ }
+
+ OMX_INIT_STRUCT_PTR (&mRegionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY);
+ OMX_INIT_STRUCT_PTR (&mFacePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY);
+ mRegionPriority.nPortIndex = OMX_ALL;
+ mFacePriority.nPortIndex = OMX_ALL;
+
+ //Setting this flag will that the first setParameter call will apply all 3A settings
+ //and will not conditionally apply based on current values.
+ mFirstTimeInit = true;
+
+ //Flag to avoid calling setVFramerate() before OMX_SetParameter(OMX_IndexParamPortDefinition)
+ //Ducati will return an error otherwise.
+ mSetFormatDone = false;
+
+ memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int));
+ memset(mZoomBracketingValues, 0, ZOOM_BRACKET_RANGE*sizeof(int));
+ mMeasurementEnabled = false;
+ mFaceDetectionRunning = false;
+ mFaceDetectionPaused = false;
+ mFDSwitchAlgoPriority = false;
+
+ metadataLastAnalogGain = -1;
+ metadataLastExposureTime = -1;
+
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters));
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters));
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex], 0, sizeof(OMXCameraPortParameters));
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex], 0, sizeof(OMXCameraPortParameters));
+
+ // initialize 3A defaults
+ mParameters3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
+ mParameters3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
+ mParameters3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
+ mParameters3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
+ mParameters3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
+ mParameters3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
+ mParameters3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
+ mParameters3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
+ mParameters3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
+ mParameters3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
+ mParameters3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
+ mParameters3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
+ mParameters3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.FocusLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
+
+ mParameters3A.ManualExposure = 0;
+ mParameters3A.ManualExposureRight = 0;
+ mParameters3A.ManualGain = 0;
+ mParameters3A.ManualGainRight = 0;
+
+ mParameters3A.AlgoExternalGamma = OMX_FALSE;
+ mParameters3A.AlgoNSF1 = OMX_TRUE;
+ mParameters3A.AlgoNSF2 = OMX_TRUE;
+ mParameters3A.AlgoSharpening = OMX_TRUE;
+ mParameters3A.AlgoThreeLinColorMap = OMX_TRUE;
+ mParameters3A.AlgoGIC = OMX_TRUE;
+ memset(&mParameters3A.mGammaTable, 0, sizeof(mParameters3A.mGammaTable));
+
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+
+ EXIT:
+
+ CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+void OMXCameraAdapter::performCleanupAfterError()
+{
+ if(mCameraAdapterParameters.mHandleComp)
+ {
+ ///Free the OMX component handle in case of error
+ OMX_FreeHandle(mCameraAdapterParameters.mHandleComp);
+ mCameraAdapterParameters.mHandleComp = NULL;
+ }
+
+ ///De-init the OMX
+ OMX_Deinit();
+ mComponentState = OMX_StateInvalid;
+ mOmxInitialized = false;
+}
+
+OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(CameraFrame::FrameType frameType)
+{
+ OMXCameraAdapter::OMXCameraPortParameters *ret = NULL;
+
+ switch ( frameType )
+ {
+ case CameraFrame::IMAGE_FRAME:
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ break;
+ case CameraFrame::RAW_FRAME:
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ } else {
+#endif
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ }
+#endif
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ case CameraFrame::SNAPSHOT_FRAME:
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+ break;
+ default:
+ break;
+ };
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMXCameraPortParameters *port = NULL;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+ bool isCaptureFrame = false;
+
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE )
+ {
+ return NO_INIT;
+ }
+
+ if ( NULL == frameBuf )
+ {
+ return -EINVAL;
+ }
+
+ isCaptureFrame = (CameraFrame::IMAGE_FRAME == frameType) ||
+ (CameraFrame::RAW_FRAME == frameType);
+
+ if ( NO_ERROR == ret )
+ {
+ port = getPortParams(frameType);
+ if ( NULL == port )
+ {
+ CAMHAL_LOGEB("Invalid frameType 0x%x", frameType);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ for ( int i = 0 ; i < port->mNumBufs ; i++) {
+ if ((CameraBuffer *) port->mBufferHeader[i]->pAppPrivate == frameBuf) {
+ if ( isCaptureFrame && !mBracketingEnabled ) {
+ android::AutoMutex lock(mBurstLock);
+ if ((1 > mCapturedFrames) && !mBracketingEnabled && (mCapMode != CP_CAM)) {
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ mEndImageCaptureCallback(mEndCaptureData);
+ }
+ port->mStatus[i] = OMXCameraPortParameters::IDLE;
+ return NO_ERROR;
+ } else if (mBurstFramesQueued >= mBurstFramesAccum) {
+ port->mStatus[i] = OMXCameraPortParameters::IDLE;
+ return NO_ERROR;
+ }
+ mBurstFramesQueued++;
+ }
+ port->mStatus[i] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, port->mBufferHeader[i]);
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGEB("OMX_FillThisBuffer 0x%x", eError);
+ goto EXIT;
+ }
+ mFramesWithDucati++;
+ break;
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ //Since fillthisbuffer is called asynchronously, make sure to signal error to the app
+ mErrorNotifier->errorNotify(CAMERA_ERROR_HARD);
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+void OMXCameraAdapter::setParamS3D(OMX_U32 port, const char *valstr)
+{
+ OMXCameraPortParameters *cap;
+
+ LOG_FUNCTION_NAME;
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[port];
+ if (valstr != NULL)
+ {
+ if (strcmp(valstr, TICameraParameters::S3D_TB_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottom;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRight;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_TB_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottomSubsample;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRightSubsample;
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OMXCameraAdapter::setParameters(const android::CameraParameters &params)
+{
+ LOG_FUNCTION_NAME;
+
+ int mode = 0;
+ status_t ret = NO_ERROR;
+ bool updateImagePortParams = false;
+ int minFramerate, maxFramerate, frameRate;
+ const char *valstr = NULL;
+ int w, h;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ ///@todo Include more camera parameters
+ if ( (valstr = params.getPreviewFormat()) != NULL ) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ CAMHAL_LOGDA("RGB565 format selected");
+ pixFormat = OMX_COLOR_Format16bitRGB565;
+ } else {
+ CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+
+ OMXCameraPortParameters *cap;
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ params.getPreviewSize(&w, &h);
+ frameRate = params.getPreviewFrameRate();
+
+ const char *frameRateRange = params.get(TICameraParameters::KEY_PREVIEW_FRAME_RATE_RANGE);
+ bool fpsRangeParsed = CameraHal::parsePair(frameRateRange, &minFramerate, &maxFramerate, ',');
+ CAMHAL_ASSERT(fpsRangeParsed);
+
+ minFramerate /= CameraHal::VFR_SCALE;
+ maxFramerate /= CameraHal::VFR_SCALE;
+
+ frameRate = maxFramerate;
+
+ if ( ( cap->mMinFrameRate != (OMX_U32) minFramerate ) ||
+ ( cap->mMaxFrameRate != (OMX_U32) maxFramerate ) ) {
+ cap->mMinFrameRate = minFramerate;
+ cap->mMaxFrameRate = maxFramerate;
+ setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
+ }
+
+ cap->mColorFormat = pixFormat;
+ cap->mWidth = w;
+ cap->mHeight = h;
+ cap->mFrameRate = frameRate;
+
+ CAMHAL_LOGVB("Prev: cap.mColorFormat = %d", (int)cap->mColorFormat);
+ CAMHAL_LOGVB("Prev: cap.mWidth = %d", (int)cap->mWidth);
+ CAMHAL_LOGVB("Prev: cap.mHeight = %d", (int)cap->mHeight);
+ CAMHAL_LOGVB("Prev: cap.mFrameRate = %d", (int)cap->mFrameRate);
+
+ ///mStride is set from setBufs() while passing the APIs
+ cap->mStride = 4096;
+ cap->mBufSize = cap->mStride * cap->mHeight;
+
+
+ if ( ( cap->mWidth >= 1920 ) &&
+ ( cap->mHeight >= 1080 ) &&
+ ( cap->mFrameRate >= FRAME_RATE_FULL_HD ) &&
+ ( !mSensorOverclock ) )
+ {
+ mOMXStateSwitch = true;
+ }
+ else if ( ( ( cap->mWidth < 1920 ) ||
+ ( cap->mHeight < 1080 ) ||
+ ( cap->mFrameRate < FRAME_RATE_FULL_HD ) ) &&
+ ( mSensorOverclock ) )
+ {
+ mOMXStateSwitch = true;
+ }
+
+#ifdef CAMERAHAL_TUNA
+ valstr = params.get(TICameraParameters::KEY_RECORDING_HINT);
+ if (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::FALSE)))) {
+ mIternalRecordingHint = false;
+ } else {
+ mIternalRecordingHint = true;
+ }
+#endif
+
+#ifdef OMAP_ENHANCEMENT
+ if ( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
+ {
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
+ {
+ mMeasurementEnabled = true;
+ }
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
+ {
+ mMeasurementEnabled = false;
+ }
+ else
+ {
+ mMeasurementEnabled = false;
+ }
+ }
+ else
+ {
+ //Disable measurement data by default
+ mMeasurementEnabled = false;
+ }
+#endif
+
+#ifdef OMAP_ENHANCEMENT_S3D
+ setParamS3D(mCameraAdapterParameters.mPrevPortIndex,
+ params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT));
+#endif
+
+ ret |= setParametersCapture(params, state);
+
+ ret |= setParameters3A(params, state);
+
+ ret |= setParametersAlgo(params, state);
+
+ ret |= setParametersFocus(params, state);
+
+ ret |= setParametersFD(params, state);
+
+ ret |= setParametersZoom(params, state);
+
+ ret |= setParametersEXIF(params, state);
+
+#ifdef MOTOROLA_CAMERA
+ CAMHAL_LOGDA("Start setting of Motorola specific parameters");
+ if (NULL != params.get(TICameraParameters::KEY_MOT_LEDFLASH)) {
+ setLedFlash((unsigned int) (params.getInt(TICameraParameters::KEY_MOT_LEDFLASH)));
+ }
+ if (NULL != params.get(TICameraParameters::KEY_MOT_LEDTORCH)) {
+ setLedTorch((unsigned int) (params.getInt(TICameraParameters::KEY_MOT_LEDTORCH)));
+ }
+#endif
+
+ mParams = params;
+ mFirstTimeInit = false;
+
+ if ( MODE_MAX != mCapabilitiesOpMode ) {
+ mCapabilities->setMode(mCapabilitiesOpMode);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void saveFile(unsigned char *buff, int width, int height, int format) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+
+ fn[0] = 0;
+ sprintf(fn, "/preview%03d.yuv", counter);
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ CAMHAL_LOGVB("Copying from 0x%x, size=%d x %d", buff, width, height);
+
+ //method currently supports only nv12 dumping
+ int stride = width;
+ uint8_t *bf = (uint8_t*) buff;
+ for(int i=0;i<height;i++)
+ {
+ write(fd, bf, width);
+ bf += 4096;
+ }
+
+ for(int i=0;i<height/2;i++)
+ {
+ write(fd, bf, stride);
+ bf += 4096;
+ }
+
+ close(fd);
+
+
+ counter++;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+static status_t saveBufferToFile(const void *buf, int size, const char *filename)
+{
+ if (size < 0) {
+ CAMHAL_LOGE("Wrong buffer size: %d", size);
+ return BAD_VALUE;
+ }
+
+ const int fd = open(filename, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0644);
+ if (fd < 0) {
+ CAMHAL_LOGE("ERROR: %s, Unable to save raw file", strerror(fd));
+ return BAD_VALUE;
+ }
+
+ if (write(fd, buf, size) != (signed)size) {
+ CAMHAL_LOGE("ERROR: Unable to write to raw file: %s ", strerror(errno));
+ close(fd);
+ return NO_MEMORY;
+ }
+
+ CAMHAL_LOGD("buffer=%p, size=%d stored at %s", buf, size, filename);
+
+ close(fd);
+ return OK;
+}
+#endif
+
+
+void OMXCameraAdapter::getParameters(android::CameraParameters& params)
+{
+ status_t ret = NO_ERROR;
+ OMX_CONFIG_EXPOSUREVALUETYPE exp;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+ const char *valstr = NULL;
+ LOG_FUNCTION_NAME;
+
+ if( mParameters3A.SceneMode != OMX_Manual ) {
+ const char *valstr_supported = NULL;
+
+ if (mCapabilities) {
+ const SceneModesEntry* entry = NULL;
+ entry = getSceneModeEntry(mCapabilities->get(CameraProperties::CAMERA_NAME),
+ (OMX_SCENEMODETYPE) mParameters3A.SceneMode);
+ if(entry) {
+ mParameters3A.Focus = entry->focus;
+ mParameters3A.FlashMode = entry->flash;
+ mParameters3A.WhiteBallance = entry->wb;
+ }
+ }
+
+ valstr = getLUTvalue_OMXtoHAL(mParameters3A.WhiteBallance, WBalLUT);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+ if (valstr && valstr_supported && strstr(valstr_supported, valstr))
+ params.set(android::CameraParameters::KEY_WHITE_BALANCE , valstr);
+
+ valstr = getLUTvalue_OMXtoHAL(mParameters3A.FlashMode, FlashLUT);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ if (valstr && valstr_supported && strstr(valstr_supported, valstr))
+ params.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
+
+ if ((mParameters3A.Focus == OMX_IMAGE_FocusControlAuto) &&
+ ( (mCapMode != OMXCameraAdapter::VIDEO_MODE) &&
+ (mCapMode != OMXCameraAdapter::VIDEO_MODE_HQ) ) ) {
+ valstr = android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ } else {
+ valstr = getLUTvalue_OMXtoHAL(mParameters3A.Focus, FocusLUT);
+ }
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ if (valstr && valstr_supported && strstr(valstr_supported, valstr))
+ params.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
+ }
+
+ //Query focus distances only when focus is running
+ if ( ( AF_ACTIVE & state ) ||
+ ( NULL == mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES) ) )
+ {
+ updateFocusDistances(params);
+ }
+ else
+ {
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES,
+ mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES));
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSUREVALUETYPE);
+ exp.nPortIndex = OMX_ALL;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &exp);
+ if ( OMX_ErrorNone == eError )
+ {
+ params.set(TICameraParameters::KEY_CURRENT_ISO, exp.nSensitivity);
+ }
+ else
+ {
+ CAMHAL_LOGEB("OMX error 0x%x, while retrieving current ISO value", eError);
+ }
+#endif
+
+ {
+ android::AutoMutex lock(mZoomLock);
+ //Immediate zoom should not be avaialable while smooth zoom is running
+ if ( ZOOM_ACTIVE & state )
+ {
+ if ( mZoomParameterIdx != mCurrentZoomIdx )
+ {
+ mZoomParameterIdx += mZoomInc;
+ }
+ params.set(android::CameraParameters::KEY_ZOOM, mZoomParameterIdx);
+ if ( ( mCurrentZoomIdx == mTargetZoomIdx ) &&
+ ( mZoomParameterIdx == mCurrentZoomIdx ) )
+ {
+
+ if ( NO_ERROR == ret )
+ {
+
+ ret = BaseCameraAdapter::setState(CAMERA_STOP_SMOOTH_ZOOM);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+
+ }
+
+ }
+
+ CAMHAL_LOGDB("CameraParameters Zoom = %d", mCurrentZoomIdx);
+ }
+ else
+ {
+ params.set(android::CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
+ }
+ }
+
+ //Populate current lock status
+ if ( mUserSetExpLock || mParameters3A.ExposureLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::TRUE);
+ } else {
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::FALSE);
+ }
+
+ if ( mUserSetWbLock || mParameters3A.WhiteBalanceLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::TRUE);
+ } else {
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::FALSE);
+ }
+
+ // Update Picture size capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+
+ // Update framerate capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+
+ params.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
+
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+
+ params.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OMXCameraAdapter::setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height) {
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_HANDLETYPE *encoderHandle = (OMX_HANDLETYPE *)EncoderHandle;
+
+ CAMHAL_LOGDB("\n %s: SliceHeight:%d, EncoderHandle:%d width:%d height:%d \n", __FUNCTION__, SliceHeight, EncoderHandle, width, height);
+
+ if (SliceHeight == 0){
+ CAMHAL_LOGEA("\n\n #### Encoder Slice Height Not received, Dont Setup Tunnel $$$$\n\n");
+ return BAD_VALUE;
+ }
+
+ if (encoderHandle == NULL) {
+ CAMHAL_LOGEA("Encoder Handle not set \n\n");
+ return BAD_VALUE;
+ }
+
+ if ( 0 != mInitSem.Count() ) {
+ CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ // Register for port enable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mInitSem);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ // Enable VIDEO Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ // Wait for the port enable event to occur
+ ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("-Port enable event arrived");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout for enabling preview port expired!");
+ return UNKNOWN_ERROR;
+ }
+
+ //Set the Video Port Params
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+ portCheck.nPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_IndexParamPortDefinition Error - %x", eError);
+ }
+
+ portCheck.format.video.nFrameWidth = width;
+ portCheck.format.video.nFrameHeight = height;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter OMX_IndexParamPortDefinition Error- %x", eError);
+ }
+
+#ifndef MOTOROLA_CAMERA
+ //Slice Configuration
+ OMX_TI_PARAM_VTCSLICE VTCSlice;
+ OMX_INIT_STRUCT_PTR(&VTCSlice, OMX_TI_PARAM_VTCSLICE);
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_TI_IndexParamVtcSlice Error - %x", eError);
+ }
+
+ VTCSlice.nSliceHeight = SliceHeight;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetParameter on OMX_TI_IndexParamVtcSlice returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ eError = OMX_SetupTunnel(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex, encoderHandle, 0);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetupTunnel returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+#endif
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::setSensorQuirks(int orientation,
+ OMXCameraPortParameters &portParams,
+ bool &portConfigured)
+{
+ status_t overclockStatus = NO_ERROR;
+ int sensorID = -1;
+ size_t overclockWidth;
+ size_t overclockHeight;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+
+ LOG_FUNCTION_NAME;
+
+ portConfigured = false;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+
+ portCheck.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition,
+ &portCheck);
+
+ if ( eError != OMX_ErrorNone ) {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( ( orientation == 90 ) || ( orientation == 270 ) ) {
+ overclockWidth = 1080;
+ overclockHeight = 1920;
+ } else {
+ overclockWidth = 1920;
+ overclockHeight = 1080;
+ }
+
+ sensorID = mCapabilities->getInt(CameraProperties::CAMERA_SENSOR_ID);
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) &&
+ ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ ( portParams.mFrameRate >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) ) ) {
+ overclockStatus = setSensorOverclock(true);
+ } else {
+
+ //WA: If the next port resolution doesn't require
+ // sensor overclocking, but the previous resolution
+ // needed it, then we have to first set new port
+ // resolution and then disable sensor overclocking.
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) &&
+ ( ( portCheck.format.video.xFramerate >> 16 ) >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ (( portCheck.format.video.xFramerate >> 16) >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) ) ) {
+ status_t ret = setFormat(mCameraAdapterParameters.mPrevPortIndex,
+ portParams);
+ if ( NO_ERROR != ret ) {
+ return ret;
+ }
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(portParams.mMinFrameRate, portParams.mMaxFrameRate);
+
+ portConfigured = true;
+ }
+
+ overclockStatus = setSensorOverclock(false);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return overclockStatus;
+}
+status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ size_t bufferCount;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+
+ portCheck.nPortIndex = port;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = portParams.mColorFormat;
+ portCheck.format.video.nStride = portParams.mStride;
+
+ portCheck.format.video.xFramerate = portParams.mFrameRate<<16;
+ portCheck.nBufferSize = portParams.mStride * portParams.mHeight;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ mFocusThreshold = FOCUS_THRESHOLD * portParams.mFrameRate;
+ // Used for RAW capture
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatRawBayer10bit; // portParams.mColorFormat;
+ portCheck.nBufferCountActual = 1; // portParams.mNumBufs;
+ } else if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
+ portCheck.format.image.nFrameWidth = portParams.mWidth;
+ portCheck.format.image.nFrameHeight = portParams.mHeight;
+ if (OMX_COLOR_FormatUnused == portParams.mColorFormat) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ if (mCodingMode == CodingJPEG) {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ } else if (mCodingMode == CodingJPS) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
+ } else if (mCodingMode == CodingMPO) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
+ } else {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+ } else {
+ portCheck.format.image.eColorFormat = portParams.mColorFormat;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ // RAW + YUV Capture
+ if (mYuvCapture) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+#endif
+ //Stride for 1D tiler buffer is zero
+ portCheck.format.image.nStride = 0;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ } else if (OMX_CAMERA_PORT_VIDEO_IN_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nStride = portParams.mStride;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = portParams.mColorFormat;
+ portCheck.format.video.xFramerate = 30 << 16;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ } else {
+ CAMHAL_LOGEB("Unsupported port index (%lu)", port);
+ }
+
+ if (( mSensorIndex == OMX_TI_StereoSensor ) && (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO != port)) {
+ ret = setS3DFrameLayout(port);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Error configuring stereo 3D frame layout");
+ return ret;
+ }
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ /* check if parameters are set correctly by calling GetParameter() */
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ portParams.mBufSize = portCheck.nBufferSize;
+ portParams.mStride = portCheck.format.image.nStride;
+
+ if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
+ CAMHAL_LOGDB("\n *** IMG Width = %ld", portCheck.format.image.nFrameWidth);
+ CAMHAL_LOGDB("\n *** IMG Height = %ld", portCheck.format.image.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** IMG portCheck.format.image.nStride = %ld\n",
+ portCheck.format.image.nStride);
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
+ CAMHAL_LOGDB("\n *** PRV Width = %ld", portCheck.format.video.nFrameWidth);
+ CAMHAL_LOGDB("\n *** PRV Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n ***PRV portCheck.format.video.nStride = %ld\n",
+ portCheck.format.video.nStride);
+ } else {
+ CAMHAL_LOGDB("\n *** VID Width = %ld", portCheck.format.video.nFrameWidth);
+ CAMHAL_LOGDB("\n *** VID Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** VID IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** VID portCheck.format.video.nStride = %ld\n",
+ portCheck.format.video.nStride);
+ }
+
+ mSetFormatDone = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of eError = 0x%x", __FUNCTION__, eError);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::flushBuffers(OMX_U32 nPort)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ if ( 0 != mFlushSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mFlushSem semaphore count %d", mFlushSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ OMXCameraPortParameters * mPreviewData = NULL;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[nPort];
+
+ ///Register for the FLUSH event
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandFlush,
+ nPort,
+ mFlushSem);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Send FLUSH command to preview port
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
+ OMX_CommandFlush,
+ nPort,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandFlush)-0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Waiting for flush event");
+
+ ///Wait for the FLUSH event to occur
+ ret = mFlushSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after Flush Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Flush event received");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandFlush,
+ nPort,
+ NULL);
+ CAMHAL_LOGDA("Flush event timeout expired");
+ goto EXIT;
+ }
+
+ mOMXCallbackHandler->flush();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+ EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+///API to give the buffers to Adapter
+status_t OMXCameraAdapter::useBuffers(CameraMode mode, CameraBuffer * bufArr, int num, size_t length, unsigned int queueable)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ switch(mode)
+ {
+ case CAMERA_PREVIEW:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ case CAMERA_IMAGE_CAPTURE:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersCapture(bufArr, num);
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num;
+ break;
+
+ case CAMERA_VIDEO:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersRawCapture(bufArr, num);
+ break;
+
+ case CAMERA_MEASUREMENT:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersPreviewData(bufArr, num);
+ break;
+
+ case CAMERA_REPROCESS:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersReprocess(bufArr, num);
+ break;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::UseBuffersPreviewData(CameraBuffer * bufArr, int num)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * measurementData = NULL;
+ android::AutoMutex lock(mPreviewDataBufferLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( mComponentState != OMX_StateLoaded )
+ {
+ CAMHAL_LOGEA("Calling UseBuffersPreviewData() when not in LOADED state");
+ return BAD_VALUE;
+ }
+
+ if ( NULL == bufArr )
+ {
+ CAMHAL_LOGEA("NULL pointer passed for buffArr");
+ return BAD_VALUE;
+ }
+
+ if ( 0 != mUsePreviewDataSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mUsePreviewDataSem semaphore count %d", mUsePreviewDataSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+ measurementData->mNumBufs = num ;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Register for port enable event on measurement port
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ mUsePreviewDataSem);
+
+ if ( ret == NO_ERROR )
+ {
+ CAMHAL_LOGDB("Registering for event %d", ret);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Enable MEASUREMENT Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ NULL);
+
+ if ( eError == OMX_ErrorNone )
+ {
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ goto EXIT;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mUsePreviewDataSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after measurement port enable Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Port enable event arrived on measurement port");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout expoired during port enable on measurement port");
+ goto EXIT;
+ }
+
+ CAMHAL_LOGDA("Port enable event arrived on measurement port");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::switchToExecuting()
+{
+ status_t ret = NO_ERROR;
+ Utils::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ mStateSwitchLock.lock();
+ msg.command = CommandHandler::CAMERA_SWITCH_TO_EXECUTING;
+ msg.arg1 = mErrorNotifier;
+ ret = mCommandHandler->put(&msg);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doSwitchToExecuting()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ LOG_FUNCTION_NAME;
+
+ if ( (mComponentState == OMX_StateExecuting) || (mComponentState == OMX_StateInvalid) ){
+ CAMHAL_LOGDA("Already in OMX_Executing state or OMX_StateInvalid state");
+ mStateSwitchLock.unlock();
+ return NO_ERROR;
+ }
+
+ if ( 0 != mSwitchToExecSem.Count() ){
+ CAMHAL_LOGEB("Error mSwitchToExecSem semaphore count %d", mSwitchToExecSem.Count());
+ goto EXIT;
+ }
+
+ ///Register for Preview port DISABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mSwitchToExecSem);
+ if ( NO_ERROR != ret ){
+ CAMHAL_LOGEB("Error in registering Port Disable for event %d", ret);
+ goto EXIT;
+ }
+ ///Disable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (ret != NO_ERROR){
+ CAMHAL_LOGEB("Timeout PREVIEW PORT DISABLE %d", ret);
+ }
+
+ CAMHAL_LOGVB("PREV PORT DISABLED %d", ret);
+
+ ///Register for IDLE state switch event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ mSwitchToExecSem);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in IDLE STATE SWITCH %d", ret);
+ goto EXIT;
+ }
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp ,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (ret != NO_ERROR){
+ CAMHAL_LOGEB("Timeout IDLE STATE SWITCH %d", ret);
+ goto EXIT;
+ }
+ mComponentState = OMX_StateIdle;
+ CAMHAL_LOGVB("OMX_SendCommand(OMX_StateIdle) 0x%x", eError);
+
+ ///Register for EXECUTING state switch event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ mSwitchToExecSem);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in EXECUTING STATE SWITCH %d", ret);
+ goto EXIT;
+ }
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp ,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ NULL);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (ret != NO_ERROR){
+ CAMHAL_LOGEB("Timeout EXEC STATE SWITCH %d", ret);
+ goto EXIT;
+ }
+ mComponentState = OMX_StateExecuting;
+ CAMHAL_LOGVB("OMX_SendCommand(OMX_StateExecuting) 0x%x", eError);
+
+ mStateSwitchLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+
+ EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ mStateSwitchLock.unlock();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::switchToIdle() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mIdleStateSwitchLock);
+
+ if ( mComponentState == OMX_StateIdle || mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_StateIdle, OMX_Loaded state or OMX_StateInvalid state");
+ return NO_ERROR;
+ }
+
+ if ( 0 != mSwitchToLoadedSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mSwitchToLoadedSem semaphore count %d", mSwitchToLoadedSem.Count());
+ goto EXIT;
+ }
+
+ ///Register for EXECUTING state transition.
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ mSwitchToLoadedSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_StateIdle) - %x", eError);
+ }
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Wait for the EXECUTING ->IDLE transition to arrive
+
+ CAMHAL_LOGDA("EXECUTING->IDLE state changed");
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after EXECUTING->IDLE Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("EXECUTING->IDLE state changed");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on EXECUTING->IDLE state change");
+ goto EXIT;
+ }
+
+ mComponentState = OMX_StateIdle;
+
+ return NO_ERROR;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+
+
+status_t OMXCameraAdapter::prevPortEnable() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ ///Register for Preview port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mSwitchToLoadedSem);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Enabling Preview port");
+ ///Wait for state to switch to idle
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview port enabled!");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Preview enable timedout");
+
+ goto EXIT;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::switchToLoaded(bool bPortEnableRequired) {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mStateSwitchLock);
+ if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
+ return NO_ERROR;
+ }
+
+ if ( mComponentState != OMX_StateIdle) {
+ ret = switchToIdle();
+ if (ret != NO_ERROR) return ret;
+ }
+
+ if ( 0 != mSwitchToLoadedSem.Count() ) {
+ CAMHAL_LOGEB("Error mSwitchToLoadedSem semaphore count %d", mSwitchToLoadedSem.Count());
+ goto EXIT;
+ }
+
+ ///Register for LOADED state transition.
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
+ mSwitchToLoadedSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_StateLoaded) - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ if ( !bPortEnableRequired ) {
+ OMXCameraPortParameters *mCaptureData , *mPreviewData, *measurementData;
+ mCaptureData = mPreviewData = measurementData = NULL;
+
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+
+ ///Free the OMX Buffers
+ for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mPreviewData->mBufferHeader[i]);
+
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ if ( mMeasurementEnabled ) {
+
+ for ( int i = 0 ; i < measurementData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ measurementData->mBufferHeader[i]);
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ {
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.clear();
+ }
+
+ }
+ }
+
+ CAMHAL_LOGDA("Switching IDLE->LOADED state");
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("IDLE->LOADED state changed");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
+ goto EXIT;
+ }
+
+ mComponentState = OMX_StateLoaded;
+ if (bPortEnableRequired == true) {
+ prevPortEnable();
+ }
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::UseBuffersPreview(CameraBuffer * bufArr, int num)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int tmpHeight, tmpWidth;
+
+ LOG_FUNCTION_NAME;
+
+ if(!bufArr)
+ {
+ CAMHAL_LOGEA("NULL pointer passed for buffArr");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ OMXCameraPortParameters * mPreviewData = NULL;
+ OMXCameraPortParameters *measurementData = NULL;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+ mPreviewData->mNumBufs = num ;
+
+ if ( 0 != mUsePreviewSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mUsePreviewSem semaphore count %d", mUsePreviewSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if(mPreviewData->mNumBufs != num)
+ {
+ CAMHAL_LOGEA("Current number of buffers doesnt equal new num of buffers passed!");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ mStateSwitchLock.lock();
+
+ if ( mComponentState == OMX_StateLoaded ) {
+
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
+ }
+ }
+
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
+ }
+ }
+
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
+ }
+ }
+
+ if( (mCapMode == OMXCameraAdapter::VIDEO_MODE) ||
+ (mCapMode == OMXCameraAdapter::VIDEO_MODE_HQ) ) {
+
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ }
+ }
+
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
+ ret = enableVideoStabilization(mVstabEnabled);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ }
+ }
+
+ }
+ }
+
+ ret = setSensorOrientation(mSensorOrientation);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error configuring Sensor Orientation %x", ret);
+ mSensorOrientation = 0;
+ }
+
+ if ( mComponentState == OMX_StateLoaded )
+ {
+ ///Register for IDLE state switch event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ mUsePreviewSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Once we get the buffers, move component state to idle state and pass the buffers to OMX comp using UseBuffer
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp ,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ mComponentState = OMX_StateIdle;
+ }
+ else
+ {
+ ///Register for Preview port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mUsePreviewSem);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ }
+
+
+ ///Configure DOMX to use either gralloc handles or vptrs
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ for(int index=0;index<num;index++) {
+ OMX_U8 *ptr;
+
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufArr[index]);
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mPrevPortIndex,
+ 0,
+ mPreviewData->mBufSize,
+ ptr);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_UseBuffer-0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR)&bufArr[index];
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0 ;
+ pBufferHdr->nVersion.s.nStep = 0;
+ mPreviewData->mBufferHeader[index] = pBufferHdr;
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ for( int i = 0; i < num; i++ )
+ {
+ OMX_BUFFERHEADERTYPE *pBufHdr;
+ OMX_U8 *ptr;
+
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&mPreviewDataBuffers[i]);
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufHdr,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ 0,
+ measurementData->mBufSize,
+ ptr);
+
+ if ( eError == OMX_ErrorNone )
+ {
+ pBufHdr->pAppPrivate = (OMX_PTR *)&mPreviewDataBuffers[i];
+ pBufHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufHdr->nVersion.s.nRevision = 0 ;
+ pBufHdr->nVersion.s.nStep = 0;
+ measurementData->mBufferHeader[i] = pBufHdr;
+ }
+ else
+ {
+ CAMHAL_LOGEB("OMX_UseBuffer -0x%x", eError);
+ ret = BAD_VALUE;
+ break;
+ }
+ }
+
+ }
+
+ CAMHAL_LOGDA("Registering preview buffers");
+
+ ret = mUsePreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after Registering preview buffers Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview buffer registration successfull");
+ }
+ else
+ {
+ if ( mComponentState == OMX_StateLoaded )
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ }
+ CAMHAL_LOGEA("Timeout expired on preview buffer registration");
+ goto EXIT;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+ ///If there is any failure, we reach here.
+ ///Here, we do any resource freeing and convert from OMX error code to Camera Hal error code
+EXIT:
+ mStateSwitchLock.unlock();
+
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *mPreviewData = NULL;
+ OMXCameraPortParameters *measurementData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if( 0 != mStartPreviewSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mStartPreviewSem semaphore count %d", mStartPreviewSem.Count());
+ ret = NO_INIT;
+ goto EXIT;
+ }
+
+ // Enable all preview mode extra data.
+ if ( OMX_ErrorNone == eError) {
+ ret |= setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_AncillaryData);
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ ret |= setExtraData(true, OMX_ALL, OMX_TI_VectShotInfo);
+#endif
+#ifdef CAMERAHAL_OMX_PROFILING
+ if ( UNLIKELY( mDebugProfile ) ) {
+ ret |= setExtraData(true, OMX_ALL, OMX_TI_ProfilerData);
+ }
+#endif
+ }
+
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+
+ if( OMX_StateIdle == mComponentState )
+ {
+ ///Register for EXECUTING state transition.
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ mStartPreviewSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Switch to EXECUTING state
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_StateExecuting)-0x%x", eError);
+ }
+
+ CAMHAL_LOGDA("+Waiting for component to go into EXECUTING state");
+ ret = mStartPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after IDLE_EXECUTING Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("+Great. Component went into executing state!!");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on executing state switch!");
+ goto EXIT;
+ }
+
+ mComponentState = OMX_StateExecuting;
+
+ }
+
+ mStateSwitchLock.unlock();
+
+ //Queue all the buffers on preview port
+ for(int index=0;index< mPreviewData->mMaxQueueable;index++)
+ {
+ CAMHAL_LOGDB("Queuing buffer on Preview port - 0x%x", (uint32_t)mPreviewData->mBufferHeader[index]->pBuffer);
+ mPreviewData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)mPreviewData->mBufferHeader[index]);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError);
+ }
+ mFramesWithDucati++;
+#ifdef CAMERAHAL_DEBUG
+ {
+ android::AutoMutex locker(mBuffersWithDucatiLock);
+ mBuffersWithDucati.add((int)mPreviewData->mBufferHeader[index]->pBuffer,1);
+ }
+#endif
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ for(int index=0;index< mPreviewData->mNumBufs;index++)
+ {
+ CAMHAL_LOGDB("Queuing buffer on Measurement port - 0x%x", (uint32_t) measurementData->mBufferHeader[index]->pBuffer);
+ measurementData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*) measurementData->mBufferHeader[index]);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ }
+
+ setFocusCallback(true);
+
+ //reset frame rate estimates
+ mFPS = 0.0f;
+ mLastFPS = 0.0f;
+ // start frame count from 0. i.e first frame after
+ // startPreview will be the 0th reference frame
+ // this way we will wait for second frame until
+ // takePicture/autoFocus is allowed to run. we
+ // are seeing SetConfig/GetConfig fail after
+ // calling after the first frame and not failing
+ // after the second frame
+ mFrameCount = -1;
+ mLastFrameCount = 0;
+ mIter = 1;
+ mLastFPSTime = systemTime();
+ mTunnelDestroyed = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ mStateSwitchLock.unlock();
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+
+status_t OMXCameraAdapter::destroyTunnel()
+{
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
+
+ OMXCameraPortParameters *mCaptureData , *mPreviewData, *measurementData;
+ mCaptureData = mPreviewData = measurementData = NULL;
+
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+
+ if (mAdapterState == LOADED_PREVIEW_STATE) {
+ // Something happened in CameraHal between UseBuffers and startPreview
+ // this means that state switch is still locked..so we need to unlock else
+ // deadlock will occur on the next start preview
+ mStateSwitchLock.unlock();
+ return ALREADY_EXISTS;
+ }
+
+ if ( mComponentState != OMX_StateExecuting )
+ {
+ CAMHAL_LOGEA("Calling StopPreview() when not in EXECUTING state");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ {
+ android::AutoMutex lock(mFrameCountMutex);
+ // we should wait for the first frame to come before trying to stopPreview...if not
+ // we might put OMXCamera in a bad state (IDLE->LOADED timeout). Seeing this a lot
+ // after a capture
+ if (mFrameCount < 1) {
+ // I want to wait for at least two frames....
+ mFrameCount = -1;
+
+ // first frame may time some time to come...so wait for an adequate amount of time
+ // which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
+ ret = mFirstFrameCondition.waitRelative(mFrameCountMutex,
+ (nsecs_t) 2 * OMX_CAPTURE_TIMEOUT * 1000);
+ }
+ // even if we timeout waiting for the first frame...go ahead with trying to stop preview
+ // signal anybody that might be waiting
+ mFrameCount = 0;
+ mFirstFrameCondition.broadcast();
+ }
+
+ {
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+
+ OMX_CONFIG_FOCUSASSISTTYPE focusAssist;
+ OMX_INIT_STRUCT_PTR (&focusAssist, OMX_CONFIG_FOCUSASSISTTYPE);
+ focusAssist.nPortIndex = OMX_ALL;
+ focusAssist.bFocusAssist = OMX_FALSE;
+ CAMHAL_LOGDB("Configuring AF Assist mode 0x%x", focusAssist.bFocusAssist);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigFocusAssist,
+ &focusAssist);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring AF Assist mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera AF Assist mode configured successfully");
+ }
+
+ if ( 0 != mStopPreviewSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mStopPreviewSem semaphore count %d", mStopPreviewSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ ret = disableImagePort();
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("disable image port failed 0x%x", ret);
+ goto EXIT;
+ }
+
+ CAMHAL_LOGDB("Average framerate: %f", mFPS);
+
+ //Avoid state switching of the OMX Component
+ ret = flushBuffers();
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Flush Buffers failed 0x%x", ret);
+ goto EXIT;
+ }
+
+ switchToIdle();
+
+ mTunnelDestroyed = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+
+status_t OMXCameraAdapter::stopPreview() {
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
+
+#ifdef CAMERAHAL_OMX_PROFILING
+ ret |= setExtraData(false, OMX_ALL, OMX_TI_ProfilerData);
+#endif
+ if (mTunnelDestroyed == false){
+ ret = destroyTunnel();
+ if (ret == ALREADY_EXISTS) {
+ // Special case to handle invalid stopping preview in LOADED_PREVIEW_STATE
+ return NO_ERROR;
+ }
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB(" destroyTunnel returned error ");
+ return ret;
+ }
+ }
+
+ mTunnelDestroyed = false;
+
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+
+ switchToLoaded();
+
+ mFirstTimeInit = true;
+ mPendingCaptureSettings = 0;
+ mPendingReprocessSettings = 0;
+ mFramesWithDucati = 0;
+ mFramesWithDisplay = 0;
+ mFramesWithEncoder = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::setSensorOverclock(bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGDA("OMX component is not in loaded state");
+ return ret;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+
+ if ( enable )
+ {
+ bOMX.bEnabled = OMX_TRUE;
+ }
+ else
+ {
+ bOMX.bEnabled = OMX_FALSE;
+ }
+
+ CAMHAL_LOGDB("Configuring Sensor overclock mode 0x%x", bOMX.bEnabled);
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexParamSensorOverClockMode, &bOMX);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting Sensor overclock 0x%x", eError);
+ }
+ else
+ {
+ mSensorOverclock = enable;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_VERSIONTYPE compVersion;
+ char compName[OMX_MAX_STRINGNAME_SIZE];
+ char *currentUUID = NULL;
+ size_t offset = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == handle )
+ {
+ CAMHAL_LOGEB("Invalid OMX Handle =0x%x", ( unsigned int ) handle);
+ ret = -EINVAL;
+ }
+
+ mCompUUID[0] = 0;
+
+ if ( NO_ERROR == ret )
+ {
+ eError = OMX_GetComponentVersion(handle,
+ compName,
+ &compVersion,
+ &mCompRevision,
+ &mCompUUID
+ );
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_GetComponentVersion returned 0x%x", eError);
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGVB("OMX Component name: [%s]", compName);
+ CAMHAL_LOGVB("OMX Component version: [%u]", ( unsigned int ) compVersion.nVersion);
+ CAMHAL_LOGVB("Spec version: [%u]", ( unsigned int ) mCompRevision.nVersion);
+ CAMHAL_LOGVB("Git Commit ID: [%s]", mCompUUID);
+ currentUUID = ( char * ) mCompUUID;
+ }
+
+ if ( NULL != currentUUID )
+ {
+ offset = strlen( ( const char * ) mCompUUID) + 1;
+ if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE )
+ {
+ currentUUID += offset;
+ CAMHAL_LOGVB("Git Branch: [%s]", currentUUID);
+ }
+ else
+ {
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ offset = strlen( ( const char * ) currentUUID) + 1;
+
+ if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE )
+ {
+ currentUUID += offset;
+ CAMHAL_LOGVB("Build date and time: [%s]", currentUUID);
+ }
+ else
+ {
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ offset = strlen( ( const char * ) currentUUID) + 1;
+
+ if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE )
+ {
+ currentUUID += offset;
+ CAMHAL_LOGVB("Build description: [%s]", currentUUID);
+ }
+ else
+ {
+ ret = BAD_VALUE;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setS3DFrameLayout(OMX_U32 port) const
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_FRAMELAYOUTTYPE frameLayout;
+ const OMXCameraPortParameters *cap =
+ &mCameraAdapterParameters.mCameraPortParams[port];
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&frameLayout, OMX_TI_FRAMELAYOUTTYPE);
+ frameLayout.nPortIndex = port;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while getting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+
+ if (cap->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutTopBottom;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else if (cap->mFrameLayoutType ==
+ OMX_TI_StereoFrameLayoutLeftRightSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutLeftRight;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else
+ {
+ frameLayout.eFrameLayout = cap->mFrameLayoutType;
+ frameLayout.nSubsampleRatio = 1;
+ }
+ frameLayout.nSubsampleRatio = frameLayout.nSubsampleRatio << 7;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while setting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+ else
+ {
+ CAMHAL_LOGDB("S3D frame layout %d applied successfully on port %lu",
+ frameLayout.eFrameLayout, port);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+ Utils::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ {
+ android::AutoMutex lock(mFrameCountMutex);
+ if (mFrameCount < 1) {
+ // first frame may time some time to come...so wait for an adequate amount of time
+ // which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
+ ret = mFirstFrameCondition.waitRelative(mFrameCountMutex,
+ (nsecs_t) 2 * OMX_CAPTURE_TIMEOUT * 1000);
+ if ((NO_ERROR != ret) || (mFrameCount == 0)) {
+ goto EXIT;
+ }
+ }
+ }
+
+ msg.command = CommandHandler::CAMERA_PERFORM_AUTOFOCUS;
+ msg.arg1 = mErrorNotifier;
+ ret = mCommandHandler->put(&msg);
+
+ EXIT:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::takePicture()
+{
+ status_t ret = NO_ERROR;
+ Utils::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ if (mNextState != REPROCESS_STATE) {
+ android::AutoMutex lock(mFrameCountMutex);
+ if (mFrameCount < 1) {
+ // first frame may time some time to come...so wait for an adequate amount of time
+ // which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
+ ret = mFirstFrameCondition.waitRelative(mFrameCountMutex,
+ (nsecs_t) 2 * OMX_CAPTURE_TIMEOUT * 1000);
+ if ((NO_ERROR != ret) || (mFrameCount == 0)) {
+ goto EXIT;
+ }
+ }
+ }
+
+ // TODO(XXX): re-using take picture to kick off reprocessing pipe
+ // Need to rethink this approach during reimplementation
+ if (mNextState == REPROCESS_STATE) {
+ msg.command = CommandHandler::CAMERA_START_REPROCESS;
+ } else {
+ msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ }
+
+ msg.arg1 = mErrorNotifier;
+ msg.arg2 = cacheCaptureParameters();
+ ret = mCommandHandler->put(&msg);
+
+ EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startVideoCapture()
+{
+ return BaseCameraAdapter::startVideoCapture();
+}
+
+status_t OMXCameraAdapter::stopVideoCapture()
+{
+ return BaseCameraAdapter::stopVideoCapture();
+}
+
+//API to get the frame size required to be allocated. This size is used to override the size passed
+//by camera service when VSTAB/VNF is turned ON for example
+status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_RECTTYPE tFrameDim;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&tFrameDim, OMX_CONFIG_RECTTYPE);
+ tFrameDim.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ if ( mOMXStateSwitch )
+ {
+ ret = switchToLoaded(true);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("switchToLoaded() failed 0x%x", ret);
+ goto exit;
+ }
+
+ mOMXStateSwitch = false;
+ }
+
+ if ( OMX_StateLoaded == mComponentState )
+ {
+
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
+ }
+ }
+
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
+ }
+ }
+
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
+ }
+ }
+
+ if((mCapMode == OMXCameraAdapter::VIDEO_MODE) ||
+ (mCapMode == OMXCameraAdapter::VIDEO_MODE_HQ) ) {
+
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ }
+ }
+
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
+ ret = enableVideoStabilization(mVstabEnabled);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ }
+ }
+
+ }
+ }
+
+ ret = setSensorOrientation(mSensorOrientation);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error configuring Sensor Orientation %x", ret);
+ mSensorOrientation = 0;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexParam2DBufferAllocDimension, &tFrameDim);
+ if ( OMX_ErrorNone == eError)
+ {
+ width = tFrameDim.nWidth;
+ height = tFrameDim.nHeight;
+ }
+ }
+
+exit:
+
+ CAMHAL_LOGDB("Required frame size %dx%d", width, height);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGEA("Calling getFrameDataSize() when not in LOADED state");
+ dataFrameSize = 0;
+ ret = BAD_VALUE;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR(&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+ portCheck.nPortIndex = mCameraAdapterParameters.mMeasurementPortIndex;
+
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_GetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError);
+ dataFrameSize = 0;
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ portCheck.nBufferCountActual = bufferCount;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_SetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError);
+ dataFrameSize = 0;
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_GetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError);
+ ret = BAD_VALUE;
+ }
+ else
+ {
+ mCameraAdapterParameters.mCameraPortParams[portCheck.nPortIndex].mBufSize = portCheck.nBufferSize;
+ dataFrameSize = portCheck.nBufferSize;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ LOG_FUNCTION_NAME;
+
+ static const unsigned int DEGREES_TILT_IGNORE = 45;
+
+ // if tilt angle is greater than DEGREES_TILT_IGNORE
+ // we are going to ignore the orientation returned from
+ // sensor. the orientation returned from sensor is not
+ // reliable. Value of DEGREES_TILT_IGNORE may need adjusting
+ if (tilt > DEGREES_TILT_IGNORE) {
+ return;
+ }
+
+ int mountOrientation = 0;
+ bool isFront = false;
+ if (mCapabilities) {
+ const char * const mountOrientationString =
+ mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ if (mountOrientationString) {
+ mountOrientation = atoi(mountOrientationString);
+ }
+
+ const char * const facingString = mCapabilities->get(CameraProperties::FACING_INDEX);
+ if (facingString) {
+ isFront = strcmp(facingString, TICameraParameters::FACING_FRONT) == 0;
+ }
+ }
+
+ // direction is a constant sign for facing, meaning the rotation direction relative to device
+ // +1 (clockwise) for back sensor and -1 (counter-clockwise) for front sensor
+ const int direction = isFront ? -1 : 1;
+
+ int rotation = mountOrientation + direction*orientation;
+
+ // crop the calculated value to [0..360) range
+ while ( rotation < 0 ) rotation += 360;
+ rotation %= 360;
+
+ if (rotation != mDeviceOrientation) {
+ mDeviceOrientation = rotation;
+
+ // restart face detection with new rotation
+ setFaceDetectionOrientation(mDeviceOrientation);
+ }
+ CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/* Application callback Functions */
+/*========================================================*/
+/* @ fn SampleTest_EventHandler :: Application callback */
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDB("Event %d", eEvent);
+
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ OMXCameraAdapter *oca = (OMXCameraAdapter*)pAppData;
+ ret = oca->OMXCameraAdapterEventHandler(hComponent, eEvent, nData1, nData2, pEventData);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+/* Application callback Functions */
+/*========================================================*/
+/* @ fn SampleTest_EventHandler :: Application callback */
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CAMHAL_LOGDB("+OMX_Event %x, %d %d", eEvent, (int)nData1, (int)nData2);
+
+ switch (eEvent) {
+ case OMX_EventCmdComplete:
+ CAMHAL_LOGDB("+OMX_EventCmdComplete %d %d", (int)nData1, (int)nData2);
+
+ if (OMX_CommandStateSet == nData1) {
+ mCameraAdapterParameters.mState = (OMX_STATETYPE) nData2;
+
+ } else if (OMX_CommandFlush == nData1) {
+ CAMHAL_LOGDB("OMX_CommandFlush received for port %d", (int)nData2);
+
+ } else if (OMX_CommandPortDisable == nData1) {
+ CAMHAL_LOGDB("OMX_CommandPortDisable received for port %d", (int)nData2);
+
+ } else if (OMX_CommandPortEnable == nData1) {
+ CAMHAL_LOGDB("OMX_CommandPortEnable received for port %d", (int)nData2);
+
+ } else if (OMX_CommandMarkBuffer == nData1) {
+ ///This is not used currently
+ }
+
+ CAMHAL_LOGDA("-OMX_EventCmdComplete");
+ break;
+
+ case OMX_EventIndexSettingChanged:
+ CAMHAL_LOGDB("OMX_EventIndexSettingChanged event received data1 0x%x, data2 0x%x",
+ ( unsigned int ) nData1, ( unsigned int ) nData2);
+ break;
+
+ case OMX_EventError:
+ CAMHAL_LOGDB("OMX interface failed to execute OMX command %d", (int)nData1);
+ CAMHAL_LOGDA("See OMX_INDEXTYPE for reference");
+ if ( NULL != mErrorNotifier && ( ( OMX_U32 ) OMX_ErrorHardware == nData1 ) && mComponentState != OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("***Got Fatal Error Notification***\n");
+ mComponentState = OMX_StateInvalid;
+ /*
+ Remove any unhandled events and
+ unblock any waiting semaphores
+ */
+ if ( !mEventSignalQ.isEmpty() )
+ {
+ for (unsigned int i = 0 ; i < mEventSignalQ.size(); i++ )
+ {
+ CAMHAL_LOGEB("***Removing %d EVENTS***** \n", mEventSignalQ.size());
+ //remove from queue and free msg
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
+ if ( NULL != msg )
+ {
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
+ if ( sem )
+ {
+ sem->Signal();
+ }
+ free(msg);
+ }
+ }
+ mEventSignalQ.clear();
+ }
+ ///Report Error to App
+ mErrorNotifier->errorNotify(CAMERA_ERROR_FATAL);
+ }
+ break;
+
+ case OMX_EventMark:
+ break;
+
+ case OMX_EventPortSettingsChanged:
+ break;
+
+ case OMX_EventBufferFlag:
+ break;
+
+ case OMX_EventResourcesAcquired:
+ break;
+
+ case OMX_EventComponentResumed:
+ break;
+
+ case OMX_EventDynamicResourcesAvailable:
+ break;
+
+ case OMX_EventPortFormatDetected:
+ break;
+
+ default:
+ break;
+ }
+
+ ///Signal to the thread(s) waiting that the event has occured
+ SignalEvent(hComponent, eEvent, nData1, nData2, pEventData);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return eError;
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return eError;
+}
+
+OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
+ bool eventSignalled = false;
+
+ LOG_FUNCTION_NAME;
+
+ if ( !mEventSignalQ.isEmpty() )
+ {
+ CAMHAL_LOGDA("Event queue not empty");
+
+ for ( unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
+ {
+ msg = mEventSignalQ.itemAt(i);
+ if ( NULL != msg )
+ {
+ if( ( msg->command != 0 || msg->command == ( unsigned int ) ( eEvent ) )
+ && ( !msg->arg1 || ( OMX_U32 ) msg->arg1 == nData1 )
+ && ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
+ && msg->arg3)
+ {
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
+ CAMHAL_LOGDA("Event matched, signalling sem");
+ mEventSignalQ.removeAt(i);
+ //Signal the semaphore provided
+ sem->Signal();
+ free(msg);
+ eventSignalled = true;
+ break;
+ }
+ }
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDA("Event queue empty!!!");
+ }
+
+ // Special handling for any unregistered events
+ if (!eventSignalled) {
+ // Handling for focus callback
+ if ((nData2 == OMX_IndexConfigCommonFocusStatus) &&
+ (eEvent == (OMX_EVENTTYPE) OMX_EventIndexSettingChanged)) {
+ Utils::Message msg;
+ msg.command = OMXCallbackHandler::CAMERA_FOCUS_STATUS;
+ msg.arg1 = NULL;
+ msg.arg2 = NULL;
+ mOMXCallbackHandler->put(&msg);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
+ LOG_FUNCTION_NAME;
+
+ if ( !mEventSignalQ.isEmpty() )
+ {
+ CAMHAL_LOGDA("Event queue not empty");
+
+ for ( unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
+ {
+ msg = mEventSignalQ.itemAt(i);
+ if ( NULL != msg )
+ {
+ if( ( msg->command != 0 || msg->command == ( unsigned int ) ( eEvent ) )
+ && ( !msg->arg1 || ( OMX_U32 ) msg->arg1 == nData1 )
+ && ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
+ && msg->arg3)
+ {
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
+ CAMHAL_LOGDA("Event matched, signalling sem");
+ mEventSignalQ.removeAt(i);
+ free(msg);
+ break;
+ }
+ }
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event queue empty!!!");
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
+}
+
+
+status_t OMXCameraAdapter::RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN Utils::Semaphore &semaphore)
+{
+ status_t ret = NO_ERROR;
+ ssize_t res;
+ android::AutoMutex lock(mEventLock);
+
+ LOG_FUNCTION_NAME;
+ Utils::Message * msg = ( struct Utils::Message * ) malloc(sizeof(struct Utils::Message));
+ if ( NULL != msg )
+ {
+ msg->command = ( unsigned int ) eEvent;
+ msg->arg1 = ( void * ) nData1;
+ msg->arg2 = ( void * ) nData2;
+ msg->arg3 = ( void * ) &semaphore;
+ msg->arg4 = ( void * ) hComponent;
+ res = mEventSignalQ.add(msg);
+ if ( NO_MEMORY == res )
+ {
+ CAMHAL_LOGEA("No ressources for inserting OMX events");
+ free(msg);
+ ret = -ENOMEM;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/*========================================================*/
+/* @ fn SampleTest_EmptyBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ OMXCameraAdapter *oca = (OMXCameraAdapter*)pAppData;
+ eError = oca->OMXCameraAdapterEmptyBufferDone(hComponent, pBuffHeader);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return eError;
+}
+
+
+/*========================================================*/
+/* @ fn SampleTest_EmptyBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+
+ LOG_FUNCTION_NAME;
+ status_t stat = NO_ERROR;
+ status_t res1, res2;
+ OMXCameraPortParameters *pPortParam;
+ CameraFrame::FrameType typeOfFrame = CameraFrame::ALL_FRAMES;
+ unsigned int refCount = 0;
+ unsigned int mask = 0xFFFF;
+ CameraFrame cameraFrame;
+ OMX_TI_PLATFORMPRIVATE *platformPrivate;
+
+ res1 = res2 = NO_ERROR;
+
+ if (!pBuffHeader || !pBuffHeader->pBuffer) {
+ CAMHAL_LOGE("NULL Buffer from OMX");
+ return OMX_ErrorNone;
+ }
+
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nInputPortIndex]);
+ platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate;
+
+ if (pBuffHeader->nInputPortIndex == OMX_CAMERA_PORT_VIDEO_IN_VIDEO) {
+ typeOfFrame = CameraFrame::REPROCESS_INPUT_FRAME;
+ mask = (unsigned int)CameraFrame::REPROCESS_INPUT_FRAME;
+
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
+}
+
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ // XXX: mFPS has the value we want
+}
+
+/*========================================================*/
+/* @ fn SampleTest_FillBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ Utils::Message msg;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ if (UNLIKELY(mDebugFps)) {
+ debugShowFPS();
+ }
+
+ OMXCameraAdapter *adapter = ( OMXCameraAdapter * ) pAppData;
+ if ( NULL != adapter )
+ {
+ msg.command = OMXCameraAdapter::OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE;
+ msg.arg1 = ( void * ) hComponent;
+ msg.arg2 = ( void * ) pBuffHeader;
+ adapter->mOMXCallbackHandler->put(&msg);
+ }
+
+ return eError;
+}
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+status_t OMXCameraAdapter::storeProfilingData(OMX_BUFFERHEADERTYPE* pBuffHeader) {
+ OMX_TI_PLATFORMPRIVATE *platformPrivate = NULL;
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+ FILE *fd = NULL;
+
+ LOG_FUNCTION_NAME
+
+ if ( UNLIKELY( mDebugProfile ) ) {
+
+ platformPrivate = static_cast<OMX_TI_PLATFORMPRIVATE *> (pBuffHeader->pPlatformPrivate);
+ extraData = getExtradata(platformPrivate,
+ static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData));
+
+ if ( NULL != extraData ) {
+ if( extraData->eType == static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData) ) {
+
+ fd = fopen(DEFAULT_PROFILE_PATH, "ab");
+ if ( NULL != fd ) {
+ fwrite(extraData->data, 1, extraData->nDataSize, fd);
+ fclose(fd);
+ } else {
+ return -errno;
+ }
+
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return NO_ERROR;
+}
+
+#endif
+
+/*========================================================*/
+/* @ fn SampleTest_FillBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+
+ status_t stat = NO_ERROR;
+ status_t res1, res2;
+ OMXCameraPortParameters *pPortParam;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CameraFrame::FrameType typeOfFrame = CameraFrame::ALL_FRAMES;
+ unsigned int refCount = 0;
+ BaseCameraAdapter::AdapterState state, nextState;
+ BaseCameraAdapter::getState(state);
+ BaseCameraAdapter::getNextState(nextState);
+ android::sp<CameraMetadataResult> metadataResult = NULL;
+ unsigned int mask = 0xFFFF;
+ CameraFrame cameraFrame;
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_TI_ANCILLARYDATATYPE *ancillaryData = NULL;
+ bool snapshotFrame = false;
+
+ if ( NULL == pBuffHeader ) {
+ return OMX_ErrorBadParameter;
+ }
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ storeProfilingData(pBuffHeader);
+
+#endif
+
+ res1 = res2 = NO_ERROR;
+
+ if ( !pBuffHeader || !pBuffHeader->pBuffer ) {
+ CAMHAL_LOGEA("NULL Buffer from OMX");
+ return OMX_ErrorNone;
+ }
+
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
+
+ // Find buffer and mark it as filled
+ for (int i = 0; i < pPortParam->mNumBufs; i++) {
+ if (pPortParam->mBufferHeader[i] == pBuffHeader) {
+ pPortParam->mStatus[i] = OMXCameraPortParameters::DONE;
+ }
+ }
+
+ if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW)
+ {
+
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE )
+ {
+ return OMX_ErrorNone;
+ }
+
+ if ( mWaitingForSnapshot ) {
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE) OMX_AncillaryData);
+
+ if ( NULL != extraData ) {
+ ancillaryData = (OMX_TI_ANCILLARYDATATYPE*) extraData->data;
+ if ((OMX_2D_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Left_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Right_Snap == ancillaryData->eCameraView)) {
+ snapshotFrame = OMX_TRUE;
+ } else {
+ snapshotFrame = OMX_FALSE;
+ }
+ mPending3Asettings |= SetFocus;
+ }
+ }
+
+ ///Prepare the frames to be sent - initialize CameraFrame object and reference count
+ // TODO(XXX): ancillary data for snapshot frame is not being sent for video snapshot
+ // if we are waiting for a snapshot and in video mode...go ahead and send
+ // this frame as a snapshot
+ if( mWaitingForSnapshot && (mCapturedFrames > 0) &&
+ (snapshotFrame || (mCapMode == VIDEO_MODE) || (mCapMode == VIDEO_MODE_HQ ) ))
+ {
+ typeOfFrame = CameraFrame::SNAPSHOT_FRAME;
+ mask = (unsigned int)CameraFrame::SNAPSHOT_FRAME;
+
+ // video snapshot gets ancillary data and wb info from last snapshot frame
+ mCaptureAncillaryData = ancillaryData;
+ mWhiteBalanceData = NULL;
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE) OMX_WhiteBalance);
+ if ( NULL != extraData )
+ {
+ mWhiteBalanceData = (OMX_TI_WHITEBALANCERESULTTYPE*) extraData->data;
+ }
+ }
+ else
+ {
+ typeOfFrame = CameraFrame::PREVIEW_FRAME_SYNC;
+ mask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
+ }
+
+ if (mRecording)
+ {
+ mask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+
+ //CAMHAL_LOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
+
+ if( mWaitingForSnapshot )
+ {
+ if ( !mBracketingEnabled &&
+ ((HIGH_SPEED == mCapMode) ||
+ (VIDEO_MODE == mCapMode) ||
+ (VIDEO_MODE_HQ == mCapMode)) )
+ {
+ notifyShutterSubscribers();
+ }
+ }
+
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ mFramesWithDisplay++;
+
+ mFramesWithDucati--;
+
+#ifdef CAMERAHAL_DEBUG
+ {
+ android::AutoMutex locker(mBuffersWithDucatiLock);
+ if(mBuffersWithDucati.indexOfKey((uint32_t)pBuffHeader->pBuffer)<0)
+ {
+ CAMHAL_LOGE("Buffer was never with Ducati!! %p", pBuffHeader->pBuffer);
+ for(unsigned int i=0;i<mBuffersWithDucati.size();i++) CAMHAL_LOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ }
+ mBuffersWithDucati.removeItem((int)pBuffHeader->pBuffer);
+ }
+#endif
+
+ if(mDebugFcs)
+ CAMHAL_LOGEB("C[%d] D[%d] E[%d]", mFramesWithDucati, mFramesWithDisplay, mFramesWithEncoder);
+
+ recalculateFPS();
+
+ createPreviewMetadata(pBuffHeader, metadataResult, pPortParam->mWidth, pPortParam->mHeight);
+ if ( NULL != metadataResult.get() ) {
+ notifyMetadataSubscribers(metadataResult);
+ metadataResult.clear();
+ }
+
+ {
+ android::AutoMutex lock(mFaceDetectionLock);
+ if ( mFDSwitchAlgoPriority ) {
+
+ //Disable region priority and enable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
+
+ //Disable Region priority and enable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
+ mFDSwitchAlgoPriority = false;
+ }
+ }
+
+ sniffDccFileDataSave(pBuffHeader);
+
+ stat |= advanceZoom();
+
+ // On the fly update to 3A settings not working
+ // Do not update 3A here if we are in the middle of a capture
+ // or in the middle of transitioning to it
+ if( mPending3Asettings &&
+ ( (nextState & CAPTURE_ACTIVE) == 0 ) &&
+ ( (state & CAPTURE_ACTIVE) == 0 ) ) {
+ apply3Asettings(mParameters3A);
+ }
+
+ }
+ else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT )
+ {
+ typeOfFrame = CameraFrame::FRAME_DATA_SYNC;
+ mask = (unsigned int)CameraFrame::FRAME_DATA_SYNC;
+
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+ else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_IMAGE_OUT_IMAGE )
+ {
+ OMX_COLOR_FORMATTYPE pixFormat;
+ const char *valstr = NULL;
+
+ pixFormat = pPortParam->mColorFormat;
+
+ if ( OMX_COLOR_FormatUnused == pixFormat )
+ {
+ typeOfFrame = CameraFrame::IMAGE_FRAME;
+ mask = (unsigned int) CameraFrame::IMAGE_FRAME;
+ } else if ( pixFormat == OMX_COLOR_FormatCbYCrY &&
+ ((mPictureFormatFromClient &&
+ !strcmp(mPictureFormatFromClient,
+ android::CameraParameters::PIXEL_FORMAT_JPEG)) ||
+ !mPictureFormatFromClient) ) {
+ // signals to callbacks that this needs to be coverted to jpeg
+ // before returning to framework
+ typeOfFrame = CameraFrame::IMAGE_FRAME;
+ mask = (unsigned int) CameraFrame::IMAGE_FRAME;
+ cameraFrame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ cameraFrame.mQuirks |= CameraFrame::FORMAT_YUV422I_UYVY;
+
+ // populate exif data and pass to subscribers via quirk
+ // subscriber is in charge of freeing exif data
+ ExifElementsTable* exif = new ExifElementsTable();
+ setupEXIF_libjpeg(exif, mCaptureAncillaryData, mWhiteBalanceData);
+ cameraFrame.mQuirks |= CameraFrame::HAS_EXIF_DATA;
+ cameraFrame.mCookie2 = (void*) exif;
+ } else {
+ typeOfFrame = CameraFrame::RAW_FRAME;
+ mask = (unsigned int) CameraFrame::RAW_FRAME;
+ }
+
+ pPortParam->mImageType = typeOfFrame;
+
+ if((mCapturedFrames>0) && !mCaptureSignalled)
+ {
+ mCaptureSignalled = true;
+ mCaptureSem.Signal();
+ }
+
+ if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE )
+ {
+ goto EXIT;
+ }
+
+ {
+ android::AutoMutex lock(mBracketingLock);
+ if ( mBracketingEnabled )
+ {
+ doBracketing(pBuffHeader, typeOfFrame);
+ return eError;
+ }
+ }
+
+ if (mZoomBracketingEnabled) {
+ doZoom(mZoomBracketingValues[mCurrentZoomBracketing]);
+ CAMHAL_LOGDB("Current Zoom Bracketing: %d", mZoomBracketingValues[mCurrentZoomBracketing]);
+ mCurrentZoomBracketing++;
+ if (mCurrentZoomBracketing == ARRAY_SIZE(mZoomBracketingValues)) {
+ mZoomBracketingEnabled = false;
+ }
+ }
+
+ if ( 1 > mCapturedFrames )
+ {
+ goto EXIT;
+ }
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != mSharedAllocator ) {
+ cameraFrame.mMetaData = new CameraMetadataResult(getMetaData(pBuffHeader->pPlatformPrivate, mSharedAllocator));
+ }
+#endif
+
+ CAMHAL_LOGDB("Captured Frames: %d", mCapturedFrames);
+
+ mCapturedFrames--;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mYuvCapture) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/yuv_%d_%d_%d_%lu.yuv",
+ kYuvImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile(((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d, while saving yuv!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("yuv_%d_%d_%d_%lu.yuv successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kYuvImagesOutputDirPath);
+ }
+ }
+#endif
+
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != cameraFrame.mMetaData.get() ) {
+ cameraFrame.mMetaData.clear();
+ }
+#endif
+
+ }
+ else if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_VIDEO) {
+ typeOfFrame = CameraFrame::RAW_FRAME;
+ pPortParam->mImageType = typeOfFrame;
+ {
+ android::AutoMutex lock(mLock);
+ if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE ) {
+ goto EXIT;
+ }
+ }
+
+ CAMHAL_LOGD("RAW buffer done on video port, length = %d", pBuffHeader->nFilledLen);
+
+ mask = (unsigned int) CameraFrame::RAW_FRAME;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if ( mRawCapture ) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/raw_%d_%d_%d_%lu.raw",
+ kRawImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile( ((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d , while saving raw!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("raw_%d_%d_%d_%lu.raw successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kRawImagesOutputDirPath);
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+ }
+#endif
+ } else {
+ CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR != stat )
+ {
+ CameraBuffer *camera_buffer;
+
+ camera_buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+
+ CAMHAL_LOGDB("sendFrameToSubscribers error: %d", stat);
+ returnFrame(camera_buffer, typeOfFrame);
+ }
+
+ return eError;
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, stat, eError);
+
+ if ( NO_ERROR != stat )
+ {
+ if ( NULL != mErrorNotifier )
+ {
+ mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
+ }
+ }
+
+ return eError;
+}
+
+status_t OMXCameraAdapter::recalculateFPS()
+{
+ float currentFPS;
+
+ {
+ android::AutoMutex lock(mFrameCountMutex);
+ mFrameCount++;
+ if (mFrameCount == 1) {
+ mFirstFrameCondition.broadcast();
+ }
+ }
+
+ if ( ( mFrameCount % FPS_PERIOD ) == 0 )
+ {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFPSTime;
+ currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFPSTime = now;
+ mLastFrameCount = mFrameCount;
+
+ if ( 1 == mIter )
+ {
+ mFPS = currentFPS;
+ }
+ else
+ {
+ //cumulative moving average
+ mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
+ }
+
+ mLastFPS = mFPS;
+ mIter++;
+ }
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == port)
+ {
+ CAMHAL_LOGEA("Invalid portParam");
+ return -EINVAL;
+ }
+
+ if ( NULL == pBuffHeader )
+ {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ return -EINVAL;
+ }
+
+ android::AutoMutex lock(mSubscriberLock);
+
+ //frame.mFrameType = typeOfFrame;
+ frame.mFrameMask = mask;
+ frame.mBuffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+ frame.mLength = pBuffHeader->nFilledLen;
+ frame.mAlignment = port->mStride;
+ frame.mOffset = pBuffHeader->nOffset;
+ frame.mWidth = port->mWidth;
+ frame.mHeight = port->mHeight;
+ frame.mYuv[0] = NULL;
+ frame.mYuv[1] = NULL;
+
+ if ( onlyOnce && mRecording )
+ {
+ mTimeSourceDelta = (pBuffHeader->nTimeStamp * 1000) - systemTime(SYSTEM_TIME_MONOTONIC);
+ onlyOnce = false;
+ }
+
+ frame.mTimestamp = (pBuffHeader->nTimeStamp * 1000) - mTimeSourceDelta;
+
+ ret = setInitFrameRefCount(frame.mBuffer, mask);
+
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+
+ CAMHAL_LOGVB("B 0x%x T %llu", frame.mBuffer, pBuffHeader->nTimeStamp);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool OMXCameraAdapter::CommandHandler::Handler()
+{
+ Utils::Message msg;
+ volatile int forever = 1;
+ status_t stat;
+ ErrorNotifier *errorNotify = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ while ( forever )
+ {
+ stat = NO_ERROR;
+ CAMHAL_LOGDA("Handler: waiting for messsage...");
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ {
+ android::AutoMutex lock(mLock);
+ mCommandMsgQ.get(&msg);
+ }
+ CAMHAL_LOGDB("msg.command = %d", msg.command);
+ switch ( msg.command ) {
+ case CommandHandler::CAMERA_START_IMAGE_CAPTURE:
+ {
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
+ break;
+ }
+ case CommandHandler::CAMERA_PERFORM_AUTOFOCUS:
+ {
+ stat = mCameraAdapter->doAutoFocus();
+ break;
+ }
+ case CommandHandler::COMMAND_EXIT:
+ {
+ CAMHAL_LOGDA("Exiting command handler");
+ forever = 0;
+ break;
+ }
+ case CommandHandler::CAMERA_SWITCH_TO_EXECUTING:
+ {
+ stat = mCameraAdapter->doSwitchToExecuting();
+ break;
+ }
+ case CommandHandler::CAMERA_START_REPROCESS:
+ {
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startReprocess();
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
+ break;
+ }
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return false;
+}
+
+bool OMXCameraAdapter::OMXCallbackHandler::Handler()
+{
+ Utils::Message msg;
+ volatile int forever = 1;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ while(forever){
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ {
+ android::AutoMutex lock(mLock);
+ mCommandMsgQ.get(&msg);
+ mIsProcessed = false;
+ }
+
+ switch ( msg.command ) {
+ case OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE:
+ {
+ ret = mCameraAdapter->OMXCameraAdapterFillBufferDone(( OMX_HANDLETYPE ) msg.arg1,
+ ( OMX_BUFFERHEADERTYPE *) msg.arg2);
+ break;
+ }
+ case OMXCallbackHandler::CAMERA_FOCUS_STATUS:
+ {
+ mCameraAdapter->handleFocusCallback();
+ break;
+ }
+ case CommandHandler::COMMAND_EXIT:
+ {
+ CAMHAL_LOGDA("Exiting OMX callback handler");
+ forever = 0;
+ break;
+ }
+ }
+
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = mCommandMsgQ.isEmpty();
+ if ( mIsProcessed )
+ mCondition.signal();
+ }
+ }
+
+ // force the condition to wake
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = true;
+ mCondition.signal();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return false;
+}
+
+void OMXCameraAdapter::OMXCallbackHandler::flush()
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ if ( mIsProcessed )
+ return;
+
+ mCondition.wait(mLock);
+}
+
+status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT_EXTRADATATYPE eType) {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXTRADATATYPE extraDataControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( ( OMX_StateInvalid == mComponentState ) ||
+ ( NULL == mCameraAdapterParameters.mHandleComp ) ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return -EINVAL;
+ }
+
+ OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
+
+ extraDataControl.nPortIndex = nPortIndex;
+ extraDataControl.eExtraDataType = eType;
+#ifdef CAMERAHAL_TUNA
+ extraDataControl.eCameraView = OMX_2D;
+#endif
+
+ if (enable) {
+ extraDataControl.bEnable = OMX_TRUE;
+ } else {
+ extraDataControl.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigOtherExtraDataControl,
+ &extraDataControl);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const
+{
+ if ( NULL != ptrPrivate ) {
+ const OMX_TI_PLATFORMPRIVATE *platformPrivate = (const OMX_TI_PLATFORMPRIVATE *) ptrPrivate;
+
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
+ platformPrivate->nSize,
+ sizeof(OMX_TI_PLATFORMPRIVATE),
+ platformPrivate->pAuxBuf1,
+ platformPrivate->pAuxBufSize1,
+ platformPrivate->pMetaDataBuffer,
+ platformPrivate->nMetaDataSize);
+ if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
+ if ( 0 < platformPrivate->nMetaDataSize ) {
+ OMX_U32 remainingSize = platformPrivate->nMetaDataSize;
+ OMX_OTHER_EXTRADATATYPE *extraData = (OMX_OTHER_EXTRADATATYPE *) platformPrivate->pMetaDataBuffer;
+ if ( NULL != extraData ) {
+ while ( extraData->eType && extraData->nDataSize && extraData->data &&
+ (remainingSize >= extraData->nSize)) {
+ if ( type == extraData->eType ) {
+ return extraData;
+ }
+ remainingSize -= extraData->nSize;
+ extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize);
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE pMetaDataBuffer is NULL");
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
+ ( unsigned int ) platformPrivate->nMetaDataSize);
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
+ ( unsigned int ) platformPrivate->nSize);
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
+ }
+
+ // Required extradata type wasn't found
+ return NULL;
+}
+
+OMXCameraAdapter::CachedCaptureParameters* OMXCameraAdapter::cacheCaptureParameters() {
+ CachedCaptureParameters* params = new CachedCaptureParameters();
+
+ params->mPendingCaptureSettings = mPendingCaptureSettings;
+ params->mPictureRotation = mPictureRotation;
+ memcpy(params->mExposureBracketingValues,
+ mExposureBracketingValues,
+ sizeof(mExposureBracketingValues));
+ memcpy(params->mExposureGainBracketingValues,
+ mExposureGainBracketingValues,
+ sizeof(mExposureGainBracketingValues));
+ memcpy(params->mExposureGainBracketingModes,
+ mExposureGainBracketingModes,
+ sizeof(mExposureGainBracketingModes));
+ params->mExposureBracketingValidEntries = mExposureBracketingValidEntries;
+ params->mExposureBracketMode = mExposureBracketMode;
+ params->mBurstFrames = mBurstFrames;
+ params->mFlushShotConfigQueue = mFlushShotConfigQueue;
+
+ return params;
+}
+
+OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
+{
+ LOG_FUNCTION_NAME;
+
+ mOmxInitialized = false;
+ mComponentState = OMX_StateInvalid;
+ mSensorIndex = sensor_index;
+ mPictureRotation = 0;
+ // Initial values
+ mTimeSourceDelta = 0;
+ onlyOnce = true;
+ mDccData.pData = NULL;
+
+ mInitSem.Create(0);
+ mFlushSem.Create(0);
+ mUsePreviewDataSem.Create(0);
+ mUsePreviewSem.Create(0);
+ mUseCaptureSem.Create(0);
+ mUseReprocessSem.Create(0);
+ mStartPreviewSem.Create(0);
+ mStopPreviewSem.Create(0);
+ mStartCaptureSem.Create(0);
+ mStopCaptureSem.Create(0);
+ mStopReprocSem.Create(0);
+ mSwitchToLoadedSem.Create(0);
+ mCaptureSem.Create(0);
+
+ mSwitchToExecSem.Create(0);
+
+ mCameraAdapterParameters.mHandleComp = 0;
+
+ mUserSetExpLock = OMX_FALSE;
+ mUserSetWbLock = OMX_FALSE;
+
+ mFramesWithDucati = 0;
+ mFramesWithDisplay = 0;
+ mFramesWithEncoder = 0;
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ mDebugProfile = 0;
+
+#endif
+
+ mPreviewPortInitialized = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+OMXCameraAdapter::~OMXCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(gAdapterLock);
+
+ // return to OMX Loaded state
+ switchToLoaded();
+
+ if ( mOmxInitialized ) {
+ saveDccFileDataSave();
+
+ closeDccFileDataSave();
+ // deinit the OMX
+ if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid ) {
+ // free the handle for the Camera component
+ if ( mCameraAdapterParameters.mHandleComp ) {
+ OMX_FreeHandle(mCameraAdapterParameters.mHandleComp);
+ mCameraAdapterParameters.mHandleComp = NULL;
+ }
+ }
+
+ OMX_Deinit();
+ mOmxInitialized = false;
+ }
+
+ //Remove any unhandled events
+ if ( !mEventSignalQ.isEmpty() )
+ {
+ for (unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
+ {
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
+ //remove from queue and free msg
+ if ( NULL != msg )
+ {
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
+ sem->Signal();
+ free(msg);
+
+ }
+ }
+ mEventSignalQ.clear();
+ }
+
+ //Exit and free ref to command handling thread
+ if ( NULL != mCommandHandler.get() )
+ {
+ Utils::Message msg;
+ msg.command = CommandHandler::COMMAND_EXIT;
+ msg.arg1 = mErrorNotifier;
+ mCommandHandler->clearCommandQ();
+ mCommandHandler->put(&msg);
+ mCommandHandler->requestExitAndWait();
+ mCommandHandler.clear();
+ }
+
+ //Exit and free ref to callback handling thread
+ if ( NULL != mOMXCallbackHandler.get() )
+ {
+ Utils::Message msg;
+ msg.command = OMXCallbackHandler::COMMAND_EXIT;
+ //Clear all messages pending first
+ mOMXCallbackHandler->clearCommandQ();
+ mOMXCallbackHandler->put(&msg);
+ mOMXCallbackHandler->requestExitAndWait();
+ mOMXCallbackHandler.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t sensor_index)
+{
+ CameraAdapter *adapter = NULL;
+ android::AutoMutex lock(gAdapterLock);
+
+ LOG_FUNCTION_NAME;
+
+ adapter = new OMXCameraAdapter(sensor_index);
+ if ( adapter ) {
+ CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index);
+ } else {
+ CAMHAL_LOGEA("OMX Camera adapter create failed for sensor index = %d!",sensor_index);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return adapter;
+}
+
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ const OMX_CALLBACKTYPE & callbacks)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorUndefined;
+
+ for ( int i = 0; i < 5; ++i ) {
+ if ( i > 0 ) {
+ // sleep for 100 ms before next attempt
+ usleep(100000);
+ }
+
+ // setup key parameters to send to Ducati during init
+ OMX_CALLBACKTYPE oCallbacks = callbacks;
+
+ // get handle
+ eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.CAMERA", pAppData, &oCallbacks);
+ if ( eError == OMX_ErrorNone ) {
+ return OMX_ErrorNone;
+ }
+
+ CAMHAL_LOGEB("OMX_GetHandle() failed, error: 0x%x", eError);
+ }
+
+ *handle = 0;
+ return eError;
+}
+
+
+class CapabilitiesHandler
+{
+public:
+ CapabilitiesHandler()
+ {
+ mComponent = 0;
+ }
+
+ const OMX_HANDLETYPE & component() const
+ {
+ return mComponent;
+ }
+
+ OMX_HANDLETYPE & componentRef()
+ {
+ return mComponent;
+ }
+
+ status_t fetchCapabiltiesForMode(OMX_CAMOPERATINGMODETYPE mode,
+ int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
+
+ OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
+ camMode.eCamOperatingMode = mode;
+
+ OMX_ERRORTYPE eError = OMX_SetParameter(component(),
+ ( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
+ &camMode);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGE("Error while configuring camera mode in CameraAdapter_Capabilities 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ // get and fill capabilities
+ OMXCameraAdapter::getCaps(sensorId, properties, component());
+
+ return NO_ERROR;
+ }
+
+ status_t fetchCapabilitiesForSensor(int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ // sensor select
+ OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
+ OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
+ sensorSelect.eSensor = (OMX_SENSORSELECT)sensorId;
+
+ CAMHAL_LOGD("Selecting sensor %d...", sensorId);
+ const OMX_ERRORTYPE sensorSelectError = OMX_SetConfig(component(),
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigSensorSelect, &sensorSelect);
+ CAMHAL_LOGD("Selecting sensor %d... DONE", sensorId);
+
+ if ( sensorSelectError != OMX_ErrorNone ) {
+ CAMHAL_LOGD("Max supported sensor number reached: %d", sensorId);
+ return BAD_VALUE;
+ }
+
+ status_t err = NO_ERROR;
+ if ( sensorId == 2 ) {
+ CAMHAL_LOGD("Camera mode: STEREO");
+ properties->setMode(MODE_STEREO);
+ err = fetchCapabiltiesForMode(OMX_CaptureStereoImageCapture,
+ sensorId,
+ properties);
+ } else {
+ CAMHAL_LOGD("Camera MONO");
+
+ CAMHAL_LOGD("Camera mode: HQ ");
+ properties->setMode(MODE_HIGH_QUALITY);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageProfileBase,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: VIDEO ");
+ properties->setMode(MODE_VIDEO);
+ err = fetchCapabiltiesForMode(OMX_CaptureVideo,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: ZSL ");
+ properties->setMode(MODE_ZEROSHUTTERLAG);
+ err = fetchCapabiltiesForMode(OMX_TI_CaptureImageProfileZeroShutterLag,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: HS ");
+ properties->setMode(MODE_HIGH_SPEED);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageHighSpeedTemporalBracketing,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ CAMHAL_LOGD("Camera mode: CPCAM ");
+ properties->setMode(MODE_CPCAM);
+ err = fetchCapabiltiesForMode(OMX_TI_CPCam,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+#endif
+
+#ifdef CAMERAHAL_OMAP5_CAPTURE_MODES
+
+ CAMHAL_LOGD("Camera mode: VIDEO HQ ");
+ properties->setMode(MODE_VIDEO_HIGH_QUALITY);
+ err = fetchCapabiltiesForMode(OMX_CaptureHighQualityVideo,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+#endif
+
+ }
+
+ return err;
+ }
+
+private:
+ OMX_HANDLETYPE mComponent;
+ OMX_STATETYPE mState;
+};
+
+extern "C" status_t OMXCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
+ LOG_FUNCTION_NAME;
+
+ supportedCameras = 0;
+
+ int num_cameras_supported = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ android::AutoMutex lock(gAdapterLock);
+
+ if (!properties_array) {
+ CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ eError = OMX_Init();
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("Error OMX_Init -0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ CapabilitiesHandler handler;
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = 0;
+ callbacks.EmptyBufferDone = 0;
+ callbacks.FillBufferDone = 0;
+
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&handler.componentRef(), &handler, callbacks);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
+ goto EXIT;
+ }
+
+#ifndef USES_LEGACY_DOMX_DCC
+ DCCHandler dcc_handler;
+ dcc_handler.loadDCC(handler.componentRef());
+#endif
+
+ // Continue selecting sensor and then querying OMX Camera for it's capabilities
+ // When sensor select returns an error, we know to break and stop
+ while (eError == OMX_ErrorNone &&
+ (starting_camera + num_cameras_supported) < max_camera) {
+
+ const int sensorId = num_cameras_supported;
+ CameraProperties::Properties * properties = properties_array + starting_camera + sensorId;
+ const status_t err = handler.fetchCapabilitiesForSensor(sensorId, properties);
+
+ if ( err != NO_ERROR )
+ break;
+
+ num_cameras_supported++;
+ CAMHAL_LOGEB("Number of OMX Cameras detected = %d \n",num_cameras_supported);
+ }
+
+ // clean up
+ if(handler.component()) {
+ CAMHAL_LOGD("Freeing the component...");
+ OMX_FreeHandle(handler.component());
+ CAMHAL_LOGD("Freeing the component... DONE");
+ handler.componentRef() = NULL;
+ }
+
+ EXIT:
+ CAMHAL_LOGD("Deinit...");
+ OMX_Deinit();
+ CAMHAL_LOGD("Deinit... DONE");
+
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Error: 0x%x", eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ supportedCameras = num_cameras_supported;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+} // namespace Camera
+} // namespace Ti
+
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp
new file mode 100644
index 0000000..72a2380
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp
@@ -0,0 +1,2513 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXCap.cpp
+*
+* This file implements the OMX Capabilities feature.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+
+namespace Ti {
+namespace Camera {
+
+/************************************
+ * global constants and variables
+ *************************************/
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+
+static const char PARAM_SEP[] = ",";
+static const uint32_t VFR_OFFSET = 8;
+static const char FPS_STR_MAX_LEN = 10;
+
+static const unsigned int MANUAL_EXPOSURE_STEP = 1;
+static const unsigned int MANUAL_GAIN_ISO_MIN = 100;
+static const unsigned int MANUAL_GAIN_ISO_STEP = 100;
+
+const int OMXCameraAdapter::SENSORID_IMX060 = 300;
+const int OMXCameraAdapter::SENSORID_OV5650 = 301;
+const int OMXCameraAdapter::SENSORID_OV5640 = 302;
+const int OMXCameraAdapter::SENSORID_OV14825 = 304;
+const int OMXCameraAdapter::SENSORID_S5K4E1GA = 305;
+const int OMXCameraAdapter::SENSORID_S5K6A1GX03 = 306;
+const int OMXCameraAdapter::SENSORID_OV8830 = 310;
+const int OMXCameraAdapter::SENSORID_OV2722 = 311;
+const int OMXCameraAdapter::SENSORID_OV9726 = 312;
+
+
+const int OMXCameraAdapter::FPS_MIN = 5;
+const int OMXCameraAdapter::FPS_MAX = 30;
+const int OMXCameraAdapter::FPS_MAX_EXTENDED = 60;
+
+inline static int androidFromDucatiFrameRate(OMX_U32 frameRate) {
+ return (frameRate >> VFR_OFFSET) * CameraHal::VFR_SCALE;
+}
+
+/**** look up tables to translate OMX Caps to Parameter ****/
+
+const CapResolution OMXCameraAdapter::mImageCapRes [] = {
+ { 4416, 3312, "4416x3312" },
+ { 4032, 3024, "4032x3024" },
+ { 4000, 3000, "4000x3000" },
+ { 3648, 2736, "3648x2736" },
+ { 3264, 2448, "3264x2448" },
+ { 2608, 1960, "2608x1960" },
+ { 2592, 1944, "2592x1944" },
+ { 2592, 1728, "2592x1728" },
+ { 2592, 1458, "2592x1458" },
+ { 2400, 1350, "2400x1350" },
+ { 2304, 1296, "2304x1296" },
+ { 2240, 1344, "2240x1344" },
+ { 2160, 1440, "2160x1440" },
+ { 2112, 1728, "2112x1728" },
+ { 2112, 1188, "2112x1188" },
+ { 2048, 1536, "2048x1536" },
+ { 2016, 1512, "2016x1512" },
+ { 2016, 1134, "2016x1134" },
+ { 2000, 1600, "2000x1600" },
+ { 1920, 1080, "1920x1080" },
+ { 1600, 1200, "1600x1200" },
+ { 1600, 900, "1600x900" },
+ { 1536, 864, "1536x864" },
+ { 1408, 792, "1408x792" },
+ { 1344, 756, "1344x756" },
+ { 1280, 1024, "1280x1024" },
+ { 1280, 720, "1280x720" },
+ { 1152, 864, "1152x864" },
+ { 1280, 960, "1280x960" },
+ { 1024, 768, "1024x768" },
+ { 640, 480, "640x480" },
+ { 320, 240, "320x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResSS [] = {
+ { 4032*2, 3024, "8064x3024" },
+ { 3648*2, 2736, "7296x2736" },
+ { 3264*2, 2448, "6528x2448" },
+ { 2592*2, 1944, "5184x1944" },
+ { 2048*2, 1536, "4096x1536" },
+ { 1600*2, 1200, "3200x1200" },
+ { 1280*2, 960, "2560x960" },
+ { 1280*2, 720, "2560x720" },
+ { 1024*2, 768, "2048x768" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResTB [] = {
+ { 4032, 3024*2, "4032x6048" },
+ { 3648, 2736*2, "3648x5472" },
+ { 3264, 2448*2, "3264x4896" },
+ { 2592, 1944*2, "2592x3888" },
+ { 2048, 1536*2, "2048x3072" },
+ { 1600, 1200*2, "1600x2400" },
+ { 1280, 960*2, "1280x1920" },
+ { 1280, 720*2, "1280x1440" },
+ { 1024, 768*2, "1024x1536" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
+};
+
+const CapResolution OMXCameraAdapter::mPreviewRes [] = {
+ { 1920, 1080, "1920x1080" },
+ { 1280, 720, "1280x720" },
+ { 960, 720, "960x720" },
+ { 800, 480, "800x480" },
+ { 720, 576, "720x576" },
+ { 720, 480, "720x480" },
+ { 768, 576, "768x576" },
+ { 640, 480, "640x480" },
+ { 320, 240, "320x240" },
+ { 352, 288, "352x288" },
+ { 240, 160, "240x160" },
+ { 176, 144, "176x144" },
+ { 160, 120, "160x120" },
+ { 128, 96, "128x96" },
+};
+
+const CapResolution OMXCameraAdapter::mPreviewPortraitRes [] = {
+ //Portrait resolutions
+ { 1088, 1920, "1088x1920" },
+ { 720, 1280, "720x1280" },
+ { 480, 800, "480x800" },
+ { 576, 720, "576x720" },
+ { 576, 768, "576x768" },
+ { 480, 720, "480x720" },
+ { 480, 640, "480x640" },
+ { 288, 352, "288x352" },
+ { 240, 320, "240x320" },
+ { 160, 240, "160x240" },
+ { 144, 176, "144x176" },
+ { 120, 160, "120x160"},
+ { 96, 128, "96x128" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResSS [] = {
+ { 1920*2, 1080, "3840x1080" },
+ { 1280*2, 720, "2560x720" },
+ { 800*2, 480, "1600x480" },
+ { 720*2, 576, "1440x576" },
+ { 720*2, 480, "1440x480" },
+ { 768*2, 576, "1536x576" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+ { 352*2, 288, "704x288" },
+ { 240*2, 160, "480x160" },
+ { 176*2, 144, "352x144" },
+ { 128*2, 96, "256x96" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResTB [] = {
+ { 1920, 1080*2, "1920x2160" },
+ { 1280, 720*2, "1280x1440" },
+ { 800, 480*2, "800x960" },
+ { 720, 576*2, "720x1152" },
+ { 720, 480*2, "720x960" },
+ { 768, 576*2, "768x1152" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
+ { 352, 288*2, "352x576" },
+ { 240, 160*2, "240x320" },
+ { 176, 144*2, "176x288" },
+ { 128, 96*2, "128x192" },
+};
+
+const CapResolution OMXCameraAdapter::mThumbRes [] = {
+ { 640, 480, "640x480" },
+ { 160, 120, "160x120" },
+ { 200, 120, "200x120" },
+ { 320, 240, "320x240" },
+ { 512, 384, "512x384" },
+ { 352, 144, "352x144" },
+ { 176, 144, "176x144" },
+ { 96, 96, "96x96" },
+};
+
+const CapPixelformat OMXCameraAdapter::mPixelformats [] = {
+ { OMX_COLOR_FormatCbYCrY, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420SP },
+ { OMX_COLOR_Format16bitRGB565, android::CameraParameters::PIXEL_FORMAT_RGB565 },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatUnused, TICameraParameters::PIXEL_FORMAT_UNUSED },
+ { OMX_COLOR_FormatRawBayer10bit, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB },
+};
+
+const userToOMX_LUT OMXCameraAdapter::mFrameLayout [] = {
+ { TICameraParameters::S3D_NONE, OMX_TI_StereoFrameLayout2D },
+ { TICameraParameters::S3D_TB_FULL, OMX_TI_StereoFrameLayoutTopBottom },
+ { TICameraParameters::S3D_SS_FULL, OMX_TI_StereoFrameLayoutLeftRight },
+ { TICameraParameters::S3D_TB_SUBSAMPLED, OMX_TI_StereoFrameLayoutTopBottomSubsample },
+ { TICameraParameters::S3D_SS_SUBSAMPLED, OMX_TI_StereoFrameLayoutLeftRightSubsample },
+};
+
+const LUTtype OMXCameraAdapter::mLayoutLUT = {
+ ARRAY_SIZE(mFrameLayout),
+ mFrameLayout
+};
+
+const CapCodingFormat OMXCameraAdapter::mImageCodingFormat [] = {
+ { OMX_IMAGE_CodingJPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingJPS, TICameraParameters::PIXEL_FORMAT_JPS },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingMPO, TICameraParameters::PIXEL_FORMAT_MPO },
+};
+
+const CapFramerate OMXCameraAdapter::mFramerates [] = {
+ { 60, "60" },
+ { 30, "30" },
+ { 24, "24" },
+ { 20, "20" },
+ { 15, "15" },
+ { 10, "10" },
+};
+
+const CapZoom OMXCameraAdapter::mZoomStages [] = {
+ { 65536, "100" },
+ { 68157, "104" },
+ { 70124, "107" },
+ { 72745, "111" },
+ { 75366, "115" },
+ { 77988, "119" },
+ { 80609, "123" },
+ { 83231, "127" },
+ { 86508, "132" },
+ { 89784, "137" },
+ { 92406, "141" },
+ { 95683, "146" },
+ { 99615, "152" },
+ { 102892, "157" },
+ { 106168, "162" },
+ { 110100, "168" },
+ { 114033, "174" },
+ { 117965, "180" },
+ { 122552, "187" },
+ { 126484, "193" },
+ { 131072, "200" },
+ { 135660, "207" },
+ { 140247, "214" },
+ { 145490, "222" },
+ { 150733, "230" },
+ { 155976, "238" },
+ { 161219, "246" },
+ { 167117, "255" },
+ { 173015, "264" },
+ { 178913, "273" },
+ { 185467, "283" },
+ { 192020, "293" },
+ { 198574, "303" },
+ { 205783, "314" },
+ { 212992, "325" },
+ { 220201, "336" },
+ { 228065, "348" },
+ { 236585, "361" },
+ { 244449, "373" },
+ { 252969, "386" },
+ { 262144, "400" },
+ { 271319, "414" },
+ { 281149, "429" },
+ { 290980, "444" },
+ { 300810, "459" },
+ { 311951, "476" },
+ { 322437, "492" },
+ { 334234, "510" },
+ { 346030, "528" },
+ { 357827, "546" },
+ { 370934, "566" },
+ { 384041, "586" },
+ { 397148, "606" },
+ { 411566, "628" },
+ { 425984, "650" },
+ { 441057, "673" },
+ { 456131, "696" },
+ { 472515, "721" },
+ { 488899, "746" },
+ { 506593, "773" },
+ { 524288, "800" },
+};
+
+const CapISO OMXCameraAdapter::mISOStages [] = {
+ { 0, "auto" },
+ { 100, "100" },
+ { 200, "200"},
+ { 400, "400" },
+ { 800, "800" },
+ { 1000, "1000" },
+ { 1200, "1200" },
+ { 1600, "1600" },
+};
+
+// mapped values have to match with new_sensor_MSP.h
+const CapU32 OMXCameraAdapter::mSensorNames [] = {
+ { SENSORID_IMX060, "IMX060" },
+ { SENSORID_OV5650, "OV5650" },
+ { SENSORID_OV5640, "OV5640" },
+ { SENSORID_OV14825, "OV14825"},
+ { SENSORID_S5K4E1GA, "S5K4E1GA"},
+ { SENSORID_S5K6A1GX03, "S5K6A1GX03" },
+ { SENSORID_OV8830, "OV8830" },
+ { SENSORID_OV2722, "OV2722" },
+ { SENSORID_OV9726, "OV9726" }
+ // TODO(XXX): need to account for S3D camera later
+};
+
+const userToOMX_LUT OMXCameraAdapter::mAutoConvergence [] = {
+ { TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE, OMX_TI_AutoConvergenceModeDisable },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_FRAME, OMX_TI_AutoConvergenceModeFrame },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_CENTER, OMX_TI_AutoConvergenceModeCenter },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_TOUCH, OMX_TI_AutoConvergenceModeFocusFaceTouch },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL, OMX_TI_AutoConvergenceModeManual }
+};
+
+const LUTtype OMXCameraAdapter::mAutoConvergenceLUT = {
+ ARRAY_SIZE(mAutoConvergence),
+ mAutoConvergence
+};
+
+const userToOMX_LUT OMXCameraAdapter::mBracketingModes [] = {
+ { TICameraParameters::TEMP_BRACKETING , OMX_BracketTemporal },
+ { TICameraParameters::EXPOSURE_BRACKETING , OMX_BracketExposureRelativeInEV }
+};
+
+const LUTtype OMXCameraAdapter::mBracketingModesLUT = {
+ ARRAY_SIZE(mBracketingModes),
+ mBracketingModes
+};
+
+// values for supported camera facing direction
+const CapU32 OMXCameraAdapter::mFacing [] = {
+ { OMX_TI_SENFACING_BACK , TICameraParameters::FACING_BACK },
+ { OMX_TI_SENFACING_FRONT, TICameraParameters::FACING_FRONT},
+};
+
+/*****************************************
+ * internal static function declarations
+ *****************************************/
+
+/**** Utility functions to help translate OMX Caps to Parameter ****/
+
+status_t OMXCameraAdapter::encodeImageCodingFormatCap(OMX_IMAGE_CODINGTYPE format,
+ const CapCodingFormat *cap,
+ size_t capCount,
+ char * buffer) {
+
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( ( NULL == buffer ) || ( NULL == cap ) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret ) {
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( format == cap[i].imageCodingFormat ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
+ strncat(buffer, cap[i].param, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
+ const CapPixelformat *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( ( NULL == buffer ) || ( NULL == cap ) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+
+ for ( unsigned int i = 0 ; i < capCount ; i++ )
+ {
+ if ( format == cap[i].pixelformat )
+ {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize - 1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void OMXCameraAdapter::encodeFrameRates(const int minFrameRate, const int maxFrameRate,
+ const OMX_TI_CAPTYPE & caps, const CapFramerate * const fixedFrameRates,
+ const int frameRateCount, android::Vector<FpsRange> & fpsRanges) {
+ LOG_FUNCTION_NAME;
+
+ if ( minFrameRate == maxFrameRate ) {
+ // single fixed frame rate supported
+ fpsRanges.add(FpsRange(minFrameRate, maxFrameRate));
+ return;
+ }
+
+ // insert min and max frame rates
+ fpsRanges.add(FpsRange(minFrameRate, minFrameRate));
+ fpsRanges.add(FpsRange(maxFrameRate, maxFrameRate));
+
+ // insert variable frame rates
+ for ( int i = 0; i < static_cast<int>(caps.ulPrvVarFPSModesCount); ++i ) {
+ const FpsRange fpsRange = FpsRange(
+ max(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMin), minFrameRate),
+ min(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMax), maxFrameRate));
+
+ if ( fpsRange.isFixed() ) {
+ // this range is either min or max fixed frame rate, already added above
+ continue;
+ }
+
+ fpsRanges.add(fpsRange);
+ }
+
+ // insert fixed frame rates
+ for ( int i = 0; i < frameRateCount; ++i ) {
+ const int fixedFrameRate = fixedFrameRates[i].num * CameraHal::VFR_SCALE;
+
+ if ( fixedFrameRate < minFrameRate || fixedFrameRate > maxFrameRate ) {
+ // not supported by hardware
+ continue;
+ }
+
+ const FpsRange fpsRange = FpsRange(fixedFrameRate, fixedFrameRate);
+ fpsRanges.add(fpsRange);
+ }
+
+ // sort first by max, then by min, according to Android API requirements
+ fpsRanges.sort(FpsRange::compare);
+
+ // remove duplicated frame rates
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()) - 1; ) {
+ const FpsRange & current = fpsRanges.itemAt(i);
+ const FpsRange & next = fpsRanges.itemAt(i + 1);
+ if ( current == next ) {
+ fpsRanges.removeAt(i + 1);
+ } else {
+ i++;
+ }
+ }
+}
+
+size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
+ const CapZoom *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t res = NO_ERROR;
+ size_t ret = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+
+ for ( unsigned int i = 0; i < capCount; i++ ) {
+ if ( cap[i].num <= maxZoom ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize - 1);
+ ret++;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
+ const CapISO *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0; i < capCount; i++ ) {
+ if ( cap[i].num <= maxISO) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize - 1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
+ const CapResolution *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( (cap[i].width <= res.nWidthMax) &&
+ (cap[i].height <= res.nHeightMax) &&
+ (cap[i].width >= res.nWidthMin) &&
+ (cap[i].height >= res.nHeightMin) ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize -1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeSizeCap3D(OMX_TI_CAPRESTYPE &res,
+ const CapResolution *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( (cap[i].width <= res.nWidthMax) &&
+ (cap[i].height <= res.nHeightMax) &&
+ (cap[i].width >= res.nWidthMin) &&
+ (cap[i].height >= res.nHeightMin) &&
+ (cap[i].width * cap[i].height <= res.nMaxResInPixels)) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize -1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
+
+ LOG_FUNCTION_NAME;
+
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tImageResRange,
+ mImageCapRes,
+ ARRAY_SIZE(mImageCapRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ }
+ params->set(CameraProperties::MAX_PICTURE_WIDTH, caps.tImageResRange.nWidthMax);
+ params->set(CameraProperties::MAX_PICTURE_HEIGHT, caps.tImageResRange.nHeightMax);
+ }
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResTB,
+ ARRAY_SIZE(mImageCapResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResSS,
+ ARRAY_SIZE(mImageCapResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapRes,
+ ARRAY_SIZE(mImageCapRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
+
+ LOG_FUNCTION_NAME;
+
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tPreviewResRange,
+ mPreviewRes,
+ ARRAY_SIZE(mPreviewRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Landscape preview sizes 0x%x", ret);
+ return ret;
+ }
+
+ /* Insert Portait Resolutions by verifying Potrait Capability Support */
+ ret = encodeSizeCap(caps.tRotatedPreviewResRange,
+ mPreviewPortraitRes,
+ ARRAY_SIZE(mPreviewPortraitRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Potrait preview sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ }
+ }
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResTB,
+ ARRAY_SIZE(mPreviewResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D TB preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResSS,
+ ARRAY_SIZE(mPreviewResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D SS preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewRes,
+ ARRAY_SIZE(mPreviewRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeSizeCap(caps.tPreviewResRange,
+ mPreviewRes,
+ ARRAY_SIZE(mPreviewRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported video sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_VIDEO_SIZES, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeSizeCap(caps.tThumbResRange,
+ mThumbRes,
+ ARRAY_SIZE(mThumbRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported thumbnail sizes 0x%x", ret);
+ } else {
+ //CTS Requirement: 0x0 should always be supported
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, "0x0", MAX_PROP_NAME_LENGTH);
+ params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertZoomStages(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ size_t zoomStageCount = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ zoomStageCount = encodeZoomCap(caps.xMaxWidthZoom,
+ mZoomStages,
+ ARRAY_SIZE(mZoomStages),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ params->set(CameraProperties::SUPPORTED_ZOOM_RATIOS, supported);
+ params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, zoomStageCount - 1); //As per CTS requirement
+
+ if ( 0 == zoomStageCount ) {
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::FALSE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::FALSE);
+ } else {
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::TRUE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::TRUE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ for ( int i = 0 ; i < caps.ulImageFormatCount ; i++ ) {
+ ret = encodePixelformatCap(caps.eImageFormats[i],
+ mPixelformats,
+ ARRAY_SIZE(mPixelformats),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
+ break;
+ }
+ }
+
+ for (int i = 0; i < caps.ulImageCodingFormatCount ; i++) {
+ ret = encodeImageCodingFormatCap(caps.eImageCodingFormat[i],
+ mImageCodingFormat,
+ ARRAY_SIZE(mImageCodingFormat),
+ supported);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
+ break;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( int i = 0 ; i < caps.ulPreviewFormatCount; i++ ) {
+ ret = encodePixelformatCap(caps.ePreviewFormats[i],
+ mPixelformats,
+ ARRAY_SIZE(mPixelformats),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview formats 0x%x", ret);
+ break;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ // need to advertise we support YV12 format
+ // We will program preview port with NV21 when we see application set YV12
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ // collect supported normal frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
+
+ // HASH: Fix JEM Amazon Ducati xFramerates which are [1 .. 30] vs [256 .. 7680]
+ int minFrameRate = -1;
+ if (caps.xFramerateMin >= 50)
+ minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMin));
+ else
+ minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMin << 8));
+ int maxFrameRate = -1;
+ if (caps.xFramerateMax >= 50)
+ maxFrameRate = min<int>(FPS_MAX * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMax));
+ else
+ maxFrameRate = min<int>(FPS_MAX * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMax << 8));
+
+ if ( minFrameRate > maxFrameRate ) {
+ CAMHAL_LOGE("Invalid frame rate range: [%d .. %d]", caps.xFramerateMin, caps.xFramerateMax);
+ return BAD_VALUE;
+ }
+
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
+
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char defaultRange[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
+
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
+
+ const FpsRange & defaultFpsRange = fpsRanges.itemAt(fpsRanges.size() - 1);
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d,%d", defaultFpsRange.min(), defaultFpsRange.max());
+
+ CAMHAL_LOGD("Supported framerate ranges: %s", supported);
+ CAMHAL_LOGD("Default framerate range: [%s]", defaultRange);
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
+ params->set(CameraProperties::FRAMERATE_RANGE, defaultRange);
+
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
+
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
+
+ CAMHAL_LOGD("Supported preview framerates: %s", supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+
+ // insert default frame rate only if it is fixed
+ if ( defaultFpsRange.isFixed() && (defaultFpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d", defaultFpsRange.min()/CameraHal::VFR_SCALE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, defaultRange);
+ }
+ }
+
+ // collect supported extended frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
+
+ // HASH: Fix JEM Amazon Ducati xFramerates which are [1 .. 30] vs [256 .. 7680]
+ int minFrameRate = -1;
+ if (caps.xFramerateMin >= 50)
+ minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMin));
+ else
+ minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMin << 8));
+ int maxFrameRate = -1;
+ if (caps.xFramerateMax >= 50)
+ maxFrameRate = min<int>(FPS_MAX_EXTENDED * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMax));
+ else
+ maxFrameRate = min<int>(FPS_MAX_EXTENDED * CameraHal::VFR_SCALE, androidFromDucatiFrameRate(caps.xFramerateMax << 8));
+
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
+
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ memset(supported, 0, sizeof(supported) - 1);
+
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
+
+ CAMHAL_LOGD("Supported framerate ranges extended: %s", supported);
+ params->set(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED, supported);
+
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported) - 1);
+
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
+
+ CAMHAL_LOGD("Supported extended preview framerates: %s", supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT, supported);
+ }
+
+ return OK;
+}
+
+status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMin * 10 ));
+ params->set(CameraProperties::SUPPORTED_EV_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMax * 10 ));
+ params->set(CameraProperties::SUPPORTED_EV_MAX, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeISOCap(caps.nSensitivityMax,
+ mISOStages,
+ ARRAY_SIZE(mISOStages),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported ISO modes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ //Off is always supported
+ strncat(supported, TICameraParameters::IPP_NONE, MAX_PROP_NAME_LENGTH);
+
+ if ( caps.bLensDistortionCorrectionSupported ) {
+ strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
+ }
+
+ if ( caps.bISONoiseFilterSupported ) {
+ strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
+ }
+
+ if ( caps.bISONoiseFilterSupported && caps.bLensDistortionCorrectionSupported ) {
+ strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
+ }
+
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulWhiteBalanceCount ; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eWhiteBalanceModes[i], WBalLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+
+ params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulColorEffectCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eColorEffects[i], EffLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+
+ params->set(CameraProperties::SUPPORTED_EFFECTS, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulExposureModeCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eExposureModes[i], ExpLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualExpRanges(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ if (caps.nManualExpMin > caps.nManualExpMax) {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+ } else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMin);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_EXPOSURE_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+ }
+
+ if (MANUAL_GAIN_ISO_MIN > caps.nSensitivityMax) {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported); }
+ else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_MIN);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nSensitivityMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulFlashCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eFlashModes[i], FlashLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+
+ if ( strlen(supported) == 0 ) {
+ strncpy(supported, DEFAULT_FLASH_MODE, MAX_PROP_NAME_LENGTH);
+ }
+
+ params->set(CameraProperties::SUPPORTED_FLASH_MODES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulSceneCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eSceneModes[i], SceneLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+
+ params->set(CameraProperties::SUPPORTED_SCENE_MODES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulFocusModeCount; i++ ) {
+ getMultipleLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT, supported);
+ }
+
+ // Check if focus is supported by camera
+ if (caps.ulFocusModeCount == 1 &&
+ caps.eFocusModes[0] == OMX_IMAGE_FocusControlOff) {
+ // Focus is not supported by camera
+ // Advertise this to app as infinitiy focus mode
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
+ }
+
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulFlickerCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eFlicker[i], FlickerLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ sprintf(supported, "%d", caps.ulAlgoAreasFocusCount);
+ params->set(CameraProperties::MAX_FOCUS_AREAS, supported);
+ CAMHAL_LOGDB("Maximum supported focus areas %s", supported);
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ sprintf(supported, "%d", caps.ulAlgoAreasExposureCount);
+ params->set(CameraProperties::MAX_NUM_METERING_AREAS, supported);
+ CAMHAL_LOGDB("Maximum supported exposure areas %s", supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVNFSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_TRUE == caps.bVideoNoiseFilterSupported ) {
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVSTABSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_TRUE == caps.bVideoStabilizationSupported ) {
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+ if ( caps.bAELockSupported ) {
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ if ( caps.bAWBLockSupported ) {
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ // 1) Look up and assign sensor name
+ for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
+ if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
+ // sensor found
+ break;
+ }
+ }
+ if ( i == ARRAY_SIZE(mSensorNames) ) {
+ p = "UNKNOWN_SENSOR";
+ } else {
+ p = mSensorNames[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::CAMERA_NAME, supported);
+ params->set(CameraProperties::CAMERA_SENSOR_ID, caps.tSenMounting.nSenId);
+
+ // 2) Assign mounting rotation
+ params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertRaw(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+ sprintf(supported,"%d",int(caps.uSenNativeResWidth));
+ params->set(CameraProperties::RAW_WIDTH, supported);
+
+ memset(supported, '\0', sizeof(supported));
+ if (caps.bMechanicalMisalignmentSupported) {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight) * 2);
+ } else {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight));
+ }
+ params->set(CameraProperties::RAW_HEIGHT, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFacing(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ for (i = 0; i < ARRAY_SIZE(mFacing); i++) {
+ if((OMX_TI_SENFACING_TYPE)mFacing[i].num == caps.tSenMounting.eFacing) {
+ break;
+ }
+ }
+ if ( i == ARRAY_SIZE(mFacing) ) {
+ p = "UNKNOWN_FACING";
+ } else {
+ p = mFacing[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::FACING_INDEX, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFocalLength(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ sprintf(supported, "%d", caps.nFocalLength / 100);
+ strncat(supported, ".", REMAINING_BYTES(supported));
+ sprintf(supported+(strlen(supported)*sizeof(char)), "%d", caps.nFocalLength % 100);
+
+ params->set(CameraProperties::FOCAL_LENGTH, supported);
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertAutoConvergenceModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ for ( unsigned int i = 0 ; i < caps.ulAutoConvModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eAutoConvModes[i], mAutoConvergenceLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualConvergenceRange(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMin ));
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax ));
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax != caps.nManualConvMin ));
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertMechanicalMisalignmentCorrection(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED,
+ caps.bMechanicalMisalignmentSupported == OMX_TRUE ?
+ android::CameraParameters::TRUE : android::CameraParameters::FALSE);
+
+ return OK;
+}
+
+status_t OMXCameraAdapter::insertCaptureModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+ // 3D mode detect: Misalignment is present only in 3d mode
+ if (caps.bMechanicalMisalignmentSupported)
+ {
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ }
+ else // 2D mode detect: Misalignment is present only in 3d mode
+ {
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_PERFORMANCE_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_QUALITY_ZSL_MODE, REMAINING_BYTES(supported));
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::CP_CAM_MODE, REMAINING_BYTES(supported));
+#endif
+#ifdef CAMERAHAL_OMAP5_CAPTURE_MODES
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE_HQ, REMAINING_BYTES(supported));
+#endif
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::ZOOM_BRACKETING, REMAINING_BYTES(supported));
+ }
+
+ for ( unsigned int i = 0 ; i < caps.ulBracketingModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eBracketingModes[i], mBracketingModesLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+
+ params->set(CameraProperties::CAP_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertLayout(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.ePrvFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES, supported);
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eCapFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVideoSnapshotSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if (caps.bStillCapDuringVideoSupported)
+ {
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::TRUE);
+ }
+ else
+ {
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if (caps.bGbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGLBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if (caps.bGlbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::TRUE);
+ } else {
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::FALSE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char *pos, *str, *def;
+ char temp[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PREVIEW_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PREVIEW_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ }
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PICTURE_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PICTURE_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ }
+
+ params->set(CameraProperties::ANTIBANDING, DEFAULT_ANTIBANDING);
+ params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS);
+ params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST);
+ params->set(CameraProperties::EFFECT, DEFAULT_EFFECT);
+ params->set(CameraProperties::EV_COMPENSATION, DEFAULT_EV_COMPENSATION);
+ params->set(CameraProperties::SUPPORTED_EV_STEP, DEFAULT_EV_STEP);
+ params->set(CameraProperties::EXPOSURE_MODE, DEFAULT_EXPOSURE_MODE);
+ params->set(CameraProperties::FLASH_MODE, DEFAULT_FLASH_MODE);
+ pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
+ if ( NULL != pos )
+ {
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE_PREFERRED);
+ }
+ else
+ {
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
+ }
+ params->set(CameraProperties::IPP, DEFAULT_IPP);
+ params->set(CameraProperties::GBCE, android::CameraParameters::FALSE);
+ params->set(CameraProperties::GLBCE, android::CameraParameters::FALSE);
+ params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
+ params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY);
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY);
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, DEFAULT_THUMBNAIL_SIZE);
+ params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
+
+ if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ }
+
+ if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ }
+
+ params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
+
+ /* Set default value if supported, otherwise set max supported value */
+ strncpy(temp, params->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ def = str = temp;
+ while (1) {
+ if ((pos = strstr(str, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ if (!strcmp(str, DEFAULT_FRAMERATE)) {
+ def = str;
+ break;
+ }
+ if (atoi(str) > atoi(def)) {
+ def = str;
+ }
+ if (pos == NULL) {
+ break;
+ }
+ str = pos + strlen(PARAM_SEP);
+ }
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, def);
+
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
+ params->set(CameraProperties::SATURATION, DEFAULT_SATURATION);
+ params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE);
+ params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS);
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
+ params->set(CameraProperties::WHITEBALANCE, DEFAULT_WB);
+ params->set(CameraProperties::ZOOM, DEFAULT_ZOOM);
+ params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES);
+ params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES);
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK);
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK);
+ params->set(CameraProperties::HOR_ANGLE, DEFAULT_HOR_ANGLE);
+ params->set(CameraProperties::VER_ANGLE, DEFAULT_VER_ANGLE);
+ params->set(CameraProperties::VIDEO_SIZE, DEFAULT_VIDEO_SIZE);
+ params->set(CameraProperties::SENSOR_ORIENTATION, DEFAULT_SENSOR_ORIENTATION);
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE, DEFAULT_AUTOCONVERGENCE_MODE);
+ params->set(CameraProperties::MANUAL_CONVERGENCE, DEFAULT_MANUAL_CONVERGENCE);
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION, DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE);
+
+ char property[PROPERTY_VALUE_MAX];
+ property_get("ro.product.manufacturer",
+ property,
+ DEFAULT_EXIF_MAKE);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MAKE, property);
+ property_get("ro.product.model",
+ property,
+ DEFAULT_EXIF_MODEL);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MODEL, property);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertThumbSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertZoomStages(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFramerates(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertEVs(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertISOModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertIPPModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertWBModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertEffects(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertExpModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertManualExpRanges(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFlashModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertSceneModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFocusModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFlickerModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertSenMount(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertLocks(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertAreas(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertFacing(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertFocalLength(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertAutoConvergenceModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertManualConvergenceRange(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertMechanicalMisalignmentCorrection(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertRaw(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertCaptureModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertLayout(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVideoSnapshotSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertVSTABSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVNFSupported(params, caps);
+ }
+
+ //NOTE: Ensure that we always call insertDefaults after inserting the supported capabilities
+ //as there are checks inside insertDefaults to make sure a certain default is supported
+ // or not
+ if ( NO_ERROR == ret ) {
+ ret = insertVideoSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertGBCESupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertGLBCESupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+bool OMXCameraAdapter::_checkOmxTiCap(const OMX_TI_CAPTYPE & caps)
+{
+#define CAMHAL_CHECK_OMX_TI_CAP(countVar, arrayVar) \
+ do { \
+ const int count = static_cast<int>(caps.countVar); \
+ const int maxSize = CAMHAL_SIZE_OF_ARRAY(caps.arrayVar); \
+ if ( count < 0 || count > maxSize ) \
+ { \
+ CAMHAL_LOGE("OMX_TI_CAPTYPE verification failed"); \
+ CAMHAL_LOGE(" variable: OMX_TI_CAPTYPE::" #countVar \
+ ", value: %d, max allowed: %d", \
+ count, maxSize); \
+ return false; \
+ } \
+ } while (0)
+
+ CAMHAL_CHECK_OMX_TI_CAP(ulPreviewFormatCount, ePreviewFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageFormatCount, eImageFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulWhiteBalanceCount, eWhiteBalanceModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulColorEffectCount, eColorEffects);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlickerCount, eFlicker);
+ CAMHAL_CHECK_OMX_TI_CAP(ulExposureModeCount, eExposureModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFocusModeCount, eFocusModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulSceneCount, eSceneModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlashCount, eFlashModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvVarFPSModesCount, tPrvVarFPSModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapVarFPSModesCount, tCapVarFPSModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulAutoConvModesCount, eAutoConvModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulBracketingModesCount, eBracketingModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageCodingFormatCount, eImageCodingFormat);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvFrameLayoutCount, ePrvFrameLayout);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapFrameLayoutCount, eCapFrameLayout);
+
+#undef CAMHAL_CHECK_OMX_TI_CAP
+
+ return true;
+}
+
+
+bool OMXCameraAdapter::_dumpOmxTiCap(const int sensorId, const OMX_TI_CAPTYPE & caps)
+{
+ if ( !_checkOmxTiCap(caps) )
+ {
+ CAMHAL_LOGE("OMX_TI_CAPTYPE structure is invalid");
+ return false;
+ }
+
+ CAMHAL_LOGD("===================================================");
+ CAMHAL_LOGD("---- Dumping OMX capabilities for sensor id: %d ----", sensorId);
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPreviewFormatCount = %d", int(caps.ulPreviewFormatCount));
+ for ( int i = 0; i < int(caps.ulPreviewFormatCount); ++i )
+ CAMHAL_LOGD(" ePreviewFormats[%2d] = %d", i, int(caps.ePreviewFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageFormatCount = %d", int(caps.ulImageFormatCount));
+ for ( int i = 0; i < int(caps.ulImageFormatCount); ++i )
+ CAMHAL_LOGD(" eImageFormats[%2d] = %d", i, int(caps.eImageFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tPreviewResRange.nWidthMin = %d", int(caps.tPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMin = %d", int(caps.tPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tPreviewResRange.nWidthMax = %d", int(caps.tPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMax = %d", int(caps.tPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tPreviewResRange.nMaxResInPixels = %d", int(caps.tPreviewResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMin = %d", int(caps.tRotatedPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMin = %d", int(caps.tRotatedPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMax = %d", int(caps.tRotatedPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMax = %d", int(caps.tRotatedPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nMaxResInPixels = %d", int(caps.tRotatedPreviewResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tImageResRange.nWidthMin = %d", int(caps.tImageResRange.nWidthMin));
+ CAMHAL_LOGD("tImageResRange.nHeightMin = %d", int(caps.tImageResRange.nHeightMin));
+ CAMHAL_LOGD("tImageResRange.nWidthMax = %d", int(caps.tImageResRange.nWidthMax));
+ CAMHAL_LOGD("tImageResRange.nHeightMax = %d", int(caps.tImageResRange.nHeightMax));
+ CAMHAL_LOGD("tImageResRange.nMaxResInPixels = %d", int(caps.tImageResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tThumbResRange.nWidthMin = %d", int(caps.tThumbResRange.nWidthMin));
+ CAMHAL_LOGD("tThumbResRange.nHeightMin = %d", int(caps.tThumbResRange.nHeightMin));
+ CAMHAL_LOGD("tThumbResRange.nWidthMax = %d", int(caps.tThumbResRange.nWidthMax));
+ CAMHAL_LOGD("tThumbResRange.nHeightMax = %d", int(caps.tThumbResRange.nHeightMax));
+ CAMHAL_LOGD("tThumbResRange.nMaxResInPixels = %d", int(caps.tThumbResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulWhiteBalanceCount = %d", int(caps.ulWhiteBalanceCount));
+ for ( int i = 0; i < int(caps.ulWhiteBalanceCount); ++i )
+ CAMHAL_LOGD(" eWhiteBalanceModes[%2d] = 0x%08x", i, int(caps.eWhiteBalanceModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulColorEffectCount = %d", int(caps.ulColorEffectCount));
+ for ( int i = 0; i < int(caps.ulColorEffectCount); ++i )
+ CAMHAL_LOGD(" eColorEffects[%2d] = 0x%08x", i, int(caps.eColorEffects[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("xMaxWidthZoom = %d", int(caps.xMaxWidthZoom));
+ CAMHAL_LOGD("xMaxHeightZoom = %d", int(caps.xMaxHeightZoom));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlickerCount = %d", int(caps.ulFlickerCount));
+ for ( int i = 0; i < int(caps.ulFlickerCount); ++i )
+ CAMHAL_LOGD(" eFlicker[%2d] = %d", i, int(caps.eFlicker[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulExposureModeCount = %d", int(caps.ulExposureModeCount));
+ for ( int i = 0; i < int(caps.ulExposureModeCount); ++i )
+ CAMHAL_LOGD(" eExposureModes[%2d] = 0x%08x", i, int(caps.eExposureModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bLensDistortionCorrectionSupported = %d", int(caps.bLensDistortionCorrectionSupported));
+ CAMHAL_LOGD("bISONoiseFilterSupported = %d", int(caps.bISONoiseFilterSupported));
+ CAMHAL_LOGD("xEVCompensationMin = %d", int(caps.xEVCompensationMin));
+ CAMHAL_LOGD("xEVCompensationMax = %d", int(caps.xEVCompensationMax));
+ CAMHAL_LOGD("nSensitivityMax = %d", int(caps.nSensitivityMax));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFocusModeCount = %d", int(caps.ulFocusModeCount));
+ for ( int i = 0; i < int(caps.ulFocusModeCount); ++i )
+ CAMHAL_LOGD(" eFocusModes[%2d] = 0x%08x", i, int(caps.eFocusModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulSceneCount = %d", int(caps.ulSceneCount));
+ for ( int i = 0; i < int(caps.ulSceneCount); ++i )
+ CAMHAL_LOGD(" eSceneModes[%2d] = %d", i, int(caps.eSceneModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlashCount = %d", int(caps.ulFlashCount));
+ for ( int i = 0; i < int(caps.ulFlashCount); ++i )
+ CAMHAL_LOGD(" eFlashModes[%2d] = %d", i, int(caps.eFlashModes[i]));
+
+ CAMHAL_LOGD("xFramerateMin = %d", int(caps.xFramerateMin));
+ CAMHAL_LOGD("xFramerateMax = %d", int(caps.xFramerateMax));
+ CAMHAL_LOGD("bContrastSupported = %d", int(caps.bContrastSupported));
+ CAMHAL_LOGD("bSaturationSupported = %d", int(caps.bSaturationSupported));
+ CAMHAL_LOGD("bBrightnessSupported = %d", int(caps.bBrightnessSupported));
+ CAMHAL_LOGD("bProcessingLevelSupported = %d", int(caps.bProcessingLevelSupported));
+ CAMHAL_LOGD("bQFactorSupported = %d", int(caps.bQFactorSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvVarFPSModesCount = %d", int(caps.ulPrvVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulPrvVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapVarFPSModesCount = %d", int(caps.ulCapVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulCapVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tSenMounting.nSenId = %d", int(caps.tSenMounting.nSenId));
+ CAMHAL_LOGD("tSenMounting.nRotation = %d", int(caps.tSenMounting.nRotation));
+ CAMHAL_LOGD("tSenMounting.bMirror = %d", int(caps.tSenMounting.bMirror));
+ CAMHAL_LOGD("tSenMounting.bFlip = %d", int(caps.tSenMounting.bFlip));
+ CAMHAL_LOGD("tSenMounting.eFacing = %d", int(caps.tSenMounting.eFacing));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulAutoConvModesCount = %d", int(caps.ulAutoConvModesCount));
+ for ( int i = 0; i < int(caps.ulAutoConvModesCount); ++i )
+ CAMHAL_LOGD(" eAutoConvModes[%2d] = %d", i, int(caps.eAutoConvModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulBracketingModesCount = %d", int(caps.ulBracketingModesCount));
+ for ( int i = 0; i < int(caps.ulBracketingModesCount); ++i )
+ CAMHAL_LOGD(" eBracketingModes[%2d] = %d", i, int(caps.eBracketingModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bGbceSupported = %d", int(caps.bGbceSupported));
+ CAMHAL_LOGD("bRawJpegSupported = %d", int(caps.bRawJpegSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageCodingFormatCount = %d", int(caps.ulImageCodingFormatCount));
+ for ( int i = 0; i < int(caps.ulImageCodingFormatCount); ++i )
+ CAMHAL_LOGD(" eImageCodingFormat[%2d] = %d", i, int(caps.eImageCodingFormat[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("uSenNativeResWidth = %d", int(caps.uSenNativeResWidth));
+ CAMHAL_LOGD("uSenNativeResHeight = %d", int(caps.uSenNativeResHeight));
+ CAMHAL_LOGD("ulAlgoAreasFocusCount = %d", int(caps.ulAlgoAreasFocusCount));
+ CAMHAL_LOGD("ulAlgoAreasExposureCount = %d", int(caps.ulAlgoAreasExposureCount));
+ CAMHAL_LOGD("bAELockSupported = %d", int(caps.bAELockSupported));
+ CAMHAL_LOGD("bAWBLockSupported = %d", int(caps.bAWBLockSupported));
+ CAMHAL_LOGD("bAFLockSupported = %d", int(caps.bAFLockSupported));
+ CAMHAL_LOGD("nFocalLength = %d", int(caps.nFocalLength));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvFrameLayoutCount = %d", int(caps.ulPrvFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulPrvFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" ePrvFrameLayout[%2d] = %d", i, int(caps.ePrvFrameLayout[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapFrameLayoutCount = %d", int(caps.ulCapFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulCapFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" eCapFrameLayout[%2d] = %d", i, int(caps.eCapFrameLayout[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bVideoNoiseFilterSupported = %d", int(caps.bVideoNoiseFilterSupported ));
+ CAMHAL_LOGD("bVideoStabilizationSupported = %d", int(caps.bVideoStabilizationSupported ));
+ CAMHAL_LOGD("bStillCapDuringVideoSupported = %d", int(caps.bStillCapDuringVideoSupported ));
+ CAMHAL_LOGD("bMechanicalMisalignmentSupported = %d", int(caps.bMechanicalMisalignmentSupported));
+ CAMHAL_LOGD("bFacePrioritySupported = %d", int(caps.bFacePrioritySupported ));
+ CAMHAL_LOGD("bRegionPrioritySupported = %d", int(caps.bRegionPrioritySupported ));
+ CAMHAL_LOGD("bGlbceSupported = %d", int(caps.bGlbceSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("nManualConvMin = %d", int(caps.nManualConvMin ));
+ CAMHAL_LOGD("nManualConvMax = %d", int(caps.nManualConvMax ));
+ CAMHAL_LOGD("nManualExpMin = %d", int(caps.nManualExpMin ));
+ CAMHAL_LOGD("nManualExpMax = %d", int(caps.nManualExpMax ));
+ CAMHAL_LOGD("nBrightnessMin = %d", int(caps.nBrightnessMin ));
+ CAMHAL_LOGD("nBrightnessMax = %d", int(caps.nBrightnessMax ));
+ CAMHAL_LOGD("nContrastMin = %d", int(caps.nContrastMin ));
+ CAMHAL_LOGD("nContrastMax = %d", int(caps.nContrastMax ));
+ CAMHAL_LOGD("nSharpnessMin = %d", int(caps.nSharpnessMin ));
+ CAMHAL_LOGD("nSharpnessMax = %d", int(caps.nSharpnessMax ));
+ CAMHAL_LOGD("nSaturationMin = %d", int(caps.nSaturationMin ));
+ CAMHAL_LOGD("nSaturationMax = %d", int(caps.nSaturationMax ));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("------------------- end of dump -------------------");
+ CAMHAL_LOGD("===================================================");
+
+ return true;
+}
+
+/*****************************************
+ * public exposed function declarations
+ *****************************************/
+
+status_t OMXCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params, OMX_HANDLETYPE handle)
+{
+ status_t ret = NO_ERROR;
+ int caps_size = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CameraBuffer *bufferlist;
+ OMX_TI_CAPTYPE* caps;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ MemoryManager memMgr;
+
+ LOG_FUNCTION_NAME;
+
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
+ // allocate tiler (or ion) buffer for caps (size is always a multiple of 4K)
+ caps_size = ((sizeof(OMX_TI_CAPTYPE)+4095)/4096)*4096;
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, caps_size, 1);
+ caps = (OMX_TI_CAPTYPE*) bufferlist[0].opaque;
+
+ if (!caps) {
+ CAMHAL_LOGEB("Error allocating buffer for caps %d", eError);
+ ret = -ENOMEM;
+ goto EXIT;
+ }
+
+ // initialize structures to be passed to OMX Camera
+ OMX_INIT_STRUCT_PTR (caps, OMX_TI_CAPTYPE);
+ caps->nPortIndex = OMX_ALL;
+
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+ sharedBuffer.nPortIndex = OMX_ALL;
+ sharedBuffer.nSharedBuffSize = caps_size;
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
+
+ // Get capabilities from OMX Camera
+ eError = OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error during capabilities query 0x%x", eError);
+ ret = UNKNOWN_ERROR;
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("OMX capability query success");
+ }
+
+#ifdef CAMERAHAL_PIRANHA
+ char hwrotation[PROPERTY_VALUE_MAX];
+ if (property_get("ro.sf.hwrotation", hwrotation, 0) > 0) {
+ if (caps->tSenMounting.nSenId == 306) { // front camera
+ caps->tSenMounting.nRotation = atoi(hwrotation);
+ } else { // back camera
+ caps->tSenMounting.nRotation = 360 - atoi(hwrotation);
+ }
+ }
+ // missing camera caps
+ if (caps->tSenMounting.nSenId == 306) {
+ caps->tPreviewResRange.nWidthMax = 640;
+ caps->tPreviewResRange.nHeightMax = 480;
+ caps->nFocalLength = 130;
+ } else {
+ caps->nFocalLength = 279;
+ }
+#endif
+
+#ifdef CAMERAHAL_DEBUG
+ _dumpOmxTiCap(sensorId, *caps);
+#endif
+
+ // Translate and insert Ducati capabilities to CameraProperties
+ if ( NO_ERROR == ret ) {
+ ret = insertCapabilities(params, *caps);
+ }
+
+ CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps->tSenMounting.nSenId);
+ CAMHAL_LOGDB("facing id=%u", (unsigned int)caps->tSenMounting.eFacing);
+
+ EXIT:
+ if (bufferlist) {
+ memMgr.freeBufferList(bufferlist);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp
new file mode 100644
index 0000000..afc335e
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXCapture.cpp
@@ -0,0 +1,2098 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXCapture.cpp
+*
+* This file contains functionality for handling image capture.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *str = NULL;
+ int w, h;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ CodingMode codingMode = mCodingMode;
+ const char *valstr = NULL;
+ int varint = 0;
+ OMX_TI_STEREOFRAMELAYOUTTYPE capFrmLayout;
+ bool inCaptureState = false;
+
+ LOG_FUNCTION_NAME;
+
+ OMXCameraPortParameters *cap;
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ capFrmLayout = cap->mFrameLayoutType;
+ setParamS3D(mCameraAdapterParameters.mImagePortIndex,
+ params.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT));
+ if (capFrmLayout != cap->mFrameLayoutType) {
+ mPendingCaptureSettings |= SetFormat;
+ }
+
+ params.getPictureSize(&w, &h);
+
+ if ( ( w != ( int ) cap->mWidth ) ||
+ ( h != ( int ) cap->mHeight ) )
+ {
+ mPendingCaptureSettings |= SetFormat;
+ }
+
+ cap->mWidth = w;
+ cap->mHeight = h;
+ //TODO: Support more pixelformats
+ //cap->mStride = 2;
+
+ CAMHAL_LOGVB("Image: cap.mWidth = %d", (int)cap->mWidth);
+ CAMHAL_LOGVB("Image: cap.mHeight = %d", (int)cap->mHeight);
+
+ if ((valstr = params.getPictureFormat()) != NULL) {
+ if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ CAMHAL_LOGDA("RGB565 format selected");
+ pixFormat = OMX_COLOR_Format16bitRGB565;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
+ CAMHAL_LOGDA("JPEG format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
+ CAMHAL_LOGDA("JPS format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPS;
+ mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_JPS;
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
+ CAMHAL_LOGDA("MPO format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingMPO;
+ mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_MPO;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ CAMHAL_LOGDA("RAW Picture format selected");
+ pixFormat = OMX_COLOR_FormatRawBayer10bit;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ } else {
+ CAMHAL_LOGEA("Invalid format, JPEG format selected as default");
+ pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
+ mPictureFormatFromClient = NULL;
+ }
+ } else {
+ CAMHAL_LOGEA("Picture format is NULL, defaulting to JPEG");
+ pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
+ mPictureFormatFromClient = NULL;
+ }
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (!valstr || strcmp(valstr, TICameraParameters::HIGH_QUALITY_MODE) == 0) &&
+ access(kRawImagesOutputDirPath, F_OK) != -1 ) {
+ mRawCapture = true;
+ }
+
+ if (mRawCapture && (access(kYuvImagesOutputDirPath, F_OK) != -1)) {
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ mYuvCapture = true;
+ }
+#endif
+ // JPEG capture is not supported in video mode by OMX Camera
+ // Set capture format to yuv422i...jpeg encode will
+ // be done on A9
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (valstr && ( strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0 ||
+ strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ) == 0 ) ) &&
+ (pixFormat == OMX_COLOR_FormatUnused) ) {
+ CAMHAL_LOGDA("Capturing in video mode...selecting yuv422i");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+
+ if (pixFormat != cap->mColorFormat || codingMode != mCodingMode) {
+ mPendingCaptureSettings |= SetFormat;
+ cap->mColorFormat = pixFormat;
+ mCodingMode = codingMode;
+ }
+
+#ifdef OMAP_ENHANCEMENT
+ str = params.get(TICameraParameters::KEY_TEMP_BRACKETING);
+ if ( ( str != NULL ) &&
+ ( strcmp(str, android::CameraParameters::TRUE) == 0 ) ) {
+
+ if ( !mBracketingSet ) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
+
+ mBracketingSet = true;
+ } else {
+
+ if ( mBracketingSet ) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
+
+ mBracketingSet = false;
+ }
+
+ if ( (str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL ) {
+ parseExpRange(str, mExposureBracketingValues, NULL,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+ mExposureBracketMode = OMX_BracketExposureRelativeInEV;
+ }
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ } else if ( (str = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
+ parseExpRange(str, mExposureBracketingValues, mExposureGainBracketingValues,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+ mExposureBracketMode = OMX_BracketExposureGainAbsolute;
+ }
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ } else {
+ // always set queued shot config in CPCAM mode
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
+ // if bracketing was previously set...we set again before capturing to clear
+ if (mExposureBracketingValidEntries) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ mExposureBracketingValidEntries = 0;
+ }
+ }
+
+ str = params.get(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
+ if ( NULL != str ) {
+ parseExpRange(str, mZoomBracketingValues, NULL, NULL,
+ ZOOM_BRACKET_RANGE, mZoomBracketingValidEntries);
+ mCurrentZoomBracketing = 0;
+ mZoomBracketingEnabled = true;
+ } else {
+ if (mZoomBracketingValidEntries) {
+ mZoomBracketingValidEntries = 0;
+ }
+ mZoomBracketingEnabled = false;
+ }
+#endif
+
+ // Flush config queue
+ // If TRUE: Flush queue and abort processing before enqueing
+ valstr = params.get(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE);
+ if ( NULL != valstr ) {
+ if ( 0 == strcmp(valstr, android::CameraParameters::TRUE) ) {
+ mFlushShotConfigQueue = true;
+ } else if ( 0 == strcmp(valstr, android::CameraParameters::FALSE) ) {
+ mFlushShotConfigQueue = false;
+ } else {
+ CAMHAL_LOGE("Missing flush shot config parameter. Will use current (%s)",
+ mFlushShotConfigQueue ? "true" : "false");
+ }
+ }
+
+ if ( params.getInt(android::CameraParameters::KEY_ROTATION) != -1 )
+ {
+ if (params.getInt(android::CameraParameters::KEY_ROTATION) != (int) mPictureRotation) {
+ mPendingCaptureSettings |= SetRotation;
+ }
+ if (cap->mFrameLayoutType == OMX_TI_StereoFrameLayout2D) {
+ mPictureRotation = params.getInt(android::CameraParameters::KEY_ROTATION);
+ } else {
+ mPictureRotation = 0;
+ }
+ }
+ else
+ {
+ if (mPictureRotation) mPendingCaptureSettings |= SetRotation;
+ mPictureRotation = 0;
+ }
+
+ CAMHAL_LOGVB("Picture Rotation set %d", mPictureRotation);
+
+#ifdef OMAP_ENHANCEMENT
+ // Read Sensor Orientation and set it based on perating mode
+ varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ( varint != -1 )
+ {
+ mSensorOrientation = varint;
+ if (mSensorOrientation == 270 ||mSensorOrientation==90)
+ {
+ CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
+ mSensorOrientation +=180;
+ mSensorOrientation%=360;
+ }
+ }
+ else
+ {
+ mSensorOrientation = 0;
+ }
+
+ CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+#endif
+
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ varint = params.getInt(TICameraParameters::KEY_BURST);
+ if ( varint >= 1 )
+ {
+ if (varint != (int) mBurstFrames) {
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
+ mBurstFrames = varint;
+ }
+ else
+ {
+ if (mBurstFrames != 1) mPendingCaptureSettings |= SetBurstExpBracket;
+ mBurstFrames = 1;
+ }
+
+ CAMHAL_LOGVB("Burst Frames set %d", mBurstFrames);
+#endif
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mPictureQuality) {
+ mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = varint;
+ }
+ } else {
+ if (mPictureQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = MAX_JPEG_QUALITY;
+ }
+ }
+
+ CAMHAL_LOGVB("Picture Quality set %d", mPictureQuality);
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ if ( varint >= 0 ) {
+ if (varint != mThumbWidth) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = varint;
+ }
+ } else {
+ if (mThumbWidth != DEFAULT_THUMB_WIDTH) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = DEFAULT_THUMB_WIDTH;
+ }
+ }
+
+ CAMHAL_LOGVB("Picture Thumb width set %d", mThumbWidth);
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if ( varint >= 0 ) {
+ if (varint != mThumbHeight) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = varint;
+ }
+ } else {
+ if (mThumbHeight != DEFAULT_THUMB_HEIGHT) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = DEFAULT_THUMB_HEIGHT;
+ }
+ }
+
+ CAMHAL_LOGVB("Picture Thumb height set %d", mThumbHeight);
+
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mThumbQuality) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = varint;
+ }
+ } else {
+ if (mThumbQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = MAX_JPEG_QUALITY;
+ }
+ }
+
+ CAMHAL_LOGDB("Thumbnail Quality set %d", mThumbQuality);
+
+ if (mFirstTimeInit) {
+ mPendingCaptureSettings = ECapturesettingsAll;
+ }
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ cap->mWidth = params.getInt(TICameraParameters::RAW_WIDTH);
+ cap->mHeight = params.getInt(TICameraParameters::RAW_HEIGHT);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ OMXCameraPortParameters *imgCaptureData = NULL;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+
+ // If any settings have changed that need to be set with SetParam,
+ // we will need to disable the port to set them
+ if ((mPendingCaptureSettings & ECaptureParamSettings)) {
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ if (mPendingCaptureSettings & SetFormat) {
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame.mLength = imgCaptureData->mBufSize;
+ frame.mWidth = imgCaptureData->mWidth;
+ frame.mHeight = imgCaptureData->mHeight;
+ frame.mAlignment = imgCaptureData->mStride;
+ CAMHAL_LOGDB("getPictureBufferSize: width:%u height:%u alignment:%u length:%u",
+ frame.mWidth, frame.mHeight, frame.mAlignment, frame.mLength);
+ }
+ else
+ {
+ CAMHAL_LOGEB("setFormat() failed 0x%x", ret);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int OMXCameraAdapter::getBracketingValueMode(const char *a, const char *b) const
+{
+ BracketingValueMode bvm = BracketingValueAbsolute;
+
+ if ( (NULL != b) &&
+ (NULL != a) &&
+ (a < b) &&
+ ( (NULL != memchr(a, '+', b - a)) ||
+ (NULL != memchr(a, '-', b - a)) ) ) {
+ bvm = BracketingValueRelative;
+ }
+ return bvm;
+}
+
+status_t OMXCameraAdapter::parseExpRange(const char *rangeStr,
+ int *expRange,
+ int *gainRange,
+ int *expGainModes,
+ size_t count,
+ size_t &validEntries)
+{
+ status_t ret = NO_ERROR;
+ char *end = NULL;
+ const char *startPtr = NULL;
+ size_t i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == rangeStr ){
+ return -EINVAL;
+ }
+
+ if ( NULL == expRange ){
+ return -EINVAL;
+ }
+
+ if ( NO_ERROR == ret ) {
+ startPtr = rangeStr;
+ do {
+ // Relative Exposure example: "-30,-10, 0, 10, 30"
+ // Absolute Gain ex. (exposure,gain) pairs: "(100,300),(200,300),(400,300),(800,300),(1600,300)"
+ // Relative Gain ex. (exposure,gain) pairs: "(-30,+0),(-10, +0),(+0,+0),(+10,+0),(+30,+0)"
+ // Forced relative Exposure example: "-30F,-10F, 0F, 10F, 30F"
+ // Forced absolute Gain ex. (exposure,gain) pairs: "(100,300)F,(200,300)F,(400,300)F,(800,300)F,(1600,300)F"
+ // Forced relative Gain ex. (exposure,gain) pairs: "(-30,+0)F,(-10, +0)F,(+0,+0)F,(+10,+0)F,(+30,+0)F"
+
+ // skip '(' and ','
+ while ((*startPtr == '(') || (*startPtr == ',')) startPtr++;
+
+ expRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (expGainModes) {
+ // if gainRange is given rangeStr should be (exposure, gain) pair
+ if (gainRange) {
+ int bvm_exp = getBracketingValueMode(startPtr, end);
+ startPtr = end + 1; // for the ','
+ gainRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (BracketingValueAbsolute == bvm_exp) {
+ expGainModes[i] = getBracketingValueMode(startPtr, end);
+ } else {
+ expGainModes[i] = bvm_exp;
+ }
+ } else {
+ expGainModes[i] = BracketingValueCompensation;
+ }
+ }
+ startPtr = end;
+
+ // skip ')'
+ while (*startPtr == ')') startPtr++;
+
+ // Check for "forced" key
+ if (expGainModes) {
+ while ((*startPtr == 'F') || (*startPtr == 'f')) {
+ if ( BracketingValueAbsolute == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueAbsoluteForced;
+ } else if ( BracketingValueRelative == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueRelativeForced;
+ } else if ( BracketingValueCompensation == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueCompensationForced;
+ } else {
+ CAMHAL_LOGE("Unexpected old mode 0x%x", expGainModes[i]);
+ }
+ startPtr++;
+ }
+ }
+
+ i++;
+
+ } while ((startPtr[0] != '\0') && (i < count));
+ validEntries = i;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doExposureBracketing(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NULL == evValues ) {
+ CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret ) {
+ if (bracketMode == OMX_BracketVectorShot) {
+ ret = setVectorShot(evValues, evValues2, evModes2, evCount, frameCount, flush, bracketMode);
+ } else {
+ ret = setExposureBracketing(evValues, evValues2, evCount, frameCount, bracketMode);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setVectorStop(bool toPreview)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE vecShotStop;
+
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&vecShotStop, OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE);
+
+ vecShotStop.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (toPreview) {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_GOTO_PREVIEW;
+ } else {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_WAIT_IN_CAPTURE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigVectShotStopMethod,
+ &vecShotStop);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring bracket shot 0x%x", eError);
+ } else {
+ CAMHAL_LOGDA("Bracket shot configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::initVectorShot()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAPTUREMODETYPE expCapMode;
+ OMX_CONFIG_EXTCAPTUREMODETYPE extExpCapMode;
+
+ LOG_FUNCTION_NAME;
+
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&expCapMode, OMX_CONFIG_CAPTUREMODETYPE);
+ expCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ expCapMode.bFrameLimited = OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCaptureMode,
+ &expCapMode);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Camera capture mode configured successfully");
+ }
+ }
+
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&extExpCapMode, OMX_CONFIG_EXTCAPTUREMODETYPE);
+ extExpCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ extExpCapMode.bEnableBracketing = OMX_TRUE;
+ extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketVectorShot;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigExtCaptureMode,
+ &extExpCapMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring extended capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Extended camera capture mode configured successfully");
+ }
+ }
+
+
+ if (NO_ERROR == ret) {
+ // set vector stop method to stop in capture
+ ret = setVectorStop(false);
+ }
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::setVectorShot(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS enqueueShotConfigs;
+ OMX_TI_CONFIG_QUERYAVAILABLESHOTS queryAvailableShots;
+ bool doFlush = flush;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&enqueueShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
+ OMX_INIT_STRUCT_PTR(&queryAvailableShots, OMX_TI_CONFIG_QUERYAVAILABLESHOTS);
+
+ queryAvailableShots.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigQueryAvailableShots,
+ &queryAvailableShots);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGE("Error getting available shots 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGD("AVAILABLE SHOTS: %d", queryAvailableShots.nAvailableShots);
+ if (queryAvailableShots.nAvailableShots < evCount) {
+ // TODO(XXX): Need to implement some logic to handle this error
+ CAMHAL_LOGE("Not enough available shots to fulfill this queue request");
+ ret = -ENOSPC;
+ goto exit;
+ }
+ }
+
+ for ( unsigned int confID = 0; confID < evCount; ) {
+ unsigned int i;
+ for ( i = 0 ; (i < ARRAY_SIZE(enqueueShotConfigs.nShotConfig)) && (confID < evCount); i++, confID++ ) {
+ CAMHAL_LOGD("%2u: (%7d,%4d) mode: %d", confID, evValues[confID], evValues2[confID], evModes2[confID]);
+ enqueueShotConfigs.nShotConfig[i].nConfigId = confID;
+ enqueueShotConfigs.nShotConfig[i].nFrames = 1;
+ if ( (BracketingValueCompensation == evModes2[confID]) ||
+ (BracketingValueCompensationForced == evModes2[confID]) ) {
+ // EV compensation
+ enqueueShotConfigs.nShotConfig[i].nEC = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nExp = 0;
+ enqueueShotConfigs.nShotConfig[i].nGain = 0;
+ } else {
+ // exposure,gain pair
+ enqueueShotConfigs.nShotConfig[i].nEC = 0;
+ enqueueShotConfigs.nShotConfig[i].nExp = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nGain = evValues2[confID];
+ }
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ switch (evModes2[confID]) {
+ case BracketingValueAbsolute: // (exp,gain) pairs directly program sensor values
+ default :
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ break;
+ case BracketingValueRelative: // (exp,gain) pairs relative to AE settings and constraints
+ case BracketingValueCompensation: // EV compensation relative to AE settings and constraints
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE;
+ break;
+ case BracketingValueAbsoluteForced: // (exp,gain) pairs directly program sensor values
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_ABSOLUTE;
+ break;
+ case BracketingValueRelativeForced: // (exp, gain) pairs relative to AE settings AND settings
+ case BracketingValueCompensationForced: // EV compensation relative to AE settings and constraints
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_RELATIVE;
+ break;
+ }
+ enqueueShotConfigs.nShotConfig[i].bNoSnapshot = OMX_FALSE; // TODO: Make this configurable
+ }
+
+ // Repeat last exposure and again
+ if ((confID == evCount) && (evCount > 0) && (frameCount > evCount) && (0 != i)) {
+ enqueueShotConfigs.nShotConfig[i-1].nFrames = frameCount - evCount;
+ }
+
+ enqueueShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ enqueueShotConfigs.bFlushQueue = doFlush ? OMX_TRUE : OMX_FALSE;
+ enqueueShotConfigs.nNumConfigs = i;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &enqueueShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring enqueue shot 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Enqueue shot configured successfully");
+ }
+ // Flush only first time
+ doFlush = false;
+ }
+
+ // Handle burst capture (no any bracketing) case
+ if (0 == evCount) {
+ CAMHAL_LOGE("Handle burst capture (no any bracketing) case");
+ enqueueShotConfigs.nShotConfig[0].nConfigId = 0;
+ enqueueShotConfigs.nShotConfig[0].nFrames = frameCount;
+ enqueueShotConfigs.nShotConfig[0].nEC = 0;
+ enqueueShotConfigs.nShotConfig[0].nExp = 0;
+ enqueueShotConfigs.nShotConfig[0].nGain = 0;
+ enqueueShotConfigs.nShotConfig[0].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE;
+ enqueueShotConfigs.nShotConfig[0].bNoSnapshot = OMX_FALSE; // TODO: Make this configurable
+ enqueueShotConfigs.nNumConfigs = 1;
+ enqueueShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ enqueueShotConfigs.bFlushQueue = doFlush ? OMX_TRUE : OMX_FALSE;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &enqueueShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring enqueue shot 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Enqueue shot configured successfully");
+ }
+ }
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
+ int *evValues2,
+ size_t evCount,
+ size_t frameCount,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAPTUREMODETYPE expCapMode;
+ OMX_CONFIG_EXTCAPTUREMODETYPE extExpCapMode;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&expCapMode, OMX_CONFIG_CAPTUREMODETYPE);
+ expCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ /// If frameCount>0 but evCount<=0, then this is the case of HQ burst.
+ //Otherwise, it is normal HQ capture
+ ///If frameCount>0 and evCount>0 then this is the cause of HQ Exposure bracketing.
+ if ( 0 == evCount && 0 == frameCount )
+ {
+ expCapMode.bFrameLimited = OMX_FALSE;
+ }
+ else
+ {
+ expCapMode.bFrameLimited = OMX_TRUE;
+ expCapMode.nFrameLimit = frameCount;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCaptureMode,
+ &expCapMode);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring capture mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera capture mode configured successfully");
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&extExpCapMode, OMX_CONFIG_EXTCAPTUREMODETYPE);
+ extExpCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ if ( 0 == evCount )
+ {
+ extExpCapMode.bEnableBracketing = OMX_FALSE;
+ }
+ else
+ {
+ extExpCapMode.bEnableBracketing = OMX_TRUE;
+ extExpCapMode.tBracketConfigType.eBracketMode = bracketMode;
+ extExpCapMode.tBracketConfigType.nNbrBracketingValues = evCount - 1;
+ }
+
+ for ( unsigned int i = 0 ; i < evCount ; i++ )
+ {
+ if (bracketMode == OMX_BracketExposureGainAbsolute) {
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = evValues[i];
+ extExpCapMode.tBracketConfigType.nBracketValues2[i] = evValues2[i];
+ } else {
+ // assuming OMX_BracketExposureRelativeInEV
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigExtCaptureMode,
+ &extExpCapMode);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring extended capture mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Extended camera capture mode configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setShutterCallback(bool enabled)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CALLBACKREQUESTTYPE shutterRequstCallback;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&shutterRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE);
+ shutterRequstCallback.nPortIndex = OMX_ALL;
+
+ if ( enabled )
+ {
+ shutterRequstCallback.bEnable = OMX_TRUE;
+ shutterRequstCallback.nIndex = ( OMX_INDEXTYPE ) OMX_TI_IndexConfigShutterCallback;
+ CAMHAL_LOGDA("Enabling shutter callback");
+ }
+ else
+ {
+ shutterRequstCallback.bEnable = OMX_FALSE;
+ shutterRequstCallback.nIndex = ( OMX_INDEXTYPE ) OMX_TI_IndexConfigShutterCallback;
+ CAMHAL_LOGDA("Disabling shutter callback");
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigCallbackRequest,
+ &shutterRequstCallback);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error registering shutter callback 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Shutter callback for index 0x%x registered successfully",
+ OMX_TI_IndexConfigShutterCallback);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
+ CameraFrame::FrameType typeOfFrame)
+{
+ status_t ret = NO_ERROR;
+ int currentBufferIdx, nextBufferIdx;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CameraBuffer *buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+ currentBufferIdx = buffer->index;
+
+ if ( currentBufferIdx >= imgCaptureData->mNumBufs)
+ {
+ CAMHAL_LOGEB("Invalid bracketing buffer index 0x%x", currentBufferIdx);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingBuffersQueued[currentBufferIdx] = false;
+ mBracketingBuffersQueuedCount--;
+
+ if ( 0 >= mBracketingBuffersQueuedCount )
+ {
+ nextBufferIdx = ( currentBufferIdx + 1 ) % imgCaptureData->mNumBufs;
+ mBracketingBuffersQueued[nextBufferIdx] = true;
+ mBracketingBuffersQueuedCount++;
+ mLastBracetingBufferIdx = nextBufferIdx;
+ setFrameRefCountByType((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame, 1);
+ returnFrame((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::sendBracketFrames(size_t &framesSent)
+{
+ status_t ret = NO_ERROR;
+ int currentBufferIdx;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ framesSent = 0;
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ currentBufferIdx = mLastBracetingBufferIdx;
+ do
+ {
+ currentBufferIdx++;
+ currentBufferIdx %= imgCaptureData->mNumBufs;
+ if (!mBracketingBuffersQueued[currentBufferIdx] )
+ {
+ CameraFrame cameraFrame;
+ sendCallBacks(cameraFrame,
+ imgCaptureData->mBufferHeader[currentBufferIdx],
+ imgCaptureData->mImageType,
+ imgCaptureData);
+ framesSent++;
+ }
+ } while ( currentBufferIdx != mLastBracetingBufferIdx );
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startBracketing(int range)
+{
+ status_t ret = NO_ERROR;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ ret = -EINVAL;
+ }
+
+ {
+ android::AutoMutex lock(mBracketingLock);
+
+ if ( mBracketingEnabled )
+ {
+ return ret;
+ }
+ }
+
+ if ( 0 == imgCaptureData->mNumBufs )
+ {
+ CAMHAL_LOGEB("Image capture buffers set to %d", imgCaptureData->mNumBufs);
+ ret = -EINVAL;
+ }
+
+ if ( mPending3Asettings )
+ apply3Asettings(mParameters3A);
+
+ if ( NO_ERROR == ret )
+ {
+ android::AutoMutex lock(mBracketingLock);
+
+ mBracketingRange = range;
+ mBracketingBuffersQueued = new bool[imgCaptureData->mNumBufs];
+ if ( NULL == mBracketingBuffersQueued )
+ {
+ CAMHAL_LOGEA("Unable to allocate bracketing management structures");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingBuffersQueuedCount = imgCaptureData->mNumBufs;
+ mBurstFramesAccum = imgCaptureData->mNumBufs;
+ mLastBracetingBufferIdx = mBracketingBuffersQueuedCount - 1;
+
+ for ( int i = 0 ; i < imgCaptureData->mNumBufs ; i++ )
+ {
+ mBracketingBuffersQueued[i] = true;
+ }
+
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CachedCaptureParameters* cap_params = cacheCaptureParameters();
+ ret = startImageCapture(true, cap_params);
+ delete cap_params;
+ {
+ android::AutoMutex lock(mBracketingLock);
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingEnabled = true;
+ }
+ else
+ {
+ mBracketingEnabled = false;
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ ret = stopImageCapture();
+
+ android::AutoMutex lock(mBracketingLock);
+
+ if ( NULL != mBracketingBuffersQueued )
+ {
+ delete [] mBracketingBuffersQueued;
+ }
+
+ mBracketingBuffersQueued = NULL;
+ mBracketingEnabled = false;
+ mBracketingBuffersQueuedCount = 0;
+ mLastBracetingBufferIdx = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParameters* capParams)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * capData = NULL;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+ size_t bracketingSent = 0;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mImageCaptureLock);
+
+ if(!mCaptureConfigured)
+ {
+ ///Image capture was cancelled before we could start
+ return NO_ERROR;
+ }
+
+ if ( 0 != mStartCaptureSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mStartCaptureSem semaphore count %d", mStartCaptureSem.Count());
+ return NO_INIT;
+ }
+
+ if ( !bracketing ) {
+ if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) {
+ CAMHAL_LOGDA("trying starting capture when already canceled");
+ return NO_ERROR;
+ }
+ }
+
+ if (!capParams) {
+ CAMHAL_LOGE("Invalid cached parameters sent!");
+ return BAD_VALUE;
+ }
+
+ // Camera framework doesn't expect face callbacks once capture is triggered
+ pauseFaceDetection(true);
+
+ //During bracketing image capture is already active
+ {
+ android::AutoMutex lock(mBracketingLock);
+ if ( mBracketingEnabled )
+ {
+ //Stop bracketing, activate normal burst for the remaining images
+ mBracketingEnabled = false;
+ ret = sendBracketFrames(bracketingSent);
+
+ // Check if we accumulated enough buffers
+ if ( bracketingSent < ( mBracketingRange - 1 ) )
+ {
+ mCapturedFrames = mBracketingRange + ( ( mBracketingRange - 1 ) - bracketingSent );
+ }
+ else
+ {
+ mCapturedFrames = mBracketingRange;
+ }
+ mBurstFramesQueued = 0;
+ mBurstFramesAccum = mCapturedFrames;
+
+ if(ret != NO_ERROR)
+ goto EXIT;
+ else
+ return ret;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ if (capParams->mPendingCaptureSettings & SetRotation) {
+ mPendingCaptureSettings &= ~SetRotation;
+ ret = setPictureRotation(mPictureRotation);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error configuring image rotation %x", ret);
+ }
+ }
+
+ if (capParams->mPendingCaptureSettings & SetBurstExpBracket) {
+ mPendingCaptureSettings &= ~SetBurstExpBracket;
+ if ( mBracketingSet ) {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ 0,
+ 0,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ } else {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ capParams->mExposureBracketingValidEntries,
+ capParams->mBurstFrames,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ }
+
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
+ goto EXIT;
+ }
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture bracketing configs done: ", &mStartCapture);
+#endif
+
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ //OMX shutter callback events are only available in hq mode
+ if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
+ if ( NO_ERROR == ret )
+ {
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_TI_IndexConfigShutterCallback,
+ mStartCaptureSem);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = setShutterCallback(true);
+ }
+
+ }
+
+ if (mPending3Asettings) {
+ apply3Asettings(mParameters3A);
+ }
+
+ if (ret == NO_ERROR) {
+ int index = 0;
+ int queued = 0;
+ android::AutoMutex lock(mBurstLock);
+
+ if (capParams->mFlushShotConfigQueue) {
+ // reset shot queue
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesAccum = mBurstFrames;
+ mBurstFramesQueued = 0;
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ mBurstFramesQueued++;
+ }
+ }
+ } else {
+ mCapturedFrames += mBurstFrames;
+ mBurstFramesAccum += mBurstFrames;
+ }
+ CAMHAL_LOGD("mBurstFramesQueued = %d mBurstFramesAccum = %d index = %d "
+ "capData->mNumBufs = %d queued = %d capData->mMaxQueueable = %d",
+ mBurstFramesQueued,mBurstFramesAccum,index,
+ capData->mNumBufs,queued,capData->mMaxQueueable);
+ CAMHAL_LOGD("%d", (mBurstFramesQueued < mBurstFramesAccum)
+ && (index < capData->mNumBufs)
+ && (queued < capData->mMaxQueueable));
+ while ((mBurstFramesQueued < mBurstFramesAccum) &&
+ (index < capData->mNumBufs) &&
+ (queued < capData->mMaxQueueable)) {
+ if (capData->mStatus[index] == OMXCameraPortParameters::IDLE) {
+ CAMHAL_LOGDB("Queuing buffer on Capture port - %p",
+ capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ mBurstFramesQueued++;
+ queued++;
+ } else if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ CAMHAL_LOGE("Not queueing index = %d", index);
+ queued++;
+ }
+ index++;
+ }
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+
+ ///Queue all the buffers on capture port
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on Video port (for RAW capture) - 0x%x", ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+#endif
+
+ mWaitingForSnapshot = true;
+ mCaptureSignalled = false;
+
+ // Capturing command is not needed when capturing in video mode
+ // Only need to queue buffers on image ports
+ if ( ( mCapMode != VIDEO_MODE ) && ( mCapMode != VIDEO_MODE_HQ ) ) {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+ bOMX.bEnabled = OMX_TRUE;
+
+ /// sending Capturing Command to the component
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCapturing,
+ &bOMX);
+
+ CAMHAL_LOGDB("Capture set - 0x%x", eError);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture image buffers queued and capture enabled: ", &mStartCapture);
+#endif
+
+ //OMX shutter callback events are only available in hq mode
+
+ if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
+ {
+ if ( NO_ERROR == ret )
+ {
+ ret = mStartCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
+ }
+
+ //If something bad happened while we wait
+ if (mComponentState != OMX_StateExecuting)
+ {
+ CAMHAL_LOGEA("Invalid State after Image Capture Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Shutter callback received");
+ notifyShutterSubscribers();
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_TI_IndexConfigShutterCallback,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on shutter callback");
+ goto EXIT;
+ }
+
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture shutter event received: ", &mStartCapture);
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ mWaitingForSnapshot = false;
+ mCaptureSignalled = false;
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+ OMXCameraPortParameters *imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mImageCaptureLock);
+
+ if (!mCaptureConfigured) {
+ //Capture is not ongoing, return from here
+ return NO_ERROR;
+ }
+
+ if ( 0 != mStopCaptureSem.Count() ) {
+ CAMHAL_LOGEB("Error mStopCaptureSem semaphore count %d", mStopCaptureSem.Count());
+ goto EXIT;
+ }
+
+ // TODO(XXX): Reprocessing is currently piggy-backing capture commands
+ if (mAdapterState == REPROCESS_STATE) {
+ ret = stopReprocess();
+ }
+
+ //Disable the callback first
+ mWaitingForSnapshot = false;
+
+ // OMX shutter callback events are only available in hq mode
+ if ((HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
+ //Disable the callback first
+ ret = setShutterCallback(false);
+
+ // if anybody is waiting on the shutter callback
+ // signal them and then recreate the semaphore
+ if ( 0 != mStartCaptureSem.Count() ) {
+
+ for (int i = mStartCaptureSem.Count(); i < 0; i++) {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_TI_IndexConfigShutterCallback,
+ NULL );
+ }
+ mStartCaptureSem.Create(0);
+ }
+ } else if (CP_CAM == mCapMode) {
+ // Reset shot config queue
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS resetShotConfigs;
+ OMX_INIT_STRUCT_PTR(&resetShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
+
+ resetShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ resetShotConfigs.bFlushQueue = OMX_TRUE;
+ resetShotConfigs.nNumConfigs = 0;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &resetShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while reset shot config 0x%x", eError);
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("Shot config reset successfully");
+ }
+ }
+
+ //Wait here for the capture to be done, in worst case timeout and proceed with cleanup
+ mCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State Image Capture Stop Exitting!!!");
+ goto EXIT;
+ }
+
+ // Disable image capture
+ // Capturing command is not needed when capturing in video mode
+ if ( ( mCapMode != VIDEO_MODE ) && ( mCapMode != VIDEO_MODE_HQ ) ) {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+ bOMX.bEnabled = OMX_FALSE;
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCapturing,
+ &bOMX);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGDB("Error during SetConfig- 0x%x", eError);
+ ret = -1;
+ goto EXIT;
+ }
+ }
+
+ CAMHAL_LOGDB("Capture set - 0x%x", eError);
+
+ mCaptureSignalled = true; //set this to true if we exited because of timeout
+
+ {
+ android::AutoMutex lock(mFrameCountMutex);
+ mFrameCount = 0;
+ mFirstFrameCondition.broadcast();
+ }
+
+ // Stop is always signalled externally in CPCAM mode
+ // We need to make sure we really stop
+ if ((mCapMode == CP_CAM)) {
+ disableReprocess();
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ // Moving code for below commit here as an optimization for continuous capture,
+ // so focus settings don't have to reapplied after each capture
+ // c78fa2a CameraHAL: Always reset focus mode after capture
+ // Workaround when doing many consecutive shots, CAF wasn't getting restarted.
+ mPending3Asettings |= SetFocus;
+
+ mCapturedFrames = 0;
+ mBurstFramesAccum = 0;
+ mBurstFramesQueued = 0;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ //Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+
+ {
+ android::AutoMutex lock(mFrameCountMutex);
+ mFrameCount = 0;
+ mFirstFrameCondition.broadcast();
+ }
+
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::disableImagePort(){
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *imgCaptureData = NULL;
+ OMXCameraPortParameters *imgRawCaptureData = NULL;
+
+ if (!mCaptureConfigured) {
+ return NO_ERROR;
+ }
+
+ mCaptureConfigured = false;
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex]; // for RAW capture
+
+ ///Register for Image port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mImagePortIndex,
+ mStopCaptureSem);
+ ///Disable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+
+ ///Free all the buffers on capture port
+ if (imgCaptureData) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - %d", imgCaptureData->mNumBufs);
+ for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - 0x%x",
+ ( unsigned int ) imgCaptureData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mImagePortIndex,
+ (OMX_BUFFERHEADERTYPE*)imgCaptureData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for port disable");
+ //Wait for the image port enable event
+ ret = mStopCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after Disable Image Port Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("Port disabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port disable");
+ goto EXIT;
+ }
+
+ deinitInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+
+ // since port settings are not persistent after port is disabled...
+ mPendingCaptureSettings |= SetFormat;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+
+ if (mRawCapture) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mStopCaptureSem);
+ ///Disable RawCapture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Free all the buffers on RawCapture port
+ if (imgRawCaptureData) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - %d", imgRawCaptureData->mNumBufs);
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - 0x%x", ( unsigned int ) imgRawCaptureData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex,
+ (OMX_BUFFERHEADERTYPE*)imgRawCaptureData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for Video port disable");
+ //Wait for the image port enable event
+ mStopCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ CAMHAL_LOGDA("Video Port disabled");
+ }
+#endif
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::initInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int index = 0;
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ /* Indicate to Ducati that we're planning to use dynamically-mapped buffers */
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypePhysicalPageList;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ CAMHAL_LOGDA("Initializing internal buffers");
+ do {
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferallocset;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.nIndex = index;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+ if (eError == OMX_ErrorNoMore) {
+ return NO_ERROR;
+ }
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("GetParameter failed error = 0x%x", eError);
+ break;
+ }
+
+ CAMHAL_LOGDB("Requesting buftype %d of size %dx%d",
+ (int)bufferalloc.eBufType, (int)bufferalloc.nAllocWidth,
+ (int)bufferalloc.nAllocLines);
+
+ bufferalloc.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+
+ OMX_INIT_STRUCT_PTR (&bufferallocset, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferallocset.nPortIndex = portIndex;
+ bufferallocset.nIndex = index;
+ bufferallocset.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+ bufferallocset.nAllocWidth = bufferalloc.nAllocWidth;
+ bufferallocset.nAllocLines = bufferalloc.nAllocLines;
+
+ eError = OMX_SetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferallocset);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("SetParameter failed, error=%08x", eError);
+ if (eError == OMX_ErrorNoMore) return NO_ERROR;
+ break;
+ }
+
+ index++;
+
+ /* 1 is an arbitrary limit */
+ } while (index < 1);
+
+ CAMHAL_LOGV("Ducati requested too many (>1) internal buffers");
+
+ return -EINVAL;
+}
+
+status_t OMXCameraAdapter::deinitInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypeDefault;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.eBufType = OMX_TI_BufferTypeDefault;
+ bufferalloc.nAllocWidth = 1;
+ bufferalloc.nAllocLines = 1;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+ OMXCameraPortParameters cap;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( 0 != mUseCaptureSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mUseCaptureSem semaphore count %d", mUseCaptureSem.Count());
+ return BAD_VALUE;
+ }
+
+ CAMHAL_ASSERT(num > 0);
+
+ // if some setting that requires a SetParameter (including
+ // changing buffer types) then we need to disable the port
+ // before being allowed to apply the settings
+ if ((mPendingCaptureSettings & ECaptureParamSettings) ||
+ bufArr[0].type != imgCaptureData->mBufferType ||
+ imgCaptureData->mNumBufs != num) {
+ if (mCaptureConfigured) {
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ imgCaptureData->mBufferType = bufArr[0].type;
+ imgCaptureData->mNumBufs = num;
+
+ CAMHAL_LOGDB("Params Width = %d", (int)imgCaptureData->mWidth);
+ CAMHAL_LOGDB("Params Height = %d", (int)imgCaptureData->mHeight);
+
+ if (mPendingCaptureSettings & SetFormat) {
+ mPendingCaptureSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
+
+ if (mPendingCaptureSettings & SetThumb) {
+ mPendingCaptureSettings &= ~SetThumb;
+ ret = setThumbnailParams(mThumbWidth, mThumbHeight, mThumbQuality);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring thumbnail size %x", ret);
+ return ret;
+ }
+ }
+
+ if (mPendingCaptureSettings & SetQuality) {
+ mPendingCaptureSettings &= ~SetQuality;
+ ret = setImageQuality(mPictureQuality);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring image quality %x", ret);
+ goto EXIT;
+ }
+ }
+
+ // Configure DOMX to use either gralloc handles or vptrs
+ {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD ("Using ANW Buffers");
+ initInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+ } else {
+ CAMHAL_LOGD ("Using ION Buffers");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture image port configuration: ", &bufArr->ppmStamp);
+
+#endif
+
+ // Register for Image port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ mUseCaptureSem);
+
+ // Enable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ for (int index = 0 ; index < imgCaptureData->mNumBufs ; index++) {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)bufArr[index].opaque,
+ (int)imgCaptureData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mImagePortIndex,
+ 0,
+ imgCaptureData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgCaptureData->mBufferHeader[index] = pBufferHdr;
+ imgCaptureData->mStatus[index] = OMXCameraPortParameters::IDLE;
+ }
+
+ // Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ // If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Enable Image Port Exitting!!!");
+ goto EXIT;
+ }
+
+ if (ret != NO_ERROR) {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+ CAMHAL_LOGDA("Port enabled");
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture image port enabled and buffers registered: ", &bufArr->ppmStamp);
+
+#endif
+
+ if (mNextState != LOADED_REPROCESS_CAPTURE_STATE) {
+ // Enable WB and vector shot extra data for metadata
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+ }
+
+ // CPCam mode only supports vector shot
+ // Regular capture is not supported
+ if ( (mCapMode == CP_CAM) && (mNextState != LOADED_REPROCESS_CAPTURE_STATE) ) {
+ initVectorShot();
+ }
+
+ mCaptureBuffersAvailable.clear();
+ for (unsigned int i = 0; i < imgCaptureData->mMaxQueueable; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
+ }
+
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for (unsigned int i = imgCaptureData->mMaxQueueable; i < imgCaptureData->mNumBufs; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = setupEXIF();
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
+ }
+ }
+
+ // Choose proper single preview mode for cp capture (reproc or hs)
+ if (( NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mCapMode)) {
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
+ if (mNextState == LOADED_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ } else if (mNextState == LOADED_REPROCESS_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_Reprocess;
+ } else {
+ CAMHAL_LOGE("Wrong state trying to start a capture in CPCAM mode?");
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ }
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture extra configs on image port done: ", &bufArr->ppmStamp);
+
+#endif
+
+ mCaptureConfigured = true;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ mCaptureConfigured = false;
+ }
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+ //Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+status_t OMXCameraAdapter::UseBuffersRawCapture(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME
+ status_t ret;
+ OMX_ERRORTYPE eError;
+ OMXCameraPortParameters * imgRawCaptureData = NULL;
+ Utils::Semaphore camSem;
+ OMXCameraPortParameters cap;
+
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+
+ if (mCaptureConfigured) {
+ return NO_ERROR;
+ }
+
+ camSem.Create();
+
+ // mWaitingForSnapshot is true only when we're in the process of capturing
+ if (mWaitingForSnapshot) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Disable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ CAMHAL_LOGDA("Waiting for port disable");
+ //Wait for the image port enable event
+ camSem.Wait();
+ CAMHAL_LOGDA("Port disabled");
+ }
+
+ imgRawCaptureData->mNumBufs = num;
+
+ CAMHAL_LOGDB("RAW Max sensor width = %d", (int)imgRawCaptureData->mWidth);
+ CAMHAL_LOGDB("RAW Max sensor height = %d", (int)imgRawCaptureData->mHeight);
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_VIDEO, *imgRawCaptureData);
+
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+ }
+
+ ///Register for Video port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Enable Video Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ mCaptureBuffersLength = (int)imgRawCaptureData->mBufSize;
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++ ) {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer rawCapture address: 0x%x, size = %d ",
+ (unsigned int)bufArr[index].opaque,
+ (int)imgRawCaptureData->mBufSize );
+
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoPortIndex,
+ 0,
+ mCaptureBuffersLength,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_UseBuffer = 0x%x", eError);
+ }
+
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgRawCaptureData->mBufferHeader[index] = pBufferHdr;
+
+ }
+
+ //Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ camSem.Wait();
+ CAMHAL_LOGDA("Port enabled");
+
+ if (NO_ERROR == ret) {
+ ret = setupEXIF();
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
+ }
+ }
+
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesQueued = 0;
+ mCaptureConfigured = true;
+
+ EXIT:
+
+ if (eError != OMX_ErrorNone) {
+ if ( NULL != mErrorNotifier )
+ {
+ mErrorNotifier->errorNotify(eError);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDCC.cpp b/camera/OMXCameraAdapter/OMXDCC.cpp
new file mode 100644
index 0000000..cbaf2ab
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDCC.cpp
@@ -0,0 +1,227 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDCC.cpp
+*
+* This file contains functionality for loading the DCC binaries.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "OMXDCC.h"
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+namespace Ti {
+namespace Camera {
+
+#ifndef MOTOROLA_CAMERA
+android::String8 DCCHandler::DCCPath("/data/misc/camera/");
+#else
+android::String8 DCCHandler::DCCPath("/system/etc/omapcam/");
+#endif
+bool DCCHandler::mDCCLoaded = false;
+
+status_t DCCHandler::loadDCC(OMX_HANDLETYPE hComponent)
+{
+ OMX_ERRORTYPE dccError = OMX_ErrorNone;
+
+ if (!mDCCLoaded) {
+ dccError = initDCC(hComponent);
+ if (dccError != OMX_ErrorNone) {
+ CAMHAL_LOGE(" Error in DCC Init");
+ }
+
+ mDCCLoaded = true;
+ }
+
+ return Utils::ErrorUtils::omxToAndroidError(dccError);
+}
+
+OMX_ERRORTYPE DCCHandler::initDCC(OMX_HANDLETYPE hComponent)
+{
+ OMX_TI_PARAM_DCCURIINFO param;
+ OMX_PTR ptempbuf;
+ OMX_U16 nIndex = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int ret;
+ OMX_S32 status = 0;
+ android::Vector<android::String8 *> dccDirs;
+ OMX_U16 i;
+ MemoryManager memMgr;
+ CameraBuffer *dccBuffer = NULL;
+ int dccbuf_size = 0;
+ OMX_INIT_STRUCT_PTR(&param, OMX_TI_PARAM_DCCURIINFO);
+
+ // Read the the DCC URI info
+ for (nIndex = 0; eError != OMX_ErrorNoMore; nIndex++) {
+ param.nIndex = nIndex;
+ eError = OMX_GetParameter(hComponent,
+ ( OMX_INDEXTYPE )OMX_TI_IndexParamDccUriInfo,
+ &param);
+
+ if (eError == OMX_ErrorNone) {
+ CAMHAL_LOGD("DCC URI's %s ", param.sDCCURI);
+ android::String8 *dccDir = new android::String8();
+ if ( NULL != dccDir ) {
+ dccDir->clear();
+ dccDir->append(DCCPath);
+ dccDir->append((const char *) param.sDCCURI);
+ dccDir->append("/");
+ dccDirs.add(dccDir);
+ } else {
+ CAMHAL_LOGE("DCC URI not allocated");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+ }
+ }
+
+ // setting back errortype OMX_ErrorNone
+ if (eError == OMX_ErrorNoMore) {
+ eError = OMX_ErrorNone;
+ }
+
+ dccbuf_size = readDCCdir(NULL, dccDirs);
+ if(dccbuf_size <= 0) {
+ CAMHAL_LOGE("No DCC files found, switching back to default DCC");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+ dccbuf_size = ((dccbuf_size + 4095 )/4096)*4096;
+
+ if ( memMgr.initialize() != NO_ERROR ) {
+ CAMHAL_LOGE("DCC memory manager initialization failed!!!");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+
+ dccBuffer = memMgr.allocateBufferList(0, 0, NULL, dccbuf_size, 1);
+ if ( NULL == dccBuffer ) {
+ CAMHAL_LOGE("DCC buffer allocation failed!!!");
+ eError = OMX_ErrorInsufficientResources;
+ goto EXIT;
+ }
+
+ dccbuf_size = readDCCdir(dccBuffer[0].mapped, dccDirs);
+ CAMHAL_ASSERT_X(dccbuf_size > 0,"ERROR in copy DCC files into buffer");
+
+ eError = sendDCCBufPtr(hComponent, dccBuffer);
+
+EXIT:
+
+ for (i = 0; i < dccDirs.size(); i++) {
+ android::String8 *dccDir = dccDirs.itemAt(0);
+ dccDirs.removeAt(0);
+ delete dccDir;
+ }
+
+ if ( NULL != dccBuffer ) {
+ memMgr.freeBufferList(dccBuffer);
+ }
+
+ return eError;
+}
+
+OMX_ERRORTYPE DCCHandler::sendDCCBufPtr(OMX_HANDLETYPE hComponent,
+ CameraBuffer *dccBuffer)
+{
+ OMX_TI_CONFIG_SHAREDBUFFER uribufparam;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_INIT_STRUCT_PTR(&uribufparam, OMX_TI_CONFIG_SHAREDBUFFER);
+
+ CAMHAL_ASSERT_X(dccBuffer != NULL,"ERROR invalid DCC buffer");
+
+ uribufparam.nPortIndex = OMX_ALL;
+ uribufparam.nSharedBuffSize = dccBuffer->size;
+ uribufparam.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr(dccBuffer);
+
+ eError = OMX_SetParameter(hComponent,
+ ( OMX_INDEXTYPE )OMX_TI_IndexParamDccUriBuffer,
+ &uribufparam);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB(" Error in SetParam for DCC Uri Buffer 0x%x", eError);
+ }
+
+ return eError;
+}
+
+size_t DCCHandler::readDCCdir(OMX_PTR buffer,
+ const android::Vector<android::String8 *> &dirPaths)
+{
+ FILE *pFile;
+ OMX_S32 lSize;
+ OMX_S32 dcc_buf_size = 0;
+ size_t result;
+ OMX_STRING filename;
+ android::String8 temp;
+ const char *dotdot = "..";
+ DIR *d;
+ struct dirent *dir;
+ OMX_U16 i = 0;
+ status_t stat = NO_ERROR;
+ size_t ret = 0;
+
+ for (i = 0; i < dirPaths.size(); i++) {
+ d = opendir(dirPaths.itemAt(i)->string());
+ if (d) {
+ // read each filename
+ while ((dir = readdir(d)) != NULL) {
+ filename = dir->d_name;
+ temp.clear();
+ temp.append(dirPaths.itemAt(i)->string());
+ temp.append(filename);
+ if ((*filename != *dotdot)) {
+ pFile = fopen(temp.string(), "rb");
+ if (pFile == NULL) {
+ stat = -errno;
+ } else {
+ fseek(pFile, 0, SEEK_END);
+ lSize = ftell(pFile);
+ rewind(pFile);
+ // buffer is not NULL then copy all the DCC profiles into buffer
+ // else return the size of the DCC directory.
+ if (buffer) {
+ // copy file into the buffer:
+ result = fread(buffer, 1, lSize, pFile);
+ if (result != (size_t) lSize) {
+ stat = INVALID_OPERATION;
+ }
+ buffer = buffer + lSize;
+ }
+ // getting the size of the total dcc files available in FS */
+ dcc_buf_size = dcc_buf_size + lSize;
+ // terminate
+ fclose(pFile);
+ }
+ }
+ }
+ closedir(d);
+ }
+ }
+
+ if (stat == NO_ERROR) {
+ ret = dcc_buf_size;
+ }
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDccDataSave.cpp b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
new file mode 100644
index 0000000..7547743
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDccDataSave.cpp
+*
+* This file contains functionality for handling DCC data save
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::initDccFileDataSave(OMX_HANDLETYPE* omxHandle, int portIndex)
+{
+ OMX_CONFIG_EXTRADATATYPE extraDataControl;
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
+ extraDataControl.nPortIndex = portIndex;
+ extraDataControl.eExtraDataType = OMX_TI_DccData;
+ extraDataControl.bEnable = OMX_TRUE;
+
+ eError = OMX_SetConfig(*omxHandle,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
+ &extraDataControl);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring dcc data overwrite extra data 0x%x",
+ eError);
+
+ ret = NO_INIT;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::sniffDccFileDataSave(OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_TI_DCCDATATYPE* dccData;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if ( NULL == pBuffHeader ) {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_TI_DccData);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
+ } else {
+ CAMHAL_LOGVA("Invalid OMX_TI_DCCDATATYPE");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ dccData = ( OMX_TI_DCCDATATYPE * ) extraData->data;
+
+ if (NULL == dccData) {
+ CAMHAL_LOGVA("OMX_TI_DCCDATATYPE is not found in extra data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ }
+
+ memcpy(&mDccData, dccData, sizeof(mDccData));
+
+ int dccDataSize = (int)dccData->nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ mDccData.pData = (OMX_PTR)malloc(dccDataSize);
+
+ if (NULL == mDccData.pData) {
+ CAMHAL_LOGVA("not enough memory for DCC data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ memcpy(mDccData.pData, &(dccData->pData), dccDataSize);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// Recursively searches given directory contents for the correct DCC file.
+// The directory must be opened and its stream pointer + path passed
+// as arguments. As this function is called recursively, to avoid excessive
+// stack usage the path param is reused -> this MUST be char array with
+// enough length!!! (260 should suffice). Path must end with "/".
+// The directory must also be closed in the caller function.
+// If the correct camera DCC file is found (based on the OMX measurement data)
+// its file stream pointer is returned. NULL is returned otherwise
+FILE * OMXCameraAdapter::parseDCCsubDir(DIR *pDir, char *path)
+{
+ FILE *pFile;
+ DIR *pSubDir;
+ struct dirent *dirEntry;
+ int initialPathLength = strlen(path);
+
+ LOG_FUNCTION_NAME;
+
+ /* check each directory entry */
+ while ((dirEntry = readdir(pDir)) != NULL)
+ {
+ if (dirEntry->d_name[0] == '.')
+ continue;
+
+ strcat(path, dirEntry->d_name);
+ // dirEntry might be sub directory -> check it
+ pSubDir = opendir(path);
+ if (pSubDir) {
+ // dirEntry is sub directory -> parse it
+ strcat(path, "/");
+ pFile = parseDCCsubDir(pSubDir, path);
+ closedir(pSubDir);
+ if (pFile) {
+ // the correct DCC file found!
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ // dirEntry is file -> open it
+ pFile = fopen(path, "rb");
+ if (pFile) {
+ // now check if this is the correct DCC file for that camera
+ OMX_U32 dccFileIDword;
+ OMX_U32 *dccFileDesc = (OMX_U32 *) &mDccData.nCameraModuleId;
+ int i;
+
+ // DCC file ID is 3 4-byte words
+ for (i = 0; i < 3; i++) {
+ if (fread(&dccFileIDword, sizeof(OMX_U32), 1, pFile) != 1) {
+ // file too short
+ break;
+ }
+ if (dccFileIDword != dccFileDesc[i]) {
+ // DCC file ID word i does not match
+ break;
+ }
+ }
+
+ fclose(pFile);
+ if (i == 3) {
+ // the correct DCC file found!
+ CAMHAL_LOGDB("DCC file to be updated: %s", path);
+ // reopen it for modification
+ pFile = fopen(path, "rb+");
+ if (!pFile)
+ CAMHAL_LOGEB("ERROR: DCC file %s failed to open for modification", path);
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ CAMHAL_LOGEB("ERROR: Failed to open file %s for reading", path);
+ }
+ }
+ // restore original path
+ path[initialPathLength] = '\0';
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ // DCC file not found in this directory tree
+ return NULL;
+}
+
+// Finds the DCC file corresponding to the current camera based on the
+// OMX measurement data, opens it and returns the file stream pointer
+// (NULL on error or if file not found).
+// The folder string dccFolderPath must end with "/"
+FILE * OMXCameraAdapter::fopenCameraDCC(const char *dccFolderPath)
+{
+ FILE *pFile;
+ DIR *pDir;
+ char dccPath[260];
+
+ LOG_FUNCTION_NAME;
+
+ strcpy(dccPath, dccFolderPath);
+
+ pDir = opendir(dccPath);
+ if (!pDir) {
+ CAMHAL_LOGEB("ERROR: Opening DCC directory %s failed", dccPath);
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ pFile = parseDCCsubDir(pDir, dccPath);
+ closedir(pDir);
+ if (pFile) {
+ CAMHAL_LOGDB("DCC file %s opened for modification", dccPath);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return pFile;
+}
+
+// Positions the DCC file stream pointer to the correct offset within the
+// correct usecase based on the OMX mesurement data. Returns 0 on success
+status_t OMXCameraAdapter::fseekDCCuseCasePos(FILE *pFile)
+{
+ OMX_U32 dccNumUseCases = 0;
+ OMX_U32 dccUseCaseData[3];
+ OMX_U32 i;
+
+ LOG_FUNCTION_NAME;
+
+ // position the file pointer to the DCC use cases section
+ if (fseek(pFile, 80, SEEK_SET)) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (fread(&dccNumUseCases, sizeof(OMX_U32), 1, pFile) != 1 ||
+ dccNumUseCases == 0) {
+ CAMHAL_LOGEA("ERROR: DCC file contains 0 use cases");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ for (i = 0; i < dccNumUseCases; i++) {
+ if (fread(dccUseCaseData, sizeof(OMX_U32), 3, pFile) != 3) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (dccUseCaseData[0] == mDccData.nUseCaseId) {
+ // DCC use case match!
+ break;
+ }
+ }
+
+ if (i == dccNumUseCases) {
+ CAMHAL_LOGEB("ERROR: Use case ID %lu not found in DCC file", mDccData.nUseCaseId);
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ // dccUseCaseData[1] is the offset to the beginning of the actual use case
+ // from the beginning of the file
+ // mDccData.nOffset is the offset within the actual use case (from the
+ // beginning of the use case to the data to be modified)
+
+ if (fseek(pFile,dccUseCaseData[1] + mDccData.nOffset, SEEK_SET ))
+ {
+ CAMHAL_LOGEA("ERROR: Error setting the correct offset");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::saveDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData)
+ {
+ FILE *fd = fopenCameraDCC(DCC_PATH);
+
+ if (fd)
+ {
+ if (!fseekDCCuseCasePos(fd))
+ {
+ int dccDataSize = (int)mDccData.nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ if (fwrite(mDccData.pData, dccDataSize, 1, fd) != 1)
+ {
+ CAMHAL_LOGEA("ERROR: Writing to DCC file failed");
+ }
+ else
+ {
+ CAMHAL_LOGDA("DCC file successfully updated");
+ }
+ }
+ fclose(fd);
+ }
+ else
+ {
+ CAMHAL_LOGEA("ERROR: Correct DCC file not found or failed to open for modification");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::closeDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDefaults.cpp b/camera/OMXCameraAdapter/OMXDefaults.cpp
new file mode 100644
index 0000000..80805b2
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDefaults.cpp
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDefaults.cpp
+*
+* This file contains definitions are OMX Camera defaults
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace Ti {
+namespace Camera {
+
+#define __STRINGIFY(s) __STRING(s)
+
+// OMX Camera defaults
+const char OMXCameraAdapter::DEFAULT_ANTIBANDING[] = "auto";
+const char OMXCameraAdapter::DEFAULT_BRIGHTNESS[] = "50";
+const char OMXCameraAdapter::DEFAULT_CONTRAST[] = "100";
+const char OMXCameraAdapter::DEFAULT_EFFECT[] = "none";
+const char OMXCameraAdapter::DEFAULT_EV_COMPENSATION[] = "0";
+const char OMXCameraAdapter::DEFAULT_EV_STEP[] = "0.1";
+const char OMXCameraAdapter::DEFAULT_EXPOSURE_MODE[] = "auto";
+const char OMXCameraAdapter::DEFAULT_FLASH_MODE[] = "off";
+const char OMXCameraAdapter::DEFAULT_FOCUS_MODE_PREFERRED[] = "auto";
+const char OMXCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
+#ifndef MOTOROLA_CAMERA
+const char OMXCameraAdapter::DEFAULT_IPP[] = "ldc-nsf";
+#else
+const char OMXCameraAdapter::DEFAULT_IPP[] = "off";
+#endif
+const char OMXCameraAdapter::DEFAULT_ISO_MODE[] = "auto";
+const char OMXCameraAdapter::DEFAULT_JPEG_QUALITY[] = "95";
+const char OMXCameraAdapter::DEFAULT_THUMBNAIL_QUALITY[] = "60";
+const char OMXCameraAdapter::DEFAULT_THUMBNAIL_SIZE[] = "160x120";
+const char OMXCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char OMXCameraAdapter::DEFAULT_S3D_PICTURE_LAYOUT[] = "tb-full";
+const char OMXCameraAdapter::DEFAULT_PICTURE_SIZE[] = "320x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_SS_SIZE[] = "640x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_TB_SIZE[] = "320x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
+const char OMXCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char OMXCameraAdapter::DEFAULT_S3D_PREVIEW_LAYOUT[] = "tb-subsampled";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_SS_SIZE[] = "1280x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_TB_SIZE[] = "640x960";
+const char OMXCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
+const char OMXCameraAdapter::DEFAULT_NUM_PIC_BUFS[] = "1";
+const char OMXCameraAdapter::DEFAULT_SATURATION[] = "100";
+const char OMXCameraAdapter::DEFAULT_SCENE_MODE[] = "auto";
+const char OMXCameraAdapter::DEFAULT_SHARPNESS[] = "100";
+const char * OMXCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
+const char OMXCameraAdapter::DEFAULT_WB[] = "auto";
+const char OMXCameraAdapter::DEFAULT_ZOOM[] = "0";
+const char OMXCameraAdapter::DEFAULT_MAX_FD_HW_FACES[] = __STRINGIFY(MAX_NUM_FACES_SUPPORTED);
+const char OMXCameraAdapter::DEFAULT_MAX_FD_SW_FACES[] = "0";
+const char OMXCameraAdapter::DEFAULT_HOR_ANGLE[] = "54.8";
+const char OMXCameraAdapter::DEFAULT_VER_ANGLE[] = "42.5";
+const char * OMXCameraAdapter::DEFAULT_AE_LOCK = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_AWB_LOCK = android::CameraParameters::FALSE;
+const char OMXCameraAdapter::DEFAULT_VIDEO_SIZE[] = "1920x1080";
+const char OMXCameraAdapter::DEFAULT_SENSOR_ORIENTATION[] = "0";
+const char OMXCameraAdapter::DEFAULT_AUTOCONVERGENCE_MODE[] = "frame";
+const char OMXCameraAdapter::DEFAULT_MANUAL_CONVERGENCE[] = "0";
+const char * OMXCameraAdapter::DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE = android::CameraParameters::TRUE;
+const char OMXCameraAdapter::DEFAULT_EXIF_MAKE[] = "default_make";
+const char OMXCameraAdapter::DEFAULT_EXIF_MODEL[] = "default_model";
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp
new file mode 100644
index 0000000..b4fde5a
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXExif.cpp
@@ -0,0 +1,838 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXExif.cpp
+*
+* This file contains functionality for handling EXIF insertion.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include <math.h>
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::setParametersEXIF(const android::CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *valstr = NULL;
+ double gpsPos;
+
+ LOG_FUNCTION_NAME;
+
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
+ {
+ gpsPos = strtod(valstr, NULL);
+
+ if ( convertGPSCoord(gpsPos,
+ mEXIFData.mGPSData.mLatDeg,
+ mEXIFData.mGPSData.mLatMin,
+ mEXIFData.mGPSData.mLatSec,
+ mEXIFData.mGPSData.mLatSecDiv ) == NO_ERROR )
+ {
+
+ if ( 0 < gpsPos )
+ {
+ strncpy(mEXIFData.mGPSData.mLatRef, GPS_NORTH_REF, GPS_REF_SIZE);
+ }
+ else
+ {
+ strncpy(mEXIFData.mGPSData.mLatRef, GPS_SOUTH_REF, GPS_REF_SIZE);
+ }
+
+ mEXIFData.mGPSData.mLatValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLatValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLatValid = false;
+ }
+
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
+ {
+ gpsPos = strtod(valstr, NULL);
+
+ if ( convertGPSCoord(gpsPos,
+ mEXIFData.mGPSData.mLongDeg,
+ mEXIFData.mGPSData.mLongMin,
+ mEXIFData.mGPSData.mLongSec,
+ mEXIFData.mGPSData.mLongSecDiv) == NO_ERROR )
+ {
+
+ if ( 0 < gpsPos )
+ {
+ strncpy(mEXIFData.mGPSData.mLongRef, GPS_EAST_REF, GPS_REF_SIZE);
+ }
+ else
+ {
+ strncpy(mEXIFData.mGPSData.mLongRef, GPS_WEST_REF, GPS_REF_SIZE);
+ }
+
+ mEXIFData.mGPSData.mLongValid= true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLongValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLongValid = false;
+ }
+
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
+ {
+ gpsPos = strtod(valstr, NULL);
+ mEXIFData.mGPSData.mAltitude = floor(fabs(gpsPos));
+ if (gpsPos < 0) {
+ mEXIFData.mGPSData.mAltitudeRef = 1;
+ } else {
+ mEXIFData.mGPSData.mAltitudeRef = 0;
+ }
+ mEXIFData.mGPSData.mAltitudeValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mAltitudeValid= false;
+ }
+
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ {
+ long gpsTimestamp = strtol(valstr, NULL, 10);
+ struct tm *timeinfo = gmtime( ( time_t * ) & (gpsTimestamp) );
+ if ( NULL != timeinfo )
+ {
+ mEXIFData.mGPSData.mTimeStampHour = timeinfo->tm_hour;
+ mEXIFData.mGPSData.mTimeStampMin = timeinfo->tm_min;
+ mEXIFData.mGPSData.mTimeStampSec = timeinfo->tm_sec;
+ mEXIFData.mGPSData.mTimeStampValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mTimeStampValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mTimeStampValid = false;
+ }
+
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
+ {
+ long gpsDatestamp = strtol(valstr, NULL, 10);
+ struct tm *timeinfo = gmtime( ( time_t * ) & (gpsDatestamp) );
+ if ( NULL != timeinfo )
+ {
+ strftime(mEXIFData.mGPSData.mDatestamp, GPS_DATESTAMP_SIZE, "%Y:%m:%d", timeinfo);
+ mEXIFData.mGPSData.mDatestampValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mDatestampValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mDatestampValid = false;
+ }
+
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
+ {
+ strncpy(mEXIFData.mGPSData.mProcMethod, valstr, GPS_PROCESSING_SIZE-1);
+ mEXIFData.mGPSData.mProcMethodValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mProcMethodValid = false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM) ) != NULL )
+ {
+ strncpy(mEXIFData.mGPSData.mMapDatum, valstr, GPS_MAPDATUM_SIZE-1);
+ mEXIFData.mGPSData.mMapDatumValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mMapDatumValid = false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GPS_VERSION) ) != NULL )
+ {
+ strncpy(mEXIFData.mGPSData.mVersionId, valstr, GPS_VERSION_SIZE-1);
+ mEXIFData.mGPSData.mVersionIdValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mVersionIdValid = false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_EXIF_MODEL ) ) != NULL )
+ {
+ CAMHAL_LOGVB("EXIF Model: %s", valstr);
+ strncpy(mEXIFData.mModel, valstr, EXIF_MODEL_SIZE - 1);
+ mEXIFData.mModelValid= true;
+ }
+ else
+ {
+ mEXIFData.mModelValid= false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_EXIF_MAKE ) ) != NULL )
+ {
+ CAMHAL_LOGVB("EXIF Make: %s", valstr);
+ strncpy(mEXIFData.mMake, valstr, EXIF_MAKE_SIZE - 1);
+ mEXIFData.mMakeValid = true;
+ }
+ else
+ {
+ mEXIFData.mMakeValid= false;
+ }
+
+
+ if( ( valstr = params.get(android::CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
+ CAMHAL_LOGVB("EXIF Focal length: %s", valstr);
+ ExifElementsTable::stringToRational(valstr,
+ &mEXIFData.mFocalNum,
+ &mEXIFData.mFocalDen);
+ } else {
+ mEXIFData.mFocalNum = 0;
+ mEXIFData.mFocalDen = 0;
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setupEXIF()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ OMX_TI_CONFIG_EXIF_TAGS *exifTags;
+ unsigned char *startPtr = NULL;
+ unsigned char *sharedPtr = NULL;
+ struct timeval sTv;
+ struct tm *pTime;
+ OMXCameraPortParameters * capData = NULL;
+ CameraBuffer *memmgr_buf_array;
+ int buf_size = 0;
+
+ LOG_FUNCTION_NAME;
+
+ sharedBuffer.pSharedBuff = NULL;
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+ sharedBuffer.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ //We allocate the shared buffer dynamically based on the
+ //requirements of the EXIF tags. The additional buffers will
+ //get stored after the EXIF configuration structure and the pointers
+ //will contain offsets within the shared buffer itself.
+ buf_size = sizeof(OMX_TI_CONFIG_EXIF_TAGS) +
+ ( EXIF_MODEL_SIZE ) +
+ ( EXIF_MAKE_SIZE ) +
+ ( EXIF_DATE_TIME_SIZE ) +
+ ( GPS_MAPDATUM_SIZE ) +
+ ( GPS_PROCESSING_SIZE );
+ buf_size = ((buf_size+4095)/4096)*4096;
+ sharedBuffer.nSharedBuffSize = buf_size;
+
+ memmgr_buf_array = mMemMgr.allocateBufferList(0, 0, NULL, buf_size, 1);
+ sharedBuffer.pSharedBuff = (OMX_U8*)camera_buffer_get_omx_ptr(&memmgr_buf_array[0]);
+ startPtr = ( OMX_U8 * ) memmgr_buf_array[0].opaque;
+
+ if ( NULL == startPtr)
+ {
+ CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
+ ret = -1;
+ }
+
+ //Extra data begins right after the EXIF configuration structure.
+ sharedPtr = startPtr + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) startPtr;
+ OMX_INIT_STRUCT_PTR (exifTags, OMX_TI_CONFIG_EXIF_TAGS);
+ exifTags->nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigExifTags,
+ &sharedBuffer );
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while retrieving EXIF configuration structure 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusModel ) &&
+ ( mEXIFData.mModelValid ) )
+ {
+ strncpy(( char * ) sharedPtr,
+ mEXIFData.mModel,
+ EXIF_MODEL_SIZE - 1);
+
+ exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
+ exifTags->ulModelBuffSizeBytes = strlen((char*)sharedPtr) + 1;
+ sharedPtr += EXIF_MODEL_SIZE;
+ exifTags->eStatusModel = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusMake) &&
+ ( mEXIFData.mMakeValid ) )
+ {
+ strncpy( ( char * ) sharedPtr,
+ mEXIFData.mMake,
+ EXIF_MAKE_SIZE - 1);
+
+ exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
+ exifTags->ulMakeBuffSizeBytes = strlen((char*)sharedPtr) + 1;
+ sharedPtr += EXIF_MAKE_SIZE;
+ exifTags->eStatusMake = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusFocalLength ))
+ {
+ if (mEXIFData.mFocalNum || mEXIFData.mFocalDen ) {
+ exifTags->ulFocalLength[0] = (OMX_U32) mEXIFData.mFocalNum;
+ exifTags->ulFocalLength[1] = (OMX_U32) mEXIFData.mFocalDen;
+ CAMHAL_LOGVB("exifTags->ulFocalLength = [%u] [%u]",
+ (unsigned int)(exifTags->ulFocalLength[0]),
+ (unsigned int)(exifTags->ulFocalLength[1]));
+ exifTags->eStatusFocalLength = OMX_TI_TagUpdated;
+ }
+ }
+
+ if ( OMX_TI_TagReadWrite == exifTags->eStatusDateTime )
+ {
+ int status = gettimeofday (&sTv, NULL);
+ pTime = localtime (&sTv.tv_sec);
+ if ( ( 0 == status ) && ( NULL != pTime ) )
+ {
+ snprintf(( char * ) sharedPtr, EXIF_DATE_TIME_SIZE,
+ "%04d:%02d:%02d %02d:%02d:%02d",
+ pTime->tm_year + 1900,
+ pTime->tm_mon + 1,
+ pTime->tm_mday,
+ pTime->tm_hour,
+ pTime->tm_min,
+ pTime->tm_sec );
+ }
+
+ exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
+ sharedPtr += EXIF_DATE_TIME_SIZE;
+ exifTags->ulDateTimeBuffSizeBytes = EXIF_DATE_TIME_SIZE;
+ exifTags->eStatusDateTime = OMX_TI_TagUpdated;
+ }
+
+ if ( OMX_TI_TagReadWrite == exifTags->eStatusImageWidth )
+ {
+ exifTags->ulImageWidth = capData->mWidth;
+ exifTags->eStatusImageWidth = OMX_TI_TagUpdated;
+ }
+
+ if ( OMX_TI_TagReadWrite == exifTags->eStatusImageHeight )
+ {
+ exifTags->ulImageHeight = capData->mHeight;
+ exifTags->eStatusImageHeight = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLatitude ) &&
+ ( mEXIFData.mGPSData.mLatValid ) )
+ {
+ exifTags->ulGpsLatitude[0] = abs(mEXIFData.mGPSData.mLatDeg);
+ exifTags->ulGpsLatitude[2] = abs(mEXIFData.mGPSData.mLatMin);
+ exifTags->ulGpsLatitude[4] = abs(mEXIFData.mGPSData.mLatSec);
+ exifTags->ulGpsLatitude[1] = 1;
+ exifTags->ulGpsLatitude[3] = 1;
+ exifTags->ulGpsLatitude[5] = abs(mEXIFData.mGPSData.mLatSecDiv);
+ exifTags->eStatusGpsLatitude = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpslatitudeRef ) &&
+ ( mEXIFData.mGPSData.mLatValid ) )
+ {
+ exifTags->cGpslatitudeRef[0] = ( OMX_S8 ) mEXIFData.mGPSData.mLatRef[0];
+ exifTags->cGpslatitudeRef[1] = '\0';
+ exifTags->eStatusGpslatitudeRef = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLongitude ) &&
+ ( mEXIFData.mGPSData.mLongValid ) )
+ {
+ exifTags->ulGpsLongitude[0] = abs(mEXIFData.mGPSData.mLongDeg);
+ exifTags->ulGpsLongitude[2] = abs(mEXIFData.mGPSData.mLongMin);
+ exifTags->ulGpsLongitude[4] = abs(mEXIFData.mGPSData.mLongSec);
+ exifTags->ulGpsLongitude[1] = 1;
+ exifTags->ulGpsLongitude[3] = 1;
+ exifTags->ulGpsLongitude[5] = abs(mEXIFData.mGPSData.mLongSecDiv);
+ exifTags->eStatusGpsLongitude = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLongitudeRef ) &&
+ ( mEXIFData.mGPSData.mLongValid ) )
+ {
+ exifTags->cGpsLongitudeRef[0] = ( OMX_S8 ) mEXIFData.mGPSData.mLongRef[0];
+ exifTags->cGpsLongitudeRef[1] = '\0';
+ exifTags->eStatusGpsLongitudeRef = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsAltitude ) &&
+ ( mEXIFData.mGPSData.mAltitudeValid) )
+ {
+ exifTags->ulGpsAltitude[0] = ( OMX_U32 ) mEXIFData.mGPSData.mAltitude;
+ exifTags->ulGpsAltitude[1] = 1;
+ exifTags->eStatusGpsAltitude = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsAltitudeRef ) &&
+ ( mEXIFData.mGPSData.mAltitudeValid) )
+ {
+ exifTags->ucGpsAltitudeRef = (OMX_U8) mEXIFData.mGPSData.mAltitudeRef;
+ exifTags->eStatusGpsAltitudeRef = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsMapDatum ) &&
+ ( mEXIFData.mGPSData.mMapDatumValid ) )
+ {
+ memcpy(sharedPtr, mEXIFData.mGPSData.mMapDatum, GPS_MAPDATUM_SIZE);
+
+ exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
+ exifTags->ulGpsMapDatumBuffSizeBytes = GPS_MAPDATUM_SIZE;
+ exifTags->eStatusGpsMapDatum = OMX_TI_TagUpdated;
+ sharedPtr += GPS_MAPDATUM_SIZE;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsProcessingMethod ) &&
+ ( mEXIFData.mGPSData.mProcMethodValid ) )
+ {
+ exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
+ memcpy(sharedPtr, ExifAsciiPrefix, sizeof(ExifAsciiPrefix));
+ sharedPtr += sizeof(ExifAsciiPrefix);
+
+ memcpy(sharedPtr,
+ mEXIFData.mGPSData.mProcMethod,
+ ( GPS_PROCESSING_SIZE - sizeof(ExifAsciiPrefix) ) );
+ exifTags->ulGpsProcessingMethodBuffSizeBytes = GPS_PROCESSING_SIZE;
+ exifTags->eStatusGpsProcessingMethod = OMX_TI_TagUpdated;
+ sharedPtr += GPS_PROCESSING_SIZE;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsVersionId ) &&
+ ( mEXIFData.mGPSData.mVersionIdValid ) )
+ {
+ exifTags->ucGpsVersionId[0] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[0];
+ exifTags->ucGpsVersionId[1] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[1];
+ exifTags->ucGpsVersionId[2] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[2];
+ exifTags->ucGpsVersionId[3] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[3];
+ exifTags->eStatusGpsVersionId = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsTimeStamp ) &&
+ ( mEXIFData.mGPSData.mTimeStampValid ) )
+ {
+ exifTags->ulGpsTimeStamp[0] = mEXIFData.mGPSData.mTimeStampHour;
+ exifTags->ulGpsTimeStamp[2] = mEXIFData.mGPSData.mTimeStampMin;
+ exifTags->ulGpsTimeStamp[4] = mEXIFData.mGPSData.mTimeStampSec;
+ exifTags->ulGpsTimeStamp[1] = 1;
+ exifTags->ulGpsTimeStamp[3] = 1;
+ exifTags->ulGpsTimeStamp[5] = 1;
+ exifTags->eStatusGpsTimeStamp = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsDateStamp ) &&
+ ( mEXIFData.mGPSData.mDatestampValid ) )
+ {
+ strncpy( ( char * ) exifTags->cGpsDateStamp,
+ ( char * ) mEXIFData.mGPSData.mDatestamp,
+ GPS_DATESTAMP_SIZE );
+ exifTags->eStatusGpsDateStamp = OMX_TI_TagUpdated;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigExifTags,
+ &sharedBuffer );
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting EXIF configuration 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ if ( NULL != memmgr_buf_array )
+ {
+ mMemMgr.freeBufferList(memmgr_buf_array);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setupEXIF_libjpeg(ExifElementsTable* exifTable,
+ OMX_TI_ANCILLARYDATATYPE* pAncillaryData,
+ OMX_TI_WHITEBALANCERESULTTYPE* pWhiteBalanceData)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ struct timeval sTv;
+ struct tm *pTime;
+ OMXCameraPortParameters * capData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ((NO_ERROR == ret) && (mEXIFData.mModelValid)) {
+ ret = exifTable->insertElement(TAG_MODEL, mEXIFData.mModel);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mMakeValid)) {
+ ret = exifTable->insertElement(TAG_MAKE, mEXIFData.mMake);
+ }
+
+ if ((NO_ERROR == ret)) {
+ if (mEXIFData.mFocalNum || mEXIFData.mFocalDen) {
+ char temp_value[256]; // arbitrarily long string
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%u/%u",
+ mEXIFData.mFocalNum,
+ mEXIFData.mFocalDen);
+ ret = exifTable->insertElement(TAG_FOCALLENGTH, temp_value);
+
+ }
+ }
+
+ if ((NO_ERROR == ret)) {
+ int status = gettimeofday (&sTv, NULL);
+ pTime = localtime (&sTv.tv_sec);
+ char temp_value[EXIF_DATE_TIME_SIZE + 1];
+ if ((0 == status) && (NULL != pTime)) {
+ snprintf(temp_value, EXIF_DATE_TIME_SIZE,
+ "%04d:%02d:%02d %02d:%02d:%02d",
+ pTime->tm_year + 1900,
+ pTime->tm_mon + 1,
+ pTime->tm_mday,
+ pTime->tm_hour,
+ pTime->tm_min,
+ pTime->tm_sec );
+ ret = exifTable->insertElement(TAG_DATETIME, temp_value);
+ }
+ }
+
+ if ((NO_ERROR == ret)) {
+ char temp_value[5];
+ snprintf(temp_value, sizeof(temp_value)/sizeof(char), "%lu", capData->mWidth);
+ ret = exifTable->insertElement(TAG_IMAGE_WIDTH, temp_value);
+ }
+
+ if ((NO_ERROR == ret)) {
+ char temp_value[5];
+ snprintf(temp_value, sizeof(temp_value)/sizeof(char), "%lu", capData->mHeight);
+ ret = exifTable->insertElement(TAG_IMAGE_LENGTH, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLatValid)) {
+ char temp_value[256]; // arbitrarily long string
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char) - 1,
+ "%d/%d,%d/%d,%d/%d",
+ abs(mEXIFData.mGPSData.mLatDeg), 1,
+ abs(mEXIFData.mGPSData.mLatMin), 1,
+ abs(mEXIFData.mGPSData.mLatSec), abs(mEXIFData.mGPSData.mLatSecDiv));
+ ret = exifTable->insertElement(TAG_GPS_LAT, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLatValid)) {
+ ret = exifTable->insertElement(TAG_GPS_LAT_REF, mEXIFData.mGPSData.mLatRef);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLongValid)) {
+ char temp_value[256]; // arbitrarily long string
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char) - 1,
+ "%d/%d,%d/%d,%d/%d",
+ abs(mEXIFData.mGPSData.mLongDeg), 1,
+ abs(mEXIFData.mGPSData.mLongMin), 1,
+ abs(mEXIFData.mGPSData.mLongSec), abs(mEXIFData.mGPSData.mLongSecDiv));
+ ret = exifTable->insertElement(TAG_GPS_LONG, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLongValid)) {
+ ret = exifTable->insertElement(TAG_GPS_LONG_REF, mEXIFData.mGPSData.mLongRef);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mAltitudeValid)) {
+ char temp_value[256]; // arbitrarily long string
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char) - 1,
+ "%d/%d",
+ abs( mEXIFData.mGPSData.mAltitude), 1);
+ ret = exifTable->insertElement(TAG_GPS_ALT, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mAltitudeValid)) {
+ char temp_value[5];
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char) - 1,
+ "%d", mEXIFData.mGPSData.mAltitudeRef);
+ ret = exifTable->insertElement(TAG_GPS_ALT_REF, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mMapDatumValid)) {
+ ret = exifTable->insertElement(TAG_GPS_MAP_DATUM, mEXIFData.mGPSData.mMapDatum);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mProcMethodValid)) {
+ char temp_value[GPS_PROCESSING_SIZE];
+
+ memcpy(temp_value, ExifAsciiPrefix, sizeof(ExifAsciiPrefix));
+ memcpy(temp_value + sizeof(ExifAsciiPrefix),
+ mEXIFData.mGPSData.mProcMethod,
+ (GPS_PROCESSING_SIZE - sizeof(ExifAsciiPrefix)));
+ ret = exifTable->insertElement(TAG_GPS_PROCESSING_METHOD, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mVersionIdValid)) {
+ char temp_value[256]; // arbitrarily long string
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char) - 1,
+ "%d,%d,%d,%d",
+ mEXIFData.mGPSData.mVersionId[0],
+ mEXIFData.mGPSData.mVersionId[1],
+ mEXIFData.mGPSData.mVersionId[2],
+ mEXIFData.mGPSData.mVersionId[3]);
+ ret = exifTable->insertElement(TAG_GPS_VERSION_ID, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mTimeStampValid)) {
+ char temp_value[256]; // arbitrarily long string
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char) - 1,
+ "%d/%d,%d/%d,%d/%d",
+ mEXIFData.mGPSData.mTimeStampHour, 1,
+ mEXIFData.mGPSData.mTimeStampMin, 1,
+ mEXIFData.mGPSData.mTimeStampSec, 1);
+ ret = exifTable->insertElement(TAG_GPS_TIMESTAMP, temp_value);
+ }
+
+ if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mDatestampValid) ) {
+ ret = exifTable->insertElement(TAG_GPS_DATESTAMP, mEXIFData.mGPSData.mDatestamp);
+ }
+
+ if (NO_ERROR == ret) {
+ const char* exif_orient =
+ ExifElementsTable::degreesToExifOrientation(mPictureRotation);
+
+ if (exif_orient) {
+ ret = exifTable->insertElement(TAG_ORIENTATION, exif_orient);
+ }
+ }
+
+ // fill in short and ushort tags
+ if (NO_ERROR == ret) {
+ char temp_value[2];
+ temp_value[1] = '\0';
+
+ // AWB
+ if (mParameters3A.WhiteBallance == OMX_WhiteBalControlAuto) {
+ temp_value[0] = '0';
+ } else {
+ temp_value[0] = '1';
+ }
+ exifTable->insertElement(TAG_WHITEBALANCE, temp_value);
+
+ // MeteringMode
+ // TODO(XXX): only supporting this metering mode at the moment, may change in future
+ temp_value[0] = '2';
+ exifTable->insertElement(TAG_METERING_MODE, temp_value);
+
+ // ExposureProgram
+ // TODO(XXX): only supporting this exposure program at the moment, may change in future
+ temp_value[0] = '3';
+ exifTable->insertElement(TAG_EXPOSURE_PROGRAM, temp_value);
+
+ // ColorSpace
+ temp_value[0] = '1';
+ exifTable->insertElement(TAG_COLOR_SPACE, temp_value);
+
+ temp_value[0] = '2';
+ exifTable->insertElement(TAG_SENSING_METHOD, temp_value);
+
+ temp_value[0] = '1';
+ exifTable->insertElement(TAG_CUSTOM_RENDERED, temp_value);
+ }
+
+ if (pAncillaryData && (NO_ERROR == ret)) {
+ unsigned int numerator = 0, denominator = 0;
+ char temp_value[256];
+ unsigned int temp_num = 0;
+
+ // DigitalZoomRatio
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%u/%u",
+ pAncillaryData->nDigitalZoomFactor, 1024);
+ exifTable->insertElement(TAG_DIGITALZOOMRATIO, temp_value);
+
+ // ExposureTime
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%u/%u",
+ pAncillaryData->nExposureTime, 1000000);
+ exifTable->insertElement(TAG_EXPOSURETIME, temp_value);
+
+ // ApertureValue and FNumber
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%u/%u",
+ pAncillaryData->nApertureValue, 100);
+ exifTable->insertElement(TAG_FNUMBER, temp_value);
+ exifTable->insertElement(TAG_APERTURE, temp_value);
+
+ // ISO
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%u,0,0",
+ pAncillaryData->nCurrentISO);
+ exifTable->insertElement(TAG_ISO_EQUIVALENT, temp_value);
+
+ // ShutterSpeed
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%f",
+ log(pAncillaryData->nExposureTime) / log(2));
+ ExifElementsTable::stringToRational(temp_value, &numerator, &denominator);
+ snprintf(temp_value, sizeof(temp_value)/sizeof(char), "%u/%u", numerator, denominator);
+ exifTable->insertElement(TAG_SHUTTERSPEED, temp_value);
+
+ // Flash
+ if (mParameters3A.FlashMode == OMX_IMAGE_FlashControlAuto) {
+ if(pAncillaryData->nFlashStatus) temp_num = 0x19; // Flash fired, auto mode
+ else temp_num = 0x18; // Flash did not fire, auto mode
+ } else if (mParameters3A.FlashMode == OMX_IMAGE_FlashControlOn) {
+ if(pAncillaryData->nFlashStatus) temp_num = 0x9; // Flash fired, compulsory flash mode
+ else temp_num = 0x10; // Flash did not fire, compulsory flash mode
+ } else if(pAncillaryData->nFlashStatus) {
+ temp_num = 0x1; // Flash fired
+ } else {
+ temp_num = 0x0; // Flash did not fire
+ }
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%u", temp_num);
+ exifTable->insertElement(TAG_FLASH, temp_value);
+
+ if (pWhiteBalanceData) {
+ unsigned int lightsource = 0;
+ unsigned int colourtemp = pWhiteBalanceData->nColorTemperature;
+ bool flash_fired = (temp_num & 0x1); // value from flash above
+
+ // stole this from framework/tools_library/src/tools_sys_exif_tags.c
+ if( colourtemp <= 3200 ) {
+ lightsource = 3; // Tungsten
+ } else if( colourtemp > 3200 && colourtemp <= 4800 ) {
+ lightsource = 2; // Fluorescent
+ } else if( colourtemp > 4800 && colourtemp <= 5500 ) {
+ lightsource = 1; // Daylight
+ } else if( colourtemp > 5500 && colourtemp <= 6500 ) {
+ lightsource = 9; // Fine weather
+ } else if( colourtemp > 6500 ) {
+ lightsource = 10; // Cloudy weather
+ }
+
+ if(flash_fired) {
+ lightsource = 4; // Flash
+ }
+
+ snprintf(temp_value,
+ sizeof(temp_value)/sizeof(char),
+ "%u", lightsource);
+ exifTable->insertElement(TAG_LIGHT_SOURCE, temp_value);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::convertGPSCoord(double coord,
+ int &deg,
+ int &min,
+ int &sec,
+ int &secDivisor)
+{
+ double tmp;
+
+ LOG_FUNCTION_NAME;
+
+ if ( coord == 0 ) {
+
+ CAMHAL_LOGE("Invalid GPS coordinate");
+
+ return -EINVAL;
+ }
+
+ deg = (int) floor(fabs(coord));
+ tmp = ( fabs(coord) - floor(fabs(coord)) ) * GPS_MIN_DIV;
+ min = (int) floor(tmp);
+ tmp = ( tmp - floor(tmp) ) * ( GPS_SEC_DIV * GPS_SEC_ACCURACY );
+ sec = (int) floor(tmp);
+ secDivisor = GPS_SEC_ACCURACY;
+
+ if( sec >= ( GPS_SEC_DIV * GPS_SEC_ACCURACY ) ) {
+ sec = 0;
+ min += 1;
+ }
+
+ if( min >= 60 ) {
+ min = 0;
+ deg += 1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFD.cpp b/camera/OMXCameraAdapter/OMXFD.cpp
new file mode 100644
index 0000000..1a482b2
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXFD.cpp
@@ -0,0 +1,504 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXFD.cpp
+*
+* This file contains functionality for handling face detection.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace Ti {
+namespace Camera {
+
+const uint32_t OMXCameraAdapter::FACE_DETECTION_THRESHOLD = 80;
+
+status_t OMXCameraAdapter::setParametersFD(const android::CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startFaceDetection()
+{
+ status_t ret = NO_ERROR;
+
+ android::AutoMutex lock(mFaceDetectionLock);
+
+ ret = setFaceDetection(true, mFaceOrientation);
+ if (ret != NO_ERROR) {
+ goto out;
+ }
+
+ if ( mFaceDetectionRunning ) {
+ mFDSwitchAlgoPriority = true;
+ }
+
+ // Note: White balance will not be face prioritized, since
+ // the algorithm needs full frame statistics, and not face
+ // regions alone.
+
+ faceDetectionNumFacesLastOutput = 0;
+ out:
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopFaceDetection()
+{
+ status_t ret = NO_ERROR;
+ const char *str = NULL;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ android::AutoMutex lock(mFaceDetectionLock);
+
+ ret = setFaceDetection(false, mFaceOrientation);
+ if (ret != NO_ERROR) {
+ goto out;
+ }
+
+ if ( mFaceDetectionRunning ) {
+ //Enable region priority and disable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , false);
+
+ //Enable Region priority and disable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, false);
+ }
+
+ if (mPending3Asettings) {
+ apply3Asettings(mParameters3A);
+ }
+
+ faceDetectionNumFacesLastOutput = 0;
+ out:
+ return ret;
+}
+
+void OMXCameraAdapter::pauseFaceDetection(bool pause)
+{
+ android::AutoMutex lock(mFaceDetectionLock);
+ // pausing will only take affect if fd is already running
+ if (mFaceDetectionRunning) {
+ mFaceDetectionPaused = pause;
+ faceDetectionNumFacesLastOutput = 0;
+ }
+}
+
+status_t OMXCameraAdapter::setFaceDetectionOrientation(OMX_U32 orientation)
+{
+ status_t ret = NO_ERROR;
+
+ android::AutoMutex lock(mFaceDetectionLock);
+
+ mFaceOrientation = orientation;
+
+ if (mFaceDetectionRunning) {
+ // restart face detection with new rotation
+ setFaceDetection(true, orientation);
+ }
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_OBJDETECTIONTYPE objDetection;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( orientation > 270 ) {
+ orientation = 0;
+ }
+
+ OMX_INIT_STRUCT_PTR (&objDetection, OMX_CONFIG_OBJDETECTIONTYPE);
+ objDetection.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ objDetection.nDeviceOrientation = orientation;
+ if ( enable )
+ {
+ objDetection.bEnable = OMX_TRUE;
+ }
+ else
+ {
+ objDetection.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigImageFaceDetection,
+ &objDetection);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring face detection 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Face detection configured successfully");
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ // TODO(XXX): Should enable/disable FD extra data separately
+ // on each port.
+ ret = setExtraData(enable, OMX_ALL, OMX_FaceDetection);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Error while configuring face detection extra data");
+ }
+ else
+ {
+ CAMHAL_LOGDA("Face detection extra data configured successfully");
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFaceDetectionRunning = enable;
+ mFaceDetectionPaused = !enable;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ android::sp<CameraMetadataResult> &result,
+ size_t previewWidth,
+ size_t previewHeight)
+{
+ status_t ret = NO_ERROR;
+ status_t faceRet = NO_ERROR;
+ status_t metaRet = NO_ERROR;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateExecuting != mComponentState ) {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ return NO_INIT;
+ }
+
+ if ( NULL == pBuffHeader ) {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ return-EINVAL;
+ }
+
+ if ( mFaceDetectionRunning && !mFaceDetectionPaused ) {
+ OMX_OTHER_EXTRADATATYPE *extraData;
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_FaceDetection);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
+ } else {
+ CAMHAL_LOGD("FD extra data not found!");
+ return -EINVAL;
+ }
+
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ if ( NULL != faceData ) {
+ if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
+ CAMHAL_LOGVB("Faces detected %d",
+ faceData->ulFaceCount,
+ faceData->nSize,
+ sizeof(OMX_FACEDETECTIONTYPE),
+ faceData->eCameraView,
+ faceData->nPortIndex,
+ faceData->nVersion);
+ } else {
+ CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
+ ( unsigned int ) faceData->nSize);
+ return -EINVAL;
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
+ return -EINVAL;
+ }
+ }
+
+ result = new (std::nothrow) CameraMetadataResult;
+ if(NULL == result.get()) {
+ ret = NO_MEMORY;
+ return ret;
+ }
+
+ //Encode face coordinates
+ faceRet = encodeFaceCoordinates(faceData, result->getMetadataResult()
+ , previewWidth, previewHeight);
+ if ((NO_ERROR == faceRet) || (NOT_ENOUGH_DATA == faceRet)) {
+ // Ignore harmless errors (no error and no update) and go ahead and encode
+ // the preview meta data
+ metaRet = encodePreviewMetadata(result->getMetadataResult()
+ , pBuffHeader->pPlatformPrivate);
+ if ( (NO_ERROR != metaRet) && (NOT_ENOUGH_DATA != metaRet) ) {
+ // Some 'real' error occurred during preview meta data encod, clear metadata
+ // result and return correct error code
+ result.clear();
+ ret = metaRet;
+ }
+ } else {
+ //Some real error occurred during face encoding, clear metadata result
+ // and return correct error code
+ result.clear();
+ ret = faceRet;
+ }
+
+ if((NOT_ENOUGH_DATA == faceRet) && (NOT_ENOUGH_DATA == metaRet)) {
+ //No point sending the callback if nothing is changed
+ result.clear();
+ ret = faceRet;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
+ camera_frame_metadata_t *metadataResult,
+ size_t previewWidth,
+ size_t previewHeight)
+{
+ status_t ret = NO_ERROR;
+ camera_face_t *faces;
+ size_t hRange, vRange;
+ double tmp;
+ bool faceArrayChanged = false;
+
+ LOG_FUNCTION_NAME;
+
+ hRange = CameraMetadataResult::RIGHT - CameraMetadataResult::LEFT;
+ vRange = CameraMetadataResult::BOTTOM - CameraMetadataResult::TOP;
+
+ android::AutoMutex lock(mFaceDetectionLock);
+
+ // Avoid memory leak if called twice on same CameraMetadataResult
+ if ( (0 < metadataResult->number_of_faces) && (NULL != metadataResult->faces) ) {
+ free(metadataResult->faces);
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
+ }
+
+ if ( (NULL != faceData) && (0 < faceData->ulFaceCount) ) {
+ int orient_mult;
+ int trans_left, trans_top, trans_right, trans_bot;
+
+ faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
+ if ( NULL == faces ) {
+ ret = NO_MEMORY;
+ goto out;
+ }
+
+ /**
+ / * When device is 180 degrees oriented to the sensor, need to translate
+ / * the output from Ducati to what Android expects
+ / * Ducati always gives face coordinates in this form, irrespective of
+ / * rotation, i.e (l,t) always represents the point towards the left eye
+ / * and top of hair.
+ / * (l, t)
+ / * ---------------
+ / * - ,,,,,,, -
+ / * - | | -
+ / * - |<a <a| -
+ / * - (| ^ |) -
+ / * - | -=- | -
+ / * - \_____/ -
+ / * ---------------
+ / * (r, b)
+ / *
+ / * However, Android expects the coords to be in respect with what the
+ / * sensor is viewing, i.e Android expects sensor to see this with (l,t)
+ / * and (r,b) like so:
+ / * (l, t)
+ / * ---------------
+ / * - _____ -
+ / * - / \ -
+ / * - | -=- | -
+ / * - (| ^ |) -
+ / * - |a> a>| -
+ / * - | | -
+ / * - ,,,,,,, -
+ / * ---------------
+ / * (r, b)
+ */
+
+ if (mFaceOrientation == 180) {
+ orient_mult = -1;
+ trans_left = 2; // right is now left
+ trans_top = 3; // bottom is now top
+ trans_right = 0; // left is now right
+ trans_bot = 1; // top is not bottom
+ } else {
+ orient_mult = 1;
+ trans_left = 0; // left
+ trans_top = 1; // top
+ trans_right = 2; // right
+ trans_bot = 3; // bottom
+ }
+
+ int j = 0, i = 0;
+ for ( ; j < faceData->ulFaceCount ; j++)
+ {
+ OMX_S32 nLeft = 0;
+ OMX_S32 nTop = 0;
+ //Face filtering
+ //For real faces, it is seen that the h/w passes a score >=80
+ //For false faces, we seem to get even a score of 70 sometimes.
+ //In order to avoid any issue at application level, we filter
+ //<=70 score here.
+ if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD)
+ continue;
+
+ if (mFaceOrientation == 180) {
+ // from sensor pov, the left pos is the right corner of the face in pov of frame
+ nLeft = faceData->tFacePosition[j].nLeft + faceData->tFacePosition[j].nWidth;
+ nTop = faceData->tFacePosition[j].nTop + faceData->tFacePosition[j].nHeight;
+ } else {
+ nLeft = faceData->tFacePosition[j].nLeft;
+ nTop = faceData->tFacePosition[j].nTop;
+ }
+
+ tmp = ( double ) nLeft / ( double ) previewWidth;
+ tmp *= hRange;
+ tmp -= hRange/2;
+ faces[i].rect[trans_left] = tmp;
+
+ tmp = ( double ) nTop / ( double )previewHeight;
+ tmp *= vRange;
+ tmp -= vRange/2;
+ faces[i].rect[trans_top] = tmp;
+
+ tmp = ( double ) faceData->tFacePosition[j].nWidth / ( double ) previewWidth;
+ tmp *= hRange;
+ tmp *= orient_mult;
+ faces[i].rect[trans_right] = faces[i].rect[trans_left] + tmp;
+
+ tmp = ( double ) faceData->tFacePosition[j].nHeight / ( double ) previewHeight;
+ tmp *= vRange;
+ tmp *= orient_mult;
+ faces[i].rect[trans_bot] = faces[i].rect[trans_top] + tmp;
+
+ faces[i].score = faceData->tFacePosition[j].nScore;
+ faces[i].id = 0;
+ faces[i].left_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].left_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[1] = CameraMetadataResult::INVALID_DATA;
+ i++;
+ }
+
+ metadataResult->number_of_faces = i;
+ metadataResult->faces = faces;
+
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
+ {
+ bool faceChanged = true;
+ int centerX = (faces[i].rect[trans_left] + faces[i].rect[trans_right] ) / 2;
+ int centerY = (faces[i].rect[trans_top] + faces[i].rect[trans_bot] ) / 2;
+
+ int sizeX = (faces[i].rect[trans_right] - faces[i].rect[trans_left] ) ;
+ int sizeY = (faces[i].rect[trans_bot] - faces[i].rect[trans_top] ) ;
+
+ for (int j = 0; j < faceDetectionNumFacesLastOutput; j++)
+ {
+ int tempCenterX = (faceDetectionLastOutput[j].rect[trans_left] +
+ faceDetectionLastOutput[j].rect[trans_right] ) / 2;
+ int tempCenterY = (faceDetectionLastOutput[j].rect[trans_top] +
+ faceDetectionLastOutput[j].rect[trans_bot] ) / 2;
+ int tempSizeX = (faceDetectionLastOutput[j].rect[trans_right] -
+ faceDetectionLastOutput[j].rect[trans_left] ) ;
+ int tempSizeY = (faceDetectionLastOutput[j].rect[trans_bot] -
+ faceDetectionLastOutput[j].rect[trans_top] ) ;
+
+ if ( ( tempCenterX == centerX) &&
+ ( tempCenterY == centerY) ) {
+ // Found Face.
+ // Now check size of rectangle
+ // compare to last output.
+ if ( ( tempSizeX == sizeX ) &&
+ ( tempSizeY == sizeY ) ) {
+ faceChanged = false;
+ }
+ }
+ }
+ // Send face detection data after some face coordinate changes
+ if (faceChanged) {
+ faceArrayChanged = true;
+ }
+ }
+
+ // Save this output for next iteration
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
+ {
+ faceDetectionLastOutput[i] = faces[i];
+ }
+ } else {
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
+ }
+
+ // Send face detection data after face count changes
+ if (faceDetectionNumFacesLastOutput != metadataResult->number_of_faces) {
+ faceArrayChanged = true;
+ }
+ faceDetectionNumFacesLastOutput = metadataResult->number_of_faces;
+
+ if ( !faceArrayChanged ) {
+ ret = NOT_ENOUGH_DATA;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+out:
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp
new file mode 100644
index 0000000..d478035
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXFocus.cpp
@@ -0,0 +1,892 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+* @file OMXFocus.cpp
+*
+* This file contains functionality for handling focus configurations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+#define TOUCH_FOCUS_RANGE 0xFF
+#define AF_IMAGE_CALLBACK_TIMEOUT 5000000 //5 seconds timeout
+#define AF_VIDEO_CALLBACK_TIMEOUT 2800000 //2.8 seconds timeout
+
+namespace Ti {
+namespace Camera {
+
+const nsecs_t OMXCameraAdapter::CANCEL_AF_TIMEOUT = seconds_to_nanoseconds(1);
+
+status_t OMXCameraAdapter::setParametersFocus(const android::CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *str = NULL;
+ android::Vector<android::sp<CameraArea> > tempAreas;
+ size_t MAX_FOCUS_AREAS;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mFocusAreasLock);
+
+ str = params.get(android::CameraParameters::KEY_FOCUS_AREAS);
+
+ MAX_FOCUS_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+
+ if ( NULL != str ) {
+ ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
+ }
+
+ if ( (NO_ERROR == ret) && CameraArea::areAreasDifferent(mFocusAreas, tempAreas) ) {
+ mFocusAreas.clear();
+ mFocusAreas = tempAreas;
+ if ( MAX_FOCUS_AREAS < mFocusAreas.size() ) {
+ CAMHAL_LOGEB("Focus areas supported %d, focus areas set %d",
+ MAX_FOCUS_AREAS,
+ mFocusAreas.size());
+ ret = -EINVAL;
+ }
+ else {
+ if ( !mFocusAreas.isEmpty() ) {
+ setTouchFocus();
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doAutoFocus()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
+ OMX_PARAM_FOCUSSTATUSTYPE focusStatus;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+ CameraAdapter::AdapterState state;
+ nsecs_t timeout = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component in Invalid state");
+ returnFocusStatus(false);
+ return -EINVAL;
+ }
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ returnFocusStatus(false);
+ return NO_ERROR;
+ }
+
+ if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) {
+ CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called");
+ return NO_ERROR;
+ }
+
+ // AF when fixed focus modes are set should be a no-op.
+ if ( ( mParameters3A.Focus == OMX_IMAGE_FocusControlOff ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlHyperfocal ) ) {
+ returnFocusStatus(true);
+ return NO_ERROR;
+ }
+
+ OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
+ // If the app calls autoFocus, the camera will stop sending face callbacks.
+ pauseFaceDetection(true);
+
+ // This is needed for applying FOCUS_REGION correctly
+ if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea()))
+ {
+ //Disable face priority
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);
+
+ //Enable region algorithm priority
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
+ }
+
+ OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) mParameters3A.Focus;
+
+ if (mParameters3A.FocusLock) {
+ // this basically means user never called cancelAutoFocus after a scan...
+ // if this is the case we need to unlock AF to ensure we will do a scan
+ if (set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_FALSE) != NO_ERROR) {
+ CAMHAL_LOGEA("Error Unlocking 3A locks");
+ } else {
+ CAMHAL_LOGDA("AE/AWB unlocked successfully");
+ }
+
+ } else if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAuto ) {
+ // In case we have CAF running we should first check the AF status.
+ // If it has managed to lock, then do as usual and return status
+ // immediately.
+ ret = checkFocus(&focusStatus);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Focus status check failed 0x%x!", ret);
+ return ret;
+ } else {
+ CAMHAL_LOGDB("Focus status check 0x%x!", focusStatus.eFocusStatus);
+ }
+ }
+
+ if ( (focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto &&
+ ( focusStatus.eFocusStatus == OMX_FocusStatusRequest ||
+ focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
+ focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) ||
+ (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) ) {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+ bOMX.bEnabled = OMX_TRUE;
+
+ //Enable focus scanning
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
+ &bOMX);
+ if ( OMX_ErrorNone != eError ) {
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ {
+ android::AutoMutex lock(mDoAFMutex);
+
+ // force AF, Ducati will take care of whether CAF
+ // or AF will be performed, depending on light conditions
+ if ( focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto &&
+ ( focusStatus.eFocusStatus == OMX_FocusStatusRequest ||
+ focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
+ focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) {
+ focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigFocusControl,
+ &focusControl);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
+ return INVALID_OPERATION;
+ } else {
+ CAMHAL_LOGDA("Autofocus started successfully");
+ }
+
+ // No need to wait if preview is about to stop
+ getNextState(state);
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE ) {
+ return NO_ERROR;
+ }
+
+ // configure focus timeout based on capture mode
+ timeout = (mCapMode == VIDEO_MODE) || (mCapMode == VIDEO_MODE_HQ) ?
+ ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
+ ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
+
+
+ ret = mDoAFCond.waitRelative(mDoAFMutex, timeout);
+ }
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Auto Focus Exitting!!!");
+ return -EINVAL;
+ }
+
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEA("Autofocus callback timeout expired");
+ ret = returnFocusStatus(true);
+ } else {
+ CAMHAL_LOGDA("Autofocus callback received");
+ ret = returnFocusStatus(false);
+ }
+ } else { // Focus mode in continuous
+ if ( NO_ERROR == ret ) {
+ ret = returnFocusStatus(true);
+ mPending3Asettings |= SetFocus;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopAutoFocus()
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component in Invalid state");
+ returnFocusStatus(false);
+ return -EINVAL;
+ }
+
+ if ( OMX_StateExecuting != mComponentState ) {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ return NO_ERROR;
+ }
+
+ if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) {
+ // No need to stop focus if we are in infinity mode. Nothing to stop.
+ return NO_ERROR;
+ }
+
+ OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focusControl.eFocusControl = OMX_IMAGE_FocusControlOff;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigFocusControl,
+ &focusControl);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+#ifdef CAMERAHAL_TUNA
+ else {
+ // This is a WA. Usually the OMX Camera component should
+ // generate AF status change OMX event fairly quickly
+ // ( after one preview frame ) and this notification should
+ // actually come from 'handleFocusCallback()'.
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focusMode)
+{;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&focusMode, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focusMode.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigFocusControl,
+ &focusMode);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while retrieving focus mode 0x%x", eError);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::cancelAutoFocus()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusMode;
+
+ LOG_FUNCTION_NAME;
+
+ ret = getFocusMode(focusMode);
+ if ( NO_ERROR != ret ) {
+ return ret;
+ }
+
+ //Stop the AF only for modes other than CAF, Inifinity or Off
+ if ( ( focusMode.eFocusControl != OMX_IMAGE_FocusControlAuto ) &&
+ ( focusMode.eFocusControl != ( OMX_IMAGE_FOCUSCONTROLTYPE )
+ OMX_IMAGE_FocusControlAutoInfinity ) &&
+ ( focusMode.eFocusControl != OMX_IMAGE_FocusControlOff ) ) {
+ android::AutoMutex lock(mCancelAFMutex);
+ stopAutoFocus();
+ ret = mCancelAFCond.waitRelative(mCancelAFMutex, CANCEL_AF_TIMEOUT);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Cancel AF timeout!");
+ }
+ } else if (focusMode.eFocusControl == OMX_IMAGE_FocusControlAuto) {
+ // This re-enabling of CAF doesn't seem to
+ // be needed any more.
+ // re-apply CAF after unlocking and canceling
+ // mPending3Asettings |= SetFocus;
+ }
+
+ {
+ // Signal to 'doAutoFocus()'
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+
+ // If the apps call #cancelAutoFocus()}, the face callbacks will also resume.
+ pauseFaceDetection(false);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+status_t OMXCameraAdapter::setFocusCallback(bool enabled)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CALLBACKREQUESTTYPE focusRequstCallback;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component in Invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&focusRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE);
+ focusRequstCallback.nPortIndex = OMX_ALL;
+ focusRequstCallback.nIndex = OMX_IndexConfigCommonFocusStatus;
+
+ if ( enabled )
+ {
+ focusRequstCallback.bEnable = OMX_TRUE;
+ }
+ else
+ {
+ focusRequstCallback.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigCallbackRequest,
+ &focusRequstCallback);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error registering focus callback 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Autofocus callback for index 0x%x registered successfully",
+ OMX_IndexConfigCommonFocusStatus);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
+{
+ status_t ret = NO_ERROR;
+ OMX_PARAM_FOCUSSTATUSTYPE eFocusStatus;
+ CameraHalEvent::FocusStatus focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
+ BaseCameraAdapter::AdapterState state, nextState;
+ BaseCameraAdapter::getState(state);
+ BaseCameraAdapter::getNextState(nextState);
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT(eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
+ if( ((AF_ACTIVE & state ) != AF_ACTIVE) && ((AF_ACTIVE & nextState ) != AF_ACTIVE) )
+ {
+ /// We don't send focus callback if focus was not started
+ CAMHAL_LOGDA("Not sending focus callback because focus was not started");
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ if ( !timeoutReached )
+ {
+ ret = checkFocus(&eFocusStatus);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Focus status check failed!");
+ }
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ if ( timeoutReached )
+ {
+ focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
+ }
+ else
+ {
+ switch (eFocusStatus.eFocusStatus)
+ {
+ case OMX_FocusStatusReached:
+ {
+ focusStatus = CameraHalEvent::FOCUS_STATUS_SUCCESS;
+ break;
+ }
+ case OMX_FocusStatusOff: // AF got canceled
+ return NO_ERROR;
+ case OMX_FocusStatusUnableToReach:
+ case OMX_FocusStatusRequest:
+ default:
+ {
+ focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
+ break;
+ }
+ }
+ // Lock CAF after AF call
+ if( set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_TRUE) != NO_ERROR) {
+ CAMHAL_LOGEA("Error Applying 3A locks");
+ } else {
+ CAMHAL_LOGDA("Focus locked. Applied focus locks successfully");
+ }
+
+ stopAutoFocus();
+ }
+ //Query current focus distance after AF is complete
+ updateFocusDistances(mParameters);
+ }
+
+ ret = BaseCameraAdapter::setState(CAMERA_CANCEL_AUTOFOCUS);
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ notifyFocusSubscribers(focusStatus);
+ }
+
+ // After focus, face detection will resume sending face callbacks
+ pauseFaceDetection(false);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == eFocusStatus )
+ {
+ CAMHAL_LOGEA("Invalid focus status");
+ ret = -EINVAL;
+ }
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component in Invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ ret = NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonFocusStatus,
+ eFocusStatus);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while retrieving focus status: 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDB("Focus Status: %d", eFocusStatus->eFocusStatus);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::updateFocusDistances(android::CameraParameters &params)
+{
+ OMX_U32 focusNear, focusOptimal, focusFar;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ ret = getFocusDistances(focusNear, focusOptimal, focusFar);
+ if ( NO_ERROR == ret)
+ {
+ ret = addFocusDistances(focusNear, focusOptimal, focusFar, params);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in call to addFocusDistances() 0x%x", ret);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("Error in call to getFocusDistances() 0x%x", ret);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError;
+
+ OMX_TI_CONFIG_FOCUSDISTANCETYPE focusDist;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = UNKNOWN_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR(&focusDist, OMX_TI_CONFIG_FOCUSDISTANCETYPE);
+ focusDist.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFocusDistance,
+ &focusDist);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while querying focus distances 0x%x", eError);
+ ret = UNKNOWN_ERROR;
+ }
+
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ near = focusDist.nFocusDistanceNear;
+ optimal = focusDist.nFocusDistanceOptimal;
+ far = focusDist.nFocusDistanceFar;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length)
+{
+ status_t ret = NO_ERROR;
+ uint32_t focusScale = 1000;
+ float distFinal;
+
+ LOG_FUNCTION_NAME;
+
+ if(mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity)
+ {
+ dist=0;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( 0 == dist )
+ {
+ strncpy(buffer, android::CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
+ }
+ else
+ {
+ distFinal = dist;
+ distFinal /= focusScale;
+ snprintf(buffer, ( length - 1 ) , "%5.3f", distFinal);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
+ OMX_U32 &optimal,
+ OMX_U32 &far,
+ android::CameraParameters& params)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ ret = encodeFocusDistance(near, mFocusDistNear, FOCUS_DIST_SIZE);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = encodeFocusDistance(optimal, mFocusDistOptimal, FOCUS_DIST_SIZE);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = encodeFocusDistance(far, mFocusDistFar, FOCUS_DIST_SIZE);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ snprintf(mFocusDistBuffer, ( FOCUS_DIST_BUFFER_SIZE - 1) ,"%s,%s,%s", mFocusDistNear,
+ mFocusDistOptimal,
+ mFocusDistFar);
+
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setTouchFocus()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ OMX_ALGOAREASTYPE *focusAreas;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ CameraBuffer *bufferlist;
+ int areasSize = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ focusAreas = (OMX_ALGOAREASTYPE*) bufferlist[0].opaque;
+
+ OMXCameraPortParameters * mPreviewData = NULL;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ if (!focusAreas)
+ {
+ CAMHAL_LOGEB("Error allocating buffer for focus areas %d", eError);
+ return -ENOMEM;
+ }
+
+ OMX_INIT_STRUCT_PTR (focusAreas, OMX_ALGOAREASTYPE);
+
+ focusAreas->nPortIndex = OMX_ALL;
+ focusAreas->nNumAreas = mFocusAreas.size();
+ focusAreas->nAlgoAreaPurpose = OMX_AlgoAreaFocus;
+
+ // If the area is the special case of (0, 0, 0, 0, 0), then
+ // the algorithm needs nNumAreas to be set to 0,
+ // in order to automatically choose the best fitting areas.
+ if ( mFocusAreas.itemAt(0)->isZeroArea() )
+ {
+ focusAreas->nNumAreas = 0;
+ }
+
+ for ( unsigned int n = 0; n < mFocusAreas.size(); n++) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
+ // transform the coordinates to 3A-type coordinates
+ mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)focusAreas->tAlgoAreas[n].nTop,
+ (size_t&)focusAreas->tAlgoAreas[n].nLeft,
+ (size_t&)focusAreas->tAlgoAreas[n].nWidth,
+ (size_t&)focusAreas->tAlgoAreas[n].nHeight);
+
+ focusAreas->tAlgoAreas[n].nLeft =
+ ( focusAreas->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nTop =
+ ( focusAreas->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nWidth =
+ ( focusAreas->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nHeight =
+ ( focusAreas->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();
+
+ CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d",
+ n, (int)focusAreas->tAlgoAreas[n].nTop, (int)focusAreas->tAlgoAreas[n].nLeft,
+ (int)focusAreas->tAlgoAreas[n].nWidth, (int)focusAreas->tAlgoAreas[n].nHeight,
+ (int)focusAreas->tAlgoAreas[n].nPriority);
+ }
+
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+
+ sharedBuffer.nPortIndex = OMX_ALL;
+ sharedBuffer.nSharedBuffSize = areasSize;
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
+
+ if ( NULL == sharedBuffer.pSharedBuff )
+ {
+ CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
+ ret = -ENOMEM;
+ goto EXIT;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigAlgoAreas, &sharedBuffer);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting Focus Areas configuration 0x%x", eError);
+ ret = -EINVAL;
+ }
+
+ EXIT:
+ if (NULL != bufferlist)
+ {
+ mMemMgr.freeBufferList (bufferlist);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void OMXCameraAdapter::handleFocusCallback() {
+ OMX_PARAM_FOCUSSTATUSTYPE eFocusStatus;
+ CameraHalEvent::FocusStatus focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
+ status_t ret = NO_ERROR;
+ BaseCameraAdapter::AdapterState nextState, currentState;
+ BaseCameraAdapter::getNextState(nextState);
+ BaseCameraAdapter::getState(currentState);
+
+ // Dropping AF callback if it triggered in non AF state
+ if ((currentState != AF_STATE) && (currentState != AF_ZOOM_STATE) &&
+ (currentState != VIDEO_AF_STATE) && (nextState != VIDEO_AF_STATE) &&
+ (nextState != AF_STATE) && (nextState != AF_ZOOM_STATE)) {
+ return;
+ }
+
+ OMX_INIT_STRUCT(eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
+ ret = checkFocus(&eFocusStatus);
+
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEA("Focus status check failed!");
+ // signal and unblock doAutoFocus
+ if (AF_ACTIVE & nextState) {
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+ return;
+ }
+
+ if ( eFocusStatus.eFocusStatus == OMX_FocusStatusOff ) {
+ android::AutoMutex lock(mCancelAFMutex);
+ mCancelAFCond.signal();
+ return;
+ }
+
+ if (eFocusStatus.eFocusStatus != OMX_FocusStatusRequest) {
+ // signal doAutoFocus when a end of scan message comes
+ // ignore start of scan
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+
+ if (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE) OMX_IMAGE_FocusControlAuto) {
+ CAMHAL_LOGDA("unregistered focus callback when not in CAF or doAutoFocus... not handling");
+ return;
+ }
+
+ // Handling for CAF Callbacks
+ switch (eFocusStatus.eFocusStatus) {
+ case OMX_FocusStatusRequest:
+ focusStatus = CameraHalEvent::FOCUS_STATUS_PENDING;
+ break;
+ case OMX_FocusStatusReached:
+ case OMX_FocusStatusOff:
+ case OMX_FocusStatusUnableToReach:
+ default:
+ focusStatus = CameraHalEvent::FOCUS_STATUS_DONE;
+ break;
+ }
+
+ notifyFocusSubscribers(focusStatus);
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXMetadata.cpp b/camera/OMXCameraAdapter/OMXMetadata.cpp
new file mode 100644
index 0000000..af8c49c
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXMetadata.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMX3A.cpp
+*
+* This file contains functionality for handling 3A configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "OMXMetaData"
+
+#include "OMXCameraAdapter.h"
+#include <camera/CameraMetadata.h>
+
+namespace Ti {
+namespace Camera {
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+camera_memory_t * OMXCameraAdapter::getMetaData(const OMX_PTR plat_pvt,
+ camera_request_memory allocator) const
+{
+ camera_memory_t * ret = NULL;
+
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
+ OMX_TI_WHITEBALANCERESULTTYPE * WBdata = NULL;
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo = NULL;
+ OMX_TI_LSCTABLETYPE *lscTbl = NULL;
+ camera_metadata_t *metaData;
+ size_t offset = 0;
+
+ size_t metaDataSize = sizeof(camera_metadata_t);
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_FaceDetection);
+ if ( NULL != extraData ) {
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ metaDataSize += faceData->ulFaceCount * sizeof(camera_metadata_face_t);
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_WhiteBalance);
+ if ( NULL != extraData ) {
+ WBdata = ( OMX_TI_WHITEBALANCERESULTTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+ if ( NULL != extraData ) {
+ shotInfo = ( OMX_TI_VECTSHOTINFOTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_LSCTable);
+ if ( NULL != extraData ) {
+ lscTbl = ( OMX_TI_LSCTABLETYPE * ) extraData->data;
+ metaDataSize += OMX_TI_LSC_GAIN_TABLE_SIZE;
+ }
+
+ ret = allocator(-1, metaDataSize, 1, NULL);
+ if ( NULL == ret ) {
+ return NULL;
+ } else {
+ metaData = static_cast<camera_metadata_t *> (ret->data);
+ offset += sizeof(camera_metadata_t);
+ }
+
+ if ( NULL != faceData ) {
+ metaData->number_of_faces = 0;
+ int idx = 0;
+ metaData->faces_offset = offset;
+ struct camera_metadata_face *faces = reinterpret_cast<struct camera_metadata_face *> (static_cast<char*>(ret->data) + offset);
+ for ( int j = 0; j < faceData->ulFaceCount ; j++ ) {
+ if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD) {
+ continue;
+ }
+ idx = metaData->number_of_faces;
+ metaData->number_of_faces++;
+ // TODO: Rework and re-use encodeFaceCoordinates()
+ faces[idx].left = faceData->tFacePosition[j].nLeft;
+ faces[idx].top = faceData->tFacePosition[j].nTop;
+ faces[idx].bottom = faceData->tFacePosition[j].nWidth;
+ faces[idx].right = faceData->tFacePosition[j].nHeight;
+ }
+ offset += sizeof(camera_metadata_face_t) * metaData->number_of_faces;
+ }
+
+ if ( NULL != WBdata ) {
+ metaData->awb_temp = WBdata->nColorTemperature;
+ metaData->gain_b = WBdata->nGainB;
+ metaData->gain_gb = WBdata->nGainGB;
+ metaData->gain_gr = WBdata->nGainGR;
+ metaData->gain_r = WBdata->nGainR;
+ metaData->offset_b = WBdata->nOffsetB;
+ metaData->offset_gb = WBdata->nOffsetGB;
+ metaData->offset_gr = WBdata->nOffsetGR;
+ metaData->offset_r = WBdata->nOffsetR;
+ }
+
+ if ( NULL != lscTbl ) {
+ metaData->lsc_table_applied = lscTbl->bApplied;
+ metaData->lsc_table_size = OMX_TI_LSC_GAIN_TABLE_SIZE;
+ metaData->lsc_table_offset = offset;
+ uint8_t *lsc_table = reinterpret_cast<uint8_t *> (static_cast<char*>(ret->data) + offset);
+ memcpy(lsc_table, lscTbl->pGainTable, OMX_TI_LSC_GAIN_TABLE_SIZE);
+ offset += metaData->lsc_table_size;
+ }
+
+ if ( NULL != shotInfo ) {
+ metaData->frame_number = shotInfo->nFrameNum;
+ metaData->shot_number = shotInfo->nConfigId;
+ metaData->analog_gain = shotInfo->nAGain;
+ metaData->analog_gain_req = shotInfo->nReqGain;
+ metaData->analog_gain_min = shotInfo->nGainMin;
+ metaData->analog_gain_max = shotInfo->nGainMax;
+ metaData->analog_gain_error = shotInfo->nSenAGainErr;
+ metaData->analog_gain_dev = shotInfo->nDevAGain;
+ metaData->exposure_time = shotInfo->nExpTime;
+ metaData->exposure_time_req = shotInfo->nReqExpTime;
+ metaData->exposure_time_min = shotInfo->nExpMin;
+ metaData->exposure_time_max = shotInfo->nExpMax;
+ metaData->exposure_time_dev = shotInfo->nDevExpTime;
+ metaData->exposure_time_error = shotInfo->nSenExpTimeErr;
+ metaData->exposure_compensation_req = shotInfo->nReqEC;
+ metaData->exposure_dev = shotInfo->nDevEV;
+ }
+
+ return ret;
+}
+#endif
+
+status_t OMXCameraAdapter::encodePreviewMetadata(camera_frame_metadata_t *meta, const OMX_PTR plat_pvt)
+{
+ status_t ret = NO_ERROR;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+
+ if ( (NULL != extraData) && (NULL != extraData->data) ) {
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo;
+ shotInfo = (OMX_TI_VECTSHOTINFOTYPE*) extraData->data;
+
+ meta->analog_gain = shotInfo->nAGain;
+ meta->exposure_time = shotInfo->nExpTime;
+ } else {
+ meta->analog_gain = -1;
+ meta->exposure_time = -1;
+ }
+
+ // Send metadata event only after any value has been changed
+ if ((metadataLastAnalogGain == meta->analog_gain) &&
+ (metadataLastExposureTime == meta->exposure_time)) {
+ ret = NOT_ENOUGH_DATA;
+ } else {
+ metadataLastAnalogGain = meta->analog_gain;
+ metadataLastExposureTime = meta->exposure_time;
+ }
+#else
+ // no-op in non enhancement mode
+ CAMHAL_UNUSED(meta);
+ CAMHAL_UNUSED(plat_pvt);
+#endif
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXReprocess.cpp b/camera/OMXCameraAdapter/OMXReprocess.cpp
new file mode 100644
index 0000000..6fdbe7b
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXReprocess.cpp
@@ -0,0 +1,382 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXReprocess.cpp
+*
+* This file contains functionality for handling reprocessing operations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameters &params,
+ CameraBuffer* buffers,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ int w, h, s;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ OMXCameraPortParameters *portData;
+ const char* valstr;
+
+ LOG_FUNCTION_NAME;
+
+ if (!buffers) {
+ CAMHAL_LOGE("invalid buffer array");
+ return BAD_VALUE;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ w = buffers[0].width;
+ h = buffers[0].height;
+ s = buffers[0].stride;
+
+ valstr = buffers[0].format;
+ if (valstr != NULL) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ CAMHAL_LOGDA("RAW Picture format selected");
+ pixFormat = OMX_COLOR_FormatRawBayer10bit;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("YUV422i Picture format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+
+ if ( (w != (int)portData->mWidth) || (h != (int)portData->mHeight) ||
+ (s != (int) portData->mStride) || (pixFormat != portData->mColorFormat)) {
+ portData->mWidth = w;
+ portData->mHeight = h;
+
+ if ( ( OMX_COLOR_FormatRawBayer10bit == pixFormat ) ||
+ ( OMX_COLOR_FormatCbYCrY == pixFormat ) ) {
+ portData->mStride = w * 2;
+ } else {
+ portData->mStride = s;
+ }
+
+ portData->mColorFormat = pixFormat;
+
+ mPendingReprocessSettings |= SetFormat;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startReprocess()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * portData = NULL;
+
+ LOG_FUNCTION_NAME;
+ CAMHAL_LOGD ("mReprocConfigured = %d", mReprocConfigured);
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ CAMHAL_LOGD ("mReprocConfigured = %d", mBurstFramesQueued);
+ if (NO_ERROR == ret) {
+ android::AutoMutex lock(mBurstLock);
+
+ for ( int index = 0 ; index < portData->mMaxQueueable ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on video input port - %p, offset: %d, length: %d",
+ portData->mBufferHeader[index]->pBuffer,
+ portData->mBufferHeader[index]->nOffset,
+ portData->mBufferHeader[index]->nFilledLen);
+ portData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_EmptyThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startReprocess buffers queued on video port: ", &mStartCapture);
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::stopReprocess()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ // Disable port - send command and then free all buffers
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mStopReprocSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ if (portData) {
+ CAMHAL_LOGDB("Freeing buffers on reproc port - num: %d", portData->mNumBufs);
+ for (int index = 0 ; index < portData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on reproc port - 0x%x",
+ ( unsigned int ) portData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for port disable");
+ ret = mStopReprocSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Disable Image Port Exitting!!!");
+ goto EXIT;
+ }
+ if (NO_ERROR == ret) {
+ CAMHAL_LOGDA("Port disabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port disable");
+ goto EXIT;
+ }
+
+ deinitInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+
+ mReprocConfigured = false;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::disableReprocess(){
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ // no-op..for now
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ if ( 0 != mUseReprocessSem.Count() ) {
+ CAMHAL_LOGEB("Error mUseReprocessSem semaphore count %d", mUseReprocessSem.Count());
+ return BAD_VALUE;
+ }
+
+ CAMHAL_ASSERT(num > 0);
+
+ if (mAdapterState == REPROCESS_STATE) {
+ stopReprocess();
+ } else if (mAdapterState == CAPTURE_STATE) {
+ stopImageCapture();
+ stopReprocess();
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess stopping image capture and disabling image port: ", &bufArr->ppmStamp);
+
+#endif
+
+ portData->mNumBufs = num;
+
+ // Configure
+ ret = setParametersReprocess(mParams, bufArr, mAdapterState);
+
+ if (mReprocConfigured) {
+ if (mPendingReprocessSettings & ECaptureParamSettings) {
+ stopReprocess();
+ } else {
+ // Tap in port has been already configured.
+ return NO_ERROR;
+ }
+ }
+
+ if (mPendingReprocessSettings & SetFormat) {
+ mPendingReprocessSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_IN_VIDEO, *portData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
+
+ // Configure DOMX to use either gralloc handles or vptrs
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mVideoInPortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD("Using ANW");
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ // Need to allocate tiler reservation and state we are going to be using
+ // pagelist buffers. Assuming this happens when buffers if from anw
+ initInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+ } else {
+ CAMHAL_LOGD("Using ION");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess configuration done: ", &bufArr->ppmStamp);
+
+#endif
+
+ // Enable Port
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mUseReprocessSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ for (int index = 0 ; index < portData->mNumBufs ; index++)
+ {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)bufArr[index].opaque,
+ (int)portData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ 0,
+ portData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ pBufferHdr->nOffset = bufArr[index].offset;
+ pBufferHdr->nFilledLen = bufArr[index].actual_size;
+ portData->mBufferHeader[index] = pBufferHdr;
+ }
+
+ // Wait for port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseReprocessSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ // Error out if somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State while trying to enable port for reprocessing");
+ goto EXIT;
+ }
+
+ if (ret == NO_ERROR) {
+ CAMHAL_LOGDA("Port enabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+
+ mReprocConfigured = true;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess video port enabled and buffers registered: ", &bufArr->ppmStamp);
+
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ // Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXZoom.cpp b/camera/OMXCameraAdapter/OMXZoom.cpp
new file mode 100644
index 0000000..e39a3b0
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXZoom.cpp
@@ -0,0 +1,289 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXZoom.cpp
+*
+* This file contains functionality for handling zoom configurations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace Ti {
+namespace Camera {
+
+const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
+ 65536, 68157, 70124, 72745,
+ 75366, 77988, 80609, 83231,
+ 86508, 89784, 92406, 95683,
+ 99615, 102892, 106168, 110100,
+ 114033, 117965, 122552, 126484,
+ 131072, 135660, 140247, 145490,
+ 150733, 155976, 161219, 167117,
+ 173015, 178913, 185467, 192020,
+ 198574, 205783, 212992, 220201,
+ 228065, 236585, 244449, 252969,
+ 262144, 271319, 281149, 290980,
+ 300810, 311951, 322437, 334234,
+ 346030, 357827, 370934, 384041,
+ 397148, 411566, 425984, 441057,
+ 456131, 472515, 488899, 506593,
+ 524288 };
+
+
+status_t OMXCameraAdapter::setParametersZoom(const android::CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ android::AutoMutex lock(mZoomLock);
+
+ LOG_FUNCTION_NAME;
+
+ //Immediate zoom should not be avaialable while smooth zoom is running
+ if ( ( ZOOM_ACTIVE & state ) != ZOOM_ACTIVE )
+ {
+ int zoom = params.getInt(android::CameraParameters::KEY_ZOOM);
+ if (( zoom >= 0 ) && ( zoom < mMaxZoomSupported )) {
+ mTargetZoomIdx = zoom;
+
+ //Immediate zoom should be applied instantly ( CTS requirement )
+ mCurrentZoomIdx = mTargetZoomIdx;
+ if(!mZoomUpdating) {
+ doZoom(mCurrentZoomIdx);
+ mZoomUpdating = true;
+ } else {
+ mZoomUpdate = true;
+ }
+
+ CAMHAL_LOGDB("Zoom by App %d", zoom);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doZoom(int index)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_SCALEFACTORTYPE zoomControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if (( 0 > index) || ((mMaxZoomSupported - 1 ) < index )) {
+ CAMHAL_LOGEB("Zoom index %d out of range", index);
+ ret = -EINVAL;
+ }
+
+ if (mPreviousZoomIndx == index )
+ {
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&zoomControl, OMX_CONFIG_SCALEFACTORTYPE);
+ zoomControl.nPortIndex = OMX_ALL;
+ zoomControl.xHeight = ZOOM_STEPS[index];
+ zoomControl.xWidth = ZOOM_STEPS[index];
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonDigitalZoom,
+ &zoomControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while applying digital zoom 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Digital zoom applied successfully");
+ mPreviousZoomIndx = index;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::advanceZoom()
+{
+ status_t ret = NO_ERROR;
+ AdapterState state;
+ android::AutoMutex lock(mZoomLock);
+
+ BaseCameraAdapter::getState(state);
+
+ if ( mReturnZoomStatus )
+ {
+ mCurrentZoomIdx +=mZoomInc;
+ mTargetZoomIdx = mCurrentZoomIdx;
+ mReturnZoomStatus = false;
+ ret = doZoom(mCurrentZoomIdx);
+ notifyZoomSubscribers(mCurrentZoomIdx, true);
+ }
+ else if ( mCurrentZoomIdx != mTargetZoomIdx )
+ {
+ if ( ZOOM_ACTIVE & state )
+ {
+ if ( mCurrentZoomIdx < mTargetZoomIdx )
+ {
+ mZoomInc = 1;
+ }
+ else
+ {
+ mZoomInc = -1;
+ }
+
+ mCurrentZoomIdx += mZoomInc;
+ }
+ else
+ {
+ mCurrentZoomIdx = mTargetZoomIdx;
+ }
+
+ ret = doZoom(mCurrentZoomIdx);
+
+ if ( ZOOM_ACTIVE & state )
+ {
+ if ( mCurrentZoomIdx == mTargetZoomIdx )
+ {
+ CAMHAL_LOGDB("[Goal Reached] Smooth Zoom notify currentIdx = %d, targetIdx = %d",
+ mCurrentZoomIdx,
+ mTargetZoomIdx);
+
+ if ( NO_ERROR == ret )
+ {
+
+ ret = BaseCameraAdapter::setState(CAMERA_STOP_SMOOTH_ZOOM);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+
+ }
+ mReturnZoomStatus = false;
+ notifyZoomSubscribers(mCurrentZoomIdx, true);
+ }
+ else
+ {
+ CAMHAL_LOGDB("[Advancing] Smooth Zoom notify currentIdx = %d, targetIdx = %d",
+ mCurrentZoomIdx,
+ mTargetZoomIdx);
+ notifyZoomSubscribers(mCurrentZoomIdx, false);
+ }
+ }
+ }
+ else if ( (mCurrentZoomIdx == mTargetZoomIdx ) &&
+ ( ZOOM_ACTIVE & state ) )
+ {
+ ret = BaseCameraAdapter::setState(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+
+ }
+
+ if(mZoomUpdate) {
+ doZoom(mTargetZoomIdx);
+ mZoomUpdate = false;
+ mZoomUpdating = true;
+ } else {
+ mZoomUpdating = false;
+ }
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mZoomLock);
+
+ CAMHAL_LOGDB("Start smooth zoom target = %d, mCurrentIdx = %d",
+ targetIdx,
+ mCurrentZoomIdx);
+
+ if (( targetIdx >= 0 ) && ( targetIdx < mMaxZoomSupported )) {
+ mTargetZoomIdx = targetIdx;
+ mZoomParameterIdx = mCurrentZoomIdx;
+ mReturnZoomStatus = false;
+ } else {
+ CAMHAL_LOGEB("Smooth value out of range %d!", targetIdx);
+ ret = -EINVAL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopSmoothZoom()
+{
+ status_t ret = NO_ERROR;
+ android::AutoMutex lock(mZoomLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( mTargetZoomIdx != mCurrentZoomIdx )
+ {
+ if ( mCurrentZoomIdx < mTargetZoomIdx )
+ {
+ mZoomInc = 1;
+ }
+ else
+ {
+ mZoomInc = -1;
+ }
+ mReturnZoomStatus = true;
+ mReturnZoomStatus = true;
+ CAMHAL_LOGDB("Stop smooth zoom mCurrentZoomIdx = %d, mTargetZoomIdx = %d",
+ mCurrentZoomIdx,
+ mTargetZoomIdx);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti