summaryrefslogtreecommitdiffstats
path: root/camera/OMXCameraAdapter
diff options
context:
space:
mode:
authorKyle Repinski <repinski23@gmail.com>2015-01-07 08:39:28 -0600
committerZiyan <jaraidaniel@gmail.com>2015-04-11 20:24:52 +0200
commitc5f4358c48d6e50b2affb3ad6c2c0f59546f5b04 (patch)
tree74070cb6d50ec822d3ef7acdaa390cf2f94c4097 /camera/OMXCameraAdapter
parentc52c76fa1593f374173a818b4de5bd7c51903fbc (diff)
downloaddevice_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.zip
device_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.tar.gz
device_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.tar.bz2
camera: Update camera HAL.
OmapZoom p-jb-release branch with 'CameraHal: Camera Capabilities query update' reverted, as well as a bunch of stuff ifdef'd out. Needs a lot of work still. At this point it's a regression, but it has to be done.
Diffstat (limited to 'camera/OMXCameraAdapter')
-rw-r--r--camera/OMXCameraAdapter/OMX3A.cpp604
-rw-r--r--camera/OMXCameraAdapter/OMXAlgo.cpp571
-rw-r--r--[-rwxr-xr-x]camera/OMXCameraAdapter/OMXCameraAdapter.cpp2642
-rw-r--r--camera/OMXCameraAdapter/OMXCapabilities.cpp1977
-rw-r--r--camera/OMXCameraAdapter/OMXCapture.cpp1325
-rw-r--r--camera/OMXCameraAdapter/OMXDccDataSave.cpp361
-rw-r--r--camera/OMXCameraAdapter/OMXDefaults.cpp40
-rw-r--r--camera/OMXCameraAdapter/OMXExif.cpp60
-rw-r--r--camera/OMXCameraAdapter/OMXFD.cpp296
-rw-r--r--camera/OMXCameraAdapter/OMXFocus.cpp232
-rw-r--r--camera/OMXCameraAdapter/OMXMetadata.cpp181
-rw-r--r--camera/OMXCameraAdapter/OMXReprocess.cpp340
-rw-r--r--camera/OMXCameraAdapter/OMXZoom.cpp37
13 files changed, 6547 insertions, 2119 deletions
diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp
index 7ae50e4..e7171f6 100644
--- a/camera/OMXCameraAdapter/OMX3A.cpp
+++ b/camera/OMXCameraAdapter/OMX3A.cpp
@@ -21,24 +21,19 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
#include <cutils/properties.h>
-#undef TRUE
-#undef FALSE
-#define TRUE "true"
-#define FALSE "false"
-
#define METERING_AREAS_RANGE 0xFF
-namespace android {
+static const char PARAM_SEP[] = ",";
+
+namespace Ti {
+namespace Camera {
+
const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
OMX_SCENEMODETYPE scene) {
const SceneModesEntry* cameraLUT = NULL;
@@ -69,7 +64,7 @@ const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
return entry;
}
-status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
+status_t OMXCameraAdapter::setParameters3A(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -81,9 +76,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
- str = params.get(CameraParameters::KEY_SCENE_MODE);
+ str = params.get(android::CameraParameters::KEY_SCENE_MODE);
mode = getLUTvalue_HALtoOMX( str, SceneLUT);
if ( mFirstTimeInit || ((str != NULL) && ( mParameters3A.SceneMode != mode )) ) {
if ( 0 <= mode ) {
@@ -113,22 +108,52 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
- str = params.get(TICameraParameters::KEY_EXPOSURE_MODE);
- mode = getLUTvalue_HALtoOMX( str, ExpLUT);
- if ( ( str != NULL ) && ( mParameters3A.Exposure != mode ))
- {
- mParameters3A.Exposure = mode;
- CAMHAL_LOGDB("Exposure mode %d", mode);
- if ( 0 <= mParameters3A.Exposure )
- {
- mPending3Asettings |= SetExpMode;
+ if ( (str = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL ) {
+ mode = getLUTvalue_HALtoOMX(str, ExpLUT);
+ if ( mParameters3A.Exposure != mode ) {
+ // If either the new or the old exposure mode is manual set also
+ // the SetManualExposure flag to call setManualExposureVal where
+ // the auto gain and exposure flags are configured
+ if ( mParameters3A.Exposure == OMX_ExposureControlOff ||
+ mode == OMX_ExposureControlOff ) {
+ mPending3Asettings |= SetManualExposure;
+ }
+ mParameters3A.Exposure = mode;
+ CAMHAL_LOGDB("Exposure mode %d", mode);
+ if ( 0 <= mParameters3A.Exposure ) {
+ mPending3Asettings |= SetExpMode;
}
}
-
+ if ( mode == OMX_ExposureControlOff ) {
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE);
+ if ( mParameters3A.ManualExposure != mode ) {
+ mParameters3A.ManualExposure = mode;
+ CAMHAL_LOGDB("Manual Exposure = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT);
+ if ( mParameters3A.ManualExposureRight != mode ) {
+ mParameters3A.ManualExposureRight = mode;
+ CAMHAL_LOGDB("Manual Exposure right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO);
+ if ( mParameters3A.ManualGain != mode ) {
+ mParameters3A.ManualGain = mode;
+ CAMHAL_LOGDB("Manual Gain = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT);
+ if ( mParameters3A.ManualGainRight != mode ) {
+ mParameters3A.ManualGainRight = mode;
+ CAMHAL_LOGDB("Manual Gain right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ }
+ }
#endif
- str = params.get(CameraParameters::KEY_WHITE_BALANCE);
+ str = params.get(android::CameraParameters::KEY_WHITE_BALANCE);
mode = getLUTvalue_HALtoOMX( str, WBalLUT);
if (mFirstTimeInit || ((str != NULL) && (mode != mParameters3A.WhiteBallance)))
{
@@ -141,7 +166,6 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
varint = params.getInt(TICameraParameters::KEY_CONTRAST);
if ( 0 <= varint )
{
@@ -189,10 +213,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
mPending3Asettings |= SetBrightness;
}
}
-
#endif
- str = params.get(CameraParameters::KEY_ANTIBANDING);
+ str = params.get(android::CameraParameters::KEY_ANTIBANDING);
mode = getLUTvalue_HALtoOMX(str,FlickerLUT);
if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Flicker != mode ) ))
{
@@ -205,7 +228,6 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
str = params.get(TICameraParameters::KEY_ISO);
mode = getLUTvalue_HALtoOMX(str, IsoLUT);
CAMHAL_LOGVB("ISO mode arrived in HAL : %s", str);
@@ -218,10 +240,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
mPending3Asettings |= SetISO;
}
}
-
#endif
- str = params.get(CameraParameters::KEY_FOCUS_MODE);
+ str = params.get(android::CameraParameters::KEY_FOCUS_MODE);
mode = getLUTvalue_HALtoOMX(str, FocusLUT);
if ( (mFirstTimeInit || ((str != NULL) && (mParameters3A.Focus != mode))))
{
@@ -237,19 +258,15 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
CAMHAL_LOGDB("Focus %x", mParameters3A.Focus);
}
- str = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- varint = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- if ( mFirstTimeInit ||
- (( str != NULL ) &&
- (mParameters3A.EVCompensation != varint )))
- {
+ str = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ varint = params.getInt(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if ( mFirstTimeInit || (str && (mParameters3A.EVCompensation != varint))) {
CAMHAL_LOGDB("Setting EV Compensation to %d", varint);
-
mParameters3A.EVCompensation = varint;
mPending3Asettings |= SetEVCompensation;
}
- str = params.get(CameraParameters::KEY_FLASH_MODE);
+ str = params.get(android::CameraParameters::KEY_FLASH_MODE);
mode = getLUTvalue_HALtoOMX( str, FlashLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.FlashMode != mode )) )
{
@@ -267,7 +284,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
CAMHAL_LOGVB("Flash Setting %s", str);
CAMHAL_LOGVB("FlashMode %d", mParameters3A.FlashMode);
- str = params.get(CameraParameters::KEY_EFFECT);
+ str = params.get(android::CameraParameters::KEY_EFFECT);
mode = getLUTvalue_HALtoOMX( str, EffLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.Effect != mode )) )
{
@@ -279,13 +296,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, "true")) )
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetExpLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
- if (str && ((strcmp(str, "true")) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking Exposure");
lock = OMX_TRUE;
@@ -304,13 +321,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, "true")) )
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetWbLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
- if (str && ((strcmp(str, "true")) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking WhiteBalance");
lock = OMX_TRUE;
@@ -329,24 +346,24 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
str = params.get(TICameraParameters::KEY_AUTO_FOCUS_LOCK);
- if (str && (strcmp(str, TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
+ if (str && (strcmp(str, android::CameraParameters::TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
CAMHAL_LOGVA("Locking Focus");
mParameters3A.FocusLock = OMX_TRUE;
setFocusLock(mParameters3A);
- } else if (str && (strcmp(str, FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
+ } else if (str && (strcmp(str, android::CameraParameters::FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
CAMHAL_LOGVA("UnLocking Focus");
mParameters3A.FocusLock = OMX_FALSE;
setFocusLock(mParameters3A);
}
- str = params.get(CameraParameters::KEY_METERING_AREAS);
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
if ( (str != NULL) ) {
size_t MAX_METERING_AREAS;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
- MAX_METERING_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_METERING_AREAS));
+ MAX_METERING_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS));
- Mutex::Autolock lock(mMeteringAreasLock);
+ android::AutoMutex lock(mMeteringAreasLock);
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -359,7 +376,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
if ( MAX_METERING_AREAS >= mMeteringAreas.size() ) {
CAMHAL_LOGDB("Setting Metering Areas %s",
- params.get(CameraParameters::KEY_METERING_AREAS));
+ params.get(android::CameraParameters::KEY_METERING_AREAS));
mPending3Asettings |= SetMeteringAreas;
} else {
@@ -370,11 +387,48 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
+// TI extensions for enable/disable algos
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_FIXED_GAMMA,
+ mParameters3A.AlgoFixedGamma, SetAlgoFixedGamma, "Fixed Gamma");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF1,
+ mParameters3A.AlgoNSF1, SetAlgoNSF1, "NSF1");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF2,
+ mParameters3A.AlgoNSF2, SetAlgoNSF2, "NSF2");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_SHARPENING,
+ mParameters3A.AlgoSharpening, SetAlgoSharpening, "Sharpening");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_THREELINCOLORMAP,
+ mParameters3A.AlgoThreeLinColorMap, SetAlgoThreeLinColorMap, "ThreeLinColorMap");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_GIC, mParameters3A.AlgoGIC, SetAlgoGIC, "GIC");
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+void OMXCameraAdapter::declareParameter3ABool(const android::CameraParameters &params, const char *key,
+ OMX_BOOL &current_setting, E3ASettingsFlags pending,
+ const char *msg)
+{
+ OMX_BOOL val = OMX_TRUE;
+ const char *str = params.get(key);
+
+ if (str && ((strcmp(str, android::CameraParameters::FALSE)) == 0))
+ {
+ CAMHAL_LOGVB("Disabling %s", msg);
+ val = OMX_FALSE;
+ }
+ else
+ {
+ CAMHAL_LOGVB("Enabling %s", msg);
+ }
+ if (current_setting != val)
+ {
+ current_setting = val;
+ CAMHAL_LOGDB("%s %s", msg, current_setting ? "enabled" : "disabled");
+ mPending3Asettings |= pending;
+ }
+}
+
int OMXCameraAdapter::getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT)
{
int LUTsize = LUT.size;
@@ -396,30 +450,23 @@ const char* OMXCameraAdapter::getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT)
return NULL;
}
-status_t OMXCameraAdapter::init3AParams(Gen3A_settings &Gen3A)
+int OMXCameraAdapter::getMultipleLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT, char * supported)
{
- LOG_FUNCTION_NAME;
-
- Gen3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
- Gen3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
- Gen3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
- Gen3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
- Gen3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
- Gen3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
- Gen3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
- Gen3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
- Gen3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
- Gen3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
- Gen3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
- Gen3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
- Gen3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
- Gen3A.ExposureLock = OMX_FALSE;
- Gen3A.FocusLock = OMX_FALSE;
- Gen3A.WhiteBalanceLock = OMX_FALSE;
-
- LOG_FUNCTION_NAME_EXIT;
+ int num = 0;
+ int remaining_size;
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].omxDefinition == OMXValue )
+ {
+ num++;
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ remaining_size = ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(supported)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(supported)));
+ strncat(supported, LUT.Table[i].userDefinition, remaining_size);
+ }
- return NO_ERROR;
+ return num;
}
status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
@@ -453,7 +500,7 @@ status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
static bool isFlashDisabled() {
@@ -470,14 +517,80 @@ static bool isFlashDisabled() {
char value[PROPERTY_VALUE_MAX];
if (property_get("camera.flash_off", value, NULL) &&
- (!strcasecmp(value, "true") || !strcasecmp(value, "1"))) {
- ALOGW("flash is disabled for testing purpose");
+ (!strcasecmp(value, android::CameraParameters::TRUE) || !strcasecmp(value, "1"))) {
+ CAMHAL_LOGW("flash is disabled for testing purpose");
return true;
}
return false;
}
+status_t OMXCameraAdapter::setManualExposureVal(Gen3A_settings& Gen3A) {
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expVal;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expVal, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
+ expVal.nPortIndex = OMX_ALL;
+ expValRight.nPortIndex = OMX_ALL;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( Gen3A.Exposure != OMX_ExposureControlOff ) {
+ expVal.bAutoShutterSpeed = OMX_TRUE;
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoShutterSpeed = OMX_FALSE;
+ expVal.nShutterSpeedMsec = Gen3A.ManualExposure;
+ expValRight.nShutterSpeedMsec = Gen3A.ManualExposureRight;
+ if ( Gen3A.ManualGain <= 0 || Gen3A.ManualGainRight <= 0 ) {
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoSensitivity = OMX_FALSE;
+ expVal.nSensitivity = Gen3A.ManualGain;
+ expValRight.nSensitivity = Gen3A.ManualGainRight;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error 0x%x while configuring manual exposure values", eError);
+ } else {
+ CAMHAL_LOGDA("Camera manual exposure values configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
{
status_t ret = NO_ERROR;
@@ -544,7 +657,7 @@ status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
@@ -576,7 +689,7 @@ status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
@@ -685,7 +798,7 @@ status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
@@ -716,7 +829,7 @@ status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
@@ -760,7 +873,7 @@ status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
@@ -804,7 +917,7 @@ status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
@@ -835,7 +948,7 @@ status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
@@ -855,21 +968,9 @@ status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
wb.eWhiteBalControl = ( OMX_WHITEBALCONTROLTYPE ) Gen3A.WhiteBallance;
- if ( WB_FACE_PRIORITY == Gen3A.WhiteBallance )
- {
- //Disable Region priority and enable Face priority
- setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, true);
-
- //Then set the mode to auto
- wb.eWhiteBalControl = OMX_WhiteBalControlAuto;
- }
- else
- {
- //Disable Face and Region priority
- setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
- setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
- }
+ // disable face and region priorities
+ setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
+ setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonWhiteBalance,
@@ -956,7 +1057,7 @@ status_t OMXCameraAdapter::setFlicker(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
@@ -993,7 +1094,7 @@ status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setContrast(Gen3A_settings& Gen3A)
@@ -1076,7 +1177,7 @@ status_t OMXCameraAdapter::setSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
@@ -1107,7 +1208,7 @@ status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
@@ -1144,7 +1245,7 @@ status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
@@ -1175,13 +1276,14 @@ status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
LOG_FUNCTION_NAME;
@@ -1191,41 +1293,60 @@ status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
return NO_INIT;
}
+ // In case of manual exposure Gain is applied from setManualExposureVal
+ if ( Gen3A.Exposure == OMX_ExposureControlOff ) {
+ return NO_ERROR;
+ }
+
OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ expValRight.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
- OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonExposureValue,
- &expValues);
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
- if( 0 == Gen3A.ISO )
- {
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if( 0 == Gen3A.ISO ) {
expValues.bAutoSensitivity = OMX_TRUE;
- }
- else
- {
+ } else {
expValues.bAutoSensitivity = OMX_FALSE;
expValues.nSensitivity = Gen3A.ISO;
- }
+ expValRight.nSensitivity = expValues.nSensitivity;
+ }
eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonExposureValue,
- &expValues);
- if ( OMX_ErrorNone != eError )
- {
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring ISO 0x%x error = 0x%x",
( unsigned int ) expValues.nSensitivity,
eError);
- }
- else
- {
+ } else {
CAMHAL_LOGDB("ISO 0x%x configured successfully",
( unsigned int ) expValues.nSensitivity);
- }
+ }
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
@@ -1256,7 +1377,7 @@ status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
@@ -1293,7 +1414,7 @@ status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
@@ -1325,7 +1446,7 @@ status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
@@ -1357,7 +1478,7 @@ status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
@@ -1388,7 +1509,7 @@ status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus)
@@ -1422,7 +1543,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_exp = toggleExp ? TRUE : FALSE;
CAMHAL_LOGDA("Exposure Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1431,7 +1551,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
setExposureLock(mParameters3A);
}
- mParams.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, lock_state_exp);
}
OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
@@ -1469,7 +1588,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_wb = toggleWb ? TRUE : FALSE;
CAMHAL_LOGDA("WhiteBalance Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1478,10 +1596,9 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
setWhiteBalanceLock(mParameters3A);
}
- mParams.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, lock_state_wb);
}
EXIT:
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
@@ -1489,14 +1606,21 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_ALGOAREASTYPE **meteringAreas;
+ CameraBuffer *bufferlist;
+ OMX_ALGOAREASTYPE *meteringAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
MemoryManager memMgr;
int areasSize = 0;
LOG_FUNCTION_NAME
- Mutex::Autolock lock(mMeteringAreasLock);
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
+ android::AutoMutex lock(mMeteringAreasLock);
if ( OMX_StateInvalid == mComponentState )
{
@@ -1505,7 +1629,8 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
}
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- meteringAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ meteringAreas = (OMX_ALGOAREASTYPE *)bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
@@ -1516,37 +1641,47 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
return -ENOMEM;
}
- OMX_INIT_STRUCT_PTR (meteringAreas[0], OMX_ALGOAREASTYPE);
+ OMX_INIT_STRUCT_PTR (meteringAreas, OMX_ALGOAREASTYPE);
- meteringAreas[0]->nPortIndex = OMX_ALL;
- meteringAreas[0]->nNumAreas = mMeteringAreas.size();
- meteringAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaExposure;
+ meteringAreas->nPortIndex = OMX_ALL;
+ meteringAreas->nNumAreas = mMeteringAreas.size();
+ meteringAreas->nAlgoAreaPurpose = OMX_AlgoAreaExposure;
for ( unsigned int n = 0; n < mMeteringAreas.size(); n++)
{
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
// transform the coordinates to 3A-type coordinates
- mMeteringAreas.itemAt(n)->transfrom(mPreviewData->mWidth,
- mPreviewData->mHeight,
- meteringAreas[0]->tAlgoAreas[n].nTop,
- meteringAreas[0]->tAlgoAreas[n].nLeft,
- meteringAreas[0]->tAlgoAreas[n].nWidth,
- meteringAreas[0]->tAlgoAreas[n].nHeight);
-
- meteringAreas[0]->tAlgoAreas[n].nLeft =
- ( meteringAreas[0]->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
- meteringAreas[0]->tAlgoAreas[n].nTop =
- ( meteringAreas[0]->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight;
- meteringAreas[0]->tAlgoAreas[n].nWidth =
- ( meteringAreas[0]->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
- meteringAreas[0]->tAlgoAreas[n].nHeight =
- ( meteringAreas[0]->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight;
-
- meteringAreas[0]->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight();
+ mMeteringAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)meteringAreas->tAlgoAreas[n].nTop,
+ (size_t&)meteringAreas->tAlgoAreas[n].nLeft,
+ (size_t&)meteringAreas->tAlgoAreas[n].nWidth,
+ (size_t&)meteringAreas->tAlgoAreas[n].nHeight);
+
+ meteringAreas->tAlgoAreas[n].nLeft =
+ ( meteringAreas->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nTop =
+ ( meteringAreas->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+ meteringAreas->tAlgoAreas[n].nWidth =
+ ( meteringAreas->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nHeight =
+ ( meteringAreas->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+
+ meteringAreas->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight();
CAMHAL_LOGDB("Metering area %d : top = %d left = %d width = %d height = %d prio = %d",
- n, (int)meteringAreas[0]->tAlgoAreas[n].nTop, (int)meteringAreas[0]->tAlgoAreas[n].nLeft,
- (int)meteringAreas[0]->tAlgoAreas[n].nWidth, (int)meteringAreas[0]->tAlgoAreas[n].nHeight,
- (int)meteringAreas[0]->tAlgoAreas[n].nPriority);
+ n, (int)meteringAreas->tAlgoAreas[n].nTop, (int)meteringAreas->tAlgoAreas[n].nLeft,
+ (int)meteringAreas->tAlgoAreas[n].nWidth, (int)meteringAreas->tAlgoAreas[n].nHeight,
+ (int)meteringAreas->tAlgoAreas[n].nPriority);
}
@@ -1554,7 +1689,7 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = areasSize;
- sharedBuffer.pSharedBuff = (OMX_U8 *) meteringAreas[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufferlist[0]);
if ( NULL == sharedBuffer.pSharedBuff )
{
@@ -1577,15 +1712,100 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
}
EXIT:
- if (NULL != meteringAreas)
+ if (NULL != bufferlist)
{
- memMgr.freeBuffer((void*) meteringAreas);
- meteringAreas = NULL;
+ memMgr.freeBufferList(bufferlist);
}
return ret;
}
+//TI extensions for enable/disable algos
+status_t OMXCameraAdapter::setParameter3ABoolInvert(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_BOOL inv_data;
+
+ if (OMX_TRUE == data)
+ {
+ inv_data = OMX_FALSE;
+ }
+ else if (OMX_FALSE == data)
+ {
+ inv_data = OMX_TRUE;
+ }
+ else
+ {
+ return BAD_VALUE;
+ }
+ return setParameter3ABool(omx_idx, inv_data, msg);
+}
+
+status_t OMXCameraAdapter::setParameter3ABool(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE cfgdata;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&cfgdata, OMX_CONFIG_BOOLEANTYPE);
+ cfgdata.bEnabled = data;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ omx_idx,
+ &cfgdata);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring %s error = 0x%x", msg, eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("%s configured successfully %d ", msg, cfgdata.bEnabled);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setAlgoFixedGamma(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABool((OMX_INDEXTYPE) OMX_TI_IndexConfigFixedGamma, Gen3A.AlgoFixedGamma, "Fixed Gamma");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF1(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF1, Gen3A.AlgoNSF1, "NSF1");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF2(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF2, Gen3A.AlgoNSF2, "NSF2");
+}
+
+status_t OMXCameraAdapter::setAlgoSharpening(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableSharpening, Gen3A.AlgoSharpening, "Sharpening");
+}
+
+status_t OMXCameraAdapter::setAlgoThreeLinColorMap(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableThreeLinColorMap, Gen3A.AlgoThreeLinColorMap, "Color Conversion");
+}
+
+status_t OMXCameraAdapter::setAlgoGIC(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableGIC, Gen3A.AlgoGIC, "Green Inballance Correction");
+}
+#endif
+
status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
{
status_t ret = NO_ERROR;
@@ -1594,7 +1814,7 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
/*
* Scenes have a priority during the process
@@ -1692,6 +1912,11 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
break;
}
+ case SetManualExposure: {
+ ret |= setManualExposureVal(Gen3A);
+ break;
+ }
+
case SetFlash:
{
ret |= setFlashMode(Gen3A);
@@ -1714,6 +1939,46 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
ret |= setMeteringAreas(Gen3A);
}
break;
+
+#ifndef OMAP_TUNA
+ //TI extensions for enable/disable algos
+ case SetAlgoFixedGamma:
+ {
+ ret |= setAlgoFixedGamma(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF1:
+ {
+ ret |= setAlgoNSF1(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF2:
+ {
+ ret |= setAlgoNSF2(Gen3A);
+ }
+ break;
+
+ case SetAlgoSharpening:
+ {
+ ret |= setAlgoSharpening(Gen3A);
+ }
+ break;
+
+ case SetAlgoThreeLinColorMap:
+ {
+ ret |= setAlgoThreeLinColorMap(Gen3A);
+ }
+ break;
+
+ case SetAlgoGIC:
+ {
+ ret |= setAlgoGIC(Gen3A);
+ }
+ break;
+#endif
+
default:
CAMHAL_LOGEB("this setting (0x%x) is still not supported in CameraAdapter ",
currSett);
@@ -1728,4 +1993,5 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp
index 12b9058..84090e9 100644
--- a/camera/OMXCameraAdapter/OMXAlgo.cpp
+++ b/camera/OMXCameraAdapter/OMXAlgo.cpp
@@ -21,70 +21,82 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
#undef TRUE
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersAlgo(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *valstr = NULL;
+ const char *valManualStr = NULL;
const char *oldstr = NULL;
+ OMXCameraPortParameters *cap;
+ BrightnessMode gbce = BRIGHTNESS_OFF;
+ BrightnessMode glbce = BRIGHTNESS_OFF;
LOG_FUNCTION_NAME;
CaptureMode capMode;
CAMHAL_LOGDB("Capture mode %s", params.get(TICameraParameters::KEY_CAP_MODE));
- if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL )
- {
- if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0)
- {
+ if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL ) {
+ if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_SPEED;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::EXPOSURE_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::ZOOM_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_QUALITY;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_ZEROSHUTTERLAG;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
capMode = OMXCameraAdapter::VIDEO_MODE;
- }
- else
- {
+ mCapabilitiesOpMode = MODE_VIDEO;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::CP_CAM_MODE) == 0) {
+ capMode = OMXCameraAdapter::CP_CAM;
+ mCapabilitiesOpMode = MODE_CPCAM;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::TEMP_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else {
capMode = OMXCameraAdapter::HIGH_QUALITY;
- }
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
}
- else
- {
- capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
- }
+ } else {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ }
- if ( mCapMode != capMode )
- {
+ if ( mSensorIndex == 2 ) {
+ mCapabilitiesOpMode = MODE_STEREO;
+ }
+
+ if ( mCapMode != capMode ) {
mCapMode = capMode;
mOMXStateSwitch = true;
- }
+ mPendingPreviewSettings |= SetCapMode;
+ }
CAMHAL_LOGDB("Capture Mode set %d", mCapMode);
/// Configure IPP, LDCNSF, GBCE and GLBCE only in HQ mode
IPPMode ipp;
if((mCapMode == OMXCameraAdapter::HIGH_QUALITY) || (mCapMode == OMXCameraAdapter::HIGH_QUALITY_ZSL)
- || (mCapMode == OMXCameraAdapter::VIDEO_MODE) )
+ || (mCapMode == OMXCameraAdapter::VIDEO_MODE)
+ || (mCapMode == OMXCameraAdapter::CP_CAM))
{
if ( (valstr = params.get(TICameraParameters::KEY_IPP)) != NULL )
{
@@ -116,95 +128,57 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
CAMHAL_LOGVB("IPP Mode set %d", ipp);
- if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) )
- {
- // Configure GBCE only if the setting has changed since last time
- oldstr = mParams.get(TICameraParameters::KEY_GBCE);
- bool cmpRes = true;
- if ( NULL != oldstr )
- {
- cmpRes = strcmp(valstr, oldstr) != 0;
- }
- else
- {
- cmpRes = true;
- }
-
+ if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) ) {
+ if (strcmp(valstr, android::CameraParameters::TRUE ) == 0) {
+ gbce = BRIGHTNESS_ON;
+ } else {
+ gbce = BRIGHTNESS_OFF;
+ }
- if( cmpRes )
- {
- if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_ENABLE ) == 0)
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_ON);
- }
- else if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_DISABLE ) == 0)
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- else
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- }
+ if ( gbce != mGBCE ) {
+ mGBCE = gbce;
+ setGBCE(mGBCE);
}
- else if(mParams.get(TICameraParameters::KEY_GBCE) || mFirstTimeInit)
- {
+
+ } else if(mFirstTimeInit) {
//Disable GBCE by default
setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
+ }
- if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
- {
- // Configure GLBCE only if the setting has changed since last time
-
- oldstr = mParams.get(TICameraParameters::KEY_GLBCE);
- bool cmpRes = true;
- if ( NULL != oldstr )
- {
- cmpRes = strcmp(valstr, oldstr) != 0;
- }
- else
- {
- cmpRes = true;
- }
+ if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL ) {
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ glbce = BRIGHTNESS_ON;
+ } else {
+ glbce = BRIGHTNESS_OFF;
+ }
- if( cmpRes )
- {
- if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_ENABLE ) == 0)
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_ON);
- }
- else if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_DISABLE ) == 0)
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- else
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- }
+ if ( glbce != mGLBCE ) {
+ mGLBCE = glbce;
+ setGLBCE(mGLBCE);
}
- else if(mParams.get(TICameraParameters::KEY_GLBCE) || mFirstTimeInit)
- {
+
+ } else if(mFirstTimeInit) {
//Disable GLBCE by default
setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
}
- else
- {
+
+ } else {
ipp = OMXCameraAdapter::IPP_NONE;
- }
+ }
if ( mIPP != ipp )
{
mIPP = ipp;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetLDC;
+ mPendingPreviewSettings |= SetNSF;
}
///Set VNF Configuration
bool vnfEnabled = false;
- if ( params.getInt(TICameraParameters::KEY_VNF) > 0 )
+ valstr = params.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
CAMHAL_LOGDA("VNF Enabled");
vnfEnabled = true;
@@ -219,12 +193,13 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
{
mVnfEnabled = vnfEnabled;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVNF;
}
///Set VSTAB Configuration
bool vstabEnabled = false;
- valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- if (valstr && strcmp(valstr, CameraParameters::TRUE) == 0) {
+ valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDA("VSTAB Enabled");
vstabEnabled = true;
}
@@ -238,6 +213,7 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
{
mVstabEnabled = vstabEnabled;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVSTAB;
}
//A work-around for a failing call to OMX flush buffers
@@ -250,35 +226,30 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
#ifdef OMAP_ENHANCEMENT
//Set Auto Convergence Mode
- valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE);
- if ( valstr != NULL )
- {
- // Set ManualConvergence default value
- OMX_S32 manualconvergence = -30;
- if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeDisable, manualconvergence);
+ valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE_MODE);
+ valManualStr = params.get(TICameraParameters::KEY_MANUAL_CONVERGENCE);
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ if (cap->mFrameLayoutType != OMX_TI_StereoFrameLayout2D) {
+ if ((valstr != NULL) || (valManualStr != NULL)) {
+ setAutoConvergence(valstr, valManualStr, params);
+ if (valstr != NULL) {
+ CAMHAL_LOGDB("AutoConvergenceMode %s", valstr);
+ }
+ if (valManualStr != NULL) {
+ CAMHAL_LOGDB("Manual Convergence %s", valManualStr);
}
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FRAME) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeFrame, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_CENTER) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeCenter, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FFT) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeFocusFaceTouch, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL) == 0 )
- {
- manualconvergence = (OMX_S32)params.getInt(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES);
- setAutoConvergence(OMX_TI_AutoConvergenceModeManual, manualconvergence);
- }
- CAMHAL_LOGVB("AutoConvergenceMode %s, value = %d", valstr, (int) manualconvergence);
}
+ //Set Mechanical Misalignment Correction
+ valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
+ if ( valstr != NULL ) {
+ setMechanicalMisalignmentCorrection(strcmp(valstr, android::CameraParameters::TRUE) == 0);
+ CAMHAL_LOGDB("Mechanical Misalignment Correction %s", valstr);
+ }
+ }
+
#endif
LOG_FUNCTION_NAME_EXIT;
@@ -286,67 +257,123 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
return ret;
}
-// Get AutoConvergence
-status_t OMXCameraAdapter::getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode,
- OMX_S32 *pManualConverence)
+// Set AutoConvergence
+status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *pValManualstr, const android::CameraParameters &params)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
-
- ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
- ACParams.nVersion = mLocalVersionParam;
- ACParams.nPortIndex = OMX_ALL;
+ const char *str = NULL;
+ android::Vector<android::sp<CameraArea> > tempAreas;
+ int mode;
+ int changed = 0;
LOG_FUNCTION_NAME;
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
- &ACParams);
- if ( eError != OMX_ErrorNone )
- {
- CAMHAL_LOGEB("Error while getting AutoConvergence 0x%x", eError);
- ret = -EINVAL;
+ if ( pValManualstr != NULL ) {
+ OMX_S32 manualConvergence = (OMX_S32)strtol(pValManualstr ,0 ,0);
+
+ if (mManualConv != manualConvergence) {
+ mManualConv = manualConvergence;
+ changed = 1;
}
- else
- {
- *pManualConverence = ACParams.nManualConverence;
- *pACMode = ACParams.eACMode;
- CAMHAL_LOGDA("AutoConvergence got successfully");
+ }
+
+ if ( pValstr != NULL ) {
+ mode = getLUTvalue_HALtoOMX(pValstr, mAutoConvergenceLUT);
+
+ if ( NAME_NOT_FOUND == mode ) {
+ CAMHAL_LOGEB("Wrong convergence mode: %s", pValstr);
+ LOG_FUNCTION_NAME_EXIT;
+ return mode;
}
- LOG_FUNCTION_NAME_EXIT;
+ if ( mAutoConv != static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode) ) {
+ mAutoConv = static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode);
+ changed = 1;
+ }
+ }
- return ret;
-}
+ if ( OMX_TI_AutoConvergenceModeFocusFaceTouch == mAutoConv ) {
+ android::AutoMutex lock(mTouchAreasLock);
-// Set AutoConvergence
-status_t OMXCameraAdapter::setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode,
- OMX_S32 pManualConverence)
-{
- status_t ret = NO_ERROR;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
- LOG_FUNCTION_NAME;
+ if ( NULL != str ) {
+ ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
+ } else {
+ CAMHAL_LOGEB("Touch areas not received in %s",
+ android::CameraParameters::KEY_METERING_AREAS);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( CameraArea::areAreasDifferent(mTouchAreas, tempAreas) ) {
+ mTouchAreas.clear();
+ mTouchAreas = tempAreas;
+ changed = 1;
+ }
+ }
- ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
+ if (!changed) {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ OMXCameraPortParameters * mPreviewData;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ ACParams.nSize = (OMX_U32)sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
ACParams.nVersion = mLocalVersionParam;
ACParams.nPortIndex = OMX_ALL;
- ACParams.nManualConverence = pManualConverence;
- ACParams.eACMode = pACMode;
+
+ OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
+ &ACParams);
+
+ ACParams.eACMode = mAutoConv;
+ ACParams.nManualConverence = mManualConv;
+
+ if (1 == mTouchAreas.size()) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
+ // transform the coordinates to 3A-type coordinates
+ mTouchAreas.itemAt(0)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&) ACParams.nACProcWinStartY,
+ (size_t&) ACParams.nACProcWinStartX,
+ (size_t&) ACParams.nACProcWinWidth,
+ (size_t&) ACParams.nACProcWinHeight);
+ }
+
+ CAMHAL_LOGDB("nSize %d", (int)ACParams.nSize);
+ CAMHAL_LOGDB("nPortIndex %d", (int)ACParams.nPortIndex);
+ CAMHAL_LOGDB("nManualConverence %d", (int)ACParams.nManualConverence);
+ CAMHAL_LOGDB("eACMode %d", (int)ACParams.eACMode);
+ CAMHAL_LOGDB("nACProcWinStartX %d", (int)ACParams.nACProcWinStartX);
+ CAMHAL_LOGDB("nACProcWinStartY %d", (int)ACParams.nACProcWinStartY);
+ CAMHAL_LOGDB("nACProcWinWidth %d", (int)ACParams.nACProcWinWidth);
+ CAMHAL_LOGDB("nACProcWinHeight %d", (int)ACParams.nACProcWinHeight);
+ CAMHAL_LOGDB("bACStatus %d", (int)ACParams.bACStatus);
+
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
&ACParams);
- if ( eError != OMX_ErrorNone )
- {
+
+ if ( eError != OMX_ErrorNone ) {
CAMHAL_LOGEB("Error while setting AutoConvergence 0x%x", eError);
- ret = -EINVAL;
- }
- else
- {
+ ret = BAD_VALUE;
+ } else {
CAMHAL_LOGDA("AutoConvergence applied successfully");
- }
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -603,17 +630,18 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
- OMX_TI_PARAM_ZSLHISTORYLENTYPE zslHistoryLen;
OMX_CONFIG_BOOLEANTYPE bCAC;
+#ifndef OMAP_TUNA
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
+#endif
LOG_FUNCTION_NAME;
- //ZSL have 4 buffers history by default
- OMX_INIT_STRUCT_PTR (&zslHistoryLen, OMX_TI_PARAM_ZSLHISTORYLENTYPE);
- zslHistoryLen.nHistoryLen = 4;
-
//CAC is disabled by default
OMX_INIT_STRUCT_PTR (&bCAC, OMX_CONFIG_BOOLEANTYPE);
+#ifndef OMAP_TUNA
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
+#endif
bCAC.bEnabled = OMX_FALSE;
if ( NO_ERROR == ret )
@@ -622,14 +650,32 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
if ( mSensorIndex == OMX_TI_StereoSensor )
{
- CAMHAL_LOGDA("Camera mode: STEREO");
- camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
+#ifndef OMAP_TUNA
+ if ( OMXCameraAdapter::VIDEO_MODE == mode ) {
+ CAMHAL_LOGDA("Camera mode: STEREO VIDEO");
+ camMode.eCamOperatingMode = OMX_TI_StereoVideo;
+ } else {
+#endif
+ CAMHAL_LOGDA("Camera mode: STEREO");
+ camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
+#ifndef OMAP_TUNA
+ }
+#endif
}
else if ( OMXCameraAdapter::HIGH_SPEED == mode )
{
CAMHAL_LOGDA("Camera mode: HIGH SPEED");
camMode.eCamOperatingMode = OMX_CaptureImageHighSpeedTemporalBracketing;
}
+ else if ( OMXCameraAdapter::CP_CAM == mode )
+ {
+ CAMHAL_LOGDA("Camera mode: CP CAM");
+#ifndef OMAP_TUNA
+ camMode.eCamOperatingMode = OMX_TI_CPCam;
+ // TODO(XXX): Hardcode for now until we implement re-proc pipe
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+#endif
+ }
else if( OMXCameraAdapter::HIGH_QUALITY == mode )
{
CAMHAL_LOGDA("Camera mode: HIGH QUALITY");
@@ -641,9 +687,11 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
CAMHAL_LOGDA("Camera mode: HIGH QUALITY_ZSL");
camMode.eCamOperatingMode = OMX_TI_CaptureImageProfileZeroShutterLag;
+#ifdef CAMERAHAL_TUNA
if ( !mIternalRecordingHint ) {
zslHistoryLen.nHistoryLen = 5;
}
+#endif
}
else if( OMXCameraAdapter::VIDEO_MODE == mode )
@@ -660,31 +708,12 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if( NO_ERROR == ret )
{
eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
- ( OMX_INDEXTYPE ) OMX_TI_IndexParamZslHistoryLen,
- &zslHistoryLen);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while configuring ZSL History len 0x%x", eError);
- // Don't return status for now
- // as high history values might lead
- // to errors on some platforms.
- // ret = ErrorUtils::omxToAndroidError(eError);
- }
- else
- {
- CAMHAL_LOGDA("ZSL History len configured successfully");
- }
- }
-
- if( NO_ERROR == ret )
- {
- eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
&camMode);
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring camera mode 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -692,6 +721,21 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
}
}
+#ifndef OMAP_TUNA
+ if((NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mode)) {
+ //Configure Single Preview Mode
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+#endif
+
if( NO_ERROR == ret )
{
//Configure CAC
@@ -701,7 +745,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring CAC 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -1009,7 +1053,7 @@ status_t OMXCameraAdapter::setAlgoPriority(AlgoPriority priority,
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setPictureRotation(unsigned int degree)
@@ -1055,76 +1099,83 @@ status_t OMXCameraAdapter::setSensorOrientation(unsigned int degree)
OMXCameraPortParameters *mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
+ if ( OMX_StateInvalid == mComponentState ) {
CAMHAL_LOGEA("OMX component is in invalid state");
ret = -1;
- }
+ }
/* Set Temproary Port resolution.
- * For resolution with height > 1008,resolution cannot be set without configuring orientation.
+ * For resolution with height >= 720,
+ * resolution cannot be set without configuring orientation.
* So we first set a temp resolution. We have used VGA
*/
- tmpHeight = mPreviewData->mHeight;
- tmpWidth = mPreviewData->mWidth;
- mPreviewData->mWidth = 640;
- mPreviewData->mHeight = 480;
- ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
+ if ( mPreviewData->mHeight >= 720 ) {
+ tmpHeight = mPreviewData->mHeight;
+ tmpWidth = mPreviewData->mWidth;
+ mPreviewData->mWidth = 640;
+ mPreviewData->mHeight = 480;
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
}
+ mPreviewData->mWidth = tmpWidth;
+ mPreviewData->mHeight = tmpHeight;
+ mPreviewPortInitialized = true;
+ }
+ else if (!mPreviewPortInitialized) {
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
+ }
+ mPreviewPortInitialized = true;
+ }
+
/* Now set Required Orientation*/
- if ( NO_ERROR == ret )
- {
+ if ( NO_ERROR == ret ) {
OMX_INIT_STRUCT(sensorOrientation, OMX_CONFIG_ROTATIONTYPE);
sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonRotate,
- &sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
- }
- CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
- ( unsigned int ) sensorOrientation.nRotation);
- sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
sensorOrientation.nRotation = degree;
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonRotate,
&sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError);
- }
- CAMHAL_LOGVA(" Read the Parameters that are set");
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonRotate,
- &sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
- }
+ }
CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
( unsigned int ) sensorOrientation.nRotation);
CAMHAL_LOGVB(" Sensor Configured for Port : %d",
( unsigned int ) sensorOrientation.nPortIndex);
- }
+ }
/* Now set the required resolution as requested */
+ if ( NO_ERROR == ret ) {
+ bool portConfigured = false;
+ ret = setSensorQuirks(degree,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex],
+ portConfigured);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring setSensorQuirks 0x%x", ret);
+ return ret;
+ }
- mPreviewData->mWidth = tmpWidth;
- mPreviewData->mHeight = tmpHeight;
- if ( NO_ERROR == ret )
- {
- ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
+ if ( !portConfigured ) {
+ ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
}
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(mPreviewData->mMinFrameRate,
+ mPreviewData->mMaxFrameRate);
}
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -1146,9 +1197,7 @@ status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameR
ret = -EINVAL;
}
- // The port framerate should never be smaller
- // than max framerate.
- if ( mPreviewData->mFrameRate < maxFrameRate ) {
+ if ( !mSetFormatDone ) {
return NO_INIT;
}
@@ -1175,6 +1224,36 @@ status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameR
}
return ret;
- }
+}
+
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setMechanicalMisalignmentCorrection(const bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_MM mm;
+
+ LOG_FUNCTION_NAME;
+
+ mm.nVersion = mLocalVersionParam;
+ mm.nSize = sizeof(OMX_TI_CONFIG_MM);
+ mm.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ mm.bMM = enable ? OMX_TRUE : OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigMechanicalMisalignment,
+ &mm);
+
+ if(OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while enabling mechanical misalignment correction. error = 0x%x", eError);
+ ret = -1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+#endif
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
index 1ae440d..ea7c890 100755..100644
--- a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
+++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
@@ -33,21 +33,21 @@
static int mDebugFps = 0;
static int mDebugFcs = 0;
-#undef TRUE
-#undef FALSE
-
#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
-namespace android {
+namespace Ti {
+namespace Camera {
+
+#ifdef CAMERAHAL_OMX_PROFILING
-#undef LOG_TAG
-///Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+const char OMXCameraAdapter::DEFAULT_PROFILE_PATH[] = "/data/dbg/profile_data.bin";
+
+#endif
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-Mutex gAdapterLock;
+android::Mutex gAdapterLock;
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
@@ -55,16 +55,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
LOG_FUNCTION_NAME;
char value[PROPERTY_VALUE_MAX];
+ const char *mountOrientationString = NULL;
+
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
property_get("debug.camera.framecounts", value, "0");
mDebugFcs = atoi(value);
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ property_get("debug.camera.profile", value, "0");
+ mDebugProfile = atoi(value);
+
+#endif
+
TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone;
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
-
mLocalVersionParam.s.nVersionMajor = 0x1;
mLocalVersionParam.s.nVersionMinor = 0x1;
mLocalVersionParam.s.nRevision = 0x0 ;
@@ -72,6 +80,7 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mPending3Asettings = 0;//E3AsettingsAll;
mPendingCaptureSettings = 0;
+ mPendingPreviewSettings = 0;
if ( 0 != mInitSem.Count() )
{
@@ -86,17 +95,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE;
mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT;
//currently not supported use preview port instead
- mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
+ mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ mCameraAdapterParameters.mVideoInPortIndex = OMX_CAMERA_PORT_VIDEO_IN_VIDEO;
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
mOmxInitialized = true;
+ // Initialize the callback handles
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = Camera::OMXCameraAdapterEventHandler;
+ callbacks.EmptyBufferDone = Camera::OMXCameraAdapterEmptyBufferDone;
+ callbacks.FillBufferDone = Camera::OMXCameraAdapterFillBufferDone;
+
///Get the handle to the OMX Component
- eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, (OMX_PTR)this);
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, this, callbacks);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
}
@@ -105,6 +121,10 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mComponentState = OMX_StateLoaded;
CAMHAL_LOGVB("OMX_GetHandle -0x%x sensor_index = %lu", eError, mSensorIndex);
+#ifndef OMAP_TUNA
+ initDccFileDataSave(&mCameraAdapterParameters.mHandleComp, mCameraAdapterParameters.mPrevPortIndex);
+#endif
+
eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
OMX_CommandPortDisable,
OMX_ALL,
@@ -162,26 +182,41 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
CAMHAL_LOGDB("Sensor %d selected successfully", mSensorIndex);
}
+#ifdef CAMERAHAL_DEBUG
+
printComponentVersion(mCameraAdapterParameters.mHandleComp);
+#endif
+
mBracketingEnabled = false;
+ mZoomBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
mBracketingRange = 1;
mLastBracetingBufferIdx = 0;
+ mBracketingBuffersQueued = NULL;
mOMXStateSwitch = false;
+ mBracketingSet = false;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+#endif
mCaptureSignalled = false;
mCaptureConfigured = false;
+ mReprocConfigured = false;
mRecording = false;
mWaitingForSnapshot = false;
- mSnapshotCount = 0;
+ mPictureFormatFromClient = NULL;
- mCapMode = HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_MAX;
+ mCapMode = INITIAL_MODE;
mIPP = IPP_NULL;
mVstabEnabled = false;
mVnfEnabled = false;
mBurstFrames = 1;
+ mBurstFramesAccum = 0;
mCapturedFrames = 0;
+ mFlushShotConfigQueue = false;
mPictureQuality = 100;
mCurrentZoomIdx = 0;
mTargetZoomIdx = 0;
@@ -190,13 +225,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mZoomInc = 1;
mZoomParameterIdx = 0;
mExposureBracketingValidEntries = 0;
+ mZoomBracketingValidEntries = 0;
mSensorOverclock = false;
+ mAutoConv = OMX_TI_AutoConvergenceModeMax;
+ mManualConv = 0;
+
+#ifdef CAMERAHAL_TUNA
mIternalRecordingHint = false;
+#endif
mDeviceOrientation = 0;
+ mFaceOrientation = 0;
mCapabilities = caps;
mZoomUpdating = false;
mZoomUpdate = false;
+ mGBCE = BRIGHTNESS_OFF;
+ mGLBCE = BRIGHTNESS_OFF;
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
mEXIFData.mGPSData.mAltitudeValid = false;
mEXIFData.mGPSData.mDatestampValid = false;
@@ -209,6 +255,25 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mEXIFData.mModelValid = false;
mEXIFData.mMakeValid = false;
+ //update the mDeviceOrientation with the sensor mount orientation.
+ //So that the face detect will work before onOrientationEvent()
+ //get triggered.
+ CAMHAL_ASSERT(mCapabilities);
+ mountOrientationString = mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ CAMHAL_ASSERT(mountOrientationString);
+ mDeviceOrientation = atoi(mountOrientationString);
+ mFaceOrientation = atoi(mountOrientationString);
+
+ if (mSensorIndex != 2) {
+ mCapabilities->setMode(MODE_HIGH_SPEED);
+ }
+
+ if (mCapabilities->get(CameraProperties::SUPPORTED_ZOOM_STAGES) != NULL) {
+ mMaxZoomSupported = mCapabilities->getInt(CameraProperties::SUPPORTED_ZOOM_STAGES) + 1;
+ } else {
+ mMaxZoomSupported = 1;
+ }
+
// initialize command handling thread
if(mCommandHandler.get() == NULL)
mCommandHandler = new CommandHandler(this);
@@ -219,14 +284,13 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mCommandHandler->run("CallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("command handler thread already runnning!!");
ret = NO_ERROR;
- } else
- {
+ } else {
CAMHAL_LOGEA("Couldn't run command handlerthread");
return ret;
}
@@ -242,31 +306,18 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mOMXCallbackHandler->run("OMXCallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("omx callback handler thread already runnning!!");
ret = NO_ERROR;
- }else
- {
+ } else {
CAMHAL_LOGEA("Couldn't run omx callback handler thread");
return ret;
}
}
- //Remove any unhandled events
- if (!mEventSignalQ.isEmpty()) {
- for (unsigned int i = 0 ;i < mEventSignalQ.size(); i++ ) {
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
- //remove from queue and free msg
- if ( NULL != msg ) {
- free(msg);
- }
- }
- mEventSignalQ.clear();
- }
-
OMX_INIT_STRUCT_PTR (&mRegionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY);
OMX_INIT_STRUCT_PTR (&mFacePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY);
mRegionPriority.nPortIndex = OMX_ALL;
@@ -276,31 +327,64 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
//and will not conditionally apply based on current values.
mFirstTimeInit = true;
+ //Flag to avoid calling setVFramerate() before OMX_SetParameter(OMX_IndexParamPortDefinition)
+ //Ducati will return an error otherwise.
+ mSetFormatDone = false;
+
memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int));
+ memset(mZoomBracketingValues, 0, ZOOM_BRACKET_RANGE*sizeof(int));
mMeasurementEnabled = false;
mFaceDetectionRunning = false;
mFaceDetectionPaused = false;
mFDSwitchAlgoPriority = false;
+ metadataLastAnalogGain = -1;
+ metadataLastExposureTime = -1;
+
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters));
-
- //Initialize 3A defaults
- ret = init3AParams(mParameters3A);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEA("Couldn't init 3A params!");
- goto EXIT;
- }
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex], 0, sizeof(OMXCameraPortParameters));
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex], 0, sizeof(OMXCameraPortParameters));
+
+ // initialize 3A defaults
+ mParameters3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
+ mParameters3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
+ mParameters3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
+ mParameters3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
+ mParameters3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
+ mParameters3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
+ mParameters3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
+ mParameters3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
+ mParameters3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
+ mParameters3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
+ mParameters3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
+ mParameters3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
+ mParameters3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.FocusLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
+
+ mParameters3A.ManualExposure = 0;
+ mParameters3A.ManualExposureRight = 0;
+ mParameters3A.ManualGain = 0;
+ mParameters3A.ManualGainRight = 0;
+
+ mParameters3A.AlgoFixedGamma = OMX_TRUE;
+ mParameters3A.AlgoNSF1 = OMX_TRUE;
+ mParameters3A.AlgoNSF2 = OMX_TRUE;
+ mParameters3A.AlgoSharpening = OMX_TRUE;
+ mParameters3A.AlgoThreeLinColorMap = OMX_TRUE;
+ mParameters3A.AlgoGIC = OMX_TRUE;
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
void OMXCameraAdapter::performCleanupAfterError()
@@ -324,9 +408,15 @@ OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(Camer
switch ( frameType )
{
case CameraFrame::IMAGE_FRAME:
- case CameraFrame::RAW_FRAME:
ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
break;
+ case CameraFrame::RAW_FRAME:
+ if (mRawCapture) {
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ } else {
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ }
+ break;
case CameraFrame::PREVIEW_FRAME_SYNC:
case CameraFrame::SNAPSHOT_FRAME:
case CameraFrame::VIDEO_FRAME_SYNC:
@@ -342,13 +432,16 @@ OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(Camer
return ret;
}
-status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t OMXCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
+ LOG_FUNCTION_NAME;
+
status_t ret = NO_ERROR;
OMXCameraPortParameters *port = NULL;
OMX_ERRORTYPE eError = OMX_ErrorNone;
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
+ bool isCaptureFrame = false;
if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE )
{
@@ -360,16 +453,19 @@ status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType
return -EINVAL;
}
- if ( (NO_ERROR == ret) &&
- ((CameraFrame::IMAGE_FRAME == frameType) || (CameraFrame::RAW_FRAME == frameType)) &&
- (1 > mCapturedFrames) &&
- (!mBracketingEnabled)) {
- // Signal end of image capture
- if ( NULL != mEndImageCaptureCallback) {
- mEndImageCaptureCallback(mEndCaptureData);
+ isCaptureFrame = (CameraFrame::IMAGE_FRAME == frameType) ||
+ (CameraFrame::RAW_FRAME == frameType);
+
+ if ( isCaptureFrame && (NO_ERROR == ret) ) {
+ // In CP_CAM mode, end image capture will be signalled when application starts preview
+ if ((1 > mCapturedFrames) && !mBracketingEnabled && (mCapMode != CP_CAM)) {
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ mEndImageCaptureCallback(mEndCaptureData);
+ }
+ return NO_ERROR;
}
- return NO_ERROR;
- }
+ }
if ( NO_ERROR == ret )
{
@@ -381,25 +477,29 @@ status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType
}
}
- if ( NO_ERROR == ret )
- {
-
- for ( int i = 0 ; i < port->mNumBufs ; i++)
- {
- if ( port->mBufferHeader[i]->pBuffer == frameBuf )
- {
+ if ( NO_ERROR == ret ) {
+ for ( int i = 0 ; i < port->mNumBufs ; i++) {
+ if ((CameraBuffer *) port->mBufferHeader[i]->pAppPrivate == frameBuf) {
+ if ( isCaptureFrame && !mBracketingEnabled ) {
+ android::AutoMutex lock(mBurstLock);
+ if (mBurstFramesQueued >= mBurstFramesAccum) {
+ port->mStatus[i] = OMXCameraPortParameters::IDLE;
+ return NO_ERROR;
+ }
+ mBurstFramesQueued++;
+ }
+ port->mStatus[i] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, port->mBufferHeader[i]);
if ( eError != OMX_ErrorNone )
- {
+ {
CAMHAL_LOGEB("OMX_FillThisBuffer 0x%x", eError);
goto EXIT;
- }
+ }
mFramesWithDucati++;
break;
- }
- }
-
- }
+ }
+ }
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -410,86 +510,106 @@ EXIT:
//Since fillthisbuffer is called asynchronously, make sure to signal error to the app
mErrorNotifier->errorNotify(CAMERA_ERROR_HARD);
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+#ifndef OMAP_TUNA
+void OMXCameraAdapter::setParamS3D(OMX_U32 port, const char *valstr)
+{
+ OMXCameraPortParameters *cap;
+
+ LOG_FUNCTION_NAME;
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[port];
+ if (valstr != NULL)
+ {
+ if (strcmp(valstr, TICameraParameters::S3D_TB_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottom;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRight;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_TB_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottomSubsample;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRightSubsample;
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
}
+#endif
-status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
+status_t OMXCameraAdapter::setParameters(const android::CameraParameters &params)
{
LOG_FUNCTION_NAME;
- const char * str = NULL;
int mode = 0;
status_t ret = NO_ERROR;
bool updateImagePortParams = false;
int minFramerate, maxFramerate, frameRate;
const char *valstr = NULL;
- const char *oldstr = NULL;
int w, h;
OMX_COLOR_FORMATTYPE pixFormat;
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
///@todo Include more camera parameters
- if ( (valstr = params.getPreviewFormat()) != NULL )
- {
- if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
+ if ( (valstr = params.getPreviewFormat()) != NULL ) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
- pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- }
- else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
+ pixFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
- }
- else
- {
+ } else {
CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
pixFormat = OMX_COLOR_FormatCbYCrY;
- }
}
- else
- {
+ } else {
CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
pixFormat = OMX_COLOR_FormatCbYCrY;
- }
+ }
OMXCameraPortParameters *cap;
cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
params.getPreviewSize(&w, &h);
frameRate = params.getPreviewFrameRate();
- minFramerate = params.getInt(TICameraParameters::KEY_MINFRAMERATE);
- maxFramerate = params.getInt(TICameraParameters::KEY_MAXFRAMERATE);
- if ( ( 0 < minFramerate ) &&
- ( 0 < maxFramerate ) )
- {
- if ( minFramerate > maxFramerate )
- {
- CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value");
- maxFramerate = minFramerate;
- }
+ params.getPreviewFpsRange(&minFramerate, &maxFramerate);
+ minFramerate /= CameraHal::VFR_SCALE;
+ maxFramerate /= CameraHal::VFR_SCALE;
+ if ( ( 0 < minFramerate ) && ( 0 < maxFramerate ) ) {
+ if ( minFramerate > maxFramerate ) {
+ CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value");
+ maxFramerate = minFramerate;
+ }
- if ( 0 >= frameRate )
- {
+ if ( 0 >= frameRate ) {
frameRate = maxFramerate;
- }
+ }
- if( ( cap->mMinFrameRate != minFramerate ) ||
- ( cap->mMaxFrameRate != maxFramerate ) )
- {
+ if ( ( cap->mMinFrameRate != (OMX_U32) minFramerate ) ||
+ ( cap->mMaxFrameRate != (OMX_U32) maxFramerate ) ) {
cap->mMinFrameRate = minFramerate;
cap->mMaxFrameRate = maxFramerate;
setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
- }
}
-
- // TODO(XXX): Limiting 1080p to (24,24) or (15,15) for now. Need to remove later.
- if ((w >= 1920) && (h >= 1080)) {
- cap->mMaxFrameRate = cap->mMinFrameRate;
- setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
}
if ( 0 < frameRate )
@@ -537,22 +657,23 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
mOMXStateSwitch = true;
}
+#ifdef CAMERAHAL_TUNA
valstr = params.get(TICameraParameters::KEY_RECORDING_HINT);
- if (!valstr || (valstr && (strcmp(valstr, CameraParameters::FALSE)))) {
+ if (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::FALSE)))) {
mIternalRecordingHint = false;
} else {
mIternalRecordingHint = true;
}
+#endif
#ifdef OMAP_ENHANCEMENT
-
if ( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
{
- if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
mMeasurementEnabled = true;
}
- else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
mMeasurementEnabled = false;
}
@@ -566,7 +687,11 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
//Disable measurement data by default
mMeasurementEnabled = false;
}
+#endif
+#ifdef OMAP_ENHANCEMENT_S3D
+ setParamS3D(mCameraAdapterParameters.mPrevPortIndex,
+ params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT));
#endif
ret |= setParametersCapture(params, state);
@@ -586,6 +711,10 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
mParams = params;
mFirstTimeInit = false;
+ if ( MODE_MAX != mCapabilitiesOpMode ) {
+ mCapabilities->setMode(mCapabilitiesOpMode);
+ }
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -601,7 +730,7 @@ void saveFile(unsigned char *buff, int width, int height, int format) {
sprintf(fn, "/preview%03d.yuv", counter);
fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
if(fd < 0) {
- ALOGE("Unable to open file %s: %s", fn, strerror(fd));
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
return;
}
@@ -630,7 +759,36 @@ void saveFile(unsigned char *buff, int width, int height, int format) {
LOG_FUNCTION_NAME_EXIT;
}
-void OMXCameraAdapter::getParameters(CameraParameters& params)
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+static status_t saveBufferToFile(const void *buf, int size, const char *filename)
+{
+ if (size < 0) {
+ CAMHAL_LOGE("Wrong buffer size: %d", size);
+ return BAD_VALUE;
+ }
+
+ const int fd = open(filename, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0644);
+ if (fd < 0) {
+ CAMHAL_LOGE("ERROR: %s, Unable to save raw file", strerror(fd));
+ return BAD_VALUE;
+ }
+
+ if (write(fd, buf, size) != (signed)size) {
+ CAMHAL_LOGE("ERROR: Unable to write to raw file: %s ", strerror(errno));
+ close(fd);
+ return NO_MEMORY;
+ }
+
+ CAMHAL_LOGD("buffer=%p, size=%d stored at %s", buf, size, filename);
+
+ close(fd);
+ return OK;
+}
+#endif
+
+
+void OMXCameraAdapter::getParameters(android::CameraParameters& params)
{
status_t ret = NO_ERROR;
OMX_CONFIG_EXPOSUREVALUETYPE exp;
@@ -643,9 +801,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
if( mParameters3A.SceneMode != OMX_Manual ) {
const char *valstr_supported = NULL;
- // if preview is not started...we still need to feedback the proper params
- // look up the settings in the LUT
- if (((state & PREVIEW_ACTIVE) == 0) && mCapabilities) {
+ if (mCapabilities) {
const SceneModesEntry* entry = NULL;
entry = getSceneModeEntry(mCapabilities->get(CameraProperties::CAMERA_NAME),
(OMX_SCENEMODETYPE) mParameters3A.SceneMode);
@@ -657,40 +813,39 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
valstr = getLUTvalue_OMXtoHAL(mParameters3A.WhiteBallance, WBalLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_WHITE_BALANCE , valstr);
+ params.set(android::CameraParameters::KEY_WHITE_BALANCE , valstr);
valstr = getLUTvalue_OMXtoHAL(mParameters3A.FlashMode, FlashLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
if ((mParameters3A.Focus == OMX_IMAGE_FocusControlAuto) &&
(mCapMode != OMXCameraAdapter::VIDEO_MODE)) {
- valstr = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ valstr = android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
} else {
valstr = getLUTvalue_OMXtoHAL(mParameters3A.Focus, FocusLUT);
}
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
}
//Query focus distances only when focus is running
if ( ( AF_ACTIVE & state ) ||
- ( NULL == mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES) ) )
+ ( NULL == mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES) ) )
{
updateFocusDistances(params);
}
else
{
- params.set(CameraParameters::KEY_FOCUS_DISTANCES,
- mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES));
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES,
+ mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES));
}
#ifdef OMAP_ENHANCEMENT
-
OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSUREVALUETYPE);
exp.nPortIndex = OMX_ALL;
@@ -705,11 +860,10 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
{
CAMHAL_LOGEB("OMX error 0x%x, while retrieving current ISO value", eError);
}
-
#endif
{
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ZOOM_ACTIVE & state )
{
@@ -717,7 +871,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
{
mZoomParameterIdx += mZoomInc;
}
- params.set( CameraParameters::KEY_ZOOM, mZoomParameterIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mZoomParameterIdx);
if ( ( mCurrentZoomIdx == mTargetZoomIdx ) &&
( mZoomParameterIdx == mCurrentZoomIdx ) )
{
@@ -744,36 +898,240 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
else
{
- params.set( CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
}
}
//Populate current lock status
- if ( mParameters3A.ExposureLock ) {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::TRUE);
+ if ( mUserSetExpLock || mParameters3A.ExposureLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::FALSE);
}
- if ( mParameters3A.WhiteBalanceLock ) {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::TRUE);
+ if ( mUserSetWbLock || mParameters3A.WhiteBalanceLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::FALSE);
}
+ // Update Picture size capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+
+ // Update framerate capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+
+ params.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
+
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+
+ params.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
+
LOG_FUNCTION_NAME_EXIT;
}
-status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+status_t OMXCameraAdapter::setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height) {
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_HANDLETYPE *encoderHandle = (OMX_HANDLETYPE *)EncoderHandle;
+
+ CAMHAL_LOGDB("\n %s: SliceHeight:%d, EncoderHandle:%d width:%d height:%d \n", __FUNCTION__, SliceHeight, EncoderHandle, width, height);
+
+ if (SliceHeight == 0){
+ CAMHAL_LOGEA("\n\n #### Encoder Slice Height Not received, Dont Setup Tunnel $$$$\n\n");
+ return BAD_VALUE;
+ }
+
+ if (encoderHandle == NULL) {
+ CAMHAL_LOGEA("Encoder Handle not set \n\n");
+ return BAD_VALUE;
+ }
+
+ if ( 0 != mInitSem.Count() ) {
+ CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ // Register for port enable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mInitSem);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ // Enable VIDEO Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ // Wait for the port enable event to occur
+ ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("-Port enable event arrived");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout for enabling preview port expired!");
+ return UNKNOWN_ERROR;
+ }
+
+ //Set the Video Port Params
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+ portCheck.nPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_IndexParamPortDefinition Error - %x", eError);
+ }
+
+ portCheck.format.video.nFrameWidth = width;
+ portCheck.format.video.nFrameHeight = height;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter OMX_IndexParamPortDefinition Error- %x", eError);
+ }
+
+#ifndef OMAP_TUNA
+ //Slice Configuration
+ OMX_TI_PARAM_VTCSLICE VTCSlice;
+ OMX_INIT_STRUCT_PTR(&VTCSlice, OMX_TI_PARAM_VTCSLICE);
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_TI_IndexParamVtcSlice Error - %x", eError);
+ }
+
+ VTCSlice.nSliceHeight = SliceHeight;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetParameter on OMX_TI_IndexParamVtcSlice returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+#endif
+
+ eError = OMX_SetupTunnel(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex, encoderHandle, 0);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetupTunnel returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::setSensorQuirks(int orientation,
+ OMXCameraPortParameters &portParams,
+ bool &portConfigured)
{
- size_t bufferCount;
+ status_t overclockStatus = NO_ERROR;
+ int sensorID = -1;
+ size_t overclockWidth;
+ size_t overclockHeight;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
LOG_FUNCTION_NAME;
+ portConfigured = false;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+
+ portCheck.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition,
+ &portCheck);
+
+ if ( eError != OMX_ErrorNone ) {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( ( orientation == 90 ) || ( orientation == 270 ) ) {
+ overclockWidth = 1080;
+ overclockHeight = 1920;
+ } else {
+ overclockWidth = 1920;
+ overclockHeight = 1080;
+ }
+
+ sensorID = mCapabilities->getInt(CameraProperties::CAMERA_SENSOR_ID);
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) &&
+ ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ ( portParams.mFrameRate >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) ) ) {
+ overclockStatus = setSensorOverclock(true);
+ } else {
+
+ //WA: If the next port resolution doesn't require
+ // sensor overclocking, but the previous resolution
+ // needed it, then we have to first set new port
+ // resolution and then disable sensor overclocking.
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) &&
+ ( ( portCheck.format.video.xFramerate >> 16 ) >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ (( portCheck.format.video.xFramerate >> 16) >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) ) ) {
+ status_t ret = setFormat(mCameraAdapterParameters.mPrevPortIndex,
+ portParams);
+ if ( NO_ERROR != ret ) {
+ return ret;
+ }
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(portParams.mMinFrameRate, portParams.mMaxFrameRate);
+
+ portConfigured = true;
+ }
+
+ overclockStatus = setSensorOverclock(false);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return overclockStatus;
+}
+status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ size_t bufferCount;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_PARAM_PORTDEFINITIONTYPE portCheck;
@@ -783,152 +1141,155 @@ status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &port
eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- if ( OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port )
- {
+ if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
portCheck.format.video.nFrameWidth = portParams.mWidth;
portCheck.format.video.nFrameHeight = portParams.mHeight;
portCheck.format.video.eColorFormat = portParams.mColorFormat;
portCheck.format.video.nStride = portParams.mStride;
- if( ( portCheck.format.video.nFrameWidth >= 1920 ) &&
- ( portCheck.format.video.nFrameHeight >= 1080 ) &&
- ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) )
- {
- setSensorOverclock(true);
- }
- else
- {
- setSensorOverclock(false);
- }
portCheck.format.video.xFramerate = portParams.mFrameRate<<16;
portCheck.nBufferSize = portParams.mStride * portParams.mHeight;
portCheck.nBufferCountActual = portParams.mNumBufs;
mFocusThreshold = FOCUS_THRESHOLD * portParams.mFrameRate;
- }
- else if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
- {
+ // Used for RAW capture
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatRawBayer10bit; // portParams.mColorFormat;
+ portCheck.nBufferCountActual = 1; // portParams.mNumBufs;
+ } else if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
portCheck.format.image.nFrameWidth = portParams.mWidth;
portCheck.format.image.nFrameHeight = portParams.mHeight;
- if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingNone )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ if (OMX_COLOR_FormatUnused == portParams.mColorFormat) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ if (mCodingMode == CodingJPEG) {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ } else if (mCodingMode == CodingJPS) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
+ } else if (mCodingMode == CodingMPO) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
+ } else {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
}
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingJPS )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingMPO )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWJPEG )
- {
- //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWJPEG when
- // RAW format is supported
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWMPO )
- {
- //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWMPO when
- // RAW format is supported
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
- }
- else
- {
- portCheck.format.image.eColorFormat = portParams.mColorFormat;
+ } else {
+ portCheck.format.image.eColorFormat = portParams.mColorFormat;
portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
- }
+ }
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ // RAW + YUV Capture
+ if (mYuvCapture) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+#endif
//Stride for 1D tiler buffer is zero
portCheck.format.image.nStride = 0;
- portCheck.nBufferSize = portParams.mStride * portParams.mWidth * portParams.mHeight;
portCheck.nBufferCountActual = portParams.mNumBufs;
+ } else if (OMX_CAMERA_PORT_VIDEO_IN_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nStride = portParams.mStride;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = portParams.mColorFormat;
+ portCheck.format.video.xFramerate = 30 << 16;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ } else {
+ CAMHAL_LOGEB("Unsupported port index (%lu)", port);
+ }
+
+#ifndef OMAP_TUNA
+ if (( mSensorIndex == OMX_TI_StereoSensor ) && (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO != port)) {
+ ret = setS3DFrameLayout(port);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Error configuring stereo 3D frame layout");
+ return ret;
+ }
}
- else
- {
- CAMHAL_LOGEB("Unsupported port index 0x%x", (unsigned int)port);
- }
+#endif
eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
- OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
/* check if parameters are set correctly by calling GetParameter() */
eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
- OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
portParams.mBufSize = portCheck.nBufferSize;
portParams.mStride = portCheck.format.image.nStride;
- if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
- {
+ if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
CAMHAL_LOGDB("\n *** IMG Width = %ld", portCheck.format.image.nFrameWidth);
- CAMHAL_LOGDB("\n ***IMG Height = %ld", portCheck.format.image.nFrameHeight);
-
- CAMHAL_LOGDB("\n ***IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountMin = %ld\n",
- portCheck.nBufferCountMin);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountActual = %ld\n",
- portCheck.nBufferCountActual);
- CAMHAL_LOGDB("\n ***IMG portCheck.format.image.nStride = %ld\n",
- portCheck.format.image.nStride);
- }
- else
- {
+ CAMHAL_LOGDB("\n *** IMG Height = %ld", portCheck.format.image.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** IMG portCheck.format.image.nStride = %ld\n",
+ portCheck.format.image.nStride);
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
CAMHAL_LOGDB("\n *** PRV Width = %ld", portCheck.format.video.nFrameWidth);
- CAMHAL_LOGDB("\n ***PRV Height = %ld", portCheck.format.video.nFrameHeight);
-
- CAMHAL_LOGDB("\n ***PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountMin = %ld\n",
- portCheck.nBufferCountMin);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountActual = %ld\n",
- portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** PRV Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
CAMHAL_LOGDB("\n ***PRV portCheck.format.video.nStride = %ld\n",
- portCheck.format.video.nStride);
- }
+ portCheck.format.video.nStride);
+ } else {
+ CAMHAL_LOGDB("\n *** VID Width = %ld", portCheck.format.video.nFrameWidth);
+ CAMHAL_LOGDB("\n *** VID Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** VID IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** VID portCheck.format.video.nStride = %ld\n",
+ portCheck.format.video.nStride);
+ }
+
+ mSetFormatDone = true;
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
- CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError);
+ CAMHAL_LOGEB("Exiting function %s because of eError = 0x%x", __FUNCTION__, eError);
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
-status_t OMXCameraAdapter::flushBuffers()
+status_t OMXCameraAdapter::flushBuffers(OMX_U32 nPort)
{
+ LOG_FUNCTION_NAME;
+
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- TIMM_OSAL_ERRORTYPE err;
- TIMM_OSAL_U32 uRequestedEvents = OMXCameraAdapter::CAMERA_PORT_FLUSH;
- TIMM_OSAL_U32 pRetrievedEvents;
if ( 0 != mFlushSem.Count() )
{
@@ -937,10 +1298,8 @@ status_t OMXCameraAdapter::flushBuffers()
return NO_INIT;
}
- LOG_FUNCTION_NAME;
-
OMXCameraPortParameters * mPreviewData = NULL;
- mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[nPort];
///Register for the FLUSH event
///This method just inserts a message in Event Q, which is checked in the callback
@@ -948,7 +1307,7 @@ status_t OMXCameraAdapter::flushBuffers()
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandFlush,
- OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ nPort,
mFlushSem);
if(ret!=NO_ERROR)
{
@@ -959,7 +1318,7 @@ status_t OMXCameraAdapter::flushBuffers()
///Send FLUSH command to preview port
eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
OMX_CommandFlush,
- mCameraAdapterParameters.mPrevPortIndex,
+ nPort,
NULL);
if(eError!=OMX_ErrorNone)
@@ -989,25 +1348,27 @@ status_t OMXCameraAdapter::flushBuffers()
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandFlush,
- OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ nPort,
NULL);
CAMHAL_LOGDA("Flush event timeout expired");
goto EXIT;
}
+ mOMXCallbackHandler->flush();
+
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
///API to give the buffers to Adapter
-status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t OMXCameraAdapter::useBuffers(CameraMode mode, CameraBuffer * bufArr, int num, size_t length, unsigned int queueable)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
@@ -1029,9 +1390,9 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
break;
case CAMERA_VIDEO:
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num;
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable;
- ret = UseBuffersPreview(bufArr, num);
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersRawCapture(bufArr, num);
break;
case CAMERA_MEASUREMENT:
@@ -1040,6 +1401,11 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
ret = UseBuffersPreviewData(bufArr, num);
break;
+ case CAMERA_REPROCESS:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersReprocess(bufArr, num);
+ break;
}
LOG_FUNCTION_NAME_EXIT;
@@ -1047,13 +1413,12 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
return ret;
}
-status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num)
+status_t OMXCameraAdapter::UseBuffersPreviewData(CameraBuffer * bufArr, int num)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * measurementData = NULL;
- uint32_t *buffers;
- Mutex::Autolock lock( mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
LOG_FUNCTION_NAME;
@@ -1080,7 +1445,6 @@ status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num)
{
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
measurementData->mNumBufs = num ;
- buffers= (uint32_t*) bufArr;
}
if ( NO_ERROR == ret )
@@ -1158,13 +1522,13 @@ EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::switchToExecuting()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1173,7 +1537,7 @@ status_t OMXCameraAdapter::switchToExecuting()
msg.arg1 = mErrorNotifier;
ret = mCommandHandler->put(&msg);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1275,23 +1639,21 @@ status_t OMXCameraAdapter::doSwitchToExecuting()
performCleanupAfterError();
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::switchToLoaded()
-{
+status_t OMXCameraAdapter::switchToIdle() {
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mStateSwitchLock);
+ android::AutoMutex lock(mIdleStateSwitchLock);
- if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
+ if ( mComponentState == OMX_StateIdle || mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_StateIdle, OMX_Loaded state or OMX_StateInvalid state");
return NO_ERROR;
- }
+ }
if ( 0 != mSwitchToLoadedSem.Count() )
{
@@ -1353,6 +1715,107 @@ status_t OMXCameraAdapter::switchToLoaded()
goto EXIT;
}
+ mComponentState = OMX_StateIdle;
+
+ return NO_ERROR;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+
+
+status_t OMXCameraAdapter::prevPortEnable() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ ///Register for Preview port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mSwitchToLoadedSem);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Enabling Preview port");
+ ///Wait for state to switch to idle
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview port enabled!");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Preview enable timedout");
+
+ goto EXIT;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::switchToLoaded(bool bPortEnableRequired) {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mStateSwitchLock);
+ if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
+ return NO_ERROR;
+ }
+
+ if ( mComponentState != OMX_StateIdle) {
+ ret = switchToIdle();
+ if (ret != NO_ERROR) return ret;
+ }
+
+ if ( 0 != mSwitchToLoadedSem.Count() ) {
+ CAMHAL_LOGEB("Error mSwitchToLoadedSem semaphore count %d", mSwitchToLoadedSem.Count());
+ goto EXIT;
+ }
+
///Register for LOADED state transition.
///This method just inserts a message in Event Q, which is checked in the callback
///The sempahore passed is signalled by the callback
@@ -1379,93 +1842,91 @@ status_t OMXCameraAdapter::switchToLoaded()
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- CAMHAL_LOGDA("Switching IDLE->LOADED state");
- ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
-
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!");
- goto EXIT;
- }
+ if ( !bPortEnableRequired ) {
+ OMXCameraPortParameters *mCaptureData , *mPreviewData, *measurementData;
+ mCaptureData = mPreviewData = measurementData = NULL;
- if ( NO_ERROR == ret )
- {
- CAMHAL_LOGDA("IDLE->LOADED state changed");
- }
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandStateSet,
- OMX_StateLoaded,
- NULL);
- CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
- goto EXIT;
- }
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
- mComponentState = OMX_StateLoaded;
+ ///Free the OMX Buffers
+ for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mPreviewData->mBufferHeader[i]);
- ///Register for Preview port ENABLE event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
- mSwitchToLoadedSem);
-
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("Error in registering for event %d", ret);
- goto EXIT;
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
- ///Enable Preview Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
+ if ( mMeasurementEnabled ) {
+ for ( int i = 0 ; i < measurementData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ measurementData->mBufferHeader[i]);
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
- CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ {
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.clear();
+ }
- CAMHAL_LOGDA("Enabling Preview port");
- ///Wait for state to switch to idle
+ }
+ }
+
+ CAMHAL_LOGDA("Switching IDLE->LOADED state");
ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
//If somethiing bad happened while we wait
if (mComponentState == OMX_StateInvalid)
{
- CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!");
+ CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!");
goto EXIT;
}
if ( NO_ERROR == ret )
{
- CAMHAL_LOGDA("Preview port enabled!");
+ CAMHAL_LOGDA("IDLE->LOADED state changed");
}
else
{
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
NULL);
- CAMHAL_LOGEA("Preview enable timedout");
-
+ CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
goto EXIT;
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ mComponentState = OMX_StateLoaded;
+ if (bPortEnableRequired == true) {
+ prevPortEnable();
+ }
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
+status_t OMXCameraAdapter::UseBuffersPreview(CameraBuffer * bufArr, int num)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -1485,7 +1946,6 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
mPreviewData->mNumBufs = num ;
- uint32_t *buffers = (uint32_t*)bufArr;
if ( 0 != mUsePreviewSem.Count() )
{
@@ -1503,70 +1963,52 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mStateSwitchLock.lock();
- if ( mComponentState == OMX_StateLoaded )
- {
+ if ( mComponentState == OMX_StateLoaded ) {
- ret = setLDC(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setLDC() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
}
+ }
- ret = setNSF(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setNSF() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
}
+ }
- ret = setCaptureMode(mCapMode);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
}
+ }
- CAMHAL_LOGDB("Camera Mode = %d", mCapMode);
-
- if( mCapMode == OMXCameraAdapter::VIDEO_MODE )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(mVnfEnabled);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- return ret;
- }
+ if(mCapMode == OMXCameraAdapter::VIDEO_MODE) {
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(mVstabEnabled);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- return ret;
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
}
}
- else
- {
- ret = enableVideoNoiseFilter(false);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- return ret;
- }
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(false);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- return ret;
+
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
+ ret = enableVideoStabilization(mVstabEnabled);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
}
}
+
}
+ }
ret = setSensorOrientation(mSensorOrientation);
if ( NO_ERROR != ret )
@@ -1575,14 +2017,6 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mSensorOrientation = 0;
}
- ret = setVFramerate(mPreviewData->mMinFrameRate, mPreviewData->mMaxFrameRate);
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEB("VFR configuration failed 0x%x", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
- }
-
if ( mComponentState == OMX_StateLoaded )
{
///Register for IDLE state switch event
@@ -1650,21 +2084,22 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
OMX_BUFFERHEADERTYPE *pBufferHdr;
for(int index=0;index<num;index++) {
+ OMX_U8 *ptr;
- CAMHAL_LOGDB("OMX_UseBuffer(0x%x)", buffers[index]);
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufArr[index]);
eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
&pBufferHdr,
mCameraAdapterParameters.mPrevPortIndex,
0,
mPreviewData->mBufSize,
- (OMX_U8*)buffers[index]);
+ ptr);
if(eError!=OMX_ErrorNone)
{
CAMHAL_LOGEB("OMX_UseBuffer-0x%x", eError);
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- //pBufferHdr->pAppPrivate = (OMX_PTR)pBufferHdr;
+ pBufferHdr->pAppPrivate = (OMX_PTR)&bufArr[index];
pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufferHdr->nVersion.s.nVersionMajor = 1 ;
pBufferHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1679,15 +2114,19 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
for( int i = 0; i < num; i++ )
{
OMX_BUFFERHEADERTYPE *pBufHdr;
+ OMX_U8 *ptr;
+
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&mPreviewDataBuffers[i]);
eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
&pBufHdr,
mCameraAdapterParameters.mMeasurementPortIndex,
0,
measurementData->mBufSize,
- (OMX_U8*)(mPreviewDataBuffers[i]));
+ ptr);
if ( eError == OMX_ErrorNone )
{
+ pBufHdr->pAppPrivate = (OMX_PTR *)&mPreviewDataBuffers[i];
pBufHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufHdr->nVersion.s.nVersionMajor = 1 ;
pBufHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1744,7 +2183,7 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
///If there is any failure, we reach here.
///Here, we do any resource freeing and convert from OMX error code to Camera Hal error code
@@ -1757,7 +2196,7 @@ EXIT:
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::startPreview()
@@ -1776,6 +2215,14 @@ status_t OMXCameraAdapter::startPreview()
goto EXIT;
}
+ // Enable all preview mode extra data.
+ if ( OMX_ErrorNone == eError) {
+ ret |= setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_AncillaryData);
+#ifndef OMAP_TUNA
+ ret |= setExtraData(true, OMX_ALL, OMX_TI_VectShotInfo);
+#endif
+ }
+
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
@@ -1838,11 +2285,11 @@ status_t OMXCameraAdapter::startPreview()
mStateSwitchLock.unlock();
- apply3Asettings(mParameters3A);
//Queue all the buffers on preview port
for(int index=0;index< mPreviewData->mMaxQueueable;index++)
{
CAMHAL_LOGDB("Queuing buffer on Preview port - 0x%x", (uint32_t)mPreviewData->mBufferHeader[index]->pBuffer);
+ mPreviewData->mStatus[index] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*)mPreviewData->mBufferHeader[index]);
if(eError!=OMX_ErrorNone)
@@ -1850,8 +2297,8 @@ status_t OMXCameraAdapter::startPreview()
CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError);
}
mFramesWithDucati++;
-#ifdef DEGUG_LOG
- mBuffersWithDucati.add((uint32_t)mPreviewData->mBufferHeader[index]->pBuffer,1);
+#ifdef CAMERAHAL_DEBUG
+ mBuffersWithDucati.add((int)mPreviewData->mBufferHeader[index]->pAppPrivate,1);
#endif
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
@@ -1862,6 +2309,7 @@ status_t OMXCameraAdapter::startPreview()
for(int index=0;index< mPreviewData->mNumBufs;index++)
{
CAMHAL_LOGDB("Queuing buffer on Measurement port - 0x%x", (uint32_t) measurementData->mBufferHeader[index]->pBuffer);
+ measurementData->mStatus[index] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*) measurementData->mBufferHeader[index]);
if(eError!=OMX_ErrorNone)
@@ -1873,22 +2321,7 @@ status_t OMXCameraAdapter::startPreview()
}
- // Enable Ancillary data. The nDCCStatus field is used to signify
- // whether the preview frame is a snapshot
- if ( OMX_ErrorNone == eError)
- {
- ret = setExtraData(true, OMX_ALL, OMX_AncillaryData);
- }
-
-
- if ( mPending3Asettings )
- apply3Asettings(mParameters3A);
-
- // enable focus callbacks just once here
- // fixes an issue with slow callback registration in Ducati
- if ( NO_ERROR == ret ) {
- ret = setFocusCallback(true);
- }
+ setFocusCallback(true);
//reset frame rate estimates
mFPS = 0.0f;
@@ -1904,10 +2337,11 @@ status_t OMXCameraAdapter::startPreview()
mLastFrameCount = 0;
mIter = 1;
mLastFPSTime = systemTime();
+ mTunnelDestroyed = false;
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
@@ -1916,11 +2350,11 @@ status_t OMXCameraAdapter::startPreview()
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::stopPreview()
+status_t OMXCameraAdapter::destroyTunnel()
{
LOG_FUNCTION_NAME;
@@ -1934,13 +2368,13 @@ status_t OMXCameraAdapter::stopPreview()
mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
- if (mAdapterState == LOADED_PREVIEW_STATE) {
- // Something happened in CameraHal between UseBuffers and startPreview
- // this means that state switch is still locked..so we need to unlock else
- // deadlock will occur on the next start preview
- mStateSwitchLock.unlock();
- return NO_ERROR;
- }
+ if (mAdapterState == LOADED_PREVIEW_STATE) {
+ // Something happened in CameraHal between UseBuffers and startPreview
+ // this means that state switch is still locked..so we need to unlock else
+ // deadlock will occur on the next start preview
+ mStateSwitchLock.unlock();
+ return ALREADY_EXISTS;
+ }
if ( mComponentState != OMX_StateExecuting )
{
@@ -1950,7 +2384,7 @@ status_t OMXCameraAdapter::stopPreview()
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
// we should wait for the first frame to come before trying to stopPreview...if not
// we might put OMXCamera in a bad state (IDLE->LOADED timeout). Seeing this a lot
// after a capture
@@ -1969,11 +2403,9 @@ status_t OMXCameraAdapter::stopPreview()
mFirstFrameCondition.broadcast();
}
- ret = cancelAutoFocus();
- if(ret!=NO_ERROR)
{
- CAMHAL_LOGEB("Error canceling autofocus %d", ret);
- // Error, but we probably still want to continue to stop preview
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
}
OMX_CONFIG_FOCUSASSISTTYPE focusAssist;
@@ -2016,89 +2448,53 @@ status_t OMXCameraAdapter::stopPreview()
goto EXIT;
}
- ///Register for Preview port Disable event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- mStopPreviewSem);
-
- ///Disable Preview Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
-
- ///Free the OMX Buffers
- for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ )
- {
- eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
- mCameraAdapterParameters.mPrevPortIndex,
- mPreviewData->mBufferHeader[i]);
-
- if(eError!=OMX_ErrorNone)
- {
- CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
-
- if ( mMeasurementEnabled )
- {
+ switchToIdle();
- for ( int i = 0 ; i < measurementData->mNumBufs ; i++ )
- {
- eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
- mCameraAdapterParameters.mMeasurementPortIndex,
- measurementData->mBufferHeader[i]);
- if(eError!=OMX_ErrorNone)
- {
- CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
+ mTunnelDestroyed = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
- {
- Mutex::Autolock lock(mPreviewDataBufferLock);
- mPreviewDataBuffersAvailable.clear();
- }
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
- }
+}
- CAMHAL_LOGDA("Disabling preview port");
- ret = mStopPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+status_t OMXCameraAdapter::stopPreview() {
+ LOG_FUNCTION_NAME;
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after Disabling preview port Exitting!!!");
- goto EXIT;
- }
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
- if ( NO_ERROR == ret )
- {
- CAMHAL_LOGDA("Preview port disabled");
+ if (mTunnelDestroyed == false){
+ ret = destroyTunnel();
+ if (ret == ALREADY_EXISTS) {
+ // Special case to handle invalid stopping preview in LOADED_PREVIEW_STATE
+ return NO_ERROR;
}
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
- CAMHAL_LOGEA("Timeout expired on preview port disable");
- goto EXIT;
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB(" destroyTunnel returned error ");
+ return ret;
}
+ }
- {
- Mutex::Autolock lock(mPreviewBufferLock);
+ mTunnelDestroyed = false;
+
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
///Clear all the available preview buffers
mPreviewBuffersAvailable.clear();
- }
+ }
switchToLoaded();
-
mFirstTimeInit = true;
mPendingCaptureSettings = 0;
mFramesWithDucati = 0;
@@ -2107,19 +2503,7 @@ status_t OMXCameraAdapter::stopPreview()
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
-
-EXIT:
- CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
- {
- Mutex::Autolock lock(mPreviewBufferLock);
- ///Clear all the available preview buffers
- mPreviewBuffersAvailable.clear();
- }
- performCleanupAfterError();
- LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
-
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::setSensorOverclock(bool enable)
@@ -2154,7 +2538,6 @@ status_t OMXCameraAdapter::setSensorOverclock(bool enable)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while setting Sensor overclock 0x%x", eError);
- ret = BAD_VALUE;
}
else
{
@@ -2164,7 +2547,7 @@ status_t OMXCameraAdapter::setSensorOverclock(bool enable)
LOG_FUNCTION_NAME_EXIT;
- return ret;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
@@ -2259,15 +2642,72 @@ status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
return ret;
}
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setS3DFrameLayout(OMX_U32 port) const
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_FRAMELAYOUTTYPE frameLayout;
+ const OMXCameraPortParameters *cap =
+ &mCameraAdapterParameters.mCameraPortParams[port];
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&frameLayout, OMX_TI_FRAMELAYOUTTYPE);
+ frameLayout.nPortIndex = port;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while getting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+
+ if (cap->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutTopBottom;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else if (cap->mFrameLayoutType ==
+ OMX_TI_StereoFrameLayoutLeftRightSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutLeftRight;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else
+ {
+ frameLayout.eFrameLayout = cap->mFrameLayoutType;
+ frameLayout.nSubsampleRatio = 1;
+ }
+ frameLayout.nSubsampleRatio = frameLayout.nSubsampleRatio << 7;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while setting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+ else
+ {
+ CAMHAL_LOGDB("S3D frame layout %d applied successfully on port %lu",
+ frameLayout.eFrameLayout, port);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+#endif
+
status_t OMXCameraAdapter::autoFocus()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -2285,7 +2725,7 @@ status_t OMXCameraAdapter::autoFocus()
EXIT:
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -2293,12 +2733,12 @@ status_t OMXCameraAdapter::autoFocus()
status_t OMXCameraAdapter::takePicture()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -2310,8 +2750,16 @@ status_t OMXCameraAdapter::takePicture()
}
}
- msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ // TODO(XXX): re-using take picture to kick off reprocessing pipe
+ // Need to rethink this approach during reimplementation
+ if (mNextState == REPROCESS_STATE) {
+ msg.command = CommandHandler::CAMERA_START_REPROCESS;
+ } else {
+ msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ }
+
msg.arg1 = mErrorNotifier;
+ msg.arg2 = cacheCaptureParameters();
ret = mCommandHandler->put(&msg);
EXIT:
@@ -2345,7 +2793,7 @@ status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
if ( mOMXStateSwitch )
{
- ret = switchToLoaded();
+ ret = switchToLoaded(true);
if ( NO_ERROR != ret )
{
CAMHAL_LOGEB("switchToLoaded() failed 0x%x", ret);
@@ -2358,78 +2806,54 @@ status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
if ( OMX_StateLoaded == mComponentState )
{
- ret = setLDC(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setLDC() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- goto exit;
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
}
+ }
- ret = setNSF(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setNSF() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- goto exit;
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
}
+ }
- ret = setCaptureMode(mCapMode);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
}
+ }
- if(mCapMode == OMXCameraAdapter::VIDEO_MODE)
- {
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(mVnfEnabled);
- }
+ if(mCapMode == OMXCameraAdapter::VIDEO_MODE) {
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
}
+ }
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Stabilization
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
ret = enableVideoStabilization(mVstabEnabled);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- }
- }
- else
- {
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(false);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- }
-
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(false);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
}
}
}
+ }
ret = setSensorOrientation(mSensorOrientation);
if ( NO_ERROR != ret )
@@ -2522,9 +2946,6 @@ void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME;
static const unsigned int DEGREES_TILT_IGNORE = 45;
- int device_orientation = 0;
- int mount_orientation = 0;
- const char *facing_direction = NULL;
// if tilt angle is greater than DEGREES_TILT_IGNORE
// we are going to ignore the orientation returned from
@@ -2534,34 +2955,36 @@ void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
return;
}
+ int mountOrientation = 0;
+ bool isFront = false;
if (mCapabilities) {
- if (mCapabilities->get(CameraProperties::ORIENTATION_INDEX)) {
- mount_orientation = atoi(mCapabilities->get(CameraProperties::ORIENTATION_INDEX));
+ const char * const mountOrientationString =
+ mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ if (mountOrientationString) {
+ mountOrientation = atoi(mountOrientationString);
}
- facing_direction = mCapabilities->get(CameraProperties::FACING_INDEX);
- }
- // calculate device orientation relative to the sensor orientation
- // front camera display is mirrored...needs to be accounted for when orientation
- // is 90 or 270...since this will result in a flip on orientation otherwise
- if (facing_direction && !strcmp(facing_direction, TICameraParameters::FACING_FRONT) &&
- (orientation == 90 || orientation == 270)) {
- device_orientation = (orientation - mount_orientation + 360) % 360;
- } else { // back-facing camera
- device_orientation = (orientation + mount_orientation) % 360;
+ const char * const facingString = mCapabilities->get(CameraProperties::FACING_INDEX);
+ if (facingString) {
+ isFront = strcmp(facingString, TICameraParameters::FACING_FRONT) == 0;
+ }
}
- if (device_orientation != mDeviceOrientation) {
- mDeviceOrientation = device_orientation;
+ // direction is a constant sign for facing, meaning the rotation direction relative to device
+ // +1 (clockwise) for back sensor and -1 (counter-clockwise) for front sensor
+ const int direction = isFront ? -1 : 1;
-#ifndef OMAP_TUNA
- mFaceDetectionLock.lock();
- if (mFaceDetectionRunning) {
- // restart face detection with new rotation
- setFaceDetection(true, mDeviceOrientation);
- }
- mFaceDetectionLock.unlock();
-#endif
+ int rotation = mountOrientation + direction*orientation;
+
+ // crop the calculated value to [0..360) range
+ while ( rotation < 0 ) rotation += 360;
+ rotation %= 360;
+
+ if (rotation != mDeviceOrientation) {
+ mDeviceOrientation = rotation;
+
+ // restart face detection with new rotation
+ setFaceDetectionOrientation(mDeviceOrientation);
}
CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation);
@@ -2652,10 +3075,10 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETY
{
CAMHAL_LOGEB("***Removing %d EVENTS***** \n", mEventSignalQ.size());
//remove from queue and free msg
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
if ( sem )
{
sem->Signal();
@@ -2714,8 +3137,8 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
bool eventSignalled = false;
LOG_FUNCTION_NAME;
@@ -2734,7 +3157,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
//Signal the semaphore provided
@@ -2756,7 +3179,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
// Handling for focus callback
if ((nData2 == OMX_IndexConfigCommonFocusStatus) &&
(eEvent == (OMX_EVENTTYPE) OMX_EventIndexSettingChanged)) {
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::CAMERA_FOCUS_STATUS;
msg.arg1 = NULL;
msg.arg2 = NULL;
@@ -2775,8 +3198,8 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
LOG_FUNCTION_NAME;
if ( !mEventSignalQ.isEmpty() )
@@ -2793,7 +3216,7 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
free(msg);
@@ -2816,14 +3239,14 @@ status_t OMXCameraAdapter::RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN Semaphore &semaphore)
+ OMX_IN Utils::Semaphore &semaphore)
{
status_t ret = NO_ERROR;
ssize_t res;
- Mutex::Autolock lock(mEventLock);
+ android::AutoMutex lock(mEventLock);
LOG_FUNCTION_NAME;
- TIUTILS::Message * msg = ( struct TIUTILS::Message * ) malloc(sizeof(struct TIUTILS::Message));
+ Utils::Message * msg = ( struct Utils::Message * ) malloc(sizeof(struct Utils::Message));
if ( NULL != msg )
{
msg->command = ( unsigned int ) eEvent;
@@ -2871,11 +3294,36 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDL
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
{
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME;
+ status_t stat = NO_ERROR;
+ status_t res1, res2;
+ OMXCameraPortParameters *pPortParam;
+ CameraFrame::FrameType typeOfFrame = CameraFrame::ALL_FRAMES;
+ unsigned int refCount = 0;
+ unsigned int mask = 0xFFFF;
+ CameraFrame cameraFrame;
+ OMX_TI_PLATFORMPRIVATE *platformPrivate;
- LOG_FUNCTION_NAME_EXIT;
+ res1 = res2 = NO_ERROR;
- return OMX_ErrorNone;
+ if (!pBuffHeader || !pBuffHeader->pBuffer) {
+ CAMHAL_LOGE("NULL Buffer from OMX");
+ return OMX_ErrorNone;
+ }
+
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nInputPortIndex]);
+ platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate;
+
+ if (pBuffHeader->nInputPortIndex == OMX_CAMERA_PORT_VIDEO_IN_VIDEO) {
+ typeOfFrame = CameraFrame::REPROCESS_INPUT_FRAME;
+ mask = (unsigned int)CameraFrame::REPROCESS_INPUT_FRAME;
+
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
}
static void debugShowFPS()
@@ -2891,7 +3339,7 @@ static void debugShowFPS()
mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
mLastFpsTime = now;
mLastFrameCount = mFrameCount;
- ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps);
}
// XXX: mFPS has the value we want
}
@@ -2903,7 +3351,7 @@ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
{
- TIUTILS::Message msg;
+ Utils::Message msg;
OMX_ERRORTYPE eError = OMX_ErrorNone;
if (UNLIKELY(mDebugFps)) {
@@ -2922,6 +3370,48 @@ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
return eError;
}
+#ifdef CAMERAHAL_OMX_PROFILING
+
+status_t OMXCameraAdapter::storeProfilingData(OMX_BUFFERHEADERTYPE* pBuffHeader) {
+ OMX_TI_PLATFORMPRIVATE *platformPrivate = NULL;
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+ FILE *fd = NULL;
+
+ LOG_FUNCTION_NAME
+
+ if ( UNLIKELY( mDebugProfile ) ) {
+
+ platformPrivate = static_cast<OMX_TI_PLATFORMPRIVATE *> (pBuffHeader->pPlatformPrivate);
+ extraData = getExtradata(static_cast<OMX_OTHER_EXTRADATATYPE *> (platformPrivate->pMetaDataBuffer),
+ platformPrivate->nMetaDataSize,
+ static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData));
+
+ if ( NULL != extraData ) {
+ if( extraData->eType == static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData) ) {
+
+ fd = fopen(DEFAULT_PROFILE_PATH, "ab");
+ if ( NULL != fd ) {
+ fwrite(extraData->data, 1, extraData->nDataSize, fd);
+ fclose(fd);
+ } else {
+ return -errno;
+ }
+
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return NO_ERROR;
+}
+
+#endif
+
/*========================================================*/
/* @ fn SampleTest_FillBufferDone :: Application callback*/
/*========================================================*/
@@ -2938,22 +3428,39 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
BaseCameraAdapter::AdapterState state, nextState;
BaseCameraAdapter::getState(state);
BaseCameraAdapter::getNextState(nextState);
- sp<CameraFDResult> fdResult = NULL;
+ android::sp<CameraMetadataResult> metadataResult = NULL;
unsigned int mask = 0xFFFF;
CameraFrame cameraFrame;
- OMX_TI_PLATFORMPRIVATE *platformPrivate;
OMX_OTHER_EXTRADATATYPE *extraData;
OMX_TI_ANCILLARYDATATYPE *ancillaryData = NULL;
bool snapshotFrame = false;
+ if ( NULL == pBuffHeader ) {
+ return OMX_ErrorBadParameter;
+ }
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ storeProfilingData(pBuffHeader);
+
+#endif
+
res1 = res2 = NO_ERROR;
- pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
if ( !pBuffHeader || !pBuffHeader->pBuffer ) {
CAMHAL_LOGEA("NULL Buffer from OMX");
return OMX_ErrorNone;
}
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
+
+ // Find buffer and mark it as filled
+ for (int i = 0; i < pPortParam->mNumBufs; i++) {
+ if (pPortParam->mBufferHeader[i] == pBuffHeader) {
+ pPortParam->mStatus[i] = OMXCameraPortParameters::DONE;
+ }
+ }
+
if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW)
{
@@ -2962,44 +3469,26 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
return OMX_ErrorNone;
}
- if ( mWaitingForSnapshot )
- {
- platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate;
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer,
- platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE) OMX_AncillaryData);
+ if ( mWaitingForSnapshot ) {
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE) OMX_AncillaryData);
- if ( NULL != extraData )
- {
+ if ( NULL != extraData ) {
ancillaryData = (OMX_TI_ANCILLARYDATATYPE*) extraData->data;
+#ifdef OMAP_TUNA
snapshotFrame = ancillaryData->nDCCStatus;
- mPending3Asettings |= SetFocus;
+#else
+ if ((OMX_2D_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Left_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Right_Snap == ancillaryData->eCameraView)) {
+ snapshotFrame = OMX_TRUE;
+ } else {
+ snapshotFrame = OMX_FALSE;
}
- }
-
- recalculateFPS();
-#ifndef OMAP_TUNA
- {
- Mutex::Autolock lock(mFaceDetectionLock);
- if ( mFaceDetectionRunning && !mFaceDetectionPaused ) {
- detectFaces(pBuffHeader, fdResult, pPortParam->mWidth, pPortParam->mHeight);
- if ( NULL != fdResult.get() ) {
- notifyFaceSubscribers(fdResult);
- fdResult.clear();
- }
- if ( mFDSwitchAlgoPriority ) {
-
- //Disable region priority and enable face priority for AF
- setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
-
- //Disable Region priority and enable Face priority
- setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
- mFDSwitchAlgoPriority = false;
- }
- }
- }
#endif
+ mPending3Asettings |= SetFocus;
+ }
+ }
///Prepare the frames to be sent - initialize CameraFrame object and reference count
// TODO(XXX): ancillary data for snapshot frame is not being sent for video snapshot
@@ -3014,8 +3503,8 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
// video snapshot gets ancillary data and wb info from last snapshot frame
mCaptureAncillaryData = ancillaryData;
mWhiteBalanceData = NULL;
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer,
- platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE) OMX_WhiteBalance);
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE) OMX_WhiteBalance);
if ( NULL != extraData )
{
mWhiteBalanceData = (OMX_TI_WHITEBALANCERESULTTYPE*) extraData->data;
@@ -3033,13 +3522,11 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mFramesWithEncoder++;
}
- //ALOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
+ //CAMHAL_LOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
if( mWaitingForSnapshot )
{
- mSnapshotCount++;
-
- if ( (mSnapshotCount == 1) &&
+ if (!mBracketingEnabled &&
((HIGH_SPEED == mCapMode) || (VIDEO_MODE == mCapMode)) )
{
notifyShutterSubscribers();
@@ -3051,11 +3538,11 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mFramesWithDucati--;
-#ifdef DEBUG_LOG
- if(mBuffersWithDucati.indexOfKey((int)pBuffHeader->pBuffer)<0)
+#ifdef CAMERAHAL_DEBUG
+ if(mBuffersWithDucati.indexOfKey((uint32_t)pBuffHeader->pBuffer)<0)
{
- ALOGE("Buffer was never with Ducati!! 0x%x", pBuffHeader->pBuffer);
- for(int i=0;i<mBuffersWithDucati.size();i++) ALOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ CAMHAL_LOGE("Buffer was never with Ducati!! %p", pBuffHeader->pBuffer);
+ for(unsigned int i=0;i<mBuffersWithDucati.size();i++) CAMHAL_LOGE("0x%x", mBuffersWithDucati.keyAt(i));
}
mBuffersWithDucati.removeItem((int)pBuffHeader->pBuffer);
#endif
@@ -3063,6 +3550,33 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
if(mDebugFcs)
CAMHAL_LOGEB("C[%d] D[%d] E[%d]", mFramesWithDucati, mFramesWithDisplay, mFramesWithEncoder);
+ recalculateFPS();
+
+ createPreviewMetadata(pBuffHeader, metadataResult, pPortParam->mWidth, pPortParam->mHeight);
+ if ( NULL != metadataResult.get() ) {
+ notifyMetadataSubscribers(metadataResult);
+ metadataResult.clear();
+ }
+
+ {
+ android::AutoMutex lock(mFaceDetectionLock);
+ if ( mFDSwitchAlgoPriority ) {
+
+ //Disable region priority and enable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
+
+ //Disable Region priority and enable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
+ mFDSwitchAlgoPriority = false;
+ }
+ }
+
+#ifndef OMAP_TUNA
+ sniffDccFileDataSave(pBuffHeader);
+#endif
+
stat |= advanceZoom();
// On the fly update to 3A settings not working
@@ -3070,10 +3584,9 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
// or in the middle of transitioning to it
if( mPending3Asettings &&
( (nextState & CAPTURE_ACTIVE) == 0 ) &&
- ( (state & CAPTURE_ACTIVE) == 0 ) )
- {
+ ( (state & CAPTURE_ACTIVE) == 0 ) ) {
apply3Asettings(mParameters3A);
- }
+ }
}
else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT )
@@ -3084,11 +3597,11 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
}
else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_IMAGE_OUT_IMAGE )
- {
+ {
OMX_COLOR_FORMATTYPE pixFormat;
const char *valstr = NULL;
- pixFormat = mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mColorFormat;
+ pixFormat = pPortParam->mColorFormat;
if ( OMX_COLOR_FormatUnused == pixFormat )
{
@@ -3096,13 +3609,15 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mask = (unsigned int) CameraFrame::IMAGE_FRAME;
} else if ( pixFormat == OMX_COLOR_FormatCbYCrY &&
((mPictureFormatFromClient &&
- !strcmp(mPictureFormatFromClient, CameraParameters::PIXEL_FORMAT_JPEG)) ||
- !mPictureFormatFromClient) ) {
+ !strcmp(mPictureFormatFromClient,
+ android::CameraParameters::PIXEL_FORMAT_JPEG)) ||
+ !mPictureFormatFromClient) ) {
// signals to callbacks that this needs to be coverted to jpeg
// before returning to framework
typeOfFrame = CameraFrame::IMAGE_FRAME;
mask = (unsigned int) CameraFrame::IMAGE_FRAME;
cameraFrame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ cameraFrame.mQuirks |= CameraFrame::FORMAT_YUV422I_UYVY;
// populate exif data and pass to subscribers via quirk
// subscriber is in charge of freeing exif data
@@ -3110,12 +3625,10 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
setupEXIF_libjpeg(exif, mCaptureAncillaryData, mWhiteBalanceData);
cameraFrame.mQuirks |= CameraFrame::HAS_EXIF_DATA;
cameraFrame.mCookie2 = (void*) exif;
- }
- else
- {
+ } else {
typeOfFrame = CameraFrame::RAW_FRAME;
mask = (unsigned int) CameraFrame::RAW_FRAME;
- }
+ }
pPortParam->mImageType = typeOfFrame;
@@ -3131,7 +3644,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
doBracketing(pBuffHeader, typeOfFrame);
@@ -3139,28 +3652,131 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
}
+ if (mZoomBracketingEnabled) {
+ doZoom(mZoomBracketingValues[mCurrentZoomBracketing]);
+ CAMHAL_LOGDB("Current Zoom Bracketing: %d", mZoomBracketingValues[mCurrentZoomBracketing]);
+ mCurrentZoomBracketing++;
+ if (mCurrentZoomBracketing == ARRAY_SIZE(mZoomBracketingValues)) {
+ mZoomBracketingEnabled = false;
+ }
+ }
+
if ( 1 > mCapturedFrames )
{
goto EXIT;
}
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != mSharedAllocator ) {
+ cameraFrame.mMetaData = new CameraMetadataResult(getMetaData(pBuffHeader->pPlatformPrivate, mSharedAllocator));
+ }
+#endif
+
CAMHAL_LOGDB("Captured Frames: %d", mCapturedFrames);
mCapturedFrames--;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mYuvCapture) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/yuv_%d_%d_%d_%lu.yuv",
+ kYuvImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile(((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d, while saving yuv!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("yuv_%d_%d_%d_%lu.yuv successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kYuvImagesOutputDirPath);
+ }
+ }
+#endif
+
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != cameraFrame.mMetaData.get() ) {
+ cameraFrame.mMetaData.clear();
+ }
+#endif
}
- else
- {
- CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
- goto EXIT;
+ else if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_VIDEO) {
+ typeOfFrame = CameraFrame::RAW_FRAME;
+ pPortParam->mImageType = typeOfFrame;
+ {
+ android::AutoMutex lock(mLock);
+ if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE ) {
+ goto EXIT;
+ }
+ }
+
+ CAMHAL_LOGD("RAW buffer done on video port, length = %d", pBuffHeader->nFilledLen);
+
+ mask = (unsigned int) CameraFrame::RAW_FRAME;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if ( mRawCapture ) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/raw_%d_%d_%d_%lu.raw",
+ kRawImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile( ((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d , while saving raw!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("raw_%d_%d_%d_%lu.raw successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kRawImagesOutputDirPath);
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+ }
+#endif
+ } else {
+ CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
+ goto EXIT;
}
if ( NO_ERROR != stat )
{
+ CameraBuffer *camera_buffer;
+
+ camera_buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+
CAMHAL_LOGDB("sendFrameToSubscribers error: %d", stat);
- returnFrame(pBuffHeader->pBuffer, typeOfFrame);
+ returnFrame(camera_buffer, typeOfFrame);
}
return eError;
@@ -3185,7 +3801,7 @@ status_t OMXCameraAdapter::recalculateFPS()
float currentFPS;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount++;
if (mFrameCount == 1) {
mFirstFrameCondition.broadcast();
@@ -3217,23 +3833,6 @@ status_t OMXCameraAdapter::recalculateFPS()
return NO_ERROR;
}
-status_t OMXCameraAdapter::sendFrame(CameraFrame &frame)
-{
- status_t ret = NO_ERROR;
-
- LOG_FUNCTION_NAME;
-
-
- if ( NO_ERROR == ret )
- {
- ret = sendFrameToSubscribers(&frame);
- }
-
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
-}
-
status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port)
{
status_t ret = NO_ERROR;
@@ -3252,18 +3851,18 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
return -EINVAL;
}
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
//frame.mFrameType = typeOfFrame;
frame.mFrameMask = mask;
- frame.mBuffer = pBuffHeader->pBuffer;
+ frame.mBuffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
frame.mLength = pBuffHeader->nFilledLen;
frame.mAlignment = port->mStride;
frame.mOffset = pBuffHeader->nOffset;
frame.mWidth = port->mWidth;
frame.mHeight = port->mHeight;
- frame.mYuv[0] = NULL;
- frame.mYuv[1] = NULL;
+ frame.mYuv[0] = 0; //NULL;
+ frame.mYuv[1] = 0; //NULL;
if ( onlyOnce && mRecording )
{
@@ -3288,60 +3887,9 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
return ret;
}
-status_t OMXCameraAdapter::initCameraFrame( CameraFrame &frame,
- OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader,
- int typeOfFrame,
- OMXCameraPortParameters *port)
-{
- status_t ret = NO_ERROR;
-
- LOG_FUNCTION_NAME;
-
- if ( NULL == port)
- {
- CAMHAL_LOGEA("Invalid portParam");
- return -EINVAL;
- }
-
- if ( NULL == pBuffHeader )
- {
- CAMHAL_LOGEA("Invalid Buffer header");
- return -EINVAL;
- }
-
- frame.mFrameType = typeOfFrame;
- frame.mBuffer = pBuffHeader->pBuffer;
- frame.mLength = pBuffHeader->nFilledLen;
- frame.mAlignment = port->mStride;
- frame.mOffset = pBuffHeader->nOffset;
- frame.mWidth = port->mWidth;
- frame.mHeight = port->mHeight;
-
- // Timestamp in pBuffHeader->nTimeStamp is derived on DUCATI side, which is
- // is not same time value as derived using systemTime. It would be ideal to use
- // exactly same time source across Android and Ducati, which is limited by
- // system now. So, workaround for now is to find the time offset between the two
- // time sources and compensate the difference, along with the latency involved
- // in camera buffer reaching CameraHal. Also, Do timeset offset calculation only
- // when recording is in progress, when nTimestamp will be populated by Camera
- if ( onlyOnce && mRecording )
- {
- mTimeSourceDelta = (pBuffHeader->nTimeStamp * 1000) - systemTime(SYSTEM_TIME_MONOTONIC);
- mTimeSourceDelta += kCameraBufferLatencyNs;
- onlyOnce = false;
- }
-
- // Calculating the new video timestamp based on offset from ducati source.
- frame.mTimestamp = (pBuffHeader->nTimeStamp * 1000) - mTimeSourceDelta;
-
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
-}
-
bool OMXCameraAdapter::CommandHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t stat;
ErrorNotifier *errorNotify = NULL;
@@ -3352,16 +3900,19 @@ bool OMXCameraAdapter::CommandHandler::Handler()
{
stat = NO_ERROR;
CAMHAL_LOGDA("Handler: waiting for messsage...");
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
}
CAMHAL_LOGDB("msg.command = %d", msg.command);
switch ( msg.command ) {
case CommandHandler::CAMERA_START_IMAGE_CAPTURE:
{
- stat = mCameraAdapter->startImageCapture();
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
break;
}
case CommandHandler::CAMERA_PERFORM_AUTOFOCUS:
@@ -3377,8 +3928,17 @@ bool OMXCameraAdapter::CommandHandler::Handler()
}
case CommandHandler::CAMERA_SWITCH_TO_EXECUTING:
{
- stat = mCameraAdapter->doSwitchToExecuting();
- break;
+ stat = mCameraAdapter->doSwitchToExecuting();
+ break;
+ }
+ case CommandHandler::CAMERA_START_REPROCESS:
+ {
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startReprocess();
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
+ break;
}
}
@@ -3391,17 +3951,18 @@ bool OMXCameraAdapter::CommandHandler::Handler()
bool OMXCameraAdapter::OMXCallbackHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
while(forever){
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
+ mIsProcessed = false;
}
switch ( msg.command ) {
@@ -3423,12 +3984,43 @@ bool OMXCameraAdapter::OMXCallbackHandler::Handler()
break;
}
}
+
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = mCommandMsgQ.isEmpty();
+ if ( mIsProcessed )
+ mCondition.signal();
+ }
+ }
+
+ // force the condition to wake
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = true;
+ mCondition.signal();
}
LOG_FUNCTION_NAME_EXIT;
return false;
}
+void OMXCameraAdapter::OMXCallbackHandler::flush()
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ if ( mIsProcessed )
+ return;
+
+ mCondition.wait(mLock);
+}
+
status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT_EXTRADATATYPE eType) {
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -3446,7 +4038,9 @@ status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT
extraDataControl.nPortIndex = nPortIndex;
extraDataControl.eExtraDataType = eType;
+#ifdef CAMERAHAL_TUNA
extraDataControl.eCameraView = OMX_2D;
+#endif
if (enable) {
extraDataControl.bEnable = OMX_TRUE;
@@ -3460,28 +4054,76 @@ status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-
-OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(OMX_OTHER_EXTRADATATYPE *extraData, OMX_U32 extraDataSize, OMX_EXTRADATATYPE type) {
- OMX_U32 remainingSize = extraDataSize;
-
- if ( NULL != extraData ) {
- while ( extraData->eType && extraData->nDataSize && extraData->data &&
- (remainingSize >= extraData->nSize)) {
- if ( type == extraData->eType ) {
- return extraData;
+OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const
+{
+ if ( NULL != ptrPrivate ) {
+ const OMX_TI_PLATFORMPRIVATE *platformPrivate = (const OMX_TI_PLATFORMPRIVATE *) ptrPrivate;
+
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
+ platformPrivate->nSize,
+ sizeof(OMX_TI_PLATFORMPRIVATE),
+ platformPrivate->pAuxBuf1,
+ platformPrivate->pAuxBufSize1,
+ platformPrivate->pMetaDataBuffer,
+ platformPrivate->nMetaDataSize);
+ if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
+ if ( 0 < platformPrivate->nMetaDataSize ) {
+ OMX_U32 remainingSize = platformPrivate->nMetaDataSize;
+ OMX_OTHER_EXTRADATATYPE *extraData = (OMX_OTHER_EXTRADATATYPE *) platformPrivate->pMetaDataBuffer;
+ if ( NULL != extraData ) {
+ while ( extraData->eType && extraData->nDataSize && extraData->data &&
+ (remainingSize >= extraData->nSize)) {
+ if ( type == extraData->eType ) {
+ return extraData;
+ }
+ remainingSize -= extraData->nSize;
+ extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize);
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE pMetaDataBuffer is NULL");
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
+ ( unsigned int ) platformPrivate->nMetaDataSize);
}
- extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize);
- remainingSize -= extraData->nSize;
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
+ ( unsigned int ) platformPrivate->nSize);
}
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
}
// Required extradata type wasn't found
return NULL;
}
+OMXCameraAdapter::CachedCaptureParameters* OMXCameraAdapter::cacheCaptureParameters() {
+ CachedCaptureParameters* params = new CachedCaptureParameters();
+
+ params->mPendingCaptureSettings = mPendingCaptureSettings;
+ params->mPictureRotation = mPictureRotation;
+ memcpy(params->mExposureBracketingValues,
+ mExposureBracketingValues,
+ sizeof(mExposureBracketingValues));
+ memcpy(params->mExposureGainBracketingValues,
+ mExposureGainBracketingValues,
+ sizeof(mExposureGainBracketingValues));
+ memcpy(params->mExposureGainBracketingModes,
+ mExposureGainBracketingModes,
+ sizeof(mExposureGainBracketingModes));
+ params->mExposureBracketingValidEntries = mExposureBracketingValidEntries;
+ params->mExposureBracketMode = mExposureBracketMode;
+ params->mBurstFrames = mBurstFrames;
+ params->mFlushShotConfigQueue = mFlushShotConfigQueue;
+
+ return params;
+}
+
OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
{
LOG_FUNCTION_NAME;
@@ -3493,16 +4135,21 @@ OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
// Initial values
mTimeSourceDelta = 0;
onlyOnce = true;
+#ifndef OMAP_TUNA
+ mDccData.pData = NULL;
+#endif
mInitSem.Create(0);
mFlushSem.Create(0);
mUsePreviewDataSem.Create(0);
mUsePreviewSem.Create(0);
mUseCaptureSem.Create(0);
+ mUseReprocessSem.Create(0);
mStartPreviewSem.Create(0);
mStopPreviewSem.Create(0);
mStartCaptureSem.Create(0);
mStopCaptureSem.Create(0);
+ mStopReprocSem.Create(0);
mSwitchToLoadedSem.Create(0);
mCaptureSem.Create(0);
@@ -3517,6 +4164,14 @@ OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
mFramesWithDisplay = 0;
mFramesWithEncoder = 0;
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ mDebugProfile = 0;
+
+#endif
+
+ mPreviewPortInitialized = false;
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -3524,12 +4179,17 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
if ( mOmxInitialized ) {
// return to OMX Loaded state
switchToLoaded();
+#ifndef OMAP_TUNA
+ saveDccFileDataSave();
+
+ closeDccFileDataSave();
+#endif
// deinit the OMX
if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid ) {
// free the handle for the Camera component
@@ -3548,11 +4208,11 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
for (unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
{
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
//remove from queue and free msg
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
sem->Signal();
free(msg);
@@ -3564,7 +4224,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to command handling thread
if ( NULL != mCommandHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = CommandHandler::COMMAND_EXIT;
msg.arg1 = mErrorNotifier;
mCommandHandler->clearCommandQ();
@@ -3576,7 +4236,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to callback handling thread
if ( NULL != mOMXCallbackHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::COMMAND_EXIT;
//Clear all messages pending first
mOMXCallbackHandler->clearCommandQ();
@@ -3588,10 +4248,10 @@ OMXCameraAdapter::~OMXCameraAdapter()
LOG_FUNCTION_NAME_EXIT;
}
-extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
+extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
LOG_FUNCTION_NAME;
@@ -3599,7 +4259,7 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
if ( adapter ) {
CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index);
} else {
- CAMHAL_LOGEA("Camera adapter create failed!");
+ CAMHAL_LOGEA("OMX Camera adapter create failed for sensor index = %d!",sensor_index);
}
LOG_FUNCTION_NAME_EXIT;
@@ -3607,7 +4267,8 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
return adapter;
}
-OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData )
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ const OMX_CALLBACKTYPE & callbacks)
{
OMX_ERRORTYPE eError = OMX_ErrorUndefined;
@@ -3618,12 +4279,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_P
}
// setup key parameters to send to Ducati during init
- OMX_CALLBACKTYPE oCallbacks;
-
- // initialize the callback handles
- oCallbacks.EventHandler = android::OMXCameraAdapterEventHandler;
- oCallbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
- oCallbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
+ OMX_CALLBACKTYPE oCallbacks = callbacks;
// get handle
eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.CAMERA", pAppData, &oCallbacks);
@@ -3638,80 +4294,422 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_P
return eError;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera) {
- int num_cameras_supported = 0;
- CameraProperties::Properties* properties = NULL;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_HANDLETYPE handle = NULL;
- OMX_TI_CAPTYPE caps;
+class CapabilitiesHandler
+{
+public:
+ CapabilitiesHandler()
+ {
+ mComponent = 0;
+ mIsAborted = true;
+ }
+
+ const OMX_HANDLETYPE & component() const
+ {
+ return mComponent;
+ }
+
+ OMX_HANDLETYPE & componentRef()
+ {
+ return mComponent;
+ }
+
+ status_t disableAllPorts()
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mPortsLeftToDisable = OMX_CAMERA_NUM_PORTS;
+ mIsOk = false;
+ mIsAborted = false;
+
+ CAMHAL_LOGD("Disabling ports...");
+ const OMX_ERRORTYPE sendCommandError = OMX_SendCommand(component(),
+ OMX_CommandPortDisable, OMX_ALL, 0);
+ CAMHAL_LOGD("Disabling ports... DONE");
+
+ if ( sendCommandError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Failed disabling all ports, error: 0x%x", sendCommandError);
+ return Utils::ErrorUtils::omxToAndroidError(sendCommandError);
+ }
+
+ CAMHAL_LOGD("Waiting for disabling all ports will be finished...");
+ const status_t waitStatus = mCondition.waitRelative(mLock, seconds_to_nanoseconds(3));
+ CAMHAL_LOGD("Waiting for disabling all ports will be finished... DONE");
+
+ if ( waitStatus != NO_ERROR )
+ {
+ CAMHAL_LOGE("Timeout triggered while waiting for all ports to be disabled");
+ return TIMED_OUT;
+ }
+
+ if ( !mIsOk )
+ {
+ CAMHAL_LOGE("Failed to disable all ports");
+ return UNKNOWN_ERROR;
+ }
+
+ // all ports have been disabled
+ mIsAborted = true;
+
+ return NO_ERROR;
+ }
+
+ status_t switchToState(OMX_STATETYPE state)
+ {
+ CAMHAL_LOGD(".");
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+ CAMHAL_LOGD(".");
+
+ mState = state;
+ mIsOk = false;
+ mIsAborted = false;
+
+ CAMHAL_LOGD("Switching to state 0x%x...", mState);
+ const OMX_ERRORTYPE switchError = OMX_SendCommand(mComponent,
+ OMX_CommandStateSet, mState, 0);
+ CAMHAL_LOGD("Switching to state 0x%x... DONE", mState);
+
+ if ( switchError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Failed switching to state 0x%x, error: 0x%x", mState, switchError);
+ return Utils::ErrorUtils::omxToAndroidError(switchError);
+ }
+
+ // wait for the event for 3 seconds
+ CAMHAL_LOGD("Waiting...");
+ const status_t waitStatus = mCondition.waitRelative(mLock, seconds_to_nanoseconds(3));
+ CAMHAL_LOGD("Waiting... DONE");
+
+ // disable following events
+ mIsAborted = true;
+
+ if ( waitStatus != NO_ERROR )
+ {
+ CAMHAL_LOGE("Timeout triggered while switching to state 0x%x", mState);
+ return TIMED_OUT;
+ }
+
+ // state has been switched, check whether is was Idle
+ if ( !mIsOk )
+ {
+ CAMHAL_LOGE("Switching to state 0x%x has failed", mState);
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+ }
+
+ status_t fetchCapabiltiesForMode(OMX_CAMOPERATINGMODETYPE mode,
+ int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
+
+ OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
+ camMode.eCamOperatingMode = mode;
+
+ OMX_ERRORTYPE eError = OMX_SetParameter(component(),
+ ( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
+ &camMode);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGE("Error while configuring camera mode in CameraAdapter_Capabilities 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ const status_t idleSwitchError = switchToState(OMX_StateIdle);
+ if ( idleSwitchError != NO_ERROR ) {
+ CAMHAL_LOGE("Failed to switch to Idle state, error: %d", idleSwitchError);
+ return UNKNOWN_ERROR;
+ }
+
+ // get and fill capabilities
+ OMXCameraAdapter::getCaps(sensorId, properties, component());
+
+ const status_t loadedSwitchError = switchToState(OMX_StateLoaded);
+ if ( loadedSwitchError != NO_ERROR ) {
+ CAMHAL_LOGE("Failed to switch to Loaded state, error: %d", loadedSwitchError);
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+ }
+
+ status_t fetchCapabilitiesForSensor(int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ CAMHAL_LOGD("Disabling all ports...");
+ const status_t disableAllPortsError = disableAllPorts();
+ CAMHAL_LOGD("Disabling all ports... DONE");
+
+ if ( disableAllPortsError != NO_ERROR ) {
+ CAMHAL_LOGE("Failed to disable all ports, error: %d",
+ disableAllPortsError);
+ return UNKNOWN_ERROR;
+ }
+
+ // sensor select
+ OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
+ OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
+ sensorSelect.eSensor = (OMX_SENSORSELECT)sensorId;
+
+ CAMHAL_LOGD("Selecting sensor %d...", sensorId);
+ const OMX_ERRORTYPE sensorSelectError = OMX_SetConfig(component(),
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigSensorSelect, &sensorSelect);
+ CAMHAL_LOGD("Selecting sensor %d... DONE", sensorId);
+
+ if ( sensorSelectError != OMX_ErrorNone ) {
+ CAMHAL_LOGD("Max supported sensor number reached: %d", sensorId);
+ return BAD_VALUE;
+ }
+
+ status_t err = NO_ERROR;
+ if ( sensorId == 2 ) {
+ CAMHAL_LOGD("Camera mode: STEREO");
+ properties->setMode(MODE_STEREO);
+ err = fetchCapabiltiesForMode(OMX_CaptureStereoImageCapture,
+ sensorId,
+ properties);
+ } else {
+ CAMHAL_LOGD("Camera MONO");
+
+ CAMHAL_LOGD("Camera mode: HQ ");
+ properties->setMode(MODE_HIGH_QUALITY);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageProfileBase,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: VIDEO ");
+ properties->setMode(MODE_VIDEO);
+ err = fetchCapabiltiesForMode(OMX_CaptureVideo,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: ZSL ");
+ properties->setMode(MODE_ZEROSHUTTERLAG);
+ err = fetchCapabiltiesForMode(OMX_TI_CaptureImageProfileZeroShutterLag,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: HS ");
+ properties->setMode(MODE_HIGH_SPEED);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageHighSpeedTemporalBracketing,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("Camera mode: CPCAM ");
+ properties->setMode(MODE_CPCAM);
+ err = fetchCapabiltiesForMode(OMX_TI_CPCam,
+ sensorId,
+ properties);
+#endif
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ }
+
+ return err;
+ }
+
+public:
+ static OMX_ERRORTYPE eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR cookie, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData)
+ {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_UNUSED(pEventData);
+
+ CAMHAL_LOGD("event = 0x%x", event);
+ CAMHAL_LOGD("data1 = 0x%x", data1);
+ CAMHAL_LOGD("data2 = 0x%x", data2);
+
+ CapabilitiesHandler * handler = reinterpret_cast<CapabilitiesHandler*>(cookie);
+
+ // ensure this is out component
+ if ( handler->component() != component )
+ {
+ CAMHAL_LOGE("Wrong component handle received: %p, expecting: %p",
+ component, handler->component());
+ return OMX_ErrorBadParameter;
+ }
+
+ return handler->processEvent(event, data1, data2);
+ }
+
+ OMX_ERRORTYPE processEvent(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2)
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ if ( mIsAborted )
+ {
+ CAMHAL_LOGE("Waiting for state switch has been aborted");
+ return OMX_ErrorNone;
+ }
+
+ switch ( event )
+ {
+ case OMX_EventCmdComplete:
+ switch ( data1 )
+ {
+ case OMX_CommandStateSet:
+ // this is our state switch command we are waiting for
+ mIsOk = static_cast<OMX_STATETYPE>(data2) == mState;
+
+ // wake up the caller
+ CAMHAL_LOGD("Waking the condition...");
+ mCondition.signal();
+ CAMHAL_LOGD("Waking the condition... DONE");
+ break;
+
+ case OMX_CommandPortDisable:
+ CAMHAL_LOGD("Decreasing disabled port count: %d", mPortsLeftToDisable);
+ mPortsLeftToDisable--;
+ if ( mPortsLeftToDisable == 0 )
+ {
+ CAMHAL_LOGD("All ports have been disabled, waking the caller...");
+ mIsOk = true;
+ mCondition.signal();
+ CAMHAL_LOGD("All ports have been disabled, waking the caller... DONE");
+ }
+ break;
+
+ default:
+ // ignore rest of the commands
+ break;
+ }
+ break;
+
+ case OMX_EventError:
+ CAMHAL_LOGE("Error event received, data1 = 0x%8x, data2 = 0x%8x", data1, data2);
+
+ // keep mIsOk in false state, indicating that request has failed
+
+ CAMHAL_LOGD("Waking the condition...");
+ mCondition.signal();
+ CAMHAL_LOGD("Waking the condition... DONE");
+ break;
+
+ default:
+ // ignore rest of the event types
+ break;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+private:
+ android::Mutex mLock;
+ android::Condition mCondition;
+ OMX_HANDLETYPE mComponent;
+ OMX_STATETYPE mState;
+ bool mIsAborted;
+ bool mIsOk;
+ int mPortsLeftToDisable;
+};
+
+extern "C" status_t OMXCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(gAdapterLock);
+ supportedCameras = 0;
+
+ int num_cameras_supported = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ android::AutoMutex lock(gAdapterLock);
if (!properties_array) {
CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
LOG_FUNCTION_NAME_EXIT;
- return -EINVAL;
+ return BAD_VALUE;
}
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("Error OMX_Init -0x%x", eError);
- return eError;
- }
-
- eError = OMXCameraAdapter::OMXCameraGetHandle(&handle);
- if (eError != OMX_ErrorNone) {
- CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
- goto EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
// Continue selecting sensor and then querying OMX Camera for it's capabilities
// When sensor select returns an error, we know to break and stop
while (eError == OMX_ErrorNone &&
(starting_camera + num_cameras_supported) < max_camera) {
- // sensor select
- OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
- OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
- sensorSelect.eSensor = (OMX_SENSORSELECT) num_cameras_supported;
- eError = OMX_SetConfig(handle, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
- if ( OMX_ErrorNone != eError ) {
- break;
+ CapabilitiesHandler handler;
+
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = CapabilitiesHandler::eventCallback;
+ callbacks.EmptyBufferDone = 0;
+ callbacks.FillBufferDone = 0;
+
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&handler.componentRef(), &handler, callbacks);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
+ goto EXIT;
}
- // get and fill capabilities
- properties = properties_array + starting_camera + num_cameras_supported;
- OMXCameraAdapter::getCaps(properties, handle);
+ const int sensorId = num_cameras_supported;
+ CameraProperties::Properties * properties = properties_array + starting_camera + sensorId;
+ const status_t err = handler.fetchCapabilitiesForSensor(sensorId, properties);
- // need to fill facing information
- // assume that only sensor 0 is back facing
- if (num_cameras_supported == 0) {
- properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_BACK);
- } else {
- properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
+ // clean up
+ if(handler.component()) {
+ CAMHAL_LOGD("Freeing the component...");
+ OMX_FreeHandle(handler.component());
+ CAMHAL_LOGD("Freeing the component... DONE");
+ handler.componentRef() = NULL;
}
+ if ( err != NO_ERROR )
+ break;
+
num_cameras_supported++;
+ CAMHAL_LOGDB("Number of OMX Cameras detected = %d \n",num_cameras_supported);
}
EXIT:
- // clean up
- if(handle) {
- OMX_FreeHandle(handle);
- handle=NULL;
- }
+ CAMHAL_LOGD("Deinit...");
OMX_Deinit();
+ CAMHAL_LOGD("Deinit... DONE");
+
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Error: 0x%x", eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ supportedCameras = num_cameras_supported;
LOG_FUNCTION_NAME_EXIT;
- return num_cameras_supported;
+ return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp
index e1323ee..646e964 100644
--- a/camera/OMXCameraAdapter/OMXCapabilities.cpp
+++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp
@@ -26,46 +26,90 @@
#include "ErrorUtils.h"
#include "TICameraParameters.h"
-namespace android {
-
-#undef LOG_TAG
-
-// Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
/************************************
* global constants and variables
*************************************/
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
-#define FPS_MIN 5
-#define FPS_STEP 5
-#define FPS_RANGE_STEP 5
static const char PARAM_SEP[] = ",";
-static const int PARAM_SEP_CHAR = ',';
static const uint32_t VFR_OFFSET = 8;
-static const char VFR_BACKET_START[] = "(";
-static const char VFR_BRACKET_END[] = ")";
-static const char FRAMERATE_COUNT = 10;
+static const char FPS_STR_MAX_LEN = 10;
+
+static const unsigned int MANUAL_EXPOSURE_STEP = 1;
+static const unsigned int MANUAL_GAIN_ISO_MIN = 100;
+static const unsigned int MANUAL_GAIN_ISO_STEP = 100;
+
+const int OMXCameraAdapter::SENSORID_IMX060 = 300;
+const int OMXCameraAdapter::SENSORID_OV5650 = 301;
+const int OMXCameraAdapter::SENSORID_OV5640 = 302;
+const int OMXCameraAdapter::SENSORID_OV14825 = 304;
+const int OMXCameraAdapter::SENSORID_S5K4E1GA = 305;
+const int OMXCameraAdapter::SENSORID_S5K6A1GX03 = 306;
+
+const int OMXCameraAdapter::FPS_MIN = 5;
+const int OMXCameraAdapter::FPS_MAX = 30;
+const int OMXCameraAdapter::FPS_MAX_EXTENDED = 60;
+
+inline static int androidFromDucatiFrameRate(OMX_U32 frameRate) {
+ return (frameRate >> VFR_OFFSET) * CameraHal::VFR_SCALE;
+}
/**** look up tables to translate OMX Caps to Parameter ****/
const CapResolution OMXCameraAdapter::mImageCapRes [] = {
+ { 4416, 3312, "4416x3312" },
{ 4032, 3024, "4032x3024" },
{ 4000, 3000, "4000x3000" },
{ 3648, 2736, "3648x2736" },
{ 3264, 2448, "3264x2448" },
+ { 2608, 1960, "2608x1960" },
{ 2592, 1944, "2592x1944" },
{ 2592, 1728, "2592x1728" },
{ 2592, 1458, "2592x1458" },
+ { 2304, 1296, "2304x1296" },
+ { 2240, 1344, "2240x1344" },
+ { 2160, 1440, "2160x1440" },
+ { 2112, 1728, "2112x1728" },
{ 2048, 1536, "2048x1536" },
+ { 2016, 1512, "2016x1512" },
+ { 2000, 1600, "2000x1600" },
{ 1600, 1200, "1600x1200" },
{ 1280, 1024, "1280x1024" },
- { 1152, 864, "1152x864" },
- { 1280, 960, "1280x960" },
- { 640, 480, "640x480" },
- { 320, 240, "320x240" },
+ { 1152, 864, "1152x864" },
+ { 1280, 960, "1280x960" },
+ { 1024, 768, "1024x768" },
+ { 640, 480, "640x480" },
+ { 320, 240, "320x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResSS [] = {
+ { 4032*2, 3024, "8064x3024" },
+ { 3648*2, 2736, "7296x2736" },
+ { 3264*2, 2448, "6528x2448" },
+ { 2592*2, 1944, "5184x1944" },
+ { 2048*2, 1536, "4096x1536" },
+ { 1600*2, 1200, "3200x1200" },
+ { 1280*2, 960, "2560x960" },
+ { 1024*2, 768, "2048x768" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResTB [] = {
+ { 4032, 3024*2, "4032x6048" },
+ { 3648, 2736*2, "3648x5472" },
+ { 3264, 2448*2, "3264x4896" },
+ { 2592, 1944*2, "2592x3888" },
+ { 2048, 1536*2, "2048x3072" },
+ { 1600, 1200*2, "1600x2400" },
+ { 1280, 960*2, "1280x1920" },
+ { 1024, 768*2, "1024x1536" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
};
const CapResolution OMXCameraAdapter::mPreviewRes [] = {
@@ -81,9 +125,57 @@ const CapResolution OMXCameraAdapter::mPreviewRes [] = {
{ 352, 288, "352x288" },
{ 240, 160, "240x160" },
{ 176, 144, "176x144" },
+ { 160, 120, "160x120" },
{ 128, 96, "128x96" },
};
+const CapResolution OMXCameraAdapter::mPreviewPortraitRes [] = {
+ //Portrait resolutions
+ { 1088, 1920, "1088x1920" },
+ { 720, 1280, "720x1280" },
+ { 480, 800, "480x800" },
+ { 576, 720, "576x720" },
+ { 576, 768, "576x768" },
+ { 480, 720, "480x720" },
+ { 480, 640, "480x640" },
+ { 288, 352, "288x352" },
+ { 240, 320, "240x320" },
+ { 160, 240, "160x240" },
+ { 144, 176, "144x176" },
+ { 120, 160, "120x160"},
+ { 96, 128, "96x128" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResSS [] = {
+ { 1920*2, 1080, "3840x1080" },
+ { 1280*2, 720, "2560x720" },
+ { 800*2, 480, "1600x480" },
+ { 720*2, 576, "1440x576" },
+ { 720*2, 480, "1440x480" },
+ { 768*2, 576, "1536x576" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+ { 352*2, 288, "704x288" },
+ { 240*2, 160, "480x160" },
+ { 176*2, 144, "352x144" },
+ { 128*2, 96, "256x96" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResTB [] = {
+ { 1920, 1080*2, "1920x2160" },
+ { 1280, 720*2, "1280x1440" },
+ { 800, 480*2, "800x960" },
+ { 720, 576*2, "720x1152" },
+ { 720, 480*2, "720x960" },
+ { 768, 576*2, "768x1152" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
+ { 352, 288*2, "352x576" },
+ { 240, 160*2, "240x320" },
+ { 176, 144*2, "176x288" },
+ { 128, 96*2, "128x192" },
+};
+
const CapResolution OMXCameraAdapter::mThumbRes [] = {
{ 640, 480, "640x480" },
{ 160, 120, "160x120" },
@@ -96,16 +188,42 @@ const CapResolution OMXCameraAdapter::mThumbRes [] = {
};
const CapPixelformat OMXCameraAdapter::mPixelformats [] = {
- { OMX_COLOR_FormatCbYCrY, CameraParameters::PIXEL_FORMAT_YUV422I },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420SP },
- { OMX_COLOR_Format16bitRGB565, CameraParameters::PIXEL_FORMAT_RGB565 },
- { OMX_COLOR_FormatRawBayer10bit, TICameraParameters::PIXEL_FORMAT_RAW },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatCbYCrY, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420SP },
+ { OMX_COLOR_Format16bitRGB565, android::CameraParameters::PIXEL_FORMAT_RGB565 },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatUnused, TICameraParameters::PIXEL_FORMAT_UNUSED },
+ { OMX_COLOR_FormatRawBayer10bit, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB },
+};
+
+const userToOMX_LUT OMXCameraAdapter::mFrameLayout [] = {
+ { TICameraParameters::S3D_NONE, OMX_TI_StereoFrameLayout2D },
+ { TICameraParameters::S3D_TB_FULL, OMX_TI_StereoFrameLayoutTopBottom },
+ { TICameraParameters::S3D_SS_FULL, OMX_TI_StereoFrameLayoutLeftRight },
+#ifndef OMAP_TUNA
+ { TICameraParameters::S3D_TB_SUBSAMPLED, OMX_TI_StereoFrameLayoutTopBottomSubsample },
+ { TICameraParameters::S3D_SS_SUBSAMPLED, OMX_TI_StereoFrameLayoutLeftRightSubsample },
+#endif
+};
+
+const LUTtype OMXCameraAdapter::mLayoutLUT = {
+ ARRAY_SIZE(mFrameLayout),
+ mFrameLayout
+};
+
+const CapCodingFormat OMXCameraAdapter::mImageCodingFormat [] = {
+ { OMX_IMAGE_CodingJPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingJPS, TICameraParameters::PIXEL_FORMAT_JPS },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingMPO, TICameraParameters::PIXEL_FORMAT_MPO },
};
const CapFramerate OMXCameraAdapter::mFramerates [] = {
+ { 60, "60" },
{ 30, "30" },
+ { 24, "24" },
+ { 20, "20" },
{ 15, "15" },
+ { 10, "10" },
};
const CapZoom OMXCameraAdapter::mZoomStages [] = {
@@ -185,37 +303,45 @@ const CapISO OMXCameraAdapter::mISOStages [] = {
// mapped values have to match with new_sensor_MSP.h
const CapU32 OMXCameraAdapter::mSensorNames [] = {
- { 300, "IMX060" },
- { 301, "OV5650" },
- { 305, "S5K4E1GA"},
- { 306, "S5K6A1GX03" }
+ { SENSORID_IMX060, "IMX060" },
+ { SENSORID_OV5650, "OV5650" },
+ { SENSORID_OV5640, "OV5640" },
+ { SENSORID_OV14825, "OV14825"},
+ { SENSORID_S5K4E1GA, "S5K4E1GA"},
+ { SENSORID_S5K6A1GX03, "S5K6A1GX03" }
// TODO(XXX): need to account for S3D camera later
};
-// values for supported variable framerates sorted in ascending order
-// CapU32Pair = (max fps, min fps, string representation)
-const CapU32Pair OMXCameraAdapter::mVarFramerates [] = {
- { 15, 15, "(15000,15000)"},
- { 30, 15, "(15000,30000)" },
- { 30, 24, "(24000,30000)" },
-// TODO(XXX): Removing 30,30 range to limit 1080p at 24fps. Will put back soon.
-#if 0
- { 30, 30, "(30000,30000)" },
-#endif
+const userToOMX_LUT OMXCameraAdapter::mAutoConvergence [] = {
+ { TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE, OMX_TI_AutoConvergenceModeDisable },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_FRAME, OMX_TI_AutoConvergenceModeFrame },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_CENTER, OMX_TI_AutoConvergenceModeCenter },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_TOUCH, OMX_TI_AutoConvergenceModeFocusFaceTouch },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL, OMX_TI_AutoConvergenceModeManual }
};
-/************************************
- * static helper functions
- *************************************/
-// utility function to remove last seperator
-void remove_last_sep(char* buffer) {
- char* last_sep = NULL;
- last_sep = strrchr(buffer, PARAM_SEP_CHAR);
- if (last_sep != NULL) {
- last_sep[0] = '\0';
- }
-}
+const LUTtype OMXCameraAdapter::mAutoConvergenceLUT = {
+ ARRAY_SIZE(mAutoConvergence),
+ mAutoConvergence
+};
+const userToOMX_LUT OMXCameraAdapter::mBracketingModes [] = {
+ { TICameraParameters::TEMP_BRACKETING , OMX_BracketTemporal },
+ { TICameraParameters::EXPOSURE_BRACKETING , OMX_BracketExposureRelativeInEV }
+};
+
+const LUTtype OMXCameraAdapter::mBracketingModesLUT = {
+ ARRAY_SIZE(mBracketingModes),
+ mBracketingModes
+};
+
+// values for supported camera facing direction
+const CapU32 OMXCameraAdapter::mFacing [] = {
+#ifndef OMAP_TUNA
+ { OMX_TI_SENFACING_BACK , TICameraParameters::FACING_BACK },
+ { OMX_TI_SENFACING_FRONT, TICameraParameters::FACING_FRONT},
+#endif
+};
/*****************************************
* internal static function declarations
@@ -223,24 +349,28 @@ void remove_last_sep(char* buffer) {
/**** Utility functions to help translate OMX Caps to Parameter ****/
-status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
- const CapPixelformat *cap,
- size_t capCount,
- char * buffer,
- size_t bufferSize) {
+status_t OMXCameraAdapter::encodeImageCodingFormatCap(OMX_IMAGE_CODINGTYPE format,
+ const CapCodingFormat *cap,
+ size_t capCount,
+ char * buffer) {
+
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
if ( ( NULL == buffer ) || ( NULL == cap ) ) {
CAMHAL_LOGEA("Invalid input arguments");
- return -EINVAL;
+ ret = -EINVAL;
}
- for ( unsigned int i = 0; i < capCount; i++ ) {
- if ( format == cap[i].pixelformat ) {
- strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
+ if ( NO_ERROR == ret ) {
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( format == cap[i].imageCodingFormat ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
+ strncat(buffer, cap[i].param, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
}
}
@@ -249,16 +379,13 @@ status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
return ret;
}
-status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax,
- OMX_U32 framerateMin,
- const CapFramerate *cap,
- size_t capCount,
- char * buffer,
- size_t bufferSize) {
+status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
+ const CapPixelformat *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
- bool minInserted = false;
- bool maxInserted = false;
- char tmpBuffer[FRAMERATE_COUNT];
LOG_FUNCTION_NAME;
@@ -267,113 +394,86 @@ status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax,
return -EINVAL;
}
- for ( unsigned int i = 0; i < capCount; i++ ) {
- if ( (framerateMax >= cap[i].num) && (framerateMin <= cap[i].num) ) {
- strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- if ( cap[i].num == framerateMin ) {
- minInserted = true;
+ for ( unsigned int i = 0 ; i < capCount ; i++ )
+ {
+ if ( format == cap[i].pixelformat )
+ {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize - 1);
}
}
- if ( cap[i].num == framerateMax ) {
- maxInserted = true;
- }
- }
-
- if ( !maxInserted ) {
- memset(tmpBuffer, 0, FRAMERATE_COUNT);
- snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMax);
- strncat(buffer, tmpBuffer, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
-
- if ( !minInserted ) {
- memset(tmpBuffer, 0, FRAMERATE_COUNT);
- snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMin);
- strncat(buffer, tmpBuffer, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
-
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps,
- const CapU32Pair *cap,
- size_t capCount,
- char *buffer,
- char *defaultRange,
- size_t bufferSize) {
- status_t ret = NO_ERROR;
- uint32_t minVFR, maxVFR;
- int default_index = -1;
-
+void OMXCameraAdapter::encodeFrameRates(const int minFrameRate, const int maxFrameRate,
+ const OMX_TI_CAPTYPE & caps, const CapFramerate * const fixedFrameRates,
+ const int frameRateCount, android::Vector<FpsRange> & fpsRanges) {
LOG_FUNCTION_NAME;
- if ( (NULL == buffer) || (NULL == cap) ) {
- CAMHAL_LOGEA("Invalid input arguments");
- return -EINVAL;
+ if ( minFrameRate == maxFrameRate ) {
+ // single fixed frame rate supported
+ fpsRanges.add(FpsRange(minFrameRate, maxFrameRate));
+ return;
}
- if(caps.ulPrvVarFPSModesCount < 1) {
- return NO_ERROR;
- }
+ // insert min and max frame rates
+ fpsRanges.add(FpsRange(minFrameRate, minFrameRate));
+ fpsRanges.add(FpsRange(maxFrameRate, maxFrameRate));
- // Assumption: last range in tPrvVarFPSModes will be for S30FPSHD mode
- minVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMin >> VFR_OFFSET;
- maxVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMax >> VFR_OFFSET;
+ // insert variable frame rates
+ for ( int i = 0; i < static_cast<int>(caps.ulPrvVarFPSModesCount); ++i ) {
+ const FpsRange fpsRange = FpsRange(
+ max(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMin), minFrameRate),
+ min(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMax), maxFrameRate));
- if (minVFR < FPS_MIN) {
- minVFR = FPS_MIN;
+ if ( fpsRange.isFixed() ) {
+ // this range is either min or max fixed frame rate, already added above
+ continue;
+ }
+
+ fpsRanges.add(fpsRange);
}
- for (unsigned int i = 0; i < capCount; i++) {
- // add cap[i] if it is in range and maxVFR != minVFR
- if ((maxVFR >= cap[i].num1) && (minVFR <= cap[i].num2)) {
- if (buffer[0] != '\0') {
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
- strncat(buffer, cap[i].param, bufferSize - 1);
+ // insert fixed frame rates
+ for ( int i = 0; i < frameRateCount; ++i ) {
+ const int fixedFrameRate = fixedFrameRates[i].num * CameraHal::VFR_SCALE;
- // choose the max variable framerate as default
- if (cap[i].num1 != cap[i].num2) {
- default_index = i;
- }
+ if ( fixedFrameRate < minFrameRate || fixedFrameRate > maxFrameRate ) {
+ // not supported by hardware
+ continue;
}
- }
- // if we haven't found any caps in the list to populate
- // just use the min and max
- if (buffer[0] == '\0') {
- snprintf(buffer, bufferSize - 1,
- "(%u,%u)",
- minVFR * CameraHal::VFR_SCALE,
- maxVFR * CameraHal::VFR_SCALE);
+ const FpsRange fpsRange = FpsRange(fixedFrameRate, fixedFrameRate);
+ fpsRanges.add(fpsRange);
}
- if (default_index != -1) {
- snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%lu,%lu",
- cap[default_index].num2 * CameraHal::VFR_SCALE,
- cap[default_index].num1 * CameraHal::VFR_SCALE);
- } else {
- snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%u,%u",
- minVFR * CameraHal::VFR_SCALE, maxVFR * CameraHal::VFR_SCALE);
- }
-
- LOG_FUNCTION_NAME_EXIT;
+ // sort first by max, then by min, according to Android API requirements
+ fpsRanges.sort(FpsRange::compare);
- return ret;
+ // remove duplicated frame rates
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()) - 1; ) {
+ const FpsRange & current = fpsRanges.itemAt(i);
+ const FpsRange & next = fpsRanges.itemAt(i + 1);
+ if ( current == next ) {
+ fpsRanges.removeAt(i + 1);
+ } else {
+ i++;
+ }
+ }
}
size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
const CapZoom *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t res = NO_ERROR;
size_t ret = 0;
@@ -387,12 +487,13 @@ size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
for ( unsigned int i = 0; i < capCount; i++ ) {
if ( cap[i].num <= maxZoom ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
ret++;
}
}
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
@@ -403,7 +504,8 @@ status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
const CapISO *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
@@ -415,11 +517,12 @@ status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
for ( unsigned int i = 0; i < capCount; i++ ) {
if ( cap[i].num <= maxISO) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
}
}
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
@@ -430,7 +533,8 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
const CapResolution *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
@@ -445,8 +549,10 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
(cap[i].height <= res.nHeightMax) &&
(cap[i].width >= res.nWidthMin) &&
(cap[i].height >= res.nHeightMin) ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize -1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
}
}
@@ -455,59 +561,287 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
return ret;
}
-status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::encodeSizeCap3D(OMX_TI_CAPRESTYPE &res,
+ const CapResolution *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( (cap[i].width <= res.nWidthMax) &&
+ (cap[i].height <= res.nHeightMax) &&
+ (cap[i].width >= res.nWidthMin) &&
+ (cap[i].height >= res.nHeightMin)
+#ifndef OMAP_TUNA
+ && (cap[i].width * cap[i].height <= res.nMaxResInPixels)
+#endif
+ ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize -1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+#endif
+
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
- ret = encodeSizeCap(caps.tImageResRange,
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tImageResRange,
mImageCapRes,
ARRAY_SIZE(mImageCapRes),
supported,
MAX_PROP_VALUE_LENGTH);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
- } else {
- remove_last_sep(supported);
- params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ }
+ params->set(CameraProperties::MAX_PICTURE_WIDTH, caps.tImageResRange.nWidthMax);
+ params->set(CameraProperties::MAX_PICTURE_HEIGHT, caps.tImageResRange.nHeightMax);
}
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResTB,
+ ARRAY_SIZE(mImageCapResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
- LOG_FUNCTION_NAME;
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResSS,
+ ARRAY_SIZE(mImageCapResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapRes,
+ ARRAY_SIZE(mImageCapRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+#endif
+
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
- ret = encodeSizeCap(caps.tPreviewResRange,
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tPreviewResRange,
mPreviewRes,
ARRAY_SIZE(mPreviewRes),
supported,
MAX_PROP_VALUE_LENGTH);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
- } else {
- remove_last_sep(supported);
- params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Landscape preview sizes 0x%x", ret);
+ return ret;
+ }
+
+#ifndef OMAP_TUNA
+ /* Insert Portait Resolutions by verifying Potrait Capability Support */
+ ret = encodeSizeCap(caps.tRotatedPreviewResRange,
+ mPreviewPortraitRes,
+ ARRAY_SIZE(mPreviewPortraitRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Potrait preview sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ }
+#endif
}
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResTB,
+ ARRAY_SIZE(mPreviewResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D TB preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
- LOG_FUNCTION_NAME;
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResSS,
+ ARRAY_SIZE(mPreviewResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D SS preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewRes,
+ ARRAY_SIZE(mPreviewRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -524,16 +858,16 @@ status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error inserting supported video sizes 0x%x", ret);
} else {
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_VIDEO_SIZES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -551,11 +885,14 @@ status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params
CAMHAL_LOGEB("Error inserting supported thumbnail sizes 0x%x", ret);
} else {
//CTS Requirement: 0x0 should always be supported
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, "0x0", MAX_PROP_NAME_LENGTH);
params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -580,25 +917,26 @@ status_t OMXCameraAdapter::insertZoomStages(CameraProperties::Properties* params
params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, zoomStageCount - 1); //As per CTS requirement
if ( 0 == zoomStageCount ) {
- params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::FALSE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::FALSE);
} else {
- params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::TRUE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::TRUE);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ memset(supported, '\0', sizeof(supported));
for ( int i = 0 ; i < caps.ulImageFormatCount ; i++ ) {
ret = encodePixelformatCap(caps.eImageFormats[i],
@@ -606,24 +944,38 @@ status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* para
ARRAY_SIZE(mPixelformats),
supported,
MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
+ break;
+ }
+ }
+
+#ifndef OMAP_TUNA
+ for (int i = 0; i < caps.ulImageCodingFormatCount ; i++) {
+ ret = encodeImageCodingFormatCap(caps.eImageCodingFormat[i],
+ mImageCodingFormat,
+ ARRAY_SIZE(mImageCodingFormat),
+ supported);
+
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
break;
}
}
+#endif
if ( NO_ERROR == ret ) {
- //jpeg is not supported in OMX capabilies yet
- strncat(supported, CameraParameters::PIXEL_FORMAT_JPEG, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -646,74 +998,132 @@ status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* pa
if ( NO_ERROR == ret ) {
// need to advertise we support YV12 format
// We will program preview port with NV21 when we see application set YV12
- strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
+status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ // collect supported normal frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
+
+ const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMin));
+ const int maxFrameRate = min<int>(FPS_MAX * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMax));
+
+ if ( minFrameRate > maxFrameRate ) {
+ CAMHAL_LOGE("Invalid frame rate range: [%d .. %d]", caps.xFramerateMin, caps.xFramerateMax);
+ return BAD_VALUE;
+ }
- LOG_FUNCTION_NAME;
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char defaultRange[MAX_PROP_VALUE_LENGTH];
- ret = encodeFramerateCap(caps.xFramerateMax >> VFR_OFFSET,
- caps.xFramerateMin >> VFR_OFFSET,
- mFramerates,
- ARRAY_SIZE(mFramerates),
- supported,
- MAX_PROP_VALUE_LENGTH);
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview framerates 0x%x", ret);
- } else {
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
+
+ const FpsRange & defaultFpsRange = fpsRanges.itemAt(fpsRanges.size() - 1);
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d,%d", defaultFpsRange.min(), defaultFpsRange.max());
+
+ CAMHAL_LOGD("Supported framerate ranges: %s", supported);
+ CAMHAL_LOGD("Default framerate range: [%s]", defaultRange);
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
+ params->set(CameraProperties::FRAMERATE_RANGE, defaultRange);
+
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
+
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
+
+ CAMHAL_LOGD("Supported preview framerates: %s", supported);
params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+
+ // insert default frame rate only if it is fixed
+ if ( defaultFpsRange.isFixed() && (defaultFpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d", defaultFpsRange.min()/CameraHal::VFR_SCALE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, defaultRange);
+ }
}
- LOG_FUNCTION_NAME;
+ // collect supported extended frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
- return ret;
-}
+ const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMin));
+ const int maxFrameRate = min<int>(FPS_MAX_EXTENDED * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMax));
-status_t OMXCameraAdapter::insertVFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
- char defaultRange[MAX_PROP_VALUE_LENGTH];
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
- LOG_FUNCTION_NAME;
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ memset(supported, 0, sizeof(supported) - 1);
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
- ret = encodeVFramerateCap(caps,
- mVarFramerates,
- ARRAY_SIZE(mVarFramerates),
- supported,
- defaultRange,
- MAX_PROP_VALUE_LENGTH);
+ CAMHAL_LOGD("Supported framerate ranges extended: %s", supported);
+ params->set(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED, supported);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview framerate ranges 0x%x", ret);
- } else {
- params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
- CAMHAL_LOGDB("framerate ranges %s", supported);
- params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
- params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, DEFAULT_FRAMERATE_RANGE_VIDEO);
- params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
- CAMHAL_LOGDB("Default framerate range: [%s]", DEFAULT_FRAMERATE_RANGE_IMAGE);
- }
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported) - 1);
- LOG_FUNCTION_NAME;
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
- return ret;
+ CAMHAL_LOGD("Supported extended preview framerates: %s", supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT, supported);
+ }
+
+ return OK;
}
-status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -727,12 +1137,13 @@ status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_T
snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMax * 10 ));
params->set(CameraProperties::SUPPORTED_EV_MAX, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -751,12 +1162,13 @@ status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params,
params->set(CameraProperties::SUPPORTED_ISO_VALUES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -766,32 +1178,31 @@ status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params,
//Off is always supported
strncat(supported, TICameraParameters::IPP_NONE, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
if ( caps.bLensDistortionCorrectionSupported ) {
- strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
}
if ( caps.bISONoiseFilterSupported ) {
- strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
}
if ( caps.bISONoiseFilterSupported && caps.bLensDistortionCorrectionSupported ) {
- strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
}
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_IPP_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -803,19 +1214,22 @@ status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, O
for ( unsigned int i = 0 ; i < caps.ulWhiteBalanceCount ; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eWhiteBalanceModes[i], WBalLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -827,19 +1241,22 @@ status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, O
for ( unsigned int i = 0 ; i < caps.ulColorEffectCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eColorEffects[i], EffLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+
params->set(CameraProperties::SUPPORTED_EFFECTS, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -851,19 +1268,77 @@ status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params,
for ( unsigned int i = 0 ; i < caps.ulExposureModeCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eExposureModes[i], ExpLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, supported);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualExpRanges(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ if (caps.nManualExpMin > caps.nManualExpMax) {
+#endif
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+#ifndef OMAP_TUNA
+ } else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMin);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_EXPOSURE_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+ }
+#endif
+
+ if (MANUAL_GAIN_ISO_MIN > caps.nSensitivityMax) {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported); }
+ else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_MIN);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nSensitivityMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
-status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -875,20 +1350,26 @@ status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params
for ( unsigned int i = 0 ; i < caps.ulFlashCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eFlashModes[i], FlashLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+ if ( strlen(supported) == 0 ) {
+ strncpy(supported, DEFAULT_FLASH_MODE, MAX_PROP_NAME_LENGTH);
+ }
+
params->set(CameraProperties::SUPPORTED_FLASH_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -900,34 +1381,31 @@ status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params
for ( unsigned int i = 0 ; i < caps.ulSceneCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eSceneModes[i], SceneLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_SCENE_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
- const char *p;
LOG_FUNCTION_NAME;
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
for ( unsigned int i = 0 ; i < caps.ulFocusModeCount; i++ ) {
- p = getLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT);
- if ( NULL != p ) {
- strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
- }
+ getMultipleLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT, supported);
}
// Check if focus is supported by camera
@@ -935,21 +1413,21 @@ status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params
caps.eFocusModes[0] == OMX_IMAGE_FocusControlOff) {
// Focus is not supported by camera
// Advertise this to app as infinitiy focus mode
- strncat(supported, CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
- } else {
- // Focus is supported but these modes are not supported by the
- // capability feature. Apply manually
- strncat(supported, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, MAX_PROP_NAME_LENGTH);
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
}
params->set(CameraProperties::SUPPORTED_FOCUS_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -961,19 +1439,22 @@ status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* para
for ( unsigned int i = 0 ; i < caps.ulFlickerCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eFlicker[i], FlickerLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+
params->set(CameraProperties::SUPPORTED_ANTIBANDING, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -991,31 +1472,490 @@ status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX
params->set(CameraProperties::MAX_NUM_METERING_AREAS, supported);
CAMHAL_LOGDB("Maximum supported exposure areas %s", supported);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVNFSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ if ( OMX_TRUE == caps.bVideoNoiseFilterSupported ) {
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
-status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertVSTABSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
status_t ret = NO_ERROR;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if ( OMX_TRUE == caps.bVideoStabilizationSupported ) {
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
- params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
- params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
+status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
LOG_FUNCTION_NAME
+#ifndef OMAP_TUNA
+ if ( caps.bAELockSupported ) {
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+#ifndef OMAP_TUNA
+ if ( caps.bAWBLockSupported ) {
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT
+
return ret;
}
-status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
+ unsigned int i = 0;
LOG_FUNCTION_NAME;
+ memset(supported, '\0', sizeof(supported));
+
+ // 1) Look up and assign sensor name
+ for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
+ if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
+ // sensor found
+ break;
+ }
+ }
+ if ( i == ARRAY_SIZE(mSensorNames) ) {
+ p = "UNKNOWN_SENSOR";
+ } else {
+ p = mSensorNames[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::CAMERA_NAME, supported);
+ params->set(CameraProperties::CAMERA_SENSOR_ID, caps.tSenMounting.nSenId);
+
+ // 2) Assign mounting rotation
+ params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertRaw(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+#ifndef OMAP_TUNA
+ sprintf(supported,"%d",int(caps.uSenNativeResWidth));
+#endif
+ params->set(CameraProperties::RAW_WIDTH, supported);
+
+ memset(supported, '\0', sizeof(supported));
+#ifndef OMAP_TUNA
+ if (caps.bMechanicalMisalignmentSupported) {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight) * 2);
+ } else {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight));
+ }
+#endif
+ params->set(CameraProperties::RAW_HEIGHT, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFacing(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+#ifndef OMAP_TUNA
+ for (i = 0; i < ARRAY_SIZE(mFacing); i++) {
+ if((OMX_TI_SENFACING_TYPE)mFacing[i].num == caps.tSenMounting.eFacing) {
+ break;
+ }
+ }
+#endif
+ if ( i == ARRAY_SIZE(mFacing) ) {
+ p = "UNKNOWN_FACING";
+ } else {
+ p = mFacing[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::FACING_INDEX, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFocalLength(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+#ifndef OMAP_TUNA
+ sprintf(supported, "%d", caps.nFocalLength / 100);
+#endif
+ strncat(supported, ".", REMAINING_BYTES(supported));
+#ifndef OMAP_TUNA
+ sprintf(supported+(strlen(supported)*sizeof(char)), "%d", caps.nFocalLength % 100);
+#endif
+
+ params->set(CameraProperties::FOCAL_LENGTH, supported);
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertAutoConvergenceModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulAutoConvModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eAutoConvModes[i], mAutoConvergenceLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+#endif
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualConvergenceRange(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMin ));
+#endif
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN, supported);
+
+#ifndef OMAP_TUNA
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax ));
+#endif
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX, supported);
+
+#ifndef OMAP_TUNA
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax != caps.nManualConvMin ));
+#endif
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::insertMechanicalMisalignmentCorrection(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED,
+ caps.bMechanicalMisalignmentSupported == OMX_TRUE ?
+ android::CameraParameters::TRUE : android::CameraParameters::FALSE);
+
+ return OK;
+}
+#endif
+
+status_t OMXCameraAdapter::insertCaptureModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+#ifndef OMAP_TUNA
+ // 3D mode detect: Misalignment is present only in 3d mode
+ if (caps.bMechanicalMisalignmentSupported)
+ {
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ }
+ else // 2D mode detect: Misalignment is present only in 3d mode
+ {
+#endif
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_PERFORMANCE_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_QUALITY_ZSL_MODE, REMAINING_BYTES(supported));
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::CP_CAM_MODE, REMAINING_BYTES(supported));
+#endif
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::ZOOM_BRACKETING, REMAINING_BYTES(supported));
+#ifndef OMAP_TUNA
+ }
+#endif
+
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulBracketingModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eBracketingModes[i], mBracketingModesLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+#endif
+
+ params->set(CameraProperties::CAP_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertLayout(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.ePrvFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+#endif
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES, supported);
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eCapFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+#endif
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVideoSnapshotSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if (caps.bStillCapDuringVideoSupported)
+ {
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::TRUE);
+ }
+ else
+ {
+#endif
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if (caps.bGbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGLBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if (caps.bGlbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char *pos, *str, *def;
+ char temp[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PREVIEW_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PREVIEW_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ }
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PICTURE_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PICTURE_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ }
+
params->set(CameraProperties::ANTIBANDING, DEFAULT_ANTIBANDING);
params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS);
params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST);
@@ -1024,7 +1964,7 @@ status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::SUPPORTED_EV_STEP, DEFAULT_EV_STEP);
params->set(CameraProperties::EXPOSURE_MODE, DEFAULT_EXPOSURE_MODE);
params->set(CameraProperties::FLASH_MODE, DEFAULT_FLASH_MODE);
- char *pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
+ pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
if ( NULL != pos )
{
params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE_PREFERRED);
@@ -1034,81 +1974,99 @@ status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
}
params->set(CameraProperties::IPP, DEFAULT_IPP);
- params->set(CameraProperties::GBCE, DEFAULT_GBCE);
+ params->set(CameraProperties::GBCE, android::CameraParameters::FALSE);
+ params->set(CameraProperties::GLBCE, android::CameraParameters::FALSE);
params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY);
params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY);
params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, DEFAULT_THUMBNAIL_SIZE);
params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
- params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+
+ if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ }
+
+ if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ }
+
params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
- params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
- params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+
+ /* Set default value if supported, otherwise set max supported value */
+ strncpy(temp, params->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ def = str = temp;
+ while (1) {
+ if ((pos = strstr(str, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ if (!strcmp(str, DEFAULT_FRAMERATE)) {
+ def = str;
+ break;
+ }
+ if (atoi(str) > atoi(def)) {
+ def = str;
+ }
+ if (pos == NULL) {
+ break;
+ }
+ str = pos + strlen(PARAM_SEP);
+ }
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, def);
+
params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
params->set(CameraProperties::SATURATION, DEFAULT_SATURATION);
params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE);
params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS);
params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
- params->set(CameraProperties::VSTAB_SUPPORTED, DEFAULT_VSTAB_SUPPORTED);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
params->set(CameraProperties::WHITEBALANCE, DEFAULT_WB);
params->set(CameraProperties::ZOOM, DEFAULT_ZOOM);
params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES);
params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES);
params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK);
params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK);
- if(caps.tSenMounting.nSenId == 305) {
- params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_PRIMARY);
- } else {
- params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_SECONDARY);
- }
params->set(CameraProperties::HOR_ANGLE, DEFAULT_HOR_ANGLE);
params->set(CameraProperties::VER_ANGLE, DEFAULT_VER_ANGLE);
- params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, DEFAULT_VIDEO_SNAPSHOT_SUPPORTED);
params->set(CameraProperties::VIDEO_SIZE, DEFAULT_VIDEO_SIZE);
- params->set(CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO, DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+ params->set(CameraProperties::SENSOR_ORIENTATION, DEFAULT_SENSOR_ORIENTATION);
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE, DEFAULT_AUTOCONVERGENCE_MODE);
+ params->set(CameraProperties::MANUAL_CONVERGENCE, DEFAULT_MANUAL_CONVERGENCE);
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION, DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE);
+
+ char property[PROPERTY_VALUE_MAX];
+ property_get("ro.product.manufacturer",
+ property,
+ DEFAULT_EXIF_MAKE);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MAKE, property);
+ property_get("ro.product.model",
+ property,
+ DEFAULT_EXIF_MODEL);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MODEL, property);
- LOG_FUNCTION_NAME;
-
- return ret;
-}
-
-status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
- const char *p;
- unsigned int i = 0;
-
- LOG_FUNCTION_NAME;
-
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
-
- // 1) Look up and assign sensor name
- for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
- if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
- // sensor found
- break;
- }
- }
- if ( i == ARRAY_SIZE(mSensorNames) ) {
- p = "UNKNOWN_SENSOR";
- } else {
- p = mSensorNames[i].param;
- }
- strncat(supported, p, MAX_PROP_NAME_LENGTH);
- params->set(CameraProperties::CAMERA_NAME, supported);
-
- // 2) Assign mounting rotation
- params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
-
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
@@ -1141,10 +2099,6 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
}
if ( NO_ERROR == ret ) {
- ret = insertVFramerates(params, caps);
- }
-
- if ( NO_ERROR == ret ) {
ret = insertEVs(params, caps);
}
@@ -1169,6 +2123,10 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
}
if ( NO_ERROR == ret ) {
+ ret = insertManualExpRanges(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
ret = insertFlashModes(params, caps);
}
@@ -1191,46 +2149,368 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
if ( NO_ERROR == ret ) {
ret = insertLocks(params, caps);
}
+
if ( NO_ERROR == ret) {
ret = insertAreas(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertFacing(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertFocalLength(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertAutoConvergenceModes(params, caps);
+ }
+ if ( NO_ERROR == ret) {
+ ret = insertManualConvergenceRange(params, caps);
+ }
+
+#ifndef OMAP_TUNA
+ if ( NO_ERROR == ret) {
+ ret = insertMechanicalMisalignmentCorrection(params, caps);
+ }
+#endif
+
+ if ( NO_ERROR == ret) {
+ ret = insertRaw(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertCaptureModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertLayout(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVideoSnapshotSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertVSTABSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVNFSupported(params, caps);
}
//NOTE: Ensure that we always call insertDefaults after inserting the supported capabilities
//as there are checks inside insertDefaults to make sure a certain default is supported
// or not
if ( NO_ERROR == ret ) {
- ret = insertVideoSizes(params, caps);
+ ret = insertVideoSizes(params, caps);
}
- if ( NO_ERROR == ret ) {
- ret = insertDefaults(params, caps);
+ if ( NO_ERROR == ret) {
+ ret = insertGBCESupported(params, caps);
}
+ if ( NO_ERROR == ret) {
+ ret = insertGLBCESupported(params, caps);
+ }
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+
+bool OMXCameraAdapter::_checkOmxTiCap(const OMX_TI_CAPTYPE & caps)
+{
+#define CAMHAL_CHECK_OMX_TI_CAP(countVar, arrayVar) \
+ do { \
+ const int count = static_cast<int>(caps.countVar); \
+ const int maxSize = CAMHAL_SIZE_OF_ARRAY(caps.arrayVar); \
+ if ( count < 0 || count > maxSize ) \
+ { \
+ CAMHAL_LOGE("OMX_TI_CAPTYPE verification failed"); \
+ CAMHAL_LOGE(" variable: OMX_TI_CAPTYPE::" #countVar \
+ ", value: %d, max allowed: %d", \
+ count, maxSize); \
+ return false; \
+ } \
+ } while (0)
+
+ CAMHAL_CHECK_OMX_TI_CAP(ulPreviewFormatCount, ePreviewFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageFormatCount, eImageFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulWhiteBalanceCount, eWhiteBalanceModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulColorEffectCount, eColorEffects);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlickerCount, eFlicker);
+ CAMHAL_CHECK_OMX_TI_CAP(ulExposureModeCount, eExposureModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFocusModeCount, eFocusModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulSceneCount, eSceneModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlashCount, eFlashModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvVarFPSModesCount, tPrvVarFPSModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapVarFPSModesCount, tCapVarFPSModes);
+#ifndef OMAP_TUNA
+ CAMHAL_CHECK_OMX_TI_CAP(ulAutoConvModesCount, eAutoConvModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulBracketingModesCount, eBracketingModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageCodingFormatCount, eImageCodingFormat);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvFrameLayoutCount, ePrvFrameLayout);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapFrameLayoutCount, eCapFrameLayout);
+#endif
+
+#undef CAMHAL_CHECK_OMX_TI_CAP
+
+ return true;
+}
+
+
+bool OMXCameraAdapter::_dumpOmxTiCap(const int sensorId, const OMX_TI_CAPTYPE & caps)
+{
+ if ( !_checkOmxTiCap(caps) )
+ {
+ CAMHAL_LOGE("OMX_TI_CAPTYPE structure is invalid");
+ return false;
+ }
+
+ CAMHAL_LOGD("===================================================");
+ CAMHAL_LOGD("---- Dumping OMX capabilities for sensor id: %d ----", sensorId);
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPreviewFormatCount = %d", int(caps.ulPreviewFormatCount));
+ for ( int i = 0; i < int(caps.ulPreviewFormatCount); ++i )
+ CAMHAL_LOGD(" ePreviewFormats[%2d] = %d", i, int(caps.ePreviewFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageFormatCount = %d", int(caps.ulImageFormatCount));
+ for ( int i = 0; i < int(caps.ulImageFormatCount); ++i )
+ CAMHAL_LOGD(" eImageFormats[%2d] = %d", i, int(caps.eImageFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tPreviewResRange.nWidthMin = %d", int(caps.tPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMin = %d", int(caps.tPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tPreviewResRange.nWidthMax = %d", int(caps.tPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMax = %d", int(caps.tPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tPreviewResRange.nMaxResInPixels = %d", int(caps.tPreviewResRange.nMaxResInPixels));
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMin = %d", int(caps.tRotatedPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMin = %d", int(caps.tRotatedPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMax = %d", int(caps.tRotatedPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMax = %d", int(caps.tRotatedPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nMaxResInPixels = %d", int(caps.tRotatedPreviewResRange.nMaxResInPixels));
+#endif
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tImageResRange.nWidthMin = %d", int(caps.tImageResRange.nWidthMin));
+ CAMHAL_LOGD("tImageResRange.nHeightMin = %d", int(caps.tImageResRange.nHeightMin));
+ CAMHAL_LOGD("tImageResRange.nWidthMax = %d", int(caps.tImageResRange.nWidthMax));
+ CAMHAL_LOGD("tImageResRange.nHeightMax = %d", int(caps.tImageResRange.nHeightMax));
+ CAMHAL_LOGD("tImageResRange.nMaxResInPixels = %d", int(caps.tImageResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tThumbResRange.nWidthMin = %d", int(caps.tThumbResRange.nWidthMin));
+ CAMHAL_LOGD("tThumbResRange.nHeightMin = %d", int(caps.tThumbResRange.nHeightMin));
+ CAMHAL_LOGD("tThumbResRange.nWidthMax = %d", int(caps.tThumbResRange.nWidthMax));
+ CAMHAL_LOGD("tThumbResRange.nHeightMax = %d", int(caps.tThumbResRange.nHeightMax));
+ CAMHAL_LOGD("tThumbResRange.nMaxResInPixels = %d", int(caps.tThumbResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulWhiteBalanceCount = %d", int(caps.ulWhiteBalanceCount));
+ for ( int i = 0; i < int(caps.ulWhiteBalanceCount); ++i )
+ CAMHAL_LOGD(" eWhiteBalanceModes[%2d] = 0x%08x", i, int(caps.eWhiteBalanceModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulColorEffectCount = %d", int(caps.ulColorEffectCount));
+ for ( int i = 0; i < int(caps.ulColorEffectCount); ++i )
+ CAMHAL_LOGD(" eColorEffects[%2d] = 0x%08x", i, int(caps.eColorEffects[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("xMaxWidthZoom = %d", int(caps.xMaxWidthZoom));
+ CAMHAL_LOGD("xMaxHeightZoom = %d", int(caps.xMaxHeightZoom));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlickerCount = %d", int(caps.ulFlickerCount));
+ for ( int i = 0; i < int(caps.ulFlickerCount); ++i )
+ CAMHAL_LOGD(" eFlicker[%2d] = %d", i, int(caps.eFlicker[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulExposureModeCount = %d", int(caps.ulExposureModeCount));
+ for ( int i = 0; i < int(caps.ulExposureModeCount); ++i )
+ CAMHAL_LOGD(" eExposureModes[%2d] = 0x%08x", i, int(caps.eExposureModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bLensDistortionCorrectionSupported = %d", int(caps.bLensDistortionCorrectionSupported));
+ CAMHAL_LOGD("bISONoiseFilterSupported = %d", int(caps.bISONoiseFilterSupported));
+ CAMHAL_LOGD("xEVCompensationMin = %d", int(caps.xEVCompensationMin));
+ CAMHAL_LOGD("xEVCompensationMax = %d", int(caps.xEVCompensationMax));
+ CAMHAL_LOGD("nSensitivityMax = %d", int(caps.nSensitivityMax));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFocusModeCount = %d", int(caps.ulFocusModeCount));
+ for ( int i = 0; i < int(caps.ulFocusModeCount); ++i )
+ CAMHAL_LOGD(" eFocusModes[%2d] = 0x%08x", i, int(caps.eFocusModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulSceneCount = %d", int(caps.ulSceneCount));
+ for ( int i = 0; i < int(caps.ulSceneCount); ++i )
+ CAMHAL_LOGD(" eSceneModes[%2d] = %d", i, int(caps.eSceneModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlashCount = %d", int(caps.ulFlashCount));
+ for ( int i = 0; i < int(caps.ulFlashCount); ++i )
+ CAMHAL_LOGD(" eFlashModes[%2d] = %d", i, int(caps.eFlashModes[i]));
+
+ CAMHAL_LOGD("xFramerateMin = %d", int(caps.xFramerateMin));
+ CAMHAL_LOGD("xFramerateMax = %d", int(caps.xFramerateMax));
+ CAMHAL_LOGD("bContrastSupported = %d", int(caps.bContrastSupported));
+ CAMHAL_LOGD("bSaturationSupported = %d", int(caps.bSaturationSupported));
+ CAMHAL_LOGD("bBrightnessSupported = %d", int(caps.bBrightnessSupported));
+ CAMHAL_LOGD("bProcessingLevelSupported = %d", int(caps.bProcessingLevelSupported));
+ CAMHAL_LOGD("bQFactorSupported = %d", int(caps.bQFactorSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvVarFPSModesCount = %d", int(caps.ulPrvVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulPrvVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapVarFPSModesCount = %d", int(caps.ulCapVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulCapVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tSenMounting.nSenId = %d", int(caps.tSenMounting.nSenId));
+ CAMHAL_LOGD("tSenMounting.nRotation = %d", int(caps.tSenMounting.nRotation));
+ CAMHAL_LOGD("tSenMounting.bMirror = %d", int(caps.tSenMounting.bMirror));
+ CAMHAL_LOGD("tSenMounting.bFlip = %d", int(caps.tSenMounting.bFlip));
+ CAMHAL_LOGD("tSenMounting.eFacing = %d", int(caps.tSenMounting.eFacing));
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulAutoConvModesCount = %d", int(caps.ulAutoConvModesCount));
+ for ( int i = 0; i < int(caps.ulAutoConvModesCount); ++i )
+ CAMHAL_LOGD(" eAutoConvModes[%2d] = %d", i, int(caps.eAutoConvModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulBracketingModesCount = %d", int(caps.ulBracketingModesCount));
+ for ( int i = 0; i < int(caps.ulBracketingModesCount); ++i )
+ CAMHAL_LOGD(" eBracketingModes[%2d] = %d", i, int(caps.eBracketingModes[i]));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bGbceSupported = %d", int(caps.bGbceSupported));
+#endif
+ CAMHAL_LOGD("bRawJpegSupported = %d", int(caps.bRawJpegSupported));
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageCodingFormatCount = %d", int(caps.ulImageCodingFormatCount));
+ for ( int i = 0; i < int(caps.ulImageCodingFormatCount); ++i )
+ CAMHAL_LOGD(" eImageCodingFormat[%2d] = %d", i, int(caps.eImageCodingFormat[i]));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("uSenNativeResWidth = %d", int(caps.uSenNativeResWidth));
+ CAMHAL_LOGD("uSenNativeResHeight = %d", int(caps.uSenNativeResHeight));
+#endif
+ CAMHAL_LOGD("ulAlgoAreasFocusCount = %d", int(caps.ulAlgoAreasFocusCount));
+ CAMHAL_LOGD("ulAlgoAreasExposureCount = %d", int(caps.ulAlgoAreasExposureCount));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bAELockSupported = %d", int(caps.bAELockSupported));
+ CAMHAL_LOGD("bAWBLockSupported = %d", int(caps.bAWBLockSupported));
+#endif
+ CAMHAL_LOGD("bAFLockSupported = %d", int(caps.bAFLockSupported));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("nFocalLength = %d", int(caps.nFocalLength));
+#endif
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvFrameLayoutCount = %d", int(caps.ulPrvFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulPrvFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" ePrvFrameLayout[%2d] = %d", i, int(caps.ePrvFrameLayout[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapFrameLayoutCount = %d", int(caps.ulCapFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulCapFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" eCapFrameLayout[%2d] = %d", i, int(caps.eCapFrameLayout[i]));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bVideoNoiseFilterSupported = %d", int(caps.bVideoNoiseFilterSupported ));
+ CAMHAL_LOGD("bVideoStabilizationSupported = %d", int(caps.bVideoStabilizationSupported ));
+#endif
+ CAMHAL_LOGD("bStillCapDuringVideoSupported = %d", int(caps.bStillCapDuringVideoSupported ));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bMechanicalMisalignmentSupported = %d", int(caps.bMechanicalMisalignmentSupported));
+#endif
+ CAMHAL_LOGD("bFacePrioritySupported = %d", int(caps.bFacePrioritySupported ));
+ CAMHAL_LOGD("bRegionPrioritySupported = %d", int(caps.bRegionPrioritySupported ));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bGlbceSupported = %d", int(caps.bGlbceSupported));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("nManualConvMin = %d", int(caps.nManualConvMin ));
+ CAMHAL_LOGD("nManualConvMax = %d", int(caps.nManualConvMax ));
+ CAMHAL_LOGD("nManualExpMin = %d", int(caps.nManualExpMin ));
+ CAMHAL_LOGD("nManualExpMax = %d", int(caps.nManualExpMax ));
+#endif
+ CAMHAL_LOGD("nBrightnessMin = %d", int(caps.nBrightnessMin ));
+ CAMHAL_LOGD("nBrightnessMax = %d", int(caps.nBrightnessMax ));
+ CAMHAL_LOGD("nContrastMin = %d", int(caps.nContrastMin ));
+ CAMHAL_LOGD("nContrastMax = %d", int(caps.nContrastMax ));
+ CAMHAL_LOGD("nSharpnessMin = %d", int(caps.nSharpnessMin ));
+ CAMHAL_LOGD("nSharpnessMax = %d", int(caps.nSharpnessMax ));
+ CAMHAL_LOGD("nSaturationMin = %d", int(caps.nSaturationMin ));
+ CAMHAL_LOGD("nSaturationMax = %d", int(caps.nSaturationMax ));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("------------------- end of dump -------------------");
+ CAMHAL_LOGD("===================================================");
+
+ return true;
+}
+
/*****************************************
* public exposed function declarations
*****************************************/
-status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HANDLETYPE handle) {
+status_t OMXCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params, OMX_HANDLETYPE handle)
+{
status_t ret = NO_ERROR;
int caps_size = 0;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_CAPTYPE** caps = NULL;;
+ CameraBuffer *bufferlist;
+ OMX_TI_CAPTYPE* caps;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
MemoryManager memMgr;
LOG_FUNCTION_NAME;
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
// allocate tiler (or ion) buffer for caps (size is always a multiple of 4K)
caps_size = ((sizeof(OMX_TI_CAPTYPE)+4095)/4096)*4096;
- caps = (OMX_TI_CAPTYPE**) memMgr.allocateBuffer(0, 0, NULL, caps_size, 1);
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, caps_size, 1);
+ caps = (OMX_TI_CAPTYPE*) bufferlist[0].opaque;
if (!caps) {
CAMHAL_LOGEB("Error allocating buffer for caps %d", eError);
@@ -1239,13 +2519,13 @@ status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HAN
}
// initialize structures to be passed to OMX Camera
- OMX_INIT_STRUCT_PTR (caps[0], OMX_TI_CAPTYPE);
- caps[0]->nPortIndex = OMX_ALL;
+ OMX_INIT_STRUCT_PTR (caps, OMX_TI_CAPTYPE);
+ caps->nPortIndex = OMX_ALL;
OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = caps_size;
- sharedBuffer.pSharedBuff = (OMX_U8 *) caps[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
// Get capabilities from OMX Camera
eError = OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer);
@@ -1257,23 +2537,26 @@ status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HAN
CAMHAL_LOGDA("OMX capability query success");
}
+#ifdef CAMERAHAL_DEBUG
+ _dumpOmxTiCap(sensorId, *caps);
+#endif
+
// Translate and insert Ducati capabilities to CameraProperties
if ( NO_ERROR == ret ) {
- ret = insertCapabilities(params, *caps[0]);
+ ret = insertCapabilities(params, *caps);
}
- CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps[0]->tSenMounting.nSenId);
-
+ CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps->tSenMounting.nSenId);
+ CAMHAL_LOGDB("facing id=%u", (unsigned int)caps->tSenMounting.eFacing);
EXIT:
- if (caps) {
- memMgr.freeBuffer((void*) caps);
- caps = NULL;
+ if (bufferlist) {
+ memMgr.freeBufferList(bufferlist);
}
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp
index 34a0357..28a0ab1 100644
--- a/camera/OMXCameraAdapter/OMXCapture.cpp
+++ b/camera/OMXCameraAdapter/OMXCapture.cpp
@@ -21,32 +21,41 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *str = NULL;
int w, h;
OMX_COLOR_FORMATTYPE pixFormat;
+ CodingMode codingMode = mCodingMode;
const char *valstr = NULL;
int varint = 0;
+ OMX_TI_STEREOFRAMELAYOUTTYPE capFrmLayout;
+ bool inCaptureState = false;
LOG_FUNCTION_NAME;
OMXCameraPortParameters *cap;
cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ capFrmLayout = cap->mFrameLayoutType;
+#ifndef OMAP_TUNA
+ setParamS3D(mCameraAdapterParameters.mImagePortIndex,
+ params.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT));
+#endif
+ if (capFrmLayout != cap->mFrameLayoutType) {
+ mPendingCaptureSettings |= SetFormat;
+ }
+
params.getPictureSize(&w, &h);
if ( ( w != ( int ) cap->mWidth ) ||
@@ -64,84 +73,170 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Image: cap.mHeight = %d", (int)cap->mHeight);
if ((valstr = params.getPictureFormat()) != NULL) {
- if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("CbYCrY format selected");
pixFormat = OMX_COLOR_FormatCbYCrY;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV422I;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV420SP;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_RGB565;
- } else if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
CAMHAL_LOGDA("JPEG format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingNone;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_JPEG;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
+ codingMode = CodingJPEG;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
CAMHAL_LOGDA("JPS format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingJPS;
+ codingMode = CodingJPS;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_JPS;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
CAMHAL_LOGDA("MPO format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingMPO;
+ codingMode = CodingMPO;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_MPO;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_RAW) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
CAMHAL_LOGDA("RAW Picture format selected");
pixFormat = OMX_COLOR_FormatRawBayer10bit;
- mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_RAW;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
} else {
CAMHAL_LOGEA("Invalid format, JPEG format selected as default");
pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
mPictureFormatFromClient = NULL;
}
} else {
CAMHAL_LOGEA("Picture format is NULL, defaulting to JPEG");
pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
mPictureFormatFromClient = NULL;
}
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (!valstr || strcmp(valstr, TICameraParameters::HIGH_QUALITY_MODE) == 0) &&
+ access(kRawImagesOutputDirPath, F_OK) != -1 ) {
+ mRawCapture = true;
+ }
+
+ if (mRawCapture && (access(kYuvImagesOutputDirPath, F_OK) != -1)) {
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ mYuvCapture = true;
+ }
+#endif
// JPEG capture is not supported in video mode by OMX Camera
// Set capture format to yuv422i...jpeg encode will
// be done on A9
valstr = params.get(TICameraParameters::KEY_CAP_MODE);
if ( (valstr && !strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE)) &&
- (pixFormat == OMX_COLOR_FormatUnused) ) {
+ (pixFormat == OMX_COLOR_FormatUnused) ) {
CAMHAL_LOGDA("Capturing in video mode...selecting yuv422i");
pixFormat = OMX_COLOR_FormatCbYCrY;
}
- if ( pixFormat != cap->mColorFormat )
- {
+ if (pixFormat != cap->mColorFormat || codingMode != mCodingMode) {
mPendingCaptureSettings |= SetFormat;
cap->mColorFormat = pixFormat;
- }
+ mCodingMode = codingMode;
+ }
#ifdef OMAP_ENHANCEMENT
+ str = params.get(TICameraParameters::KEY_TEMP_BRACKETING);
+ if ( ( str != NULL ) &&
+ ( strcmp(str, android::CameraParameters::TRUE) == 0 ) ) {
- str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
- if ( NULL != str ) {
- parseExpRange(str, mExposureBracketingValues, EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ if ( !mBracketingSet ) {
+ mPendingCaptureSettings |= SetExpBracket;
+ }
+
+ mBracketingSet = true;
+ } else {
+
+ if ( mBracketingSet ) {
+ mPendingCaptureSettings |= SetExpBracket;
+ }
+
+ mBracketingSet = false;
+ }
+
+ if ( (str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL ) {
+ parseExpRange(str, mExposureBracketingValues, NULL,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+#ifndef OMAP_TUNA
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+#endif
+ mExposureBracketMode = OMX_BracketExposureRelativeInEV;
+#ifndef OMAP_TUNA
+ }
+#endif
+ mPendingCaptureSettings |= SetExpBracket;
+ } else if ( (str = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
+ parseExpRange(str, mExposureBracketingValues, mExposureGainBracketingValues,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+#ifndef OMAP_TUNA
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+#endif
+ mExposureBracketMode = OMX_BracketExposureGainAbsolute;
+#ifndef OMAP_TUNA
+ }
+#endif
+ mPendingCaptureSettings |= SetExpBracket;
} else {
// if bracketing was previously set...we set again before capturing to clear
- if (mExposureBracketingValidEntries) mPendingCaptureSettings |= SetExpBracket;
- mExposureBracketingValidEntries = 0;
+ if (mExposureBracketingValidEntries) {
+ mPendingCaptureSettings |= SetExpBracket;
+ mExposureBracketingValidEntries = 0;
+ }
}
+ str = params.get(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
+ if ( NULL != str ) {
+ parseExpRange(str, mZoomBracketingValues, NULL, NULL,
+ ZOOM_BRACKET_RANGE, mZoomBracketingValidEntries);
+ mCurrentZoomBracketing = 0;
+ mZoomBracketingEnabled = true;
+ } else {
+ if (mZoomBracketingValidEntries) {
+ mZoomBracketingValidEntries = 0;
+ }
+ mZoomBracketingEnabled = false;
+ }
#endif
- varint = params.getInt(CameraParameters::KEY_ROTATION);
- if ( varint != -1 )
+ // Flush config queue
+ // If TRUE: Flush queue and abort processing before enqueing
+ valstr = params.get(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE);
+ if ( NULL != valstr ) {
+ if ( 0 == strcmp(valstr, android::CameraParameters::TRUE) ) {
+ mFlushShotConfigQueue = true;
+ } else if ( 0 == strcmp(valstr, android::CameraParameters::FALSE) ) {
+ mFlushShotConfigQueue = false;
+ } else {
+ CAMHAL_LOGE("Missing flush shot config parameter. Will use current (%s)",
+ mFlushShotConfigQueue ? "true" : "false");
+ }
+ }
+
+ if ( params.getInt(android::CameraParameters::KEY_ROTATION) != -1 )
{
- if ( ( unsigned int ) varint != mPictureRotation) {
+ if (params.getInt(android::CameraParameters::KEY_ROTATION) != (int) mPictureRotation) {
mPendingCaptureSettings |= SetRotation;
}
- mPictureRotation = varint;
+ mPictureRotation = params.getInt(android::CameraParameters::KEY_ROTATION);
}
else
{
@@ -152,110 +247,101 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Picture Rotation set %d", mPictureRotation);
#ifdef OMAP_ENHANCEMENT
-
// Read Sensor Orientation and set it based on perating mode
-
- varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
- if (( varint != -1 ) && (mCapMode == OMXCameraAdapter::VIDEO_MODE))
+ varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ( varint != -1 )
{
- mSensorOrientation = varint;
- if (mSensorOrientation == 270 ||mSensorOrientation==90)
- {
- CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
- mSensorOrientation +=180;
- mSensorOrientation%=360;
- }
- }
- else
+ mSensorOrientation = varint;
+ if (mSensorOrientation == 270 ||mSensorOrientation==90)
+ {
+ CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
+ mSensorOrientation +=180;
+ mSensorOrientation%=360;
+ }
+ }
+ else
{
- mSensorOrientation = 0;
+ mSensorOrientation = 0;
}
- CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+ CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+#endif
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
varint = params.getInt(TICameraParameters::KEY_BURST);
if ( varint >= 1 )
{
- if (varint != mBurstFrames) {
- mPendingCaptureSettings |= SetExpBracket;
+ if (varint != (int) mBurstFrames) {
+ mPendingCaptureSettings |= SetBurst;
}
mBurstFrames = varint;
}
else
{
- if (mBurstFrames != 1) mPendingCaptureSettings |= SetExpBracket;
+ if (mBurstFrames != 1) mPendingCaptureSettings |= SetBurst;
mBurstFrames = 1;
}
CAMHAL_LOGVB("Burst Frames set %d", mBurstFrames);
-
#endif
- varint = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
- if ( ( varint >= MIN_JPEG_QUALITY ) &&
- ( varint <= MAX_JPEG_QUALITY ) )
- {
- if ( ( unsigned int ) varint != mPictureQuality) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mPictureQuality) {
mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = varint;
}
- mPictureQuality = varint;
- }
- else
- {
- if (mPictureQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetQuality;
- mPictureQuality = MAX_JPEG_QUALITY;
+ } else {
+ if (mPictureQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = MAX_JPEG_QUALITY;
}
+ }
CAMHAL_LOGVB("Picture Quality set %d", mPictureQuality);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- if ( varint >= 0 )
- {
- if ( ( unsigned int ) varint != mThumbWidth) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ if ( varint >= 0 ) {
+ if (varint != mThumbWidth) {
mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = varint;
}
- mThumbWidth = varint;
- }
- else
- {
- if (mThumbWidth != DEFAULT_THUMB_WIDTH) mPendingCaptureSettings |= SetThumb;
- mThumbWidth = DEFAULT_THUMB_WIDTH;
+ } else {
+ if (mThumbWidth != DEFAULT_THUMB_WIDTH) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = DEFAULT_THUMB_WIDTH;
}
-
+ }
CAMHAL_LOGVB("Picture Thumb width set %d", mThumbWidth);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
- if ( varint >= 0 )
- {
- if ( ( unsigned int ) varint != mThumbHeight) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if ( varint >= 0 ) {
+ if (varint != mThumbHeight) {
mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = varint;
}
- mThumbHeight = varint;
- }
- else
- {
- if (mThumbHeight != DEFAULT_THUMB_HEIGHT) mPendingCaptureSettings |= SetThumb;
- mThumbHeight = DEFAULT_THUMB_HEIGHT;
+ } else {
+ if (mThumbHeight != DEFAULT_THUMB_HEIGHT) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = DEFAULT_THUMB_HEIGHT;
}
-
+ }
CAMHAL_LOGVB("Picture Thumb height set %d", mThumbHeight);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
- if ( ( varint >= MIN_JPEG_QUALITY ) &&
- ( varint <= MAX_JPEG_QUALITY ) )
- {
- if ( ( unsigned int ) varint != mThumbQuality) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mThumbQuality) {
mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = varint;
}
- mThumbQuality = varint;
- }
- else
- {
- if (mThumbQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetThumb;
- mThumbQuality = MAX_JPEG_QUALITY;
+ } else {
+ if (mThumbQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = MAX_JPEG_QUALITY;
}
+ }
CAMHAL_LOGDB("Thumbnail Quality set %d", mThumbQuality);
@@ -263,19 +349,36 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
mPendingCaptureSettings = ECapturesettingsAll;
}
- if (mPendingCaptureSettings) {
+ // we are already capturing and in cpcam mode...just need to enqueue
+ // shots
+ inCaptureState = (CAPTURE_ACTIVE & mAdapterState) && (CAPTURE_ACTIVE & mNextState);
+ if ((mPendingCaptureSettings & ~SetExpBracket) && !inCaptureState) {
+ disableReprocess();
disableImagePort();
if ( NULL != mReleaseImageBuffersCallback ) {
mReleaseImageBuffersCallback(mReleaseData);
}
+ if (mPendingCaptureSettings & SetFormat) {
+ mPendingCaptureSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *cap);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
}
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ cap->mWidth = params.getInt(TICameraParameters::RAW_WIDTH);
+ cap->mHeight = params.getInt(TICameraParameters::RAW_HEIGHT);
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t OMXCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
{
status_t ret = NO_ERROR;
OMXCameraPortParameters *imgCaptureData = NULL;
@@ -297,85 +400,196 @@ status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCou
if ( ret == NO_ERROR )
{
- length = imgCaptureData->mBufSize;
+ frame.mLength = imgCaptureData->mBufSize;
+ frame.mWidth = imgCaptureData->mWidth;
+ frame.mHeight = imgCaptureData->mHeight;
+ frame.mAlignment = imgCaptureData->mStride;
+ CAMHAL_LOGDB("getPictureBufferSize: width:%u height:%u alignment:%u length:%u",
+ frame.mWidth, frame.mHeight, frame.mAlignment, frame.mLength);
}
else
{
CAMHAL_LOGEB("setFormat() failed 0x%x", ret);
- length = 0;
}
}
- CAMHAL_LOGDB("getPictureBufferSize %d", length);
-
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+int OMXCameraAdapter::getBracketingValueMode(const char *a, const char *b) const
+{
+ BracketingValueMode bvm = BracketingValueAbsolute;
+
+ if ( (NULL != b) &&
+ (NULL != a) &&
+ (a < b) &&
+ ( (NULL != memchr(a, '+', b - a)) ||
+ (NULL != memchr(a, '-', b - a)) ) ) {
+ bvm = BracketingValueRelative;
+ }
+ return bvm;
+}
+
status_t OMXCameraAdapter::parseExpRange(const char *rangeStr,
- int * expRange,
+ int *expRange,
+ int *gainRange,
+ int *expGainModes,
size_t count,
size_t &validEntries)
{
status_t ret = NO_ERROR;
- char *ctx, *expVal;
- char *tmp = NULL;
+ char *end = NULL;
+ const char *startPtr = NULL;
size_t i = 0;
LOG_FUNCTION_NAME;
- if ( NULL == rangeStr )
- {
+ if ( NULL == rangeStr ){
return -EINVAL;
- }
+ }
- if ( NULL == expRange )
- {
+ if ( NULL == expRange ){
return -EINVAL;
- }
-
- if ( NO_ERROR == ret )
- {
- tmp = ( char * ) malloc( strlen(rangeStr) + 1 );
+ }
- if ( NULL == tmp )
- {
- CAMHAL_LOGEA("No resources for temporary buffer");
- return -1;
+ if ( NO_ERROR == ret ) {
+ startPtr = rangeStr;
+ do {
+ // Relative Exposure example: "-30,-10, 0, 10, 30"
+ // Absolute Gain ex. (exposure,gain) pairs: "(100,300),(200,300),(400,300),(800,300),(1600,300)"
+ // Relative Gain ex. (exposure,gain) pairs: "(-30,+0),(-10, +0),(+0,+0),(+10,+0),(+30,+0)"
+ // Forced relative Exposure example: "-30F,-10F, 0F, 10F, 30F"
+ // Forced absolute Gain ex. (exposure,gain) pairs: "(100,300)F,(200,300)F,(400,300)F,(800,300)F,(1600,300)F"
+ // Forced relative Gain ex. (exposure,gain) pairs: "(-30,+0)F,(-10, +0)F,(+0,+0)F,(+10,+0)F,(+30,+0)F"
+
+ // skip '(' and ','
+ while ((*startPtr == '(') || (*startPtr == ',')) startPtr++;
+
+ expRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (expGainModes) {
+ // if gainRange is given rangeStr should be (exposure, gain) pair
+ if (gainRange) {
+ int bvm_exp = getBracketingValueMode(startPtr, end);
+ startPtr = end + 1; // for the ','
+ gainRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (BracketingValueAbsolute == bvm_exp) {
+ expGainModes[i] = getBracketingValueMode(startPtr, end);
+ } else {
+ expGainModes[i] = bvm_exp;
+ }
+ } else {
+ expGainModes[i] = BracketingValueCompensation;
+ }
+ }
+ startPtr = end;
+
+ // skip ')'
+ while (*startPtr == ')') startPtr++;
+
+ // Check for "forced" key
+ if (expGainModes) {
+ while ((*startPtr == 'F') || (*startPtr == 'f')) {
+ if ( BracketingValueAbsolute == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueAbsoluteForced;
+ } else if ( BracketingValueRelative == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueRelativeForced;
+ } else if ( BracketingValueCompensation == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueCompensationForced;
+ } else {
+ CAMHAL_LOGE("Unexpected old mode 0x%x", expGainModes[i]);
+ }
+ startPtr++;
+ }
}
- memset(tmp, '\0', strlen(rangeStr) + 1);
-
- }
-
- if ( NO_ERROR == ret )
- {
- strncpy(tmp, rangeStr, strlen(rangeStr) );
- expVal = strtok_r( (char *) tmp, CameraHal::PARAMS_DELIMITER, &ctx);
- i = 0;
- while ( ( NULL != expVal ) && ( i < count ) )
- {
- expRange[i] = atoi(expVal);
- expVal = strtok_r(NULL, CameraHal::PARAMS_DELIMITER, &ctx);
i++;
- }
+
+ } while ((startPtr[0] != '\0') && (i < count));
validEntries = i;
- }
+ }
- if ( NULL != tmp )
- {
- free(tmp);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doExposureBracketing(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NULL == evValues ) {
+ CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret ) {
+#ifndef OMAP_TUNA
+ if (bracketMode == OMX_BracketVectorShot) {
+ ret = setVectorShot(evValues, evValues2, evModes2, evCount, frameCount, flush, bracketMode);
+ } else {
+#endif
+ ret = setExposureBracketing(evValues, evValues2, evCount, frameCount, bracketMode);
+#ifndef OMAP_TUNA
}
+#endif
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
- size_t evCount,
- size_t frameCount)
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setVectorStop(bool toPreview)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE vecShotStop;
+
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&vecShotStop, OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE);
+
+ vecShotStop.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (toPreview) {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_GOTO_PREVIEW;
+ } else {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_WAIT_IN_CAPTURE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigVectShotStopMethod,
+ &vecShotStop);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring bracket shot 0x%x", eError);
+ } else {
+ CAMHAL_LOGDA("Bracket shot configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::initVectorShot()
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -384,17 +598,171 @@ status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
- CAMHAL_LOGEA("OMX component is in invalid state");
- ret = -EINVAL;
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&expCapMode, OMX_CONFIG_CAPTUREMODETYPE);
+ expCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ expCapMode.bFrameLimited = OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCaptureMode,
+ &expCapMode);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Camera capture mode configured successfully");
}
+ }
- if ( NULL == evValues )
- {
- CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
- ret = -EINVAL;
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&extExpCapMode, OMX_CONFIG_EXTCAPTUREMODETYPE);
+ extExpCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ extExpCapMode.bEnableBracketing = OMX_TRUE;
+ extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketVectorShot;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigExtCaptureMode,
+ &extExpCapMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring extended capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Extended camera capture mode configured successfully");
+ }
+ }
+
+#ifndef OMAP_TUNA
+ if (NO_ERROR == ret) {
+ // set vector stop method to stop in capture
+ ret = setVectorStop(false);
+ }
+#endif
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::setVectorShot(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS enqueueShotConfigs;
+ OMX_TI_CONFIG_QUERYAVAILABLESHOTS queryAvailableShots;
+ bool doFlush = flush;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&enqueueShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
+ OMX_INIT_STRUCT_PTR(&queryAvailableShots, OMX_TI_CONFIG_QUERYAVAILABLESHOTS);
+
+ queryAvailableShots.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigQueryAvailableShots,
+ &queryAvailableShots);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGE("Error getting available shots 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGD("AVAILABLE SHOTS: %d", queryAvailableShots.nAvailableShots);
+ if (queryAvailableShots.nAvailableShots < evCount) {
+ // TODO(XXX): Need to implement some logic to handle this error
+ CAMHAL_LOGE("Not enough available shots to fulfill this queue request");
+ ret = -ENOSPC;
+ goto exit;
+ }
+ }
+
+ for ( unsigned int confID = 0; confID < evCount; ) {
+ unsigned int i;
+ for ( i = 0 ; (i < ARRAY_SIZE(enqueueShotConfigs.nShotConfig)) && (confID < evCount); i++, confID++ ) {
+ CAMHAL_LOGD("%2u: (%7d,%4d) mode: %d", confID, evValues[confID], evValues2[confID], evModes2[confID]);
+ enqueueShotConfigs.nShotConfig[i].nConfigId = confID;
+ enqueueShotConfigs.nShotConfig[i].nFrames = 1;
+ if ( (BracketingValueCompensation == evModes2[confID]) ||
+ (BracketingValueCompensationForced == evModes2[confID]) ) {
+ // EV compensation
+ enqueueShotConfigs.nShotConfig[i].nEC = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nExp = 0;
+ enqueueShotConfigs.nShotConfig[i].nGain = 0;
+ } else {
+ // exposure,gain pair
+ enqueueShotConfigs.nShotConfig[i].nEC = 0;
+ enqueueShotConfigs.nShotConfig[i].nExp = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nGain = evValues2[confID];
+ }
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ switch (evModes2[confID]) {
+ case BracketingValueAbsolute: // (exp,gain) pairs directly program sensor values
+ default :
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ break;
+ case BracketingValueRelative: // (exp,gain) pairs relative to AE settings and constraints
+ case BracketingValueCompensation: // EV compensation relative to AE settings and constraints
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE;
+ break;
+ case BracketingValueAbsoluteForced: // (exp,gain) pairs directly program sensor values
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_ABSOLUTE;
+ break;
+ case BracketingValueRelativeForced: // (exp, gain) pairs relative to AE settings AND settings
+ case BracketingValueCompensationForced: // EV compensation relative to AE settings and constraints
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_RELATIVE;
+ break;
+ }
+ enqueueShotConfigs.nShotConfig[i].bNoSnapshot = OMX_FALSE; // TODO: Make this configurable
+ }
+
+ // Repeat last exposure and again
+ if ((confID == evCount) && (evCount > 0) && (frameCount > evCount) && (0 != i)) {
+ enqueueShotConfigs.nShotConfig[i-1].nFrames = frameCount - evCount;
+ }
+
+ enqueueShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ enqueueShotConfigs.bFlushQueue = doFlush ? OMX_TRUE : OMX_FALSE;
+ enqueueShotConfigs.nNumConfigs = i;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &enqueueShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring bracket shot 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Bracket shot configured successfully");
}
+ // Flush only first time
+ doFlush = false;
+ }
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+#endif
+
+status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
+ int *evValues2,
+ size_t evCount,
+ size_t frameCount,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAPTUREMODETYPE expCapMode;
+ OMX_CONFIG_EXTCAPTUREMODETYPE extExpCapMode;
+
+ LOG_FUNCTION_NAME;
if ( NO_ERROR == ret )
{
@@ -439,13 +807,23 @@ status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
else
{
extExpCapMode.bEnableBracketing = OMX_TRUE;
- extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketExposureRelativeInEV;
+ extExpCapMode.tBracketConfigType.eBracketMode = bracketMode;
extExpCapMode.tBracketConfigType.nNbrBracketingValues = evCount - 1;
}
for ( unsigned int i = 0 ; i < evCount ; i++ )
{
- extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+#ifndef OMAP_TUNA
+ if (bracketMode == OMX_BracketExposureGainAbsolute) {
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = evValues[i];
+ extExpCapMode.tBracketConfigType.nBracketValues2[i] = evValues2[i];
+ } else {
+#endif
+ // assuming OMX_BracketExposureRelativeInEV
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+#ifndef OMAP_TUNA
+ }
+#endif
}
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
@@ -538,7 +916,8 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
if ( NO_ERROR == ret )
{
- currentBufferIdx = ( unsigned int ) pBuffHeader->pAppPrivate;
+ CameraBuffer *buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+ currentBufferIdx = buffer->index;
if ( currentBufferIdx >= imgCaptureData->mNumBufs)
{
@@ -558,8 +937,8 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
mBracketingBuffersQueued[nextBufferIdx] = true;
mBracketingBuffersQueuedCount++;
mLastBracetingBufferIdx = nextBufferIdx;
- setFrameRefCount(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame, 1);
- returnFrame(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame);
+ setFrameRefCount((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame, 1);
+ returnFrame((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame);
}
}
@@ -568,7 +947,7 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
return ret;
}
-status_t OMXCameraAdapter::sendBracketFrames()
+status_t OMXCameraAdapter::sendBracketFrames(size_t &framesSent)
{
status_t ret = NO_ERROR;
int currentBufferIdx;
@@ -577,6 +956,7 @@ status_t OMXCameraAdapter::sendBracketFrames()
LOG_FUNCTION_NAME;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ framesSent = 0;
if ( OMX_StateExecuting != mComponentState )
{
@@ -599,6 +979,7 @@ status_t OMXCameraAdapter::sendBracketFrames()
imgCaptureData->mBufferHeader[currentBufferIdx],
imgCaptureData->mImageType,
imgCaptureData);
+ framesSent++;
}
} while ( currentBufferIdx != mLastBracetingBufferIdx );
@@ -625,7 +1006,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
@@ -644,7 +1025,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
mBracketingRange = range;
mBracketingBuffersQueued = new bool[imgCaptureData->mNumBufs];
@@ -657,6 +1038,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
mBracketingBuffersQueuedCount = imgCaptureData->mNumBufs;
+ mBurstFramesAccum = imgCaptureData->mNumBufs;
mLastBracetingBufferIdx = mBracketingBuffersQueuedCount - 1;
for ( int i = 0 ; i < imgCaptureData->mNumBufs ; i++ )
@@ -669,10 +1051,11 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
-
- ret = startImageCapture();
+ CachedCaptureParameters* cap_params = cacheCaptureParameters();
+ ret = startImageCapture(true, cap_params);
+ delete cap_params;
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( NO_ERROR == ret )
{
@@ -696,15 +1079,15 @@ status_t OMXCameraAdapter::stopBracketing()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mBracketingLock);
+ ret = stopImageCapture();
+
+ android::AutoMutex lock(mBracketingLock);
if ( NULL != mBracketingBuffersQueued )
{
delete [] mBracketingBuffersQueued;
}
- ret = stopImageCapture();
-
mBracketingBuffersQueued = NULL;
mBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
@@ -715,16 +1098,17 @@ status_t OMXCameraAdapter::stopBracketing()
return ret;
}
-status_t OMXCameraAdapter::startImageCapture()
+status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParameters* capParams)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * capData = NULL;
OMX_CONFIG_BOOLEANTYPE bOMX;
+ size_t bracketingSent = 0;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mImageCaptureLock);
+ android::AutoMutex lock(mImageCaptureLock);
if(!mCaptureConfigured)
{
@@ -738,25 +1122,42 @@ status_t OMXCameraAdapter::startImageCapture()
return NO_INIT;
}
- if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) {
- CAMHAL_LOGDA("trying starting capture when already canceled");
- return NO_ERROR;
+ if ( !bracketing ) {
+ if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) {
+ CAMHAL_LOGDA("trying starting capture when already canceled");
+ return NO_ERROR;
+ }
+ }
+
+ if (!capParams) {
+ CAMHAL_LOGE("Invalid cached parameters sent!");
+ return BAD_VALUE;
}
-#ifndef OMAP_TUNA
// Camera framework doesn't expect face callbacks once capture is triggered
pauseFaceDetection(true);
-#endif
//During bracketing image capture is already active
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
//Stop bracketing, activate normal burst for the remaining images
mBracketingEnabled = false;
- mCapturedFrames = mBracketingRange;
- ret = sendBracketFrames();
+ ret = sendBracketFrames(bracketingSent);
+
+ // Check if we accumulated enough buffers
+ if ( bracketingSent < ( mBracketingRange - 1 ) )
+ {
+ mCapturedFrames = mBracketingRange + ( ( mBracketingRange - 1 ) - bracketingSent );
+ }
+ else
+ {
+ mCapturedFrames = mBracketingRange;
+ }
+ mBurstFramesQueued = 0;
+ mBurstFramesAccum = mCapturedFrames;
+
if(ret != NO_ERROR)
goto EXIT;
else
@@ -765,25 +1166,45 @@ status_t OMXCameraAdapter::startImageCapture()
}
if ( NO_ERROR == ret ) {
- if (mPendingCaptureSettings & SetRotation) {
+ if (capParams->mPendingCaptureSettings & SetRotation) {
mPendingCaptureSettings &= ~SetRotation;
ret = setPictureRotation(mPictureRotation);
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error configuring image rotation %x", ret);
}
}
- }
- // need to enable wb data for video snapshot to fill in exif data
- if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) {
- // video snapshot uses wb data from snapshot frame
- ret = setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
+ if (capParams->mPendingCaptureSettings & (SetBurst|SetExpBracket)) {
+ mPendingCaptureSettings &= ~(SetExpBracket|SetBurst);
+ if ( mBracketingSet ) {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ 0,
+ 0,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ } else {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ capParams->mExposureBracketingValidEntries,
+ capParams->mBurstFrames,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ }
+
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
+ goto EXIT;
+ }
+ }
}
- //OMX shutter callback events are only available in hq mode
- if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
- {
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ //OMX shutter callback events are only available in hq mode
+ if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
if ( NO_ERROR == ret )
{
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
@@ -798,23 +1219,86 @@ status_t OMXCameraAdapter::startImageCapture()
ret = setShutterCallback(true);
}
- }
+ }
- if ( NO_ERROR == ret ) {
- capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ if (mPending3Asettings) {
+ apply3Asettings(mParameters3A);
+ }
+ // check is we are already in capture state...which means we are
+ // accumulating shots
+ if ((ret == NO_ERROR) && (mBurstFramesQueued > 0)) {
+ int index = 0;
+ int queued = 0;
+ android::AutoMutex lock(mBurstLock);
+
+ if (capParams->mFlushShotConfigQueue) {
+ // reset shot queue
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesAccum = mBurstFrames;
+ mBurstFramesQueued = 0;
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ mBurstFramesQueued++;
+ }
+ }
+ } else {
+ mCapturedFrames += mBurstFrames;
+ mBurstFramesAccum += mBurstFrames;
+ }
+
+ while ((mBurstFramesQueued < mBurstFramesAccum) &&
+ (index < capData->mNumBufs) &&
+ (queued < capData->mMaxQueueable)) {
+ if (capData->mStatus[index] == OMXCameraPortParameters::IDLE) {
+ CAMHAL_LOGDB("Queuing buffer on Capture port - %p",
+ capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ mBurstFramesQueued++;
+ queued++;
+ } else if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ queued++;
+ }
+ index++;
+ }
+ } else if ( NO_ERROR == ret ) {
///Queue all the buffers on capture port
- for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
- CAMHAL_LOGDB("Queuing buffer on Capture port - 0x%x",
- ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
- eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ for ( int index = 0 ; index < capData->mMaxQueueable ; index++ ) {
+ if (mBurstFramesQueued < mBurstFramesAccum) {
+ CAMHAL_LOGDB("Queuing buffer on Capture port - %p",
+ capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
-
+ mBurstFramesQueued++;
+ } else {
+ capData->mStatus[index] = OMXCameraPortParameters::IDLE;
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+
+ ///Queue all the buffers on capture port
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on Video port (for RAW capture) - 0x%x", ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+#endif
+
mWaitingForSnapshot = true;
mCaptureSignalled = false;
+ mPendingCaptureSettings &= ~SetBurst;
// Capturing command is not needed when capturing in video mode
// Only need to queue buffers on image ports
@@ -834,9 +1318,9 @@ status_t OMXCameraAdapter::startImageCapture()
}
//OMX shutter callback events are only available in hq mode
+
if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
{
-
if ( NO_ERROR == ret )
{
ret = mStartCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
@@ -867,16 +1351,15 @@ status_t OMXCameraAdapter::startImageCapture()
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
- setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
mWaitingForSnapshot = false;
mCaptureSignalled = false;
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::stopImageCapture()
@@ -888,7 +1371,7 @@ status_t OMXCameraAdapter::stopImageCapture()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mImageCaptureLock);
+ android::AutoMutex lock(mImageCaptureLock);
if (!mCaptureConfigured) {
//Capture is not ongoing, return from here
@@ -900,9 +1383,13 @@ status_t OMXCameraAdapter::stopImageCapture()
goto EXIT;
}
+ // TODO(XXX): Reprocessing is currently piggy-backing capture commands
+ if (mAdapterState == REPROCESS_STATE) {
+ ret = stopReprocess();
+ }
+
//Disable the callback first
mWaitingForSnapshot = false;
- mSnapshotCount = 0;
// OMX shutter callback events are only available in hq mode
if ((HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
@@ -923,11 +1410,25 @@ status_t OMXCameraAdapter::stopImageCapture()
mStartCaptureSem.Create(0);
}
}
-
#ifndef OMAP_TUNA
- // After capture, face detection should be disabled
- // and application needs to restart face detection
- stopFaceDetection();
+ else if (CP_CAM == mCapMode) {
+ // Reset shot config queue
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS resetShotConfigs;
+ OMX_INIT_STRUCT_PTR(&resetShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
+
+ resetShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ resetShotConfigs.bFlushQueue = OMX_TRUE;
+ resetShotConfigs.nNumConfigs = 0;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &resetShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while reset shot config 0x%x", eError);
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("Shot config reset successfully");
+ }
+ }
#endif
//Wait here for the capture to be done, in worst case timeout and proceed with cleanup
@@ -956,23 +1457,42 @@ status_t OMXCameraAdapter::stopImageCapture()
}
}
- // had to enable wb data for video snapshot to fill in exif data
- // now that we are done...disable
- if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) {
- ret = setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
- }
+ // Disable WB and vector shot extra data for metadata
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+#ifndef OMAP_TUNA
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+#endif
CAMHAL_LOGDB("Capture set - 0x%x", eError);
mCaptureSignalled = true; //set this to true if we exited because of timeout
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ // Stop is always signalled externally in CPCAM mode
+ // We need to make sure we really stop
+ if ((mCapMode == CP_CAM)) {
+ disableReprocess();
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ // Moving code for below commit here as an optimization for continuous capture,
+ // so focus settings don't have to reapplied after each capture
+ // c78fa2a CameraHAL: Always reset focus mode after capture
+ // Workaround when doing many consecutive shots, CAF wasn't getting restarted.
+ mPending3Asettings |= SetFocus;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
@@ -982,20 +1502,21 @@ EXIT:
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::disableImagePort(){
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters *imgCaptureData = NULL;
+ OMXCameraPortParameters *imgRawCaptureData = NULL;
if (!mCaptureConfigured) {
return NO_ERROR;
@@ -1003,6 +1524,7 @@ status_t OMXCameraAdapter::disableImagePort(){
mCaptureConfigured = false;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex]; // for RAW capture
///Register for Image port Disable event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
@@ -1052,19 +1574,165 @@ status_t OMXCameraAdapter::disableImagePort(){
goto EXIT;
}
- EXIT:
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ deinitInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mStopCaptureSem);
+ ///Disable RawCapture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Free all the buffers on RawCapture port
+ if (imgRawCaptureData) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - %d", imgRawCaptureData->mNumBufs);
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - 0x%x", ( unsigned int ) imgRawCaptureData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex,
+ (OMX_BUFFERHEADERTYPE*)imgRawCaptureData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for Video port disable");
+ //Wait for the image port enable event
+ mStopCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ CAMHAL_LOGDA("Video Port disabled");
+ }
+#endif
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
+status_t OMXCameraAdapter::initInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int index = 0;
+#ifndef OMAP_TUNA
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ /* Indicate to Ducati that we're planning to use dynamically-mapped buffers */
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypePhysicalPageList;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ CAMHAL_LOGDA("Initializing internal buffers");
+ do {
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferallocset;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.nIndex = index;
-status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+ if (eError == OMX_ErrorNoMore) {
+ return NO_ERROR;
+ }
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("GetParameter failed error = 0x%x", eError);
+ break;
+ }
+
+ CAMHAL_LOGDB("Requesting buftype %d of size %dx%d",
+ (int)bufferalloc.eBufType, (int)bufferalloc.nAllocWidth,
+ (int)bufferalloc.nAllocLines);
+
+ bufferalloc.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+
+ OMX_INIT_STRUCT_PTR (&bufferallocset, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferallocset.nPortIndex = portIndex;
+ bufferallocset.nIndex = index;
+ bufferallocset.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+ bufferallocset.nAllocWidth = bufferalloc.nAllocWidth;
+ bufferallocset.nAllocLines = bufferalloc.nAllocLines;
+
+ eError = OMX_SetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferallocset);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("SetParameter failed, error=%08x", eError);
+ if (eError == OMX_ErrorNoMore) return NO_ERROR;
+ break;
+ }
+
+ index++;
+
+ /* 1 is an arbitrary limit */
+ } while (index < 1);
+
+ CAMHAL_LOGV("Ducati requested too many (>1) internal buffers");
+
+ return -EINVAL;
+#endif
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::deinitInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+#ifndef OMAP_TUNA
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypeDefault;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.eBufType = OMX_TI_BufferTypeDefault;
+ bufferalloc.nAllocWidth = 1;
+ bufferalloc.nAllocLines = 1;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+#endif
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
{
LOG_FUNCTION_NAME;
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * imgCaptureData = NULL;
- uint32_t *buffers = (uint32_t*)bufArr;
OMXCameraPortParameters cap;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -1077,7 +1745,6 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
// capture is already configured...we can skip this step
if (mCaptureConfigured) {
-
if ( NO_ERROR == ret )
{
ret = setupEXIF();
@@ -1088,6 +1755,7 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
mCapturedFrames = mBurstFrames;
+ mBurstFramesQueued = 0;
return NO_ERROR;
}
@@ -1117,16 +1785,6 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
}
- if (mPendingCaptureSettings & SetExpBracket) {
- mPendingCaptureSettings &= ~SetExpBracket;
- ret = setExposureBracketing( mExposureBracketingValues,
- mExposureBracketingValidEntries, mBurstFrames);
- if ( ret != NO_ERROR ) {
- CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
- goto EXIT;
- }
- }
-
if (mPendingCaptureSettings & SetQuality) {
mPendingCaptureSettings &= ~SetQuality;
ret = setImageQuality(mPictureQuality);
@@ -1136,6 +1794,12 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
}
+ // assuming if buffers are from ANW that they will be pagelist buffers
+ // and need a tiler reservation
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ initInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+ }
+
///Register for Image port ENABLE event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
@@ -1152,24 +1816,47 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+ // Configure DOMX to use either gralloc handles or vptrs
+ if ((imgCaptureData->mNumBufs > 0)) {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD ("Using ANW Buffers");
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+ } else {
+ CAMHAL_LOGD ("Using ION Buffers");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++ )
{
OMX_BUFFERHEADERTYPE *pBufferHdr;
CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
- (unsigned int)buffers[index],
+ (unsigned int)bufArr[index].opaque,
(int)imgCaptureData->mBufSize);
eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
&pBufferHdr,
mCameraAdapterParameters.mImagePortIndex,
0,
- mCaptureBuffersLength,
- (OMX_U8*)buffers[index]);
+ imgCaptureData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
- pBufferHdr->pAppPrivate = (OMX_PTR) index;
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufferHdr->nVersion.s.nVersionMajor = 1 ;
pBufferHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1213,21 +1900,197 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
}
+ // Enable WB and vector shot extra data for metadata
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+#ifndef OMAP_TUNA
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+
+ // CPCam mode only supports vector shot
+ // Regular capture is not supported
+ if (mCapMode == CP_CAM) initVectorShot();
+
+ // Choose proper single preview mode for cpcapture capture (reproc or hs)
+ if (( NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mCapMode)) {
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
+ if (mNextState == LOADED_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ } else if (mNextState == LOADED_REPROCESS_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_Reprocess;
+ } else {
+ CAMHAL_LOGE("Wrong state trying to start a capture in CPCAM mode?");
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ }
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+#endif
+
mCapturedFrames = mBurstFrames;
+ mBurstFramesAccum = mBurstFrames;
+ mBurstFramesQueued = 0;
+
mCaptureConfigured = true;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ mCaptureConfigured = false;
+ }
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+#ifndef OMAP_TUNA
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+#endif
//Release image buffers
if ( NULL != mReleaseImageBuffersCallback ) {
mReleaseImageBuffersCallback(mReleaseData);
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+status_t OMXCameraAdapter::UseBuffersRawCapture(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME
+ status_t ret;
+ OMX_ERRORTYPE eError;
+ OMXCameraPortParameters * imgRawCaptureData = NULL;
+ Utils::Semaphore camSem;
+ OMXCameraPortParameters cap;
+
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+
+ if (mCaptureConfigured) {
+ return NO_ERROR;
+ }
+
+ camSem.Create();
+
+ // mWaitingForSnapshot is true only when we're in the process of capturing
+ if (mWaitingForSnapshot) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Disable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ CAMHAL_LOGDA("Waiting for port disable");
+ //Wait for the image port enable event
+ camSem.Wait();
+ CAMHAL_LOGDA("Port disabled");
+ }
+
+ imgRawCaptureData->mNumBufs = num;
+ CAMHAL_LOGDB("RAW Max sensor width = %d", (int)imgRawCaptureData->mWidth);
+ CAMHAL_LOGDB("RAW Max sensor height = %d", (int)imgRawCaptureData->mHeight);
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_VIDEO, *imgRawCaptureData);
+
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+ }
+
+ ///Register for Video port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Enable Video Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ mCaptureBuffersLength = (int)imgRawCaptureData->mBufSize;
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++ ) {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer rawCapture address: 0x%x, size = %d ",
+ (unsigned int)bufArr[index].opaque,
+ (int)imgRawCaptureData->mBufSize );
+
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoPortIndex,
+ 0,
+ mCaptureBuffersLength,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_UseBuffer = 0x%x", eError);
+ }
+
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgRawCaptureData->mBufferHeader[index] = pBufferHdr;
+
+ }
+
+ //Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ camSem.Wait();
+ CAMHAL_LOGDA("Port enabled");
+
+ if (NO_ERROR == ret) {
+ ret = setupEXIF();
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
+ }
+ }
+
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesQueued = 0;
+ mCaptureConfigured = true;
+
+ EXIT:
+
+ if (eError != OMX_ErrorNone) {
+ if ( NULL != mErrorNotifier )
+ {
+ mErrorNotifier->errorNotify(eError);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDccDataSave.cpp b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
new file mode 100644
index 0000000..7547743
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDccDataSave.cpp
+*
+* This file contains functionality for handling DCC data save
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::initDccFileDataSave(OMX_HANDLETYPE* omxHandle, int portIndex)
+{
+ OMX_CONFIG_EXTRADATATYPE extraDataControl;
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
+ extraDataControl.nPortIndex = portIndex;
+ extraDataControl.eExtraDataType = OMX_TI_DccData;
+ extraDataControl.bEnable = OMX_TRUE;
+
+ eError = OMX_SetConfig(*omxHandle,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
+ &extraDataControl);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring dcc data overwrite extra data 0x%x",
+ eError);
+
+ ret = NO_INIT;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::sniffDccFileDataSave(OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_TI_DCCDATATYPE* dccData;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if ( NULL == pBuffHeader ) {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_TI_DccData);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
+ } else {
+ CAMHAL_LOGVA("Invalid OMX_TI_DCCDATATYPE");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ dccData = ( OMX_TI_DCCDATATYPE * ) extraData->data;
+
+ if (NULL == dccData) {
+ CAMHAL_LOGVA("OMX_TI_DCCDATATYPE is not found in extra data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ }
+
+ memcpy(&mDccData, dccData, sizeof(mDccData));
+
+ int dccDataSize = (int)dccData->nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ mDccData.pData = (OMX_PTR)malloc(dccDataSize);
+
+ if (NULL == mDccData.pData) {
+ CAMHAL_LOGVA("not enough memory for DCC data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ memcpy(mDccData.pData, &(dccData->pData), dccDataSize);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// Recursively searches given directory contents for the correct DCC file.
+// The directory must be opened and its stream pointer + path passed
+// as arguments. As this function is called recursively, to avoid excessive
+// stack usage the path param is reused -> this MUST be char array with
+// enough length!!! (260 should suffice). Path must end with "/".
+// The directory must also be closed in the caller function.
+// If the correct camera DCC file is found (based on the OMX measurement data)
+// its file stream pointer is returned. NULL is returned otherwise
+FILE * OMXCameraAdapter::parseDCCsubDir(DIR *pDir, char *path)
+{
+ FILE *pFile;
+ DIR *pSubDir;
+ struct dirent *dirEntry;
+ int initialPathLength = strlen(path);
+
+ LOG_FUNCTION_NAME;
+
+ /* check each directory entry */
+ while ((dirEntry = readdir(pDir)) != NULL)
+ {
+ if (dirEntry->d_name[0] == '.')
+ continue;
+
+ strcat(path, dirEntry->d_name);
+ // dirEntry might be sub directory -> check it
+ pSubDir = opendir(path);
+ if (pSubDir) {
+ // dirEntry is sub directory -> parse it
+ strcat(path, "/");
+ pFile = parseDCCsubDir(pSubDir, path);
+ closedir(pSubDir);
+ if (pFile) {
+ // the correct DCC file found!
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ // dirEntry is file -> open it
+ pFile = fopen(path, "rb");
+ if (pFile) {
+ // now check if this is the correct DCC file for that camera
+ OMX_U32 dccFileIDword;
+ OMX_U32 *dccFileDesc = (OMX_U32 *) &mDccData.nCameraModuleId;
+ int i;
+
+ // DCC file ID is 3 4-byte words
+ for (i = 0; i < 3; i++) {
+ if (fread(&dccFileIDword, sizeof(OMX_U32), 1, pFile) != 1) {
+ // file too short
+ break;
+ }
+ if (dccFileIDword != dccFileDesc[i]) {
+ // DCC file ID word i does not match
+ break;
+ }
+ }
+
+ fclose(pFile);
+ if (i == 3) {
+ // the correct DCC file found!
+ CAMHAL_LOGDB("DCC file to be updated: %s", path);
+ // reopen it for modification
+ pFile = fopen(path, "rb+");
+ if (!pFile)
+ CAMHAL_LOGEB("ERROR: DCC file %s failed to open for modification", path);
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ CAMHAL_LOGEB("ERROR: Failed to open file %s for reading", path);
+ }
+ }
+ // restore original path
+ path[initialPathLength] = '\0';
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ // DCC file not found in this directory tree
+ return NULL;
+}
+
+// Finds the DCC file corresponding to the current camera based on the
+// OMX measurement data, opens it and returns the file stream pointer
+// (NULL on error or if file not found).
+// The folder string dccFolderPath must end with "/"
+FILE * OMXCameraAdapter::fopenCameraDCC(const char *dccFolderPath)
+{
+ FILE *pFile;
+ DIR *pDir;
+ char dccPath[260];
+
+ LOG_FUNCTION_NAME;
+
+ strcpy(dccPath, dccFolderPath);
+
+ pDir = opendir(dccPath);
+ if (!pDir) {
+ CAMHAL_LOGEB("ERROR: Opening DCC directory %s failed", dccPath);
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ pFile = parseDCCsubDir(pDir, dccPath);
+ closedir(pDir);
+ if (pFile) {
+ CAMHAL_LOGDB("DCC file %s opened for modification", dccPath);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return pFile;
+}
+
+// Positions the DCC file stream pointer to the correct offset within the
+// correct usecase based on the OMX mesurement data. Returns 0 on success
+status_t OMXCameraAdapter::fseekDCCuseCasePos(FILE *pFile)
+{
+ OMX_U32 dccNumUseCases = 0;
+ OMX_U32 dccUseCaseData[3];
+ OMX_U32 i;
+
+ LOG_FUNCTION_NAME;
+
+ // position the file pointer to the DCC use cases section
+ if (fseek(pFile, 80, SEEK_SET)) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (fread(&dccNumUseCases, sizeof(OMX_U32), 1, pFile) != 1 ||
+ dccNumUseCases == 0) {
+ CAMHAL_LOGEA("ERROR: DCC file contains 0 use cases");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ for (i = 0; i < dccNumUseCases; i++) {
+ if (fread(dccUseCaseData, sizeof(OMX_U32), 3, pFile) != 3) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (dccUseCaseData[0] == mDccData.nUseCaseId) {
+ // DCC use case match!
+ break;
+ }
+ }
+
+ if (i == dccNumUseCases) {
+ CAMHAL_LOGEB("ERROR: Use case ID %lu not found in DCC file", mDccData.nUseCaseId);
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ // dccUseCaseData[1] is the offset to the beginning of the actual use case
+ // from the beginning of the file
+ // mDccData.nOffset is the offset within the actual use case (from the
+ // beginning of the use case to the data to be modified)
+
+ if (fseek(pFile,dccUseCaseData[1] + mDccData.nOffset, SEEK_SET ))
+ {
+ CAMHAL_LOGEA("ERROR: Error setting the correct offset");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::saveDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData)
+ {
+ FILE *fd = fopenCameraDCC(DCC_PATH);
+
+ if (fd)
+ {
+ if (!fseekDCCuseCasePos(fd))
+ {
+ int dccDataSize = (int)mDccData.nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ if (fwrite(mDccData.pData, dccDataSize, 1, fd) != 1)
+ {
+ CAMHAL_LOGEA("ERROR: Writing to DCC file failed");
+ }
+ else
+ {
+ CAMHAL_LOGDA("DCC file successfully updated");
+ }
+ }
+ fclose(fd);
+ }
+ else
+ {
+ CAMHAL_LOGEA("ERROR: Correct DCC file not found or failed to open for modification");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::closeDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDefaults.cpp b/camera/OMXCameraAdapter/OMXDefaults.cpp
index aff38d1..2928573 100644
--- a/camera/OMXCameraAdapter/OMXDefaults.cpp
+++ b/camera/OMXCameraAdapter/OMXDefaults.cpp
@@ -24,10 +24,8 @@
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
-
-#undef LOG_TAG
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
#define __STRINGIFY(s) __STRING(s)
@@ -42,42 +40,44 @@ const char OMXCameraAdapter::DEFAULT_EXPOSURE_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_FLASH_MODE[] = "off";
const char OMXCameraAdapter::DEFAULT_FOCUS_MODE_PREFERRED[] = "auto";
const char OMXCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
-const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_IMAGE[] = "15000,30000";
-const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_VIDEO[]="24000,30000";
const char OMXCameraAdapter::DEFAULT_IPP[] = "ldc-nsf";
-const char OMXCameraAdapter::DEFAULT_GBCE[] = "disable";
const char OMXCameraAdapter::DEFAULT_ISO_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_JPEG_QUALITY[] = "95";
const char OMXCameraAdapter::DEFAULT_THUMBNAIL_QUALITY[] = "60";
const char OMXCameraAdapter::DEFAULT_THUMBNAIL_SIZE[] = "160x120";
const char OMXCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char OMXCameraAdapter::DEFAULT_S3D_PICTURE_LAYOUT[] = "tb-full";
const char OMXCameraAdapter::DEFAULT_PICTURE_SIZE[] = "320x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_SS_SIZE[] = "640x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_TB_SIZE[] = "320x480";
const char OMXCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
const char OMXCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char OMXCameraAdapter::DEFAULT_S3D_PREVIEW_LAYOUT[] = "tb-subsampled";
const char OMXCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_SS_SIZE[] = "1280x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_TB_SIZE[] = "640x960";
const char OMXCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
const char OMXCameraAdapter::DEFAULT_NUM_PIC_BUFS[] = "1";
-const char OMXCameraAdapter::DEFAULT_MAX_FOCUS_AREAS[] = "1";
const char OMXCameraAdapter::DEFAULT_SATURATION[] = "100";
const char OMXCameraAdapter::DEFAULT_SCENE_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_SHARPNESS[] = "100";
-const char OMXCameraAdapter::DEFAULT_VSTAB[] = "false";
-const char OMXCameraAdapter::DEFAULT_VSTAB_SUPPORTED[] = "true";
+const char * OMXCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_WB[] = "auto";
const char OMXCameraAdapter::DEFAULT_ZOOM[] = "0";
const char OMXCameraAdapter::DEFAULT_MAX_FD_HW_FACES[] = __STRINGIFY(MAX_NUM_FACES_SUPPORTED);
const char OMXCameraAdapter::DEFAULT_MAX_FD_SW_FACES[] = "0";
-const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_PRIMARY[] = "3.43";
-const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_SECONDARY[] = "1.95";
const char OMXCameraAdapter::DEFAULT_HOR_ANGLE[] = "54.8";
const char OMXCameraAdapter::DEFAULT_VER_ANGLE[] = "42.5";
-const char OMXCameraAdapter::DEFAULT_AE_LOCK[] = "false";
-const char OMXCameraAdapter::DEFAULT_AWB_LOCK[] = "false";
-const char OMXCameraAdapter::DEFAULT_MAX_NUM_METERING_AREAS[] = "0";
-const char OMXCameraAdapter::DEFAULT_LOCK_SUPPORTED[] = "true";
-const char OMXCameraAdapter::DEFAULT_LOCK_UNSUPPORTED[] = "false";
-const char OMXCameraAdapter::DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[] = "true";
+const char * OMXCameraAdapter::DEFAULT_AE_LOCK = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_AWB_LOCK = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_VIDEO_SIZE[] = "1920x1080";
-const char OMXCameraAdapter::DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "1920x1080";
-};
+const char OMXCameraAdapter::DEFAULT_SENSOR_ORIENTATION[] = "0";
+const char OMXCameraAdapter::DEFAULT_AUTOCONVERGENCE_MODE[] = "frame";
+const char OMXCameraAdapter::DEFAULT_MANUAL_CONVERGENCE[] = "0";
+const char * OMXCameraAdapter::DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE = android::CameraParameters::TRUE;
+const char OMXCameraAdapter::DEFAULT_EXIF_MAKE[] = "default_make";
+const char OMXCameraAdapter::DEFAULT_EXIF_MODEL[] = "default_model";
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp
index 76d94bd..cbf7c1c 100644
--- a/camera/OMXCameraAdapter/OMXExif.cpp
+++ b/camera/OMXCameraAdapter/OMXExif.cpp
@@ -21,17 +21,14 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include <math.h>
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersEXIF(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -40,7 +37,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
LOG_FUNCTION_NAME;
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -72,7 +69,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLatValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -104,7 +101,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLongValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
mEXIFData.mGPSData.mAltitude = floor(fabs(gpsPos));
@@ -120,7 +117,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mAltitudeValid= false;
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
{
long gpsTimestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsTimestamp) );
@@ -141,7 +138,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mTimeStampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
{
long gpsDatestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsDatestamp) );
@@ -160,7 +157,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mDatestampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
{
strncpy(mEXIFData.mGPSData.mProcMethod, valstr, GPS_PROCESSING_SIZE-1);
mEXIFData.mGPSData.mProcMethodValid = true;
@@ -213,7 +210,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
}
- if( ( valstr = params.get(CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
+ if( ( valstr = params.get(android::CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
CAMHAL_LOGVB("EXIF Focal length: %s", valstr);
ExifElementsTable::stringToRational(valstr,
&mEXIFData.mFocalNum,
@@ -235,16 +232,23 @@ status_t OMXCameraAdapter::setupEXIF()
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
OMX_TI_CONFIG_EXIF_TAGS *exifTags;
+ unsigned char *startPtr = NULL;
unsigned char *sharedPtr = NULL;
struct timeval sTv;
struct tm *pTime;
OMXCameraPortParameters * capData = NULL;
MemoryManager memMgr;
- OMX_U8** memmgr_buf_array = NULL;
+ CameraBuffer *memmgr_buf_array;
int buf_size = 0;
LOG_FUNCTION_NAME;
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
sharedBuffer.pSharedBuff = NULL;
capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -272,22 +276,23 @@ status_t OMXCameraAdapter::setupEXIF()
buf_size = ((buf_size+4095)/4096)*4096;
sharedBuffer.nSharedBuffSize = buf_size;
- memmgr_buf_array = (OMX_U8 **)memMgr.allocateBuffer(0, 0, NULL, buf_size, 1);
- sharedBuffer.pSharedBuff = ( OMX_U8 * ) memmgr_buf_array[0];
+ memmgr_buf_array = memMgr.allocateBufferList(0, 0, NULL, buf_size, 1);
+ sharedBuffer.pSharedBuff = (OMX_U8*)camera_buffer_get_omx_ptr(&memmgr_buf_array[0]);
+ startPtr = ( OMX_U8 * ) memmgr_buf_array[0].opaque;
- if ( NULL == sharedBuffer.pSharedBuff )
+ if ( NULL == startPtr)
{
CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
ret = -1;
}
//Extra data begins right after the EXIF configuration structure.
- sharedPtr = sharedBuffer.pSharedBuff + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
+ sharedPtr = startPtr + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
}
if ( NO_ERROR == ret )
{
- exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) sharedBuffer.pSharedBuff;
+ exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) startPtr;
OMX_INIT_STRUCT_PTR (exifTags, OMX_TI_CONFIG_EXIF_TAGS);
exifTags->nPortIndex = mCameraAdapterParameters.mImagePortIndex;
@@ -310,7 +315,7 @@ status_t OMXCameraAdapter::setupEXIF()
mEXIFData.mModel,
EXIF_MODEL_SIZE - 1);
- exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulModelBuffSizeBytes = strlen((char*)sharedPtr) + 1;
sharedPtr += EXIF_MODEL_SIZE;
exifTags->eStatusModel = OMX_TI_TagUpdated;
@@ -323,7 +328,7 @@ status_t OMXCameraAdapter::setupEXIF()
mEXIFData.mMake,
EXIF_MAKE_SIZE - 1);
- exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulMakeBuffSizeBytes = strlen((char*)sharedPtr) + 1;
sharedPtr += EXIF_MAKE_SIZE;
exifTags->eStatusMake = OMX_TI_TagUpdated;
@@ -357,7 +362,7 @@ status_t OMXCameraAdapter::setupEXIF()
pTime->tm_sec );
}
- exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
sharedPtr += EXIF_DATE_TIME_SIZE;
exifTags->ulDateTimeBuffSizeBytes = EXIF_DATE_TIME_SIZE;
exifTags->eStatusDateTime = OMX_TI_TagUpdated;
@@ -435,7 +440,7 @@ status_t OMXCameraAdapter::setupEXIF()
{
memcpy(sharedPtr, mEXIFData.mGPSData.mMapDatum, GPS_MAPDATUM_SIZE);
- exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulGpsMapDatumBuffSizeBytes = GPS_MAPDATUM_SIZE;
exifTags->eStatusGpsMapDatum = OMX_TI_TagUpdated;
sharedPtr += GPS_MAPDATUM_SIZE;
@@ -444,7 +449,7 @@ status_t OMXCameraAdapter::setupEXIF()
if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsProcessingMethod ) &&
( mEXIFData.mGPSData.mProcMethodValid ) )
{
- exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
memcpy(sharedPtr, ExifAsciiPrefix, sizeof(ExifAsciiPrefix));
sharedPtr += sizeof(ExifAsciiPrefix);
@@ -500,7 +505,7 @@ status_t OMXCameraAdapter::setupEXIF()
if ( NULL != memmgr_buf_array )
{
- memMgr.freeBuffer(memmgr_buf_array);
+ memMgr.freeBufferList(memmgr_buf_array);
}
LOG_FUNCTION_NAME_EXIT;
@@ -809,7 +814,7 @@ status_t OMXCameraAdapter::convertGPSCoord(double coord,
if ( coord == 0 ) {
- ALOGE("Invalid GPS coordinate");
+ CAMHAL_LOGE("Invalid GPS coordinate");
return -EINVAL;
}
@@ -836,4 +841,5 @@ status_t OMXCameraAdapter::convertGPSCoord(double coord,
return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFD.cpp b/camera/OMXCameraAdapter/OMXFD.cpp
index 15f8d05..1a482b2 100644
--- a/camera/OMXCameraAdapter/OMXFD.cpp
+++ b/camera/OMXCameraAdapter/OMXFD.cpp
@@ -21,25 +21,15 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-#define FACE_DETECTION_THRESHOLD 80
-
-// constants used for face smooth filtering
-static const int HorizontalFilterThreshold = 40;
-static const int VerticalFilterThreshold = 40;
-static const int HorizontalFaceSizeThreshold = 30;
-static const int VerticalFaceSizeThreshold = 30;
+namespace Ti {
+namespace Camera {
+const uint32_t OMXCameraAdapter::FACE_DETECTION_THRESHOLD = 80;
-namespace android {
-
-status_t OMXCameraAdapter::setParametersFD(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersFD(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -55,9 +45,9 @@ status_t OMXCameraAdapter::startFaceDetection()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
- ret = setFaceDetection(true, mDeviceOrientation);
+ ret = setFaceDetection(true, mFaceOrientation);
if (ret != NO_ERROR) {
goto out;
}
@@ -82,17 +72,21 @@ status_t OMXCameraAdapter::stopFaceDetection()
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
- ret = setFaceDetection(false, mDeviceOrientation);
+ ret = setFaceDetection(false, mFaceOrientation);
if (ret != NO_ERROR) {
goto out;
}
- // Reset 3A settings
- ret = setParameters3A(mParams, state);
- if (ret != NO_ERROR) {
- goto out;
+ if ( mFaceDetectionRunning ) {
+ //Enable region priority and disable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , false);
+
+ //Enable Region priority and disable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, false);
}
if (mPending3Asettings) {
@@ -106,7 +100,7 @@ status_t OMXCameraAdapter::stopFaceDetection()
void OMXCameraAdapter::pauseFaceDetection(bool pause)
{
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
// pausing will only take affect if fd is already running
if (mFaceDetectionRunning) {
mFaceDetectionPaused = pause;
@@ -114,6 +108,22 @@ void OMXCameraAdapter::pauseFaceDetection(bool pause)
}
}
+status_t OMXCameraAdapter::setFaceDetectionOrientation(OMX_U32 orientation)
+{
+ status_t ret = NO_ERROR;
+
+ android::AutoMutex lock(mFaceDetectionLock);
+
+ mFaceOrientation = orientation;
+
+ if (mFaceDetectionRunning) {
+ // restart face detection with new rotation
+ setFaceDetection(true, orientation);
+ }
+
+ return ret;
+}
+
status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
{
status_t ret = NO_ERROR;
@@ -162,7 +172,9 @@ status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
if ( NO_ERROR == ret )
{
- ret = setExtraData(enable, mCameraAdapterParameters.mPrevPortIndex, OMX_FaceDetection);
+ // TODO(XXX): Should enable/disable FD extra data separately
+ // on each port.
+ ret = setExtraData(enable, OMX_ALL, OMX_FaceDetection);
if ( NO_ERROR != ret )
{
@@ -185,18 +197,15 @@ status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
return ret;
}
-status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
- sp<CameraFDResult> &result,
- size_t previewWidth,
- size_t previewHeight)
+status_t OMXCameraAdapter::createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ android::sp<CameraMetadataResult> &result,
+ size_t previewWidth,
+ size_t previewHeight)
{
status_t ret = NO_ERROR;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_FACERESULT *faceResult;
- OMX_OTHER_EXTRADATATYPE *extraData;
- OMX_FACEDETECTIONTYPE *faceData;
- OMX_TI_PLATFORMPRIVATE *platformPrivate;
- camera_frame_metadata_t *faces;
+ status_t faceRet = NO_ERROR;
+ status_t metaRet = NO_ERROR;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
LOG_FUNCTION_NAME;
@@ -210,78 +219,78 @@ status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
return-EINVAL;
}
- platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate);
- if ( NULL != platformPrivate ) {
- if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
- CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
- platformPrivate->nSize,
- sizeof(OMX_TI_PLATFORMPRIVATE),
- platformPrivate->pAuxBuf1,
- platformPrivate->pAuxBufSize1,
- platformPrivate->pMetaDataBuffer,
- platformPrivate->nMetaDataSize);
+ if ( mFaceDetectionRunning && !mFaceDetectionPaused ) {
+ OMX_OTHER_EXTRADATATYPE *extraData;
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_FaceDetection);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
} else {
- CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
- ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
- ( unsigned int ) platformPrivate->nSize);
+ CAMHAL_LOGD("FD extra data not found!");
return -EINVAL;
}
- } else {
- CAMHAL_LOGDA("Invalid OMX_TI_PLATFORMPRIVATE");
- return-EINVAL;
- }
-
- if ( 0 >= platformPrivate->nMetaDataSize ) {
- CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
- ( unsigned int ) platformPrivate->nMetaDataSize);
- return -EINVAL;
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ if ( NULL != faceData ) {
+ if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
+ CAMHAL_LOGVB("Faces detected %d",
+ faceData->ulFaceCount,
+ faceData->nSize,
+ sizeof(OMX_FACEDETECTIONTYPE),
+ faceData->eCameraView,
+ faceData->nPortIndex,
+ faceData->nVersion);
+ } else {
+ CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
+ ( unsigned int ) faceData->nSize);
+ return -EINVAL;
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
+ return -EINVAL;
+ }
}
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer),
- platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE)OMX_FaceDetection);
-
- if ( NULL != extraData ) {
- CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
- extraData->nSize,
- sizeof(OMX_OTHER_EXTRADATATYPE),
- extraData->eType,
- extraData->nDataSize,
- extraData->nPortIndex,
- extraData->nVersion);
- } else {
- CAMHAL_LOGDA("Invalid OMX_OTHER_EXTRADATATYPE");
- return -EINVAL;
+ result = new (std::nothrow) CameraMetadataResult;
+ if(NULL == result.get()) {
+ ret = NO_MEMORY;
+ return ret;
}
- faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
- if ( NULL != faceData ) {
- if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
- CAMHAL_LOGVB("Faces detected %d",
- faceData->ulFaceCount,
- faceData->nSize,
- sizeof(OMX_FACEDETECTIONTYPE),
- faceData->eCameraView,
- faceData->nPortIndex,
- faceData->nVersion);
- } else {
- CAMHAL_LOGDB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
- ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
- ( unsigned int ) faceData->nSize);
- return -EINVAL;
+ //Encode face coordinates
+ faceRet = encodeFaceCoordinates(faceData, result->getMetadataResult()
+ , previewWidth, previewHeight);
+ if ((NO_ERROR == faceRet) || (NOT_ENOUGH_DATA == faceRet)) {
+ // Ignore harmless errors (no error and no update) and go ahead and encode
+ // the preview meta data
+ metaRet = encodePreviewMetadata(result->getMetadataResult()
+ , pBuffHeader->pPlatformPrivate);
+ if ( (NO_ERROR != metaRet) && (NOT_ENOUGH_DATA != metaRet) ) {
+ // Some 'real' error occurred during preview meta data encod, clear metadata
+ // result and return correct error code
+ result.clear();
+ ret = metaRet;
}
} else {
- CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
- return -EINVAL;
+ //Some real error occurred during face encoding, clear metadata result
+ // and return correct error code
+ result.clear();
+ ret = faceRet;
}
- ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight);
-
- if ( NO_ERROR == ret ) {
- result = new CameraFDResult(faces);
- } else {
+ if((NOT_ENOUGH_DATA == faceRet) && (NOT_ENOUGH_DATA == metaRet)) {
+ //No point sending the callback if nothing is changed
result.clear();
- result = NULL;
+ ret = faceRet;
}
LOG_FUNCTION_NAME_EXIT;
@@ -290,40 +299,38 @@ status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
}
status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
- camera_frame_metadata_t **pFaces,
+ camera_frame_metadata_t *metadataResult,
size_t previewWidth,
size_t previewHeight)
{
status_t ret = NO_ERROR;
camera_face_t *faces;
- camera_frame_metadata_t *faceResult;
size_t hRange, vRange;
double tmp;
+ bool faceArrayChanged = false;
LOG_FUNCTION_NAME;
- if ( NULL == faceData ) {
- CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter");
- return EINVAL;
- }
-
- LOG_FUNCTION_NAME
+ hRange = CameraMetadataResult::RIGHT - CameraMetadataResult::LEFT;
+ vRange = CameraMetadataResult::BOTTOM - CameraMetadataResult::TOP;
- hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT;
- vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP;
+ android::AutoMutex lock(mFaceDetectionLock);
- faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t));
- if ( NULL == faceResult ) {
- return -ENOMEM;
+ // Avoid memory leak if called twice on same CameraMetadataResult
+ if ( (0 < metadataResult->number_of_faces) && (NULL != metadataResult->faces) ) {
+ free(metadataResult->faces);
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
}
- if ( 0 < faceData->ulFaceCount ) {
+ if ( (NULL != faceData) && (0 < faceData->ulFaceCount) ) {
int orient_mult;
int trans_left, trans_top, trans_right, trans_bot;
faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
if ( NULL == faces ) {
- return -ENOMEM;
+ ret = NO_MEMORY;
+ goto out;
}
/**
@@ -359,7 +366,7 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
/ * (r, b)
*/
- if (mDeviceOrientation == 180) {
+ if (mFaceOrientation == 180) {
orient_mult = -1;
trans_left = 2; // right is now left
trans_top = 3; // bottom is now top
@@ -386,7 +393,7 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD)
continue;
- if (mDeviceOrientation == 180) {
+ if (mFaceOrientation == 180) {
// from sensor pov, the left pos is the right corner of the face in pov of frame
nLeft = faceData->tFacePosition[j].nLeft + faceData->tFacePosition[j].nWidth;
nTop = faceData->tFacePosition[j].nTop + faceData->tFacePosition[j].nHeight;
@@ -417,20 +424,21 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
faces[i].score = faceData->tFacePosition[j].nScore;
faces[i].id = 0;
- faces[i].left_eye[0] = CameraFDResult::INVALID_DATA;
- faces[i].left_eye[1] = CameraFDResult::INVALID_DATA;
- faces[i].right_eye[0] = CameraFDResult::INVALID_DATA;
- faces[i].right_eye[1] = CameraFDResult::INVALID_DATA;
- faces[i].mouth[0] = CameraFDResult::INVALID_DATA;
- faces[i].mouth[1] = CameraFDResult::INVALID_DATA;
+ faces[i].left_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].left_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[1] = CameraMetadataResult::INVALID_DATA;
i++;
}
- faceResult->number_of_faces = i;
- faceResult->faces = faces;
+ metadataResult->number_of_faces = i;
+ metadataResult->faces = faces;
- for (int i = 0; i < faceResult->number_of_faces; i++)
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
{
+ bool faceChanged = true;
int centerX = (faces[i].rect[trans_left] + faces[i].rect[trans_right] ) / 2;
int centerY = (faces[i].rect[trans_top] + faces[i].rect[trans_bot] ) / 2;
@@ -448,43 +456,49 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
int tempSizeY = (faceDetectionLastOutput[j].rect[trans_bot] -
faceDetectionLastOutput[j].rect[trans_top] ) ;
- if ( (abs(tempCenterX - centerX) < HorizontalFilterThreshold) &&
- (abs(tempCenterY - centerY) < VerticalFilterThreshold) )
- {
- // Found Face. It did not move too far.
- // Now check size of rectangle compare to last output
- if ( (abs (tempSizeX -sizeX) < HorizontalFaceSizeThreshold) &&
- (abs (tempSizeY -sizeY) < VerticalFaceSizeThreshold) )
- {
- // Rectangle is almost same as last time
- // Output exactly what was done for this face last time.
- faces[i] = faceDetectionLastOutput[j];
- }
- else
- {
- // TODO(XXX): Rectangle size changed but position is same.
- // Possibly we can apply just positional correctness.
+ if ( ( tempCenterX == centerX) &&
+ ( tempCenterY == centerY) ) {
+ // Found Face.
+ // Now check size of rectangle
+ // compare to last output.
+ if ( ( tempSizeX == sizeX ) &&
+ ( tempSizeY == sizeY ) ) {
+ faceChanged = false;
}
}
}
+ // Send face detection data after some face coordinate changes
+ if (faceChanged) {
+ faceArrayChanged = true;
+ }
}
// Save this output for next iteration
- for (int i = 0; i < faceResult->number_of_faces; i++)
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
{
faceDetectionLastOutput[i] = faces[i];
}
- faceDetectionNumFacesLastOutput = faceResult->number_of_faces;
} else {
- faceResult->number_of_faces = 0;
- faceResult->faces = NULL;
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
}
- *pFaces = faceResult;
+ // Send face detection data after face count changes
+ if (faceDetectionNumFacesLastOutput != metadataResult->number_of_faces) {
+ faceArrayChanged = true;
+ }
+ faceDetectionNumFacesLastOutput = metadataResult->number_of_faces;
+
+ if ( !faceArrayChanged ) {
+ ret = NOT_ENOUGH_DATA;
+ }
LOG_FUNCTION_NAME_EXIT;
+out:
+
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp
index f584184..65577a5 100644
--- a/camera/OMXCameraAdapter/OMXFocus.cpp
+++ b/camera/OMXCameraAdapter/OMXFocus.cpp
@@ -22,10 +22,6 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
@@ -34,23 +30,26 @@
#define AF_IMAGE_CALLBACK_TIMEOUT 5000000 //5 seconds timeout
#define AF_VIDEO_CALLBACK_TIMEOUT 2800000 //2.8 seconds timeout
-namespace android {
+namespace Ti {
+namespace Camera {
+
+const nsecs_t OMXCameraAdapter::CANCEL_AF_TIMEOUT = seconds_to_nanoseconds(1);
-status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersFocus(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *str = NULL;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
size_t MAX_FOCUS_AREAS;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mFocusAreasLock);
+ android::AutoMutex lock(mFocusAreasLock);
- str = params.get(CameraParameters::KEY_FOCUS_AREAS);
+ str = params.get(android::CameraParameters::KEY_FOCUS_AREAS);
- MAX_FOCUS_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+ MAX_FOCUS_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
if ( NULL != str ) {
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -72,7 +71,7 @@ status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
}
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -84,6 +83,7 @@ status_t OMXCameraAdapter::doAutoFocus()
OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
OMX_PARAM_FOCUSSTATUSTYPE focusStatus;
OMX_CONFIG_BOOLEANTYPE bOMX;
+ CameraAdapter::AdapterState state;
nsecs_t timeout = 0;
LOG_FUNCTION_NAME;
@@ -102,18 +102,23 @@ status_t OMXCameraAdapter::doAutoFocus()
return NO_ERROR;
}
-
if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) {
CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called");
return NO_ERROR;
}
+ // AF when fixed focus modes are set should be a no-op.
+ if ( ( mParameters3A.Focus == OMX_IMAGE_FocusControlOff ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlHyperfocal ) ) {
+ returnFocusStatus(true);
+ return NO_ERROR;
+ }
+
OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
-#ifndef OMAP_TUNA
// If the app calls autoFocus, the camera will stop sending face callbacks.
pauseFaceDetection(true);
-#endif
// This is needed for applying FOCUS_REGION correctly
if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea()))
@@ -154,8 +159,7 @@ status_t OMXCameraAdapter::doAutoFocus()
( focusStatus.eFocusStatus == OMX_FocusStatusRequest ||
focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) ||
- (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) )
- {
+ (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) ) {
OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
bOMX.bEnabled = OMX_TRUE;
@@ -163,6 +167,12 @@ status_t OMXCameraAdapter::doAutoFocus()
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
&bOMX);
+ if ( OMX_ErrorNone != eError ) {
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ {
+ android::AutoMutex lock(mDoAFMutex);
// force AF, Ducati will take care of whether CAF
// or AF will be performed, depending on light conditions
@@ -172,29 +182,31 @@ status_t OMXCameraAdapter::doAutoFocus()
focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock;
}
- if ( focusControl.eFocusControl != OMX_IMAGE_FocusControlAuto )
- {
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigFocusControl,
&focusControl);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
+ return INVALID_OPERATION;
+ } else {
+ CAMHAL_LOGDA("Autofocus started successfully");
}
- if ( OMX_ErrorNone != eError ) {
- CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
- return INVALID_OPERATION;
- } else {
- CAMHAL_LOGDA("Autofocus started successfully");
- }
+ // No need to wait if preview is about to stop
+ getNextState(state);
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE ) {
+ return NO_ERROR;
+ }
+
+ // configure focus timeout based on capture mode
+ timeout = (mCapMode == VIDEO_MODE) ?
+ ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
+ ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
- // configure focus timeout based on capture mode
- timeout = (mCapMode == VIDEO_MODE) ?
- ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
- ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
- {
- Mutex::Autolock lock(mDoAFMutex);
ret = mDoAFCond.waitRelative(mDoAFMutex, timeout);
- }
+ }
//If somethiing bad happened while we wait
if (mComponentState == OMX_StateInvalid) {
@@ -206,6 +218,7 @@ status_t OMXCameraAdapter::doAutoFocus()
CAMHAL_LOGEA("Autofocus callback timeout expired");
ret = returnFocusStatus(true);
} else {
+ CAMHAL_LOGDA("Autofocus callback received");
ret = returnFocusStatus(false);
}
} else { // Focus mode in continuous
@@ -227,18 +240,16 @@ status_t OMXCameraAdapter::stopAutoFocus()
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
+ if ( OMX_StateInvalid == mComponentState ) {
CAMHAL_LOGEA("OMX component in Invalid state");
returnFocusStatus(false);
return -EINVAL;
- }
+ }
- if ( OMX_StateExecuting != mComponentState )
- {
+ if ( OMX_StateExecuting != mComponentState ) {
CAMHAL_LOGEA("OMX component not in executing state");
return NO_ERROR;
- }
+ }
if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) {
// No need to stop focus if we are in infinity mode. Nothing to stop.
@@ -251,19 +262,20 @@ status_t OMXCameraAdapter::stopAutoFocus()
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigFocusControl,
&focusControl);
- if ( OMX_ErrorNone != eError )
- {
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
- } else {
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+#ifdef CAMERAHAL_TUNA
+ else {
// This is a WA. Usually the OMX Camera component should
// generate AF status change OMX event fairly quickly
// ( after one preview frame ) and this notification should
// actually come from 'handleFocusCallback()'.
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
-
+#endif
LOG_FUNCTION_NAME_EXIT;
@@ -294,7 +306,7 @@ status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focus
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::cancelAutoFocus()
@@ -310,21 +322,32 @@ status_t OMXCameraAdapter::cancelAutoFocus()
return ret;
}
- //Stop the AF only for modes other than CAF or Inifinity
+ //Stop the AF only for modes other than CAF, Inifinity or Off
if ( ( focusMode.eFocusControl != OMX_IMAGE_FocusControlAuto ) &&
( focusMode.eFocusControl != ( OMX_IMAGE_FOCUSCONTROLTYPE )
- OMX_IMAGE_FocusControlAutoInfinity ) ) {
+ OMX_IMAGE_FocusControlAutoInfinity ) &&
+ ( focusMode.eFocusControl != OMX_IMAGE_FocusControlOff ) ) {
+ android::AutoMutex lock(mCancelAFMutex);
stopAutoFocus();
+ ret = mCancelAFCond.waitRelative(mCancelAFMutex, CANCEL_AF_TIMEOUT);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Cancel AF timeout!");
+ }
} else if (focusMode.eFocusControl == OMX_IMAGE_FocusControlAuto) {
// This re-enabling of CAF doesn't seem to
// be needed any more.
// re-apply CAF after unlocking and canceling
// mPending3Asettings |= SetFocus;
}
-#ifndef OMAP_TUNA
+
+ {
+ // Signal to 'doAutoFocus()'
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+
// If the apps call #cancelAutoFocus()}, the face callbacks will also resume.
pauseFaceDetection(false);
-#endif
LOG_FUNCTION_NAME_EXIT;
@@ -349,7 +372,7 @@ status_t OMXCameraAdapter::setFocusCallback(bool enabled)
if ( OMX_StateExecuting != mComponentState )
{
CAMHAL_LOGEA("OMX component not in executing state");
- ret = NO_ERROR;
+ return NO_ERROR;
}
if ( NO_ERROR == ret )
@@ -454,9 +477,9 @@ status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
} else {
CAMHAL_LOGDA("Focus locked. Applied focus locks successfully");
}
+
stopAutoFocus();
}
-
//Query current focus distance after AF is complete
updateFocusDistances(mParameters);
}
@@ -476,10 +499,8 @@ status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
notifyFocusSubscribers(focusStatus);
}
-#ifndef OMAP_TUNA
// After focus, face detection will resume sending face callbacks
pauseFaceDetection(false);
-#endif
LOG_FUNCTION_NAME_EXIT;
@@ -514,6 +535,7 @@ status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
if ( NO_ERROR == ret )
{
OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonFocusStatus,
eFocusStatus);
@@ -534,7 +556,7 @@ status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
return ret;
}
-status_t OMXCameraAdapter::updateFocusDistances(CameraParameters &params)
+status_t OMXCameraAdapter::updateFocusDistances(android::CameraParameters &params)
{
OMX_U32 focusNear, focusOptimal, focusFar;
status_t ret = NO_ERROR;
@@ -620,7 +642,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
{
if ( 0 == dist )
{
- strncpy(buffer, CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
+ strncpy(buffer, android::CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
}
else
{
@@ -638,7 +660,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
OMX_U32 &optimal,
OMX_U32 &far,
- CameraParameters& params)
+ android::CameraParameters& params)
{
status_t ret = NO_ERROR;
@@ -677,7 +699,7 @@ status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
mFocusDistOptimal,
mFocusDistFar);
- params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
}
LOG_FUNCTION_NAME_EXIT;
@@ -690,13 +712,20 @@ status_t OMXCameraAdapter::setTouchFocus()
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_ALGOAREASTYPE **focusAreas;
+ OMX_ALGOAREASTYPE *focusAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
MemoryManager memMgr;
+ CameraBuffer *bufferlist;
int areasSize = 0;
LOG_FUNCTION_NAME;
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
if ( OMX_StateInvalid == mComponentState )
{
CAMHAL_LOGEA("OMX component is in invalid state");
@@ -707,7 +736,8 @@ status_t OMXCameraAdapter::setTouchFocus()
{
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- focusAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ focusAreas = (OMX_ALGOAREASTYPE*) bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
@@ -718,51 +748,60 @@ status_t OMXCameraAdapter::setTouchFocus()
return -ENOMEM;
}
- OMX_INIT_STRUCT_PTR (focusAreas[0], OMX_ALGOAREASTYPE);
+ OMX_INIT_STRUCT_PTR (focusAreas, OMX_ALGOAREASTYPE);
- focusAreas[0]->nPortIndex = OMX_ALL;
- focusAreas[0]->nNumAreas = mFocusAreas.size();
- focusAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaFocus;
+ focusAreas->nPortIndex = OMX_ALL;
+ focusAreas->nNumAreas = mFocusAreas.size();
+ focusAreas->nAlgoAreaPurpose = OMX_AlgoAreaFocus;
// If the area is the special case of (0, 0, 0, 0, 0), then
// the algorithm needs nNumAreas to be set to 0,
// in order to automatically choose the best fitting areas.
if ( mFocusAreas.itemAt(0)->isZeroArea() )
{
- focusAreas[0]->nNumAreas = 0;
+ focusAreas->nNumAreas = 0;
+ }
+
+ for ( unsigned int n = 0; n < mFocusAreas.size(); n++) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
}
- for ( unsigned int n = 0; n < mFocusAreas.size(); n++)
- {
// transform the coordinates to 3A-type coordinates
- mFocusAreas.itemAt(n)->transfrom(mPreviewData->mWidth,
- mPreviewData->mHeight,
- focusAreas[0]->tAlgoAreas[n].nTop,
- focusAreas[0]->tAlgoAreas[n].nLeft,
- focusAreas[0]->tAlgoAreas[n].nWidth,
- focusAreas[0]->tAlgoAreas[n].nHeight);
-
- focusAreas[0]->tAlgoAreas[n].nLeft =
- ( focusAreas[0]->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
- focusAreas[0]->tAlgoAreas[n].nTop =
- ( focusAreas[0]->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
- focusAreas[0]->tAlgoAreas[n].nWidth =
- ( focusAreas[0]->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
- focusAreas[0]->tAlgoAreas[n].nHeight =
- ( focusAreas[0]->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
- focusAreas[0]->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();
+ mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)focusAreas->tAlgoAreas[n].nTop,
+ (size_t&)focusAreas->tAlgoAreas[n].nLeft,
+ (size_t&)focusAreas->tAlgoAreas[n].nWidth,
+ (size_t&)focusAreas->tAlgoAreas[n].nHeight);
+
+ focusAreas->tAlgoAreas[n].nLeft =
+ ( focusAreas->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nTop =
+ ( focusAreas->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nWidth =
+ ( focusAreas->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nHeight =
+ ( focusAreas->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();
CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d",
- n, (int)focusAreas[0]->tAlgoAreas[n].nTop, (int)focusAreas[0]->tAlgoAreas[n].nLeft,
- (int)focusAreas[0]->tAlgoAreas[n].nWidth, (int)focusAreas[0]->tAlgoAreas[n].nHeight,
- (int)focusAreas[0]->tAlgoAreas[n].nPriority);
- }
+ n, (int)focusAreas->tAlgoAreas[n].nTop, (int)focusAreas->tAlgoAreas[n].nLeft,
+ (int)focusAreas->tAlgoAreas[n].nWidth, (int)focusAreas->tAlgoAreas[n].nHeight,
+ (int)focusAreas->tAlgoAreas[n].nPriority);
+ }
OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = areasSize;
- sharedBuffer.pSharedBuff = (OMX_U8 *) focusAreas[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
if ( NULL == sharedBuffer.pSharedBuff )
{
@@ -781,10 +820,9 @@ status_t OMXCameraAdapter::setTouchFocus()
}
EXIT:
- if (NULL != focusAreas)
+ if (NULL != bufferlist)
{
- memMgr.freeBuffer((void*) focusAreas);
- focusAreas = NULL;
+ memMgr.freeBufferList (bufferlist);
}
}
@@ -808,17 +846,22 @@ void OMXCameraAdapter::handleFocusCallback() {
CAMHAL_LOGEA("Focus status check failed!");
// signal and unblock doAutoFocus
if (AF_ACTIVE & nextState) {
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
return;
}
- if ( ( eFocusStatus.eFocusStatus != OMX_FocusStatusRequest ) &&
- ( eFocusStatus.eFocusStatus != OMX_FocusStatusOff ) ) {
+ if ( eFocusStatus.eFocusStatus == OMX_FocusStatusOff ) {
+ android::AutoMutex lock(mCancelAFMutex);
+ mCancelAFCond.signal();
+ return;
+ }
+
+ if (eFocusStatus.eFocusStatus != OMX_FocusStatusRequest) {
// signal doAutoFocus when a end of scan message comes
// ignore start of scan
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
@@ -843,4 +886,5 @@ void OMXCameraAdapter::handleFocusCallback() {
notifyFocusSubscribers(focusStatus);
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXMetadata.cpp b/camera/OMXCameraAdapter/OMXMetadata.cpp
new file mode 100644
index 0000000..af8c49c
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXMetadata.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMX3A.cpp
+*
+* This file contains functionality for handling 3A configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "OMXMetaData"
+
+#include "OMXCameraAdapter.h"
+#include <camera/CameraMetadata.h>
+
+namespace Ti {
+namespace Camera {
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+camera_memory_t * OMXCameraAdapter::getMetaData(const OMX_PTR plat_pvt,
+ camera_request_memory allocator) const
+{
+ camera_memory_t * ret = NULL;
+
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
+ OMX_TI_WHITEBALANCERESULTTYPE * WBdata = NULL;
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo = NULL;
+ OMX_TI_LSCTABLETYPE *lscTbl = NULL;
+ camera_metadata_t *metaData;
+ size_t offset = 0;
+
+ size_t metaDataSize = sizeof(camera_metadata_t);
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_FaceDetection);
+ if ( NULL != extraData ) {
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ metaDataSize += faceData->ulFaceCount * sizeof(camera_metadata_face_t);
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_WhiteBalance);
+ if ( NULL != extraData ) {
+ WBdata = ( OMX_TI_WHITEBALANCERESULTTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+ if ( NULL != extraData ) {
+ shotInfo = ( OMX_TI_VECTSHOTINFOTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_LSCTable);
+ if ( NULL != extraData ) {
+ lscTbl = ( OMX_TI_LSCTABLETYPE * ) extraData->data;
+ metaDataSize += OMX_TI_LSC_GAIN_TABLE_SIZE;
+ }
+
+ ret = allocator(-1, metaDataSize, 1, NULL);
+ if ( NULL == ret ) {
+ return NULL;
+ } else {
+ metaData = static_cast<camera_metadata_t *> (ret->data);
+ offset += sizeof(camera_metadata_t);
+ }
+
+ if ( NULL != faceData ) {
+ metaData->number_of_faces = 0;
+ int idx = 0;
+ metaData->faces_offset = offset;
+ struct camera_metadata_face *faces = reinterpret_cast<struct camera_metadata_face *> (static_cast<char*>(ret->data) + offset);
+ for ( int j = 0; j < faceData->ulFaceCount ; j++ ) {
+ if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD) {
+ continue;
+ }
+ idx = metaData->number_of_faces;
+ metaData->number_of_faces++;
+ // TODO: Rework and re-use encodeFaceCoordinates()
+ faces[idx].left = faceData->tFacePosition[j].nLeft;
+ faces[idx].top = faceData->tFacePosition[j].nTop;
+ faces[idx].bottom = faceData->tFacePosition[j].nWidth;
+ faces[idx].right = faceData->tFacePosition[j].nHeight;
+ }
+ offset += sizeof(camera_metadata_face_t) * metaData->number_of_faces;
+ }
+
+ if ( NULL != WBdata ) {
+ metaData->awb_temp = WBdata->nColorTemperature;
+ metaData->gain_b = WBdata->nGainB;
+ metaData->gain_gb = WBdata->nGainGB;
+ metaData->gain_gr = WBdata->nGainGR;
+ metaData->gain_r = WBdata->nGainR;
+ metaData->offset_b = WBdata->nOffsetB;
+ metaData->offset_gb = WBdata->nOffsetGB;
+ metaData->offset_gr = WBdata->nOffsetGR;
+ metaData->offset_r = WBdata->nOffsetR;
+ }
+
+ if ( NULL != lscTbl ) {
+ metaData->lsc_table_applied = lscTbl->bApplied;
+ metaData->lsc_table_size = OMX_TI_LSC_GAIN_TABLE_SIZE;
+ metaData->lsc_table_offset = offset;
+ uint8_t *lsc_table = reinterpret_cast<uint8_t *> (static_cast<char*>(ret->data) + offset);
+ memcpy(lsc_table, lscTbl->pGainTable, OMX_TI_LSC_GAIN_TABLE_SIZE);
+ offset += metaData->lsc_table_size;
+ }
+
+ if ( NULL != shotInfo ) {
+ metaData->frame_number = shotInfo->nFrameNum;
+ metaData->shot_number = shotInfo->nConfigId;
+ metaData->analog_gain = shotInfo->nAGain;
+ metaData->analog_gain_req = shotInfo->nReqGain;
+ metaData->analog_gain_min = shotInfo->nGainMin;
+ metaData->analog_gain_max = shotInfo->nGainMax;
+ metaData->analog_gain_error = shotInfo->nSenAGainErr;
+ metaData->analog_gain_dev = shotInfo->nDevAGain;
+ metaData->exposure_time = shotInfo->nExpTime;
+ metaData->exposure_time_req = shotInfo->nReqExpTime;
+ metaData->exposure_time_min = shotInfo->nExpMin;
+ metaData->exposure_time_max = shotInfo->nExpMax;
+ metaData->exposure_time_dev = shotInfo->nDevExpTime;
+ metaData->exposure_time_error = shotInfo->nSenExpTimeErr;
+ metaData->exposure_compensation_req = shotInfo->nReqEC;
+ metaData->exposure_dev = shotInfo->nDevEV;
+ }
+
+ return ret;
+}
+#endif
+
+status_t OMXCameraAdapter::encodePreviewMetadata(camera_frame_metadata_t *meta, const OMX_PTR plat_pvt)
+{
+ status_t ret = NO_ERROR;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+
+ if ( (NULL != extraData) && (NULL != extraData->data) ) {
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo;
+ shotInfo = (OMX_TI_VECTSHOTINFOTYPE*) extraData->data;
+
+ meta->analog_gain = shotInfo->nAGain;
+ meta->exposure_time = shotInfo->nExpTime;
+ } else {
+ meta->analog_gain = -1;
+ meta->exposure_time = -1;
+ }
+
+ // Send metadata event only after any value has been changed
+ if ((metadataLastAnalogGain == meta->analog_gain) &&
+ (metadataLastExposureTime == meta->exposure_time)) {
+ ret = NOT_ENOUGH_DATA;
+ } else {
+ metadataLastAnalogGain = meta->analog_gain;
+ metadataLastExposureTime = meta->exposure_time;
+ }
+#else
+ // no-op in non enhancement mode
+ CAMHAL_UNUSED(meta);
+ CAMHAL_UNUSED(plat_pvt);
+#endif
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXReprocess.cpp b/camera/OMXCameraAdapter/OMXReprocess.cpp
new file mode 100644
index 0000000..28f1744
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXReprocess.cpp
@@ -0,0 +1,340 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXReprocess.cpp
+*
+* This file contains functionality for handling reprocessing operations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameters &params,
+ CameraBuffer* buffers,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ int w, h, s;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ OMXCameraPortParameters *portData;
+ const char* valstr;
+
+ LOG_FUNCTION_NAME;
+
+ if (!buffers) {
+ CAMHAL_LOGE("invalid buffer array");
+ return BAD_VALUE;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ w = buffers[0].width;
+ h = buffers[0].height;
+ s = buffers[0].stride;
+
+ valstr = buffers[0].format;
+ if (valstr != NULL) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ CAMHAL_LOGDA("RAW Picture format selected");
+ pixFormat = OMX_COLOR_FormatRawBayer10bit;
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+
+ if ( (w != (int)portData->mWidth) || (h != (int)portData->mHeight) ||
+ (s != (int) portData->mStride) || (pixFormat != portData->mColorFormat)) {
+ portData->mWidth = w;
+ portData->mHeight = h;
+
+ if ( OMX_COLOR_FormatRawBayer10bit == pixFormat ) {
+ portData->mStride = w * 2;
+ } else {
+ portData->mStride = s;
+ }
+
+ portData->mColorFormat = pixFormat;
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_IN_VIDEO, *portData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startReprocess()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * portData = NULL;
+
+ LOG_FUNCTION_NAME;
+ CAMHAL_LOGD ("mReprocConfigured = %d", mReprocConfigured);
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ CAMHAL_LOGD ("mReprocConfigured = %d", mBurstFramesQueued);
+ if (NO_ERROR == ret) {
+ android::AutoMutex lock(mBurstLock);
+
+ for ( int index = 0 ; index < portData->mMaxQueueable ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on video input port - %p",
+ portData->mBufferHeader[index]->pBuffer);
+ portData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_EmptyThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::stopReprocess()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ // Disable port - send command and then free all buffers
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mStopReprocSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ if (portData) {
+ CAMHAL_LOGDB("Freeing buffers on reproc port - num: %d", portData->mNumBufs);
+ for (int index = 0 ; index < portData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on reproc port - 0x%x",
+ ( unsigned int ) portData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for port disable");
+ ret = mStopReprocSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Disable Image Port Exitting!!!");
+ goto EXIT;
+ }
+ if (NO_ERROR == ret) {
+ CAMHAL_LOGDA("Port disabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port disable");
+ goto EXIT;
+ }
+
+ deinitInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+
+ mReprocConfigured = false;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::disableReprocess(){
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ // no-op..for now
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ if ( 0 != mUseReprocessSem.Count() ) {
+ CAMHAL_LOGEB("Error mUseReprocessSem semaphore count %d", mUseReprocessSem.Count());
+ return BAD_VALUE;
+ }
+
+ if (mAdapterState == REPROCESS_STATE) {
+ stopReprocess();
+ } else if (mAdapterState == CAPTURE_STATE) {
+ stopImageCapture();
+ disableImagePort();
+ }
+
+ if (mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData->mNumBufs = num;
+
+ // Configure
+ ret = setParametersReprocess(mParams, bufArr, mAdapterState);
+
+ // Configure DOMX to use either gralloc handles or vptrs
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mVideoInPortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD("Using ANW");
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ // Need to allocate tiler reservation and state we are going to be using
+ // pagelist buffers. Assuming this happens when buffers if from anw
+ initInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+ } else {
+ CAMHAL_LOGD("Using ION");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ // Enable Port
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mUseReprocessSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ for (int index = 0 ; index < portData->mNumBufs ; index++)
+ {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)bufArr[index].opaque,
+ (int)portData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ 0,
+ portData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ portData->mBufferHeader[index] = pBufferHdr;
+ }
+
+ // Wait for port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseReprocessSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ // Error out if somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State while trying to enable port for reprocessing");
+ goto EXIT;
+ }
+
+ if (ret == NO_ERROR) {
+ CAMHAL_LOGDA("Port enabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+
+ mReprocConfigured = true;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ // Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXZoom.cpp b/camera/OMXCameraAdapter/OMXZoom.cpp
index eec7691..e39a3b0 100644
--- a/camera/OMXCameraAdapter/OMXZoom.cpp
+++ b/camera/OMXCameraAdapter/OMXZoom.cpp
@@ -21,14 +21,11 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
65536, 68157, 70124, 72745,
@@ -49,20 +46,19 @@ const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
524288 };
-status_t OMXCameraAdapter::setParametersZoom(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersZoom(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ( ZOOM_ACTIVE & state ) != ZOOM_ACTIVE )
{
- int zoom = params.getInt(CameraParameters::KEY_ZOOM);
- if( ( zoom >= 0 ) && ( zoom < ZOOM_STAGES ) )
- {
+ int zoom = params.getInt(android::CameraParameters::KEY_ZOOM);
+ if (( zoom >= 0 ) && ( zoom < mMaxZoomSupported )) {
mTargetZoomIdx = zoom;
//Immediate zoom should be applied instantly ( CTS requirement )
@@ -97,8 +93,7 @@ status_t OMXCameraAdapter::doZoom(int index)
ret = -1;
}
- if ( ( 0 > index) || ( ( ZOOM_STAGES - 1 ) < index ) )
- {
+ if (( 0 > index) || ((mMaxZoomSupported - 1 ) < index )) {
CAMHAL_LOGEB("Zoom index %d out of range", index);
ret = -EINVAL;
}
@@ -139,7 +134,7 @@ status_t OMXCameraAdapter::advanceZoom()
{
status_t ret = NO_ERROR;
AdapterState state;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
BaseCameraAdapter::getState(state);
@@ -241,23 +236,20 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
CAMHAL_LOGDB("Start smooth zoom target = %d, mCurrentIdx = %d",
targetIdx,
mCurrentZoomIdx);
- if ( ( targetIdx >= 0 ) && ( targetIdx < ZOOM_STAGES ) )
- {
+ if (( targetIdx >= 0 ) && ( targetIdx < mMaxZoomSupported )) {
mTargetZoomIdx = targetIdx;
mZoomParameterIdx = mCurrentZoomIdx;
mReturnZoomStatus = false;
- }
- else
- {
+ } else {
CAMHAL_LOGEB("Smooth value out of range %d!", targetIdx);
ret = -EINVAL;
- }
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -267,7 +259,7 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
status_t OMXCameraAdapter::stopSmoothZoom()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
@@ -293,4 +285,5 @@ status_t OMXCameraAdapter::stopSmoothZoom()
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti