summaryrefslogtreecommitdiffstats
path: root/camera
diff options
context:
space:
mode:
authorKyle Repinski <repinski23@gmail.com>2015-01-07 08:39:28 -0600
committerZiyan <jaraidaniel@gmail.com>2015-04-11 20:24:52 +0200
commitc5f4358c48d6e50b2affb3ad6c2c0f59546f5b04 (patch)
tree74070cb6d50ec822d3ef7acdaa390cf2f94c4097 /camera
parentc52c76fa1593f374173a818b4de5bd7c51903fbc (diff)
downloaddevice_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.zip
device_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.tar.gz
device_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.tar.bz2
camera: Update camera HAL.
OmapZoom p-jb-release branch with 'CameraHal: Camera Capabilities query update' reverted, as well as a bunch of stuff ifdef'd out. Needs a lot of work still. At this point it's a regression, but it has to be done.
Diffstat (limited to 'camera')
-rw-r--r--camera/ANativeWindowDisplayAdapter.cpp454
-rw-r--r--camera/Android.mk310
-rw-r--r--camera/AppCallbackNotifier.cpp480
-rw-r--r--camera/BaseCameraAdapter.cpp934
-rw-r--r--camera/BufferSourceAdapter.cpp816
-rw-r--r--camera/CameraHal.cpp2542
-rw-r--r--camera/CameraHalCommon.cpp14
-rw-r--r--camera/CameraHalUtilClasses.cpp63
-rw-r--r--camera/CameraHal_Module.cpp283
-rw-r--r--camera/CameraParameters.cpp138
-rw-r--r--camera/CameraProperties.cpp44
-rw-r--r--camera/Encoder_libjpeg.cpp103
-rw-r--r--camera/MemoryManager.cpp167
-rw-r--r--camera/NV12_resize.cpp (renamed from camera/NV12_resize.c)221
-rw-r--r--camera/OMXCameraAdapter/OMX3A.cpp604
-rw-r--r--camera/OMXCameraAdapter/OMXAlgo.cpp571
-rw-r--r--[-rwxr-xr-x]camera/OMXCameraAdapter/OMXCameraAdapter.cpp2642
-rw-r--r--camera/OMXCameraAdapter/OMXCapabilities.cpp1977
-rw-r--r--camera/OMXCameraAdapter/OMXCapture.cpp1325
-rw-r--r--camera/OMXCameraAdapter/OMXDccDataSave.cpp361
-rw-r--r--camera/OMXCameraAdapter/OMXDefaults.cpp40
-rw-r--r--camera/OMXCameraAdapter/OMXExif.cpp60
-rw-r--r--camera/OMXCameraAdapter/OMXFD.cpp296
-rw-r--r--camera/OMXCameraAdapter/OMXFocus.cpp232
-rw-r--r--camera/OMXCameraAdapter/OMXMetadata.cpp181
-rw-r--r--camera/OMXCameraAdapter/OMXReprocess.cpp340
-rw-r--r--camera/OMXCameraAdapter/OMXZoom.cpp37
-rw-r--r--camera/SensorListener.cpp39
-rw-r--r--camera/TICameraParameters.cpp137
-rw-r--r--camera/V4LCameraAdapter/V4LCameraAdapter.cpp1165
-rw-r--r--camera/V4LCameraAdapter/V4LCapabilities.cpp346
-rw-r--r--camera/inc/ANativeWindowDisplayAdapter.h47
-rw-r--r--camera/inc/BaseCameraAdapter.h122
-rw-r--r--camera/inc/BufferSourceAdapter.h190
-rw-r--r--camera/inc/CameraHal.h687
-rw-r--r--camera/inc/CameraProperties.h105
-rw-r--r--camera/inc/Common.h65
-rw-r--r--camera/inc/Encoder_libjpeg.h43
-rw-r--r--camera/inc/General3A_Settings.h138
-rw-r--r--camera/inc/NV12_resize.h161
-rw-r--r--camera/inc/OMXCameraAdapter/OMXCameraAdapter.h559
-rw-r--r--camera/inc/OMXCameraAdapter/OMXSceneModeTables.h306
-rw-r--r--camera/inc/SensorListener.h26
-rw-r--r--camera/inc/TICameraParameters.h133
-rw-r--r--camera/inc/V4LCameraAdapter/V4LCameraAdapter.h130
45 files changed, 14368 insertions, 5266 deletions
diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp
index e4a70ae..45f7ba0 100644
--- a/camera/ANativeWindowDisplayAdapter.cpp
+++ b/camera/ANativeWindowDisplayAdapter.cpp
@@ -14,18 +14,14 @@
* limitations under the License.
*/
-
-
-
-#define LOG_TAG "CameraHAL"
-
#include "ANativeWindowDisplayAdapter.h"
#include <OMX_IVCommon.h>
#include <ui/GraphicBuffer.h>
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-namespace android {
+namespace Ti {
+namespace Camera {
///Constant declarations
///@todo Check the time units
@@ -41,88 +37,97 @@ OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format)
if ( parameters_format != NULL )
{
- if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
{
CAMHAL_LOGDA("CbYCrY format selected");
pixFormat = OMX_COLOR_FormatCbYCrY;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
{
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
{
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
}
else
{
- CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
- pixFormat = OMX_COLOR_FormatCbYCrY;
+ CAMHAL_LOGDA("Invalid format, NV12 format selected as default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
}
}
else {
- CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
- pixFormat = OMX_COLOR_FormatCbYCrY;
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
}
return pixFormat;
}
-const char* getPixFormatConstant(const char* parameters_format)
+const char* DisplayAdapter::getPixFormatConstant(const char* parameters_format) const
{
const char* pixFormat;
if ( parameters_format != NULL )
{
- if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
{
CAMHAL_LOGVA("CbYCrY format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV422I;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
{
// TODO(XXX): We are treating YV12 the same as YUV420SP
CAMHAL_LOGVA("YUV420SP format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
{
CAMHAL_LOGVA("RGB565 format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_RGB565;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_RGB565;
+ }
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0)
+ {
+ CAMHAL_LOGVA("BAYER format selected");
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
}
else
{
- CAMHAL_LOGEA("Invalid format, CbYCrY format selected as default");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ CAMHAL_LOGEA("Invalid format, NV12 format selected as default");
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
}
else
{
- CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
return pixFormat;
}
-const size_t getBufSize(const char* parameters_format, int width, int height)
+size_t DisplayAdapter::getBufSize(const char* parameters_format, int width, int height) const
{
int buf_size;
if ( parameters_format != NULL ) {
if (strcmp(parameters_format,
- (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
buf_size = width * height * 2;
}
- else if((strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
- (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
+ else if((strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
+ (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
buf_size = width * height * 3 / 2;
}
else if(strcmp(parameters_format,
- (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ buf_size = width * height * 2;
+ }
+ else if (strcmp(parameters_format,
+ android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
buf_size = width * height * 2;
} else {
CAMHAL_LOGEA("Invalid format");
@@ -162,8 +167,7 @@ ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
#endif
mPixelFormat = NULL;
- mBufferHandleMap = NULL;
- mGrallocHandleMap = NULL;
+ mBuffers = NULL;
mOffsetsMap = NULL;
mFrameProvider = NULL;
mANativeWindow = NULL;
@@ -188,8 +192,8 @@ ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
ANativeWindowDisplayAdapter::~ANativeWindowDisplayAdapter()
{
- Semaphore sem;
- TIUTILS::Message msg;
+ Utils::Semaphore sem;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -245,7 +249,7 @@ status_t ANativeWindowDisplayAdapter::initialize()
}
///Start the display thread
- status_t ret = mDisplayThread->run("DisplayThread", PRIORITY_URGENT_DISPLAY);
+ status_t ret = mDisplayThread->run("DisplayThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
CAMHAL_LOGEA("Couldn't run display thread");
@@ -316,10 +320,9 @@ int ANativeWindowDisplayAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
LOG_FUNCTION_NAME;
- if ( NULL == errorNotifier )
- {
+ if ( NULL == errorNotifier ) {
CAMHAL_LOGEA("Invalid Error Notifier reference");
- ret = -EINVAL;
+ ret = BAD_VALUE;
}
if ( NO_ERROR == ret )
@@ -342,7 +345,7 @@ status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime
if ( NULL != refTime )
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
memcpy(&mStartCapture, refTime, sizeof(struct timeval));
}
@@ -354,10 +357,10 @@ status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime
#endif
-int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime, S3DParameters *s3dParams)
+int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime)
{
- Semaphore sem;
- TIUTILS::Message msg;
+ Utils::Semaphore sem;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -369,17 +372,11 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
return NO_ERROR;
}
-#if 0 //TODO: s3d is not part of bringup...will reenable
- if (s3dParams)
- mOverlay->set_s3d_params(s3dParams->mode, s3dParams->framePacking,
- s3dParams->order, s3dParams->subSampling);
-#endif
-
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
if ( NULL != refTime )
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
memcpy(&mStandbyToShot, refTime, sizeof(struct timeval));
mMeasureStandby = true;
}
@@ -401,6 +398,7 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
// Register with the frame provider for frames
mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
mDisplayEnabled = true;
mPreviewWidth = width;
@@ -416,7 +414,7 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
{
status_t ret = NO_ERROR;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
LOG_FUNCTION_NAME;
@@ -429,15 +427,16 @@ int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
// Unregister with the frame provider here
mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
mFrameProvider->removeFramePointers();
if ( NULL != mDisplayThread.get() )
{
//Send STOP_DISPLAY COMMAND to display thread. Display thread will stop and dequeue all messages
// and then wait for message
- Semaphore sem;
+ Utils::Semaphore sem;
sem.Create();
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = DisplayThread::DISPLAY_STOP;
// Send the semaphore to signal once the command is completed
@@ -452,11 +451,14 @@ int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
}
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
{
///Reset the display enabled flag
mDisplayEnabled = false;
+ // Reset pause flag since display is being disabled
+ mPaused = false;
+
///Reset the offset values
mXOff = -1;
mYOff = -1;
@@ -493,7 +495,7 @@ status_t ANativeWindowDisplayAdapter::pauseDisplay(bool pause)
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mPaused = pause;
}
@@ -520,18 +522,20 @@ void ANativeWindowDisplayAdapter::destroy()
}
// Implementation of inherited interfaces
-void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
+CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs)
{
LOG_FUNCTION_NAME;
status_t err;
int i = -1;
const int lnumBufs = numBufs;
- mBufferHandleMap = new buffer_handle_t*[lnumBufs];
- mGrallocHandleMap = new IMG_native_handle_t*[lnumBufs];
int undequeued = 0;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ mFramesType.clear();
if ( NULL == mANativeWindow ) {
return NULL;
@@ -539,10 +543,10 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
// Set gralloc usage bits for window.
err = mANativeWindow->set_usage(mANativeWindow, CAMHAL_GRALLOC_USAGE);
- if (err != 0) {
- ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::setUsage failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -553,10 +557,10 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs);
///Set the number of buffers needed for camera preview
err = mANativeWindow->set_buffer_count(mANativeWindow, numBufs);
- if (err != 0) {
- ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::setBufferCount failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -574,10 +578,10 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
height,
/*toOMXPixFormat(format)*/HAL_PIXEL_FORMAT_TI_NV12); // Gralloc only supports NV12 alloc!
- if (err != 0) {
- ALOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -588,7 +592,7 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
///We just return the buffers from ANativeWindow, if the width and height are same, else (vstab, vnf case)
///re-allocate buffers using ANativeWindow and then get them
///@todo - Re-allocate buffers for vnf and vstab using the width, height, format, numBufs etc
- if ( mBufferHandleMap == NULL )
+ if ( mBuffers == NULL )
{
CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
LOG_FUNCTION_NAME_EXIT;
@@ -599,17 +603,16 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
for ( i=0; i < mBufferCount; i++ )
{
- IMG_native_handle_t** hndl2hndl;
- IMG_native_handle_t* handle;
+ buffer_handle_t *handle;
int stride; // dummy variable to get stride
// TODO(XXX): Do we need to keep stride information in camera hal?
- err = mANativeWindow->dequeue_buffer(mANativeWindow, (buffer_handle_t**) &hndl2hndl, &stride);
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, &handle, &stride);
- if (err != 0) {
- CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::dequeueBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -617,11 +620,16 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
goto fail;
}
- handle = *hndl2hndl;
+ CAMHAL_LOGDB("got handle %p", handle);
+ mBuffers[i].opaque = (void *)handle;
+ mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mFramesWithCameraAdapterMap.add(handle, i);
- mBufferHandleMap[i] = (buffer_handle_t*) hndl2hndl;
- mGrallocHandleMap[i] = handle;
- mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+ // Tag remaining preview buffers as preview frames
+ if ( i >= ( mBufferCount - undequeued ) ) {
+ mFramesType.add( (int) mBuffers[i].opaque,
+ CameraFrame::PREVIEW_FRAME_SYNC);
+ }
bytes = getBufSize(format, width, height);
@@ -636,33 +644,37 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
for( i = 0; i < mBufferCount-undequeued; i++ )
{
void *y_uv[2];
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
- mANativeWindow->lock_buffer(mANativeWindow, mBufferHandleMap[i]);
+ mANativeWindow->lock_buffer(mANativeWindow, handle);
- mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv);
- mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ mFrameProvider->addFramePointers(&mBuffers[i], y_uv);
}
// return the rest of the buffers back to ANativeWindow
for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++)
{
- err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
- if (err != 0) {
- CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ err = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::cancelBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
goto fail;
}
- mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[i]);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque);
//LOCK UNLOCK TO GET YUV POINTERS
void *y_uv[2];
- mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv);
- mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv);
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
+ mapper.lock(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ mFrameProvider->addFramePointers(&mBuffers[i], y_uv);
+ mapper.unlock(*(buffer_handle_t *) mBuffers[i].opaque);
}
mFirstInit = true;
@@ -670,26 +682,26 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
mFrameWidth = width;
mFrameHeight = height;
- return mGrallocHandleMap;
+ return mBuffers;
fail:
// need to cancel buffers if any were dequeued
for (int start = 0; start < i && i > 0; start++) {
- int err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[start]);
- if (err != 0) {
- CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err);
+ status_t err = mANativeWindow->cancel_buffer(mANativeWindow,
+ (buffer_handle_t *) mBuffers[start].opaque);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::cancelBuffer failed w/ error 0x%08x", err);
break;
}
- mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[start]);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[start].opaque);
}
- freeBuffer(mGrallocHandleMap);
+ freeBufferList(mBuffers);
CAMHAL_LOGEA("Error occurred, performing cleanup");
- if ( NULL != mErrorNotifier.get() )
- {
- mErrorNotifier->errorNotify(-ENOMEM);
+ if ( NULL != mErrorNotifier.get() ) {
+ mErrorNotifier->errorNotify(NO_MEMORY);
}
LOG_FUNCTION_NAME_EXIT;
@@ -697,6 +709,13 @@ void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const c
}
+CameraBuffer* ANativeWindowDisplayAdapter::getBufferList(int *numBufs) {
+ LOG_FUNCTION_NAME;
+ if (numBufs) *numBufs = -1;
+
+ return NULL;
+}
+
uint32_t * ANativeWindowDisplayAdapter::getOffsets()
{
const int lnumBufs = mBufferCount;
@@ -711,7 +730,7 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
goto fail;
}
- if( mBufferHandleMap == NULL)
+ if( mBuffers == NULL)
{
CAMHAL_LOGEA("Buffers not allocated yet!!");
goto fail;
@@ -722,7 +741,6 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
mOffsetsMap = new uint32_t[lnumBufs];
for(int i = 0; i < mBufferCount; i++)
{
- IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[i]);
mOffsetsMap[i] = 0;
}
}
@@ -739,9 +757,8 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
mOffsetsMap = NULL;
}
- if ( NULL != mErrorNotifier.get() )
- {
- mErrorNotifier->errorNotify(-ENOSYS);
+ if ( NULL != mErrorNotifier.get() ) {
+ mErrorNotifier->errorNotify(INVALID_OPERATION);
}
LOG_FUNCTION_NAME_EXIT;
@@ -749,34 +766,48 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets()
return NULL;
}
-int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable)
-{
+status_t ANativeWindowDisplayAdapter::minUndequeueableBuffers(int& undequeueable) {
LOG_FUNCTION_NAME;
- int ret = NO_ERROR;
- int undequeued = 0;
-
- if(mBufferCount == 0)
- {
- ret = -ENOSYS;
- goto end;
- }
+ status_t ret = NO_ERROR;
- if(!mANativeWindow)
- {
- ret = -ENOSYS;
+ if(!mANativeWindow) {
+ ret = INVALID_OPERATION;
goto end;
}
- ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+ ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeueable);
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret);
- if ( ENODEV == ret ) {
+ if ( NO_INIT == ret ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
- return -ret;
+ return ret;
+ }
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0)
+ {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = minUndequeueableBuffers(undequeued);
+ if (ret != NO_ERROR) {
+ goto end;
}
queueable = mBufferCount - undequeued;
@@ -792,10 +823,12 @@ int ANativeWindowDisplayAdapter::getFd()
if(mFD == -1)
{
- IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[0]);
+ buffer_handle_t *handle = (buffer_handle_t *)mBuffers[0].opaque;
+ IMG_native_handle_t *img = (IMG_native_handle_t *)handle;
// TODO: should we dup the fd? not really necessary and another thing for ANativeWindow
// to manage and close...
- mFD = dup(handle->fd[0]);
+
+ mFD = dup(img->fd[0]);
}
LOG_FUNCTION_NAME_EXIT;
@@ -808,29 +841,36 @@ status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
{
status_t ret = NO_ERROR;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
//Give the buffers back to display here - sort of free it
if (mANativeWindow)
for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
int value = mFramesWithCameraAdapterMap.valueAt(i);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[value].opaque;
+
+ // if buffer index is out of bounds skip
+ if ((value < 0) || (value >= mBufferCount)) {
+ CAMHAL_LOGEA("Potential out bounds access to handle...skipping");
+ continue;
+ }
// unlock buffer before giving it up
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[value]);
+ mapper.unlock(*handle);
- ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[value]);
- if ( ENODEV == ret ) {
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_INIT == ret ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
- return -ret;
+ return ret;
} else if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)",
+ CAMHAL_LOGE("Surface::cancelBuffer() failed: %s (%d)",
strerror(-ret),
-ret);
- return -ret;
+ return ret;
}
}
else
- ALOGE("mANativeWindow is NULL");
+ CAMHAL_LOGE("mANativeWindow is NULL");
///Clear the frames with camera adapter map
mFramesWithCameraAdapterMap.clear();
@@ -839,36 +879,35 @@ status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
}
-int ANativeWindowDisplayAdapter::freeBuffer(void* buf)
+int ANativeWindowDisplayAdapter::freeBufferList(CameraBuffer * buflist)
{
LOG_FUNCTION_NAME;
- int *buffers = (int *) buf;
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
- if((int *)mGrallocHandleMap != buffers)
+ if(mBuffers != buflist)
{
CAMHAL_LOGEA("CameraHal passed wrong set of buffers to free!!!");
- if (mGrallocHandleMap != NULL)
- delete []mGrallocHandleMap;
- mGrallocHandleMap = NULL;
+ if (mBuffers != NULL)
+ delete []mBuffers;
+ mBuffers = NULL;
}
-
+ /* FIXME this will probably want the list that was just deleted */
returnBuffersToWindow();
- if ( NULL != buf )
+ if ( NULL != buflist )
{
- delete [] buffers;
- mGrallocHandleMap = NULL;
+ delete [] buflist;
+ mBuffers = NULL;
}
- if( mBufferHandleMap != NULL)
+ if( mBuffers != NULL)
{
- delete [] mBufferHandleMap;
- mBufferHandleMap = NULL;
+ delete [] mBuffers;
+ mBuffers = NULL;
}
if ( NULL != mOffsetsMap )
@@ -883,6 +922,8 @@ int ANativeWindowDisplayAdapter::freeBuffer(void* buf)
mFD = -1;
}
+ mFramesType.clear();
+
return NO_ERROR;
}
@@ -892,11 +933,6 @@ bool ANativeWindowDisplayAdapter::supportsExternalBuffering()
return false;
}
-int ANativeWindowDisplayAdapter::useBuffers(void *bufArr, int num)
-{
- return NO_ERROR;
-}
-
void ANativeWindowDisplayAdapter::displayThread()
{
bool shouldLive = true;
@@ -907,7 +943,7 @@ void ANativeWindowDisplayAdapter::displayThread()
while(shouldLive)
{
- ret = TIUTILS::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
+ ret = Utils::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
, &mDisplayQ
, NULL
, ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT);
@@ -929,7 +965,7 @@ void ANativeWindowDisplayAdapter::displayThread()
}
else
{
- TIUTILS::Message msg;
+ Utils::Message msg;
///Get the dummy msg from the displayQ
if(mDisplayQ.get(&msg)!=NO_ERROR)
{
@@ -960,7 +996,7 @@ void ANativeWindowDisplayAdapter::displayThread()
bool ANativeWindowDisplayAdapter::processHalMsg()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -987,6 +1023,12 @@ bool ANativeWindowDisplayAdapter::processHalMsg()
CAMHAL_LOGDA("Display thread received DISPLAY_STOP command from Camera HAL");
mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STOPPED;
+ // flush frame message queue
+ while ( !mDisplayQ.isEmpty() ) {
+ Utils::Message message;
+ mDisplayQ.get(&message);
+ }
+
break;
case DisplayThread::DISPLAY_EXIT:
@@ -1013,7 +1055,7 @@ bool ANativeWindowDisplayAdapter::processHalMsg()
{
CAMHAL_LOGDA("+Signalling display semaphore");
- Semaphore &sem = *((Semaphore*)msg.arg1);
+ Utils::Semaphore &sem = *((Utils::Semaphore*)msg.arg1);
sem.Signal();
@@ -1031,7 +1073,7 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
status_t ret = NO_ERROR;
uint32_t actualFramesWithDisplay = 0;
android_native_buffer_t *buffer = NULL;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
int i;
///@todo Do cropping based on the stabilized frame coordinates
@@ -1039,24 +1081,32 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
///display or rendering rate whichever is lower
///Queue the buffer to overlay
- if (!mGrallocHandleMap || !dispFrame.mBuffer) {
+ if ( NULL == mANativeWindow ) {
+ return NO_INIT;
+ }
+
+ if (!mBuffers || !dispFrame.mBuffer) {
CAMHAL_LOGEA("NULL sent to PostFrame");
- return -EINVAL;
+ return BAD_VALUE;
}
for ( i = 0; i < mBufferCount; i++ )
{
- if ( ((int) dispFrame.mBuffer ) == (int)mGrallocHandleMap[i] )
+ if ( dispFrame.mBuffer == &mBuffers[i] )
{
break;
}
}
+
+ android::AutoMutex lock(mLock);
+
+ mFramesType.add( (int)mBuffers[i].opaque, dispFrame.mType);
+
if ( mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED &&
(!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) &&
!mSuspend)
{
- Mutex::Autolock lock(mLock);
uint32_t xOff = (dispFrame.mOffset% PAGE_SIZE);
uint32_t yOff = (dispFrame.mOffset / PAGE_SIZE);
@@ -1066,15 +1116,15 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
CAMHAL_LOGDB("Offset %d xOff = %d, yOff = %d", dispFrame.mOffset, xOff, yOff);
uint8_t bytesPerPixel;
///Calculate bytes per pixel based on the pixel format
- if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
{
bytesPerPixel = 2;
}
- else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ else if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
{
bytesPerPixel = 2;
}
- else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ else if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
{
bytesPerPixel = 1;
}
@@ -1095,19 +1145,22 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
mYOff = yOff;
}
- // unlock buffer before sending to display
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
- ret = mANativeWindow->enqueue_buffer(mANativeWindow, mBufferHandleMap[i]);
- if (ret != 0) {
- ALOGE("Surface::queueBuffer returned error %d", ret);
+ {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ // unlock buffer before sending to display
+ mapper.unlock(*handle);
+ ret = mANativeWindow->enqueue_buffer(mANativeWindow, handle);
+ }
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret);
}
- mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) dispFrame.mBuffer->opaque);
// HWComposer has not minimum buffer requirement. We should be able to dequeue
// the buffer immediately
- TIUTILS::Message msg;
+ Utils::Message msg;
mDisplayQ.put(&msg);
@@ -1133,20 +1186,20 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
}
else
{
- Mutex::Autolock lock(mLock);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
// unlock buffer before giving it up
- mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
+ mapper.unlock(*handle);
// cancel buffer and dequeue another one
- ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
- if (ret != 0) {
- ALOGE("Surface::queueBuffer returned error %d", ret);
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Surface::cancelBuffer returned error %d", ret);
}
- mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) dispFrame.mBuffer->opaque);
- TIUTILS::Message msg;
+ Utils::Message msg;
mDisplayQ.put(&msg);
ret = NO_ERROR;
}
@@ -1158,11 +1211,14 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
bool ANativeWindowDisplayAdapter::handleFrameReturn()
{
status_t err;
- buffer_handle_t* buf;
+ buffer_handle_t *buf;
int i = 0;
+ unsigned int k;
int stride; // dummy variable to get stride
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+ CameraFrame::FrameType frameType = CameraFrame::PREVIEW_FRAME_SYNC;
+
void *y_uv[2];
// TODO(XXX): Do we need to keep stride information in camera hal?
@@ -1173,9 +1229,9 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
err = mANativeWindow->dequeue_buffer(mANativeWindow, &buf, &stride);
if (err != 0) {
- CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ CAMHAL_LOGE("Surface::dequeueBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -1184,10 +1240,10 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
}
err = mANativeWindow->lock_buffer(mANativeWindow, buf);
- if (err != 0) {
- CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err);
+ if ( NO_ERROR != err ) {
+ CAMHAL_LOGE("Surface::lockBuffer failed: %s (%d)", strerror(-err), -err);
- if ( ENODEV == err ) {
+ if ( NO_INIT == err ) {
CAMHAL_LOGEA("Preview surface abandoned!");
mANativeWindow = NULL;
}
@@ -1197,9 +1253,12 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
for(i = 0; i < mBufferCount; i++)
{
- if (mBufferHandleMap[i] == buf)
+ if (mBuffers[i].opaque == buf)
break;
}
+ if (i == mBufferCount) {
+ CAMHAL_LOGEB("Failed to find handle %p", buf);
+ }
// lock buffer before sending to FrameProvider for filling
bounds.left = 0;
@@ -1208,7 +1267,7 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
bounds.bottom = mFrameHeight;
int lock_try_count = 0;
- while (mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){
+ while (mapper.lock(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){
if (++lock_try_count > LOCK_BUFFER_TRIES){
if ( NULL != mErrorNotifier.get() ){
mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
@@ -1219,10 +1278,27 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
usleep(15000);
}
- mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+ {
+ android::AutoMutex lock(mLock);
+ mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i);
+
+ for( k = 0; k < mFramesType.size() ; k++) {
+ if(mFramesType.keyAt(k) == (int)mBuffers[i].opaque)
+ break;
+ }
+
+ if ( k == mFramesType.size() ) {
+ CAMHAL_LOGE("Frame type for preview buffer 0%x not found!!", mBuffers[i].opaque);
+ return false;
+ }
+
+ frameType = (CameraFrame::FrameType) mFramesType.valueAt(k);
+ mFramesType.removeItem((int) mBuffers[i].opaque);
+ }
CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount-1);
- mFrameProvider->returnFrame( (void*)mGrallocHandleMap[i], CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->returnFrame(&mBuffers[i], frameType);
+
return true;
}
@@ -1265,5 +1341,5 @@ void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/Android.mk b/camera/Android.mk
index 23571f0..08e0e1b 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -1,137 +1,223 @@
-LOCAL_PATH := $(call my-dir)
-
-OMAP4_CAMERA_HAL_USES := OMX
-# OMAP4_CAMERA_HAL_USES := USB
-
-OMAP4_CAMERA_HAL_SRC := \
- CameraHal_Module.cpp \
- CameraHal.cpp \
- CameraHalUtilClasses.cpp \
- AppCallbackNotifier.cpp \
- ANativeWindowDisplayAdapter.cpp \
- CameraProperties.cpp \
- MemoryManager.cpp \
- Encoder_libjpeg.cpp \
- SensorListener.cpp \
- NV12_resize.c
-
-OMAP4_CAMERA_COMMON_SRC := \
- CameraParameters.cpp \
- TICameraParameters.cpp \
- CameraHalCommon.cpp
-
-OMAP4_CAMERA_OMX_SRC := \
- BaseCameraAdapter.cpp \
- OMXCameraAdapter/OMX3A.cpp \
- OMXCameraAdapter/OMXAlgo.cpp \
- OMXCameraAdapter/OMXCameraAdapter.cpp \
- OMXCameraAdapter/OMXCapabilities.cpp \
- OMXCameraAdapter/OMXCapture.cpp \
- OMXCameraAdapter/OMXDefaults.cpp \
- OMXCameraAdapter/OMXExif.cpp \
- OMXCameraAdapter/OMXFD.cpp \
- OMXCameraAdapter/OMXFocus.cpp \
- OMXCameraAdapter/OMXZoom.cpp
-
-OMAP4_CAMERA_USB_SRC := \
- BaseCameraAdapter.cpp \
- V4LCameraAdapter/V4LCameraAdapter.cpp
-
-#
-# OMX Camera HAL
-#
+LOCAL_PATH:= $(call my-dir)
+
+OMAP4_CAMERA_HAL_USES:= OMX
+#OMAP4_CAMERA_HAL_USES:= USB
+#OMAP4_CAMERA_HAL_USES:= ALL
+
+CAMERAHAL_CFLAGS += $(ANDROID_API_CFLAGS) -DANDROID_API_JB_OR_LATER
+
+ifdef TI_CAMERAHAL_DEBUG_ENABLED
+ # Enable CameraHAL debug logs
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_DEBUG
+endif
+
+ifdef TI_CAMERAHAL_VERBOSE_DEBUG_ENABLED
+ # Enable CameraHAL verbose debug logs
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_DEBUG_VERBOSE
+endif
+
+ifdef TI_CAMERAHAL_DEBUG_FUNCTION_NAMES
+ # Enable CameraHAL function enter/exit logging
+ CAMERAHAL_CFLAGS += -DTI_UTILS_FUNCTION_LOGGER_ENABLE
+endif
+
+ifdef TI_CAMERAHAL_DEBUG_TIMESTAMPS
+ # Enable timestamp logging
+ CAMERAHAL_CFLAGS += -DTI_UTILS_DEBUG_USE_TIMESTAMPS
+endif
+
+ifndef TI_CAMERAHAL_DONT_USE_RAW_IMAGE_SAVING
+ # Enabled saving RAW images to file
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_USE_RAW_IMAGE_SAVING
+endif
+
+ifdef TI_CAMERAHAL_PROFILING
+ # Enable OMX Camera component profiling
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_OMX_PROFILING
+endif
+
+ifeq ($(ENHANCED_DOMX),true)
+ CAMERAHAL_CFLAGS += -DENHANCED_DOMX
+endif
+
+CAMERAHAL_CFLAGS += -DLOG_TAG=\"CameraHal\"
+
+TI_CAMERAHAL_COMMON_INCLUDES := \
+ $(DEVICE_FOLDER)/hwc \
+ external/jpeg \
+ external/jhead \
+ $(LOCAL_PATH)/../libtiutils \
+ $(LOCAL_PATH)/inc \
+ frameworks/native/include/media/hardware \
+ system/media/camera/include
+
+TI_CAMERAHAL_COMMON_SRC := \
+ CameraHal_Module.cpp \
+ CameraHal.cpp \
+ CameraHalUtilClasses.cpp \
+ AppCallbackNotifier.cpp \
+ ANativeWindowDisplayAdapter.cpp \
+ BufferSourceAdapter.cpp \
+ CameraProperties.cpp \
+ BaseCameraAdapter.cpp \
+ MemoryManager.cpp \
+ Encoder_libjpeg.cpp \
+ SensorListener.cpp \
+ NV12_resize.cpp \
+ CameraParameters.cpp \
+ TICameraParameters.cpp \
+ CameraHalCommon.cpp
+
+TI_CAMERAHAL_OMX_SRC := \
+ OMXCameraAdapter/OMX3A.cpp \
+ OMXCameraAdapter/OMXAlgo.cpp \
+ OMXCameraAdapter/OMXCameraAdapter.cpp \
+ OMXCameraAdapter/OMXCapabilities.cpp \
+ OMXCameraAdapter/OMXCapture.cpp \
+ OMXCameraAdapter/OMXReprocess.cpp \
+ OMXCameraAdapter/OMXDefaults.cpp \
+ OMXCameraAdapter/OMXExif.cpp \
+ OMXCameraAdapter/OMXFD.cpp \
+ OMXCameraAdapter/OMXFocus.cpp \
+ OMXCameraAdapter/OMXMetadata.cpp \
+ OMXCameraAdapter/OMXZoom.cpp
+
+ifndef OMAP_TUNA
+TI_CAMERAHAL_OMX_SRC += \
+ OMXCameraAdapter/OMXDccDataSave.cpp
+endif
+
+TI_CAMERAHAL_USB_SRC := \
+ V4LCameraAdapter/V4LCameraAdapter.cpp \
+ V4LCameraAdapter/V4LCapabilities.cpp
+
+TI_CAMERAHAL_COMMON_SHARED_LIBRARIES := \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libtiutils_custom \
+ libcamera_client \
+ libgui \
+ libion_ti \
+ libjpeg \
+ libjhead
+
+ifdef OMAP_ENHANCEMENT_CPCAM
+TI_CAMERAHAL_COMMON_STATIC_LIBRARIES += \
+ libcpcamcamera_client
+endif
+
+
+# ====================
+# OMX Camera Adapter
+# --------------------
ifeq ($(OMAP4_CAMERA_HAL_USES),OMX)
include $(CLEAR_VARS)
-LOCAL_SRC_FILES := \
- $(OMAP4_CAMERA_HAL_SRC) \
- $(OMAP4_CAMERA_OMX_SRC) \
- $(OMAP4_CAMERA_COMMON_SRC)
+CAMERAHAL_CFLAGS += -DOMX_CAMERA_ADAPTER
+
+LOCAL_SRC_FILES:= \
+ $(TI_CAMERAHAL_COMMON_SRC) \
+ $(TI_CAMERAHAL_OMX_SRC)
LOCAL_C_INCLUDES += \
- $(LOCAL_PATH)/inc/ \
- $(DEVICE_FOLDER)/hwc \
- $(DEVICE_FOLDER)/include \
- $(LOCAL_PATH)/inc/OMXCameraAdapter \
- $(DEVICE_FOLDER)/libtiutils \
- hardware/ti/omap4xxx/tiler \
- $(DEVICE_FOLDER)/libion_ti \
- $(DOMX_PATH)/omx_core/inc \
- $(DOMX_PATH)/mm_osal/inc \
- frameworks/base/include/media/stagefright \
- frameworks/native/include/media/hardware \
- frameworks/native/include/media/openmax \
- external/jpeg \
- external/jhead
-
-LOCAL_SHARED_LIBRARIES := \
- libui \
- libbinder \
- libutils \
- libcutils \
- liblog \
- libtiutils_custom \
- libmm_osal \
- libOMX_Core \
- libcamera_client \
- libgui \
- libdomx \
- libion_ti \
- libjpeg \
- libjhead
-
-LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+ $(TI_CAMERAHAL_COMMON_INCLUDES) \
+ $(DOMX_PATH)/omx_core/inc \
+ $(DOMX_PATH)/mm_osal/inc \
+ $(LOCAL_PATH)/inc/OMXCameraAdapter
+
+LOCAL_SHARED_LIBRARIES:= \
+ $(TI_CAMERAHAL_COMMON_SHARED_LIBRARIES) \
+ libmm_osal \
+ libOMX_Core \
+ libdomx
+
+LOCAL_STATIC_LIBRARIES := $(TI_CAMERAHAL_COMMON_STATIC_LIBRARIES)
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER $(CAMERAHAL_CFLAGS)
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
-LOCAL_MODULE := camera.$(TARGET_BOOTLOADER_BOARD_NAME)
-LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE:= camera.$(TARGET_BOOTLOADER_BOARD_NAME)
+LOCAL_MODULE_TAGS:= optional
include $(BUILD_SHARED_LIBRARY)
else
ifeq ($(OMAP4_CAMERA_HAL_USES),USB)
-#
-# USB Camera Adapter
-#
+
+# ====================
+# USB Camera Adapter
+# --------------------
include $(CLEAR_VARS)
-LOCAL_SRC_FILES := \
- $(OMAP4_CAMERA_HAL_SRC) \
- $(OMAP4_CAMERA_USB_SRC) \
- $(OMAP4_CAMERA_COMMON_SRC)
+CAMERAHAL_CFLAGS += -DV4L_CAMERA_ADAPTER
+
+LOCAL_SRC_FILES:= \
+ $(TI_CAMERAHAL_COMMON_SRC) \
+ $(TI_CAMERAHAL_USB_SRC)
LOCAL_C_INCLUDES += \
- $(LOCAL_PATH)/inc/ \
- $(DEVICE_FOLDER)/hwc \
- $(DEVICE_FOLDER)/include \
- $(LOCAL_PATH)/inc/V4LCameraAdapter \
- $(DEVICE_FOLDER)/libtiutils \
- hardware/ti/omap4xxx/tiler \
- $(DEVICE_FOLDER)/libion_ti \
- frameworks/base/include/ui \
- frameworks/base/include/utils \
- frameworks/base/include/media/stagefright/openmax
-
-LOCAL_SHARED_LIBRARIES := \
- libui \
- libbinder \
- libutils \
- libcutils \
- liblog \
- libtiutils_custom \
- libcamera_client \
- libion_ti
-
-LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+ $(TI_CAMERAHAL_COMMON_INCLUDES) \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter
+
+LOCAL_SHARED_LIBRARIES:= \
+ $(TI_CAMERAHAL_COMMON_SHARED_LIBRARIES)
+
+LOCAL_STATIC_LIBRARIES := $(TI_CAMERAHAL_COMMON_STATIC_LIBRARIES)
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER $(CAMERAHAL_CFLAGS)
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
-LOCAL_MODULE := camera.$(TARGET_BOOTLOADER_BOARD_NAME)
-LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE:= camera.$(TARGET_BOOTLOADER_BOARD_NAME)
+LOCAL_MODULE_TAGS:= optional
include $(BUILD_SHARED_LIBRARY)
+
+else
+ifeq ($(OMAP4_CAMERA_HAL_USES),ALL)
+
+
+# =====================
+# ALL Camera Adapters
+# ---------------------
+
+include $(CLEAR_VARS)
+
+CAMERAHAL_CFLAGS += -DOMX_CAMERA_ADAPTER -DV4L_CAMERA_ADAPTER
+
+LOCAL_SRC_FILES:= \
+ $(TI_CAMERAHAL_COMMON_SRC) \
+ $(TI_CAMERAHAL_OMX_SRC) \
+ $(TI_CAMERAHAL_USB_SRC)
+
+LOCAL_C_INCLUDES += \
+ $(TI_CAMERAHAL_COMMON_INCLUDES) \
+ $(DOMX_PATH)/omx_core/inc \
+ $(DOMX_PATH)/mm_osal/inc \
+ $(LOCAL_PATH)/inc/OMXCameraAdapter \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter
+
+LOCAL_SHARED_LIBRARIES:= \
+ $(TI_CAMERAHAL_COMMON_SHARED_LIBRARIES) \
+ libmm_osal \
+ libOMX_Core \
+ libdomx
+
+LOCAL_STATIC_LIBRARIES := $(TI_CAMERAHAL_COMMON_STATIC_LIBRARIES)
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER $(CAMERAHAL_CFLAGS)
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera.$(TARGET_BOOTLOADER_BOARD_NAME)
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
+endif
endif
-endif
diff --git a/camera/AppCallbackNotifier.cpp b/camera/AppCallbackNotifier.cpp
index 4103699..a4ac224 100644
--- a/camera/AppCallbackNotifier.cpp
+++ b/camera/AppCallbackNotifier.cpp
@@ -14,12 +14,6 @@
* limitations under the License.
*/
-
-
-
-#define LOG_TAG "CameraHAL"
-
-
#include "CameraHal.h"
#include "VideoMetadata.h"
#include "Encoder_libjpeg.h"
@@ -27,11 +21,13 @@
#include <ui/GraphicBuffer.h>
#include <ui/GraphicBufferMapper.h>
#include "NV12_resize.h"
+#include "TICameraParameters.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const int AppCallbackNotifier::NOTIFIER_TIMEOUT = -1;
-KeyedVector<void*, sp<Encoder_libjpeg> > gEncoderQueue;
+android::KeyedVector<void*, android::sp<Encoder_libjpeg> > gEncoderQueue;
void AppCallbackNotifierEncoderCallback(void* main_jpeg,
void* thumb_jpeg,
@@ -39,11 +35,12 @@ void AppCallbackNotifierEncoderCallback(void* main_jpeg,
void* cookie1,
void* cookie2,
void* cookie3,
+ void* cookie4,
bool canceled)
{
if (cookie1 && !canceled) {
AppCallbackNotifier* cb = (AppCallbackNotifier*) cookie1;
- cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3);
+ cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3, cookie4);
}
if (main_jpeg) {
@@ -60,20 +57,21 @@ void AppCallbackNotifierEncoderCallback(void* main_jpeg,
/*--------------------NotificationHandler Class STARTS here-----------------------------*/
-void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2)
+void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2, void *cookie3)
{
camera_memory_t* encoded_mem = NULL;
Encoder_libjpeg::params *main_param = NULL, *thumb_param = NULL;
size_t jpeg_size;
uint8_t* src = NULL;
- sp<Encoder_libjpeg> encoder = NULL;
+ CameraBuffer *camera_buffer;
+ android::sp<Encoder_libjpeg> encoder = NULL;
LOG_FUNCTION_NAME;
camera_memory_t* picture = NULL;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (!main_jpeg) {
goto exit;
@@ -82,6 +80,7 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
encoded_mem = (camera_memory_t*) cookie1;
main_param = (Encoder_libjpeg::params *) main_jpeg;
jpeg_size = main_param->jpeg_size;
+ camera_buffer = (CameraBuffer *)cookie3;
src = main_param->src;
if(encoded_mem && encoded_mem->data && (jpeg_size > 0)) {
@@ -126,11 +125,13 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
if(picture && (mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED) &&
(mCameraHal->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE)))
{
- Mutex::Autolock lock(mBurstLock);
-#if 0 //TODO: enable burst mode later
+ android::AutoMutex lock(mBurstLock);
+
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
if ( mBurst )
{
- `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ mDataCb(CAMERA_MSG_COMPRESSED_BURST_IMAGE, picture, 0, NULL, mCallbackCookie);
+
}
else
#endif
@@ -157,7 +158,7 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
gEncoderQueue.removeItem(src);
encoder.clear();
}
- mFrameProvider->returnFrame(src, type);
+ mFrameProvider->returnFrame(camera_buffer, type);
}
LOG_FUNCTION_NAME_EXIT;
@@ -172,8 +173,12 @@ status_t AppCallbackNotifier::initialize()
{
LOG_FUNCTION_NAME;
+ mPreviewMemory = 0;
+
mMeasurementEnabled = false;
+ mNotifierState = NOTIFIER_STOPPED;
+
///Create the app notifier thread
mNotificationThread = new NotificationThread(this);
if(!mNotificationThread.get())
@@ -183,7 +188,7 @@ status_t AppCallbackNotifier::initialize()
}
///Start the display thread
- status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
+ status_t ret = mNotificationThread->run("NotificationThread", android::PRIORITY_URGENT_DISPLAY);
if(ret!=NO_ERROR)
{
CAMHAL_LOGEA("Couldn't run NotificationThread");
@@ -194,6 +199,9 @@ status_t AppCallbackNotifier::initialize()
mUseMetaDataBufferMode = true;
mRawAvailable = false;
+ mRecording = false;
+ mPreviewing = false;
+
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -206,7 +214,7 @@ void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
camera_request_memory get_memory,
void *user)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -222,7 +230,7 @@ void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
void AppCallbackNotifier::setMeasurements(bool enable)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -245,7 +253,9 @@ void AppCallbackNotifier::errorNotify(int error)
CAMHAL_LOGEB("AppCallbackNotifier received error %d", error);
// If it is a fatal error abort here!
- if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD)) {
+ // If TILER is Out of memory we notify Mediaserver so that Memory is cleared and we can restart usecase
+ if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD) || (error == -ENOMEM))
+ {
//We kill media server if we encounter these errors as there is
//no point continuing and apps also don't handle errors other
//than media server death always.
@@ -272,7 +282,7 @@ bool AppCallbackNotifier::notificationThread()
LOG_FUNCTION_NAME;
//CAMHAL_LOGDA("Notification Thread waiting for message");
- ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
+ ret = Utils::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
&mEventQ,
&mFrameQ,
AppCallbackNotifier::NOTIFIER_TIMEOUT);
@@ -297,7 +307,7 @@ bool AppCallbackNotifier::notificationThread()
if(mFrameQ.hasMsg()) {
///Received a frame from one of the frame providers
- CAMHAL_LOGDA("Notification Thread received a frame from frame provider (CameraAdapter)");
+ //CAMHAL_LOGDA("Notification Thread received a frame from frame provider (CameraAdapter)");
notifyFrame();
}
@@ -308,21 +318,21 @@ bool AppCallbackNotifier::notificationThread()
void AppCallbackNotifier::notifyEvent()
{
///Receive and send the event notifications to app
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
- if(!mEventQ.isEmpty()) {
- mEventQ.get(&msg);
- } else {
+ android::AutoMutex lock(mLock);
+ if ( !mEventQ.hasMsg() ) {
return;
+ } else {
+ mEventQ.get(&msg);
}
}
bool ret = true;
CameraHalEvent *evt = NULL;
CameraHalEvent::FocusEventData *focusEvtData;
CameraHalEvent::ZoomEventData *zoomEvtData;
- CameraHalEvent::FaceEventData faceEvtData;
+ CameraHalEvent::MetaEventData metaEvtData;
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
{
@@ -357,37 +367,39 @@ void AppCallbackNotifier::notifyEvent()
case CameraHalEvent::EVENT_FOCUS_LOCKED:
case CameraHalEvent::EVENT_FOCUS_ERROR:
- focusEvtData = &evt->mEventData->focusEvent;
- if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_SUCCESS ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
- {
- mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
- mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
- }
- else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_FAIL ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
- {
- mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
- mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
- }
- else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_PENDING ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) )
- {
- mNotifyCb(CAMERA_MSG_FOCUS_MOVE, true, 0, mCallbackCookie);
- }
- else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_DONE ) &&
- ( NULL != mCameraHal ) &&
- ( NULL != mNotifyCb ) &&
- ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) )
- {
- mNotifyCb(CAMERA_MSG_FOCUS_MOVE, false, 0, mCallbackCookie);
+ if ( mCameraHal && mNotifyCb ) {
+ focusEvtData = &evt->mEventData->focusEvent;
+
+ switch ( focusEvtData->focusStatus ) {
+ case CameraHalEvent::FOCUS_STATUS_SUCCESS:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) {
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
+ }
+ break;
+
+ case CameraHalEvent::FOCUS_STATUS_FAIL:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) {
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
+ }
+ break;
+
+#ifdef ANDROID_API_JB_OR_LATER
+ case CameraHalEvent::FOCUS_STATUS_PENDING:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) {
+ mNotifyCb(CAMERA_MSG_FOCUS_MOVE, true, 0, mCallbackCookie);
+ }
+ break;
+
+ case CameraHalEvent::FOCUS_STATUS_DONE:
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) {
+ mNotifyCb(CAMERA_MSG_FOCUS_MOVE, false, 0, mCallbackCookie);
+ }
+ break;
+#endif
}
+ }
break;
@@ -404,10 +416,9 @@ void AppCallbackNotifier::notifyEvent()
break;
-#ifndef OMAP_TUNA
- case CameraHalEvent::EVENT_FACE:
+ case CameraHalEvent::EVENT_METADATA:
- faceEvtData = evt->mEventData->faceEvent;
+ metaEvtData = evt->mEventData->metadataEvent;
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb) &&
@@ -419,10 +430,10 @@ void AppCallbackNotifier::notifyEvent()
mDataCb(CAMERA_MSG_PREVIEW_METADATA,
tmpBuffer,
0,
- faceEvtData->getFaceResult(),
+ metaEvtData->getMetadataResult(),
mCallbackCookie);
- faceEvtData.clear();
+ metaEvtData.clear();
if ( NULL != tmpBuffer ) {
tmpBuffer->release(tmpBuffer);
@@ -431,7 +442,6 @@ void AppCallbackNotifier::notifyEvent()
}
break;
-#endif
case CameraHalEvent::ALL_EVENTS:
break;
@@ -454,11 +464,11 @@ void AppCallbackNotifier::notifyEvent()
static void alignYV12(int width,
int height,
- int &yStride,
- int &uvStride,
- int &ySize,
- int &uvSize,
- int &size)
+ size_t &yStride,
+ size_t &uvStride,
+ size_t &ySize,
+ size_t &uvSize,
+ size_t &size)
{
yStride = ( width + 0xF ) & ~0xF;
uvStride = ( yStride / 2 + 0xF ) & ~0xF;
@@ -485,10 +495,10 @@ static void copy2Dto1D(void *dst,
unsigned int *y_uv = (unsigned int *)src;
CAMHAL_LOGVB("copy2Dto1D() y= %p ; uv=%p.",y_uv[0], y_uv[1]);
- CAMHAL_LOGVB("pixelFormat= %s; offset=%d", pixelFormat,offset);
+ CAMHAL_LOGVB("pixelFormat = %s; offset=%d",pixelFormat,offset);
if (pixelFormat!=NULL) {
- if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
bytesPerPixel = 2;
bufferSrc = ( unsigned char * ) y_uv[0] + offset;
uint32_t xOff = offset % stride;
@@ -533,8 +543,8 @@ static void copy2Dto1D(void *dst,
}
return;
- } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
bytesPerPixel = 1;
bufferDst = ( unsigned char * ) dst;
bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel;
@@ -559,7 +569,7 @@ static void copy2Dto1D(void *dst,
bufferSrc_UV = ( uint16_t * ) ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff);
- if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
uint16_t *bufferDst_UV;
// Step 2: UV plane: convert NV12 to NV21 by swapping U & V
@@ -607,7 +617,7 @@ static void copy2Dto1D(void *dst,
: "cc", "memory", "q0", "q1"
);
}
- } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
uint16_t *bufferDst_U;
uint16_t *bufferDst_V;
@@ -616,7 +626,7 @@ static void copy2Dto1D(void *dst,
// camera adapter to support YV12. Need to address for
// USBCamera
- int yStride, uvStride, ySize, uvSize, size;
+ size_t yStride, uvStride, ySize, uvSize, size;
alignYV12(width, height, yStride, uvStride, ySize, uvSize, size);
bufferDst_V = (uint16_t *) (((uint8_t*)dst) + ySize);
@@ -673,7 +683,7 @@ static void copy2Dto1D(void *dst,
}
return ;
- } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
bytesPerPixel = 2;
}
}
@@ -695,8 +705,8 @@ void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t ms
void *dest = NULL, *src = NULL;
// scope for lock
- {
- Mutex::Autolock lock(mLock);
+ if (mCameraHal->msgTypeEnabled(msgType)) {
+ android::AutoMutex lock(mLock);
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
goto exit;
@@ -707,7 +717,7 @@ void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t ms
if (NULL != picture) {
dest = picture->data;
if (NULL != dest) {
- src = (void *) ((unsigned int) frame->mBuffer + frame->mOffset);
+ src = (void *) ((unsigned int) frame->mBuffer->mapped + frame->mOffset);
memcpy(dest, src, frame->mLength);
}
}
@@ -728,11 +738,11 @@ void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t ms
void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType)
{
camera_memory_t* picture = NULL;
- void* dest = NULL;
+ CameraBuffer * dest = NULL;
// scope for lock
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
goto exit;
@@ -743,39 +753,39 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
goto exit;
}
-
- dest = (void*) mPreviewBufs[mPreviewBufCount];
+ dest = &mPreviewBuffers[mPreviewBufCount];
CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)",
__LINE__,
dest,
frame->mBuffer,
- frame->mWidth,
- frame->mHeight,
- frame->mAlignment,
+ mPreviewWidth,
+ mPreviewHeight,
+ mPreviewStride,
2,
frame->mLength,
mPreviewPixelFormat);
- if ( NULL != dest ) {
+ /* FIXME map dest */
+ if ( NULL != dest && dest->mapped != NULL ) {
// data sync frames don't need conversion
if (CameraFrame::FRAME_DATA_SYNC == frame->mFrameType) {
if ( (mPreviewMemory->size / MAX_BUFFERS) >= frame->mLength ) {
- memcpy(dest, (void*) frame->mBuffer, frame->mLength);
+ memcpy(dest->mapped, (void*) frame->mBuffer->mapped, frame->mLength);
} else {
- memset(dest, 0, (mPreviewMemory->size / MAX_BUFFERS));
+ memset(dest->mapped, 0, (mPreviewMemory->size / MAX_BUFFERS));
}
} else {
- if ((0 == frame->mYuv[0]) || (0 == frame->mYuv[1])){ //NULL ==
+ if ((0 == frame->mYuv[0]) || (0 == frame->mYuv[1])){ //NULL == frame->mYuv
CAMHAL_LOGEA("Error! One of the YUV Pointer is 0"); //is NULL
goto exit;
}
else{
- copy2Dto1D(dest,
+ copy2Dto1D(dest->mapped,
frame->mYuv,
- frame->mWidth,
- frame->mHeight,
- frame->mAlignment,
+ mPreviewWidth,
+ mPreviewHeight,
+ mPreviewStride,
frame->mOffset,
2,
frame->mLength,
@@ -790,8 +800,10 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) &&
mCameraHal->msgTypeEnabled(msgType) &&
- (dest != NULL)) {
- mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+ (dest != NULL) && (dest->mapped != NULL)) {
+ android::AutoMutex locker(mLock);
+ if ( mPreviewMemory )
+ mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
}
// increment for next buffer
@@ -835,17 +847,17 @@ status_t AppCallbackNotifier::dummyRaw()
void AppCallbackNotifier::notifyFrame()
{
///Receive and send the frame notifications to app
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraFrame *frame;
- MemoryHeapBase *heap;
- MemoryBase *buffer = NULL;
- sp<MemoryBase> memBase;
+ android::MemoryHeapBase *heap;
+ android::MemoryBase *buffer = NULL;
+ android::sp<android::MemoryBase> memBase;
void *buf = NULL;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(!mFrameQ.isEmpty()) {
mFrameQ.get(&msg);
} else {
@@ -902,23 +914,24 @@ void AppCallbackNotifier::notifyFrame()
unsigned int current_snapshot = 0;
Encoder_libjpeg::params *main_jpeg = NULL, *tn_jpeg = NULL;
void* exif_data = NULL;
+ const char *previewFormat = NULL;
camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL);
if(raw_picture) {
buf = raw_picture->data;
}
- CameraParameters parameters;
+ android::CameraParameters parameters;
char *params = mCameraHal->getParameters();
- const String8 strParams(params);
+ const android::String8 strParams(params);
parameters.unflatten(strParams);
- encode_quality = parameters.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ encode_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
if (encode_quality < 0 || encode_quality > 100) {
encode_quality = 100;
}
- tn_quality = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ tn_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
if (tn_quality < 0 || tn_quality > 100) {
tn_quality = 100;
}
@@ -938,7 +951,7 @@ void AppCallbackNotifier::notifyFrame()
CAMHAL_LOGDB("Video snapshot offset = %d", frame->mOffset);
if (main_jpeg) {
- main_jpeg->src = (uint8_t*) frame->mBuffer;
+ main_jpeg->src = (uint8_t *)frame->mBuffer->mapped;
main_jpeg->src_size = frame->mLength;
main_jpeg->dst = (uint8_t*) buf;
main_jpeg->dst_size = frame->mLength;
@@ -949,13 +962,19 @@ void AppCallbackNotifier::notifyFrame()
main_jpeg->out_height = frame->mHeight;
main_jpeg->right_crop = rightCrop;
main_jpeg->start_offset = frame->mOffset;
- main_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV422I;
+ if ( CameraFrame::FORMAT_YUV422I_UYVY & frame->mQuirks) {
+ main_jpeg->format = TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY;
+ }
+ else { //if ( CameraFrame::FORMAT_YUV422I_YUYV & frame->mQuirks)
+ main_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
}
- tn_width = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- tn_height = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ tn_width = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ tn_height = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ previewFormat = parameters.getPreviewFormat();
- if ((tn_width > 0) && (tn_height > 0)) {
+ if ((tn_width > 0) && (tn_height > 0) && ( NULL != previewFormat )) {
tn_jpeg = (Encoder_libjpeg::params*)
malloc(sizeof(Encoder_libjpeg::params));
// if malloc fails just keep going and encode main jpeg
@@ -968,10 +987,12 @@ void AppCallbackNotifier::notifyFrame()
int width, height;
parameters.getPreviewSize(&width,&height);
current_snapshot = (mPreviewBufCount + MAX_BUFFERS - 1) % MAX_BUFFERS;
- tn_jpeg->src = (uint8_t*) mPreviewBufs[current_snapshot];
+ tn_jpeg->src = (uint8_t *)mPreviewBuffers[current_snapshot].mapped;
tn_jpeg->src_size = mPreviewMemory->size / MAX_BUFFERS;
- tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->src_size);
- tn_jpeg->dst_size = tn_jpeg->src_size;
+ tn_jpeg->dst_size = calculateBufferSize(tn_width,
+ tn_height,
+ previewFormat);
+ tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->dst_size);
tn_jpeg->quality = tn_quality;
tn_jpeg->in_width = width;
tn_jpeg->in_height = height;
@@ -979,17 +1000,17 @@ void AppCallbackNotifier::notifyFrame()
tn_jpeg->out_height = tn_height;
tn_jpeg->right_crop = 0;
tn_jpeg->start_offset = 0;
- tn_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV420SP;;
+ tn_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV420SP;;
}
- sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
+ android::sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
tn_jpeg,
AppCallbackNotifierEncoderCallback,
(CameraFrame::FrameType)frame->mFrameType,
this,
raw_picture,
- exif_data);
- gEncoderQueue.add(frame->mBuffer, encoder);
+ exif_data, frame->mBuffer);
+ gEncoderQueue.add(frame->mBuffer->mapped, encoder);
encoder->run();
encoder.clear();
if (params != NULL)
@@ -1006,7 +1027,7 @@ void AppCallbackNotifier::notifyFrame()
// who registers a raw callback should receive one
// as well. This is not always the case with
// CameraAdapters though.
- if (!mRawAvailable) {
+ if (!mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE)) {
dummyRaw();
} else {
mRawAvailable = false;
@@ -1014,11 +1035,11 @@ void AppCallbackNotifier::notifyFrame()
#ifdef COPY_IMAGE_BUFFER
{
- Mutex::Autolock lock(mBurstLock);
-#if 0 //TODO: enable burst mode later
+ android::AutoMutex lock(mBurstLock);
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
if ( mBurst )
{
- `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ copyAndSendPictureFrame(frame, CAMERA_MSG_COMPRESSED_BURST_IMAGE);
}
else
#endif
@@ -1035,13 +1056,13 @@ void AppCallbackNotifier::notifyFrame()
( NULL != mDataCb) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) )
{
- mRecordingLock.lock();
+ android::AutoMutex locker(mRecordingLock);
if(mRecording)
{
if(mUseMetaDataBufferMode)
{
camera_memory_t *videoMedatadaBufferMemory =
- (camera_memory_t *) mVideoMetadataBufferMemoryMap.valueFor((uint32_t) frame->mBuffer);
+ mVideoMetadataBufferMemoryMap.valueFor(frame->mBuffer->opaque);
video_metadata_t *videoMetadataBuffer = (video_metadata_t *) videoMedatadaBufferMemory->data;
if( (NULL == videoMedatadaBufferMemory) || (NULL == videoMetadataBuffer) || (NULL == frame->mBuffer) )
@@ -1052,9 +1073,9 @@ void AppCallbackNotifier::notifyFrame()
if ( mUseVideoBuffers )
{
- int vBuf = mVideoMap.valueFor((uint32_t) frame->mBuffer);
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ CameraBuffer *vBuf = mVideoMap.valueFor(frame->mBuffer->opaque);
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
bounds.left = 0;
bounds.top = 0;
bounds.right = mVideoWidth;
@@ -1062,14 +1083,15 @@ void AppCallbackNotifier::notifyFrame()
void *y_uv[2];
mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ y_uv[1] = y_uv[0] + mVideoHeight*4096;
- structConvImage input = {(int)frame->mWidth,
- (int)frame->mHeight,
+ structConvImage input = {frame->mWidth,
+ frame->mHeight,
4096,
IC_FORMAT_YCbCr420_lp,
(mmByte *)frame->mYuv[0],
(mmByte *)frame->mYuv[1],
- (int)frame->mOffset};
+ frame->mOffset};
structConvImage output = {mVideoWidth,
mVideoHeight,
@@ -1080,20 +1102,21 @@ void AppCallbackNotifier::notifyFrame()
0};
VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
- mapper.unlock((buffer_handle_t)vBuf);
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = (void *)vBuf;
+ mapper.unlock((buffer_handle_t)vBuf->opaque);
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
+ /* FIXME remove cast */
+ videoMetadataBuffer->handle = (void *)vBuf->opaque;
videoMetadataBuffer->offset = 0;
}
else
{
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = frame->mBuffer;
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = camera_buffer_get_omx_ptr(frame->mBuffer);
videoMetadataBuffer->offset = frame->mOffset;
}
CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
- frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory);
+ frame->mBuffer->opaque, videoMetadataBuffer, videoMedatadaBufferMemory);
mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
videoMedatadaBufferMemory, 0, mCallbackCookie);
@@ -1108,13 +1131,11 @@ void AppCallbackNotifier::notifyFrame()
break;
}
- *reinterpret_cast<buffer_handle_t*>(fakebuf->data) = reinterpret_cast<buffer_handle_t>(frame->mBuffer);
+ *reinterpret_cast<buffer_handle_t*>(fakebuf->data) = reinterpret_cast<buffer_handle_t>(frame->mBuffer->mapped);
mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, fakebuf, 0, mCallbackCookie);
fakebuf->release(fakebuf);
}
}
- mRecordingLock.unlock();
-
}
else if(( CameraFrame::SNAPSHOT_FRAME == frame->mFrameType ) &&
( NULL != mCameraHal ) &&
@@ -1180,7 +1201,7 @@ void AppCallbackNotifier::frameCallbackRelay(CameraFrame* caFrame)
void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
{
///Post the event to the event queue of AppCallbackNotifier
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraFrame *frame;
LOG_FUNCTION_NAME;
@@ -1207,10 +1228,12 @@ void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
void AppCallbackNotifier::flushAndReturnFrames()
{
- TIUTILS::Message msg;
+ LOG_FUNCTION_NAME;
+
+ Utils::Message msg;
CameraFrame *frame;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
while (!mFrameQ.isEmpty()) {
mFrameQ.get(&msg);
frame = (CameraFrame*) msg.arg1;
@@ -1235,7 +1258,7 @@ void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
{
///Post the event to the event queue of AppCallbackNotifier
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraHalEvent *event;
@@ -1250,7 +1273,7 @@ void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT;
msg.arg1 = event;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mEventQ.put(&msg);
}
}
@@ -1269,7 +1292,7 @@ void AppCallbackNotifier::flushEventQueue()
{
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mEventQ.clear();
}
}
@@ -1278,7 +1301,7 @@ void AppCallbackNotifier::flushEventQueue()
bool AppCallbackNotifier::processMessage()
{
///Retrieve the command from the command queue and process it
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1291,7 +1314,7 @@ bool AppCallbackNotifier::processMessage()
{
case NotificationThread::NOTIFIER_EXIT:
{
- CAMHAL_LOGDA("Received NOTIFIER_EXIT command from Camera HAL");
+ CAMHAL_LOGD("Received NOTIFIER_EXIT command from Camera HAL");
mNotifierState = AppCallbackNotifier::NOTIFIER_EXITED;
ret = false;
break;
@@ -1329,7 +1352,7 @@ AppCallbackNotifier::~AppCallbackNotifier()
mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
}
- TIUTILS::Message msg = {0,0,0,0,0,0};
+ Utils::Message msg = {0,0,0,0,0,0};
msg.command = NotificationThread::NOTIFIER_EXIT;
///Post the message to display thread
@@ -1375,11 +1398,11 @@ void AppCallbackNotifier::releaseSharedVideoBuffers()
camera_memory_t* videoMedatadaBufferMemory;
for (unsigned int i = 0; i < mVideoMetadataBufferMemoryMap.size(); i++)
{
- videoMedatadaBufferMemory = (camera_memory_t*) mVideoMetadataBufferMemoryMap.valueAt(i);
+ videoMedatadaBufferMemory = mVideoMetadataBufferMemoryMap.valueAt(i);
if(NULL != videoMedatadaBufferMemory)
{
videoMedatadaBufferMemory->release(videoMedatadaBufferMemory);
- CAMHAL_LOGDB("Released videoMedatadaBufferMemory=0x%x", videoMedatadaBufferMemory);
+ CAMHAL_LOGDB("Released videoMedatadaBufferMemory=%p", videoMedatadaBufferMemory);
}
}
@@ -1436,16 +1459,57 @@ void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
LOG_FUNCTION_NAME_EXIT;
}
-status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+size_t AppCallbackNotifier::calculateBufferSize(size_t width, size_t height, const char *pixelFormat)
+{
+ size_t res = 0;
+
+ LOG_FUNCTION_NAME
+
+ if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ res = width*height*2;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ res = (width*height*3)/2;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ res = width*height*2;
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ size_t yStride, uvStride, ySize, uvSize;
+ alignYV12(width, height, yStride, uvStride, ySize, uvSize, res);
+ mPreviewPixelFormat = android::CameraParameters::PIXEL_FORMAT_YUV420P;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return res;
+}
+
+const char* AppCallbackNotifier::getContstantForPixelFormat(const char *pixelFormat) {
+ if (!pixelFormat) {
+ // returning NV12 as default
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+
+ if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ return android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ) {
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ return android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ return android::CameraParameters::PIXEL_FORMAT_YUV420P;
+ } else {
+ // returning NV12 as default
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+}
+
+status_t AppCallbackNotifier::startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
{
- sp<MemoryHeapBase> heap;
- sp<MemoryBase> buffer;
unsigned int *bufArr;
int size = 0;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if ( NULL == mFrameProvider )
{
@@ -1463,30 +1527,12 @@ status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, vo
///Get preview size
params.getPreviewSize(&w, &h);
- //Get the preview pixel format
- mPreviewPixelFormat = params.getPreviewFormat();
-
- if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
- size = w*h*2;
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV422I;
- }
- else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 )
- {
- size = (w*h*3)/2;
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
- else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
- size = w*h*2;
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_RGB565;
- }
- else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
- {
- int yStride, uvStride, ySize, uvSize;
- alignYV12(w, h, yStride, uvStride, ySize, uvSize, size);
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420P;
- }
+ // save preview pixel format, size and stride
+ mPreviewWidth = w;
+ mPreviewHeight = h;
+ mPreviewStride = 4096;
+ mPreviewPixelFormat = getContstantForPixelFormat(params.getPreviewFormat());
+ size = calculateBufferSize(w, h, mPreviewPixelFormat);
mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL);
if (!mPreviewMemory) {
@@ -1494,18 +1540,24 @@ status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, vo
}
for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) {
- mPreviewBufs[i] = (unsigned char*) mPreviewMemory->data + (i*size);
+ mPreviewBuffers[i].type = CAMERA_BUFFER_MEMORY;
+ mPreviewBuffers[i].opaque = (unsigned char*) mPreviewMemory->data + (i*size);
+ mPreviewBuffers[i].mapped = mPreviewBuffers[i].opaque;
}
if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) {
mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
}
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_POSTVIEW_FRAME) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
mPreviewBufCount = 0;
mPreviewing = true;
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
@@ -1514,7 +1566,7 @@ void AppCallbackNotifier::setBurst(bool burst)
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
mBurst = burst;
@@ -1547,9 +1599,6 @@ void AppCallbackNotifier::setVideoRes(int width, int height)
status_t AppCallbackNotifier::stopPreviewCallbacks()
{
- sp<MemoryHeapBase> heap;
- sp<MemoryBase> buffer;
-
LOG_FUNCTION_NAME;
if ( NULL == mFrameProvider )
@@ -1564,10 +1613,12 @@ status_t AppCallbackNotifier::stopPreviewCallbacks()
}
mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mPreviewMemory->release(mPreviewMemory);
+ mPreviewMemory = 0;
}
mPreviewing = false;
@@ -1592,7 +1643,7 @@ status_t AppCallbackNotifier::startRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mRecordingLock);
+ android::AutoMutex lock(mRecordingLock);
if ( NULL == mFrameProvider )
{
@@ -1618,14 +1669,13 @@ status_t AppCallbackNotifier::startRecording()
}
//Allocate metadata buffers for video recording
-status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs)
+status_t AppCallbackNotifier::initSharedVideoBuffers(CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count, CameraBuffer *vidBufs)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
if(mUseMetaDataBufferMode)
{
- uint32_t *bufArr = NULL;
camera_memory_t* videoMedatadaBufferMemory = NULL;
if(NULL == buffers)
@@ -1633,7 +1683,6 @@ status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *of
CAMHAL_LOGEA("Error! Video buffers are NULL");
return BAD_VALUE;
}
- bufArr = (uint32_t *) buffers;
for (uint32_t i = 0; i < count; i++)
{
@@ -1644,16 +1693,18 @@ status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *of
return NO_MEMORY;
}
- mVideoMetadataBufferMemoryMap.add(bufArr[i], (uint32_t)(videoMedatadaBufferMemory));
- mVideoMetadataBufferReverseMap.add((uint32_t)(videoMedatadaBufferMemory->data), bufArr[i]);
- CAMHAL_LOGDB("bufArr[%d]=0x%x, videoMedatadaBufferMemory=0x%x, videoMedatadaBufferMemory->data=0x%x",
- i, bufArr[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data);
+ // FIXME remove cast
+ mVideoMetadataBufferMemoryMap.add((void *)buffers[i].opaque, videoMedatadaBufferMemory);
+ mVideoMetadataBufferReverseMap.add(videoMedatadaBufferMemory->data, &buffers[i]);
+ CAMHAL_LOGDB("buffers[%d]=%p, videoMedatadaBufferMemory=%p, videoMedatadaBufferMemory->data=%p",
+ i, &buffers[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data);
if (vidBufs != NULL)
{
- uint32_t *vBufArr = (uint32_t *) vidBufs;
- mVideoMap.add(bufArr[i], vBufArr[i]);
- CAMHAL_LOGVB("bufArr[%d]=0x%x, vBuffArr[%d]=0x%x", i, bufArr[i], i, vBufArr[i]);
+ //ASSERT(buffers[i].type == CAMERA_BUFFER_GRALLOC);
+ // FIXME remove cast
+ mVideoMap.add((void *)buffers[i].opaque, &vidBufs[i]);
+ CAMHAL_LOGVB("buffers[%d]=%p, vBuffArr[%d]=%p", i, &buffers[i], i, &vidBufs[i]);
}
}
}
@@ -1670,7 +1721,7 @@ status_t AppCallbackNotifier::stopRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mRecordingLock);
+ android::AutoMutex lock(mRecordingLock);
if ( NULL == mFrameProvider )
{
@@ -1701,7 +1752,7 @@ status_t AppCallbackNotifier::stopRecording()
status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
{
status_t ret = NO_ERROR;
- void *frame = NULL;
+ CameraBuffer *frame = NULL;
LOG_FUNCTION_NAME;
if ( NULL == mFrameProvider )
@@ -1724,13 +1775,15 @@ status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
if(mUseMetaDataBufferMode)
{
video_metadata_t *videoMetadataBuffer = (video_metadata_t *) mem ;
- frame = (void*) mVideoMetadataBufferReverseMap.valueFor((uint32_t) videoMetadataBuffer);
+ /* FIXME remove cast */
+ frame = mVideoMetadataBufferReverseMap.valueFor(videoMetadataBuffer);
CAMHAL_LOGVB("Releasing frame with videoMetadataBuffer=0x%x, videoMetadataBuffer->handle=0x%x & frame handle=0x%x\n",
videoMetadataBuffer, videoMetadataBuffer->handle, frame);
}
else
{
- frame = (void*)(*((uint32_t *)mem));
+ /* FIXME this won't work */
+ frame = (CameraBuffer *)(void*)(*((uint32_t *)mem));
}
if ( NO_ERROR == ret )
@@ -1745,19 +1798,35 @@ status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
status_t AppCallbackNotifier::enableMsgType(int32_t msgType)
{
- if( msgType & (CAMERA_MSG_POSTVIEW_FRAME | CAMERA_MSG_PREVIEW_FRAME) ) {
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
}
+ if( msgType & CAMERA_MSG_POSTVIEW_FRAME ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_RAW_IMAGE) {
+ mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
return NO_ERROR;
}
status_t AppCallbackNotifier::disableMsgType(int32_t msgType)
{
- if( msgType & (CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_POSTVIEW_FRAME) ) {
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
}
+ if( msgType & CAMERA_MSG_POSTVIEW_FRAME ) {
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_RAW_IMAGE) {
+ mFrameProvider->disableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
return NO_ERROR;
}
@@ -1814,14 +1883,14 @@ status_t AppCallbackNotifier::stop()
return ALREADY_EXISTS;
}
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mNotifierState = AppCallbackNotifier::NOTIFIER_STOPPED;
CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STOPPED \n");
}
while(!gEncoderQueue.isEmpty()) {
- sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
+ android::sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
camera_memory_t* encoded_mem = NULL;
ExifElementsTable* exif = NULL;
@@ -1850,4 +1919,5 @@ status_t AppCallbackNotifier::stop()
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/BaseCameraAdapter.cpp b/camera/BaseCameraAdapter.cpp
index 458c79e..4e315b6 100644
--- a/camera/BaseCameraAdapter.cpp
+++ b/camera/BaseCameraAdapter.cpp
@@ -14,13 +14,50 @@
* limitations under the License.
*/
-
-
-#define LOG_TAG "CameraHAL"
-
#include "BaseCameraAdapter.h"
-namespace android {
+const int EVENT_MASK = 0xffff;
+
+namespace Ti {
+namespace Camera {
+
+const LUT cameraCommandsUserToHAL[] = {
+ { "CAMERA_START_PREVIEW", CameraAdapter::CAMERA_START_PREVIEW },
+ { "CAMERA_STOP_PREVIEW", CameraAdapter::CAMERA_STOP_PREVIEW },
+ { "CAMERA_START_VIDEO", CameraAdapter::CAMERA_START_VIDEO },
+ { "CAMERA_STOP_VIDEO", CameraAdapter::CAMERA_STOP_VIDEO },
+ { "CAMERA_START_IMAGE_CAPTURE", CameraAdapter::CAMERA_START_IMAGE_CAPTURE },
+ { "CAMERA_STOP_IMAGE_CAPTURE", CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE },
+ { "CAMERA_PERFORM_AUTOFOCUS", CameraAdapter::CAMERA_PERFORM_AUTOFOCUS },
+ { "CAMERA_CANCEL_AUTOFOCUS", CameraAdapter::CAMERA_CANCEL_AUTOFOCUS },
+ { "CAMERA_PREVIEW_FLUSH_BUFFERS", CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS },
+ { "CAMERA_START_SMOOTH_ZOOM", CameraAdapter::CAMERA_START_SMOOTH_ZOOM },
+ { "CAMERA_STOP_SMOOTH_ZOOM", CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM },
+ { "CAMERA_USE_BUFFERS_PREVIEW", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW },
+ { "CAMERA_SET_TIMEOUT", CameraAdapter::CAMERA_SET_TIMEOUT },
+ { "CAMERA_CANCEL_TIMEOUT", CameraAdapter::CAMERA_CANCEL_TIMEOUT },
+ { "CAMERA_START_BRACKET_CAPTURE", CameraAdapter::CAMERA_START_BRACKET_CAPTURE },
+ { "CAMERA_STOP_BRACKET_CAPTURE", CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE },
+ { "CAMERA_QUERY_RESOLUTION_PREVIEW", CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW },
+ { "CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE },
+ { "CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA },
+ { "CAMERA_USE_BUFFERS_IMAGE_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE },
+ { "CAMERA_USE_BUFFERS_PREVIEW_DATA", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA },
+ { "CAMERA_TIMEOUT_EXPIRED", CameraAdapter::CAMERA_TIMEOUT_EXPIRED },
+ { "CAMERA_START_FD", CameraAdapter::CAMERA_START_FD },
+ { "CAMERA_STOP_FD", CameraAdapter::CAMERA_STOP_FD },
+ { "CAMERA_SWITCH_TO_EXECUTING", CameraAdapter::CAMERA_SWITCH_TO_EXECUTING },
+ { "CAMERA_USE_BUFFERS_VIDEO_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE },
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ { "CAMERA_USE_BUFFERS_REPROCESS", CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS },
+ { "CAMERA_START_REPROCESS", CameraAdapter::CAMERA_START_REPROCESS },
+#endif
+};
+
+const LUTtypeHAL CamCommandsLUT = {
+ sizeof(cameraCommandsUserToHAL)/sizeof(cameraCommandsUserToHAL[0]),
+ cameraCommandsUserToHAL
+};
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
@@ -51,6 +88,8 @@ BaseCameraAdapter::BaseCameraAdapter()
mAdapterState = INTIALIZED_STATE;
+ mSharedAllocator = NULL;
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
mStartFocus.tv_sec = 0;
mStartFocus.tv_usec = 0;
@@ -64,16 +103,18 @@ BaseCameraAdapter::~BaseCameraAdapter()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
mFrameSubscribers.clear();
mImageSubscribers.clear();
mRawSubscribers.clear();
mVideoSubscribers.clear();
+ mVideoInSubscribers.clear();
mFocusSubscribers.clear();
mShutterSubscribers.clear();
mZoomSubscribers.clear();
- mFaceSubscribers.clear();
+ mSnapshotSubscribers.clear();
+ mMetadataSubscribers.clear();
LOG_FUNCTION_NAME_EXIT;
}
@@ -130,40 +171,59 @@ status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, event_callback eventCb, void* cookie)
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
LOG_FUNCTION_NAME;
- if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
- {
- mFrameSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
- {
- mFrameDataSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::IMAGE_FRAME == msgs)
- {
- mImageSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::RAW_FRAME == msgs)
- {
- mRawSubscribers.add((int) cookie, callback);
- }
- else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
- {
- mVideoSubscribers.add((int) cookie, callback);
- }
- else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ int32_t frameMsg = ((msgs >> MessageNotifier::FRAME_BIT_FIELD_POSITION) & EVENT_MASK);
+ int32_t eventMsg = ((msgs >> MessageNotifier::EVENT_BIT_FIELD_POSITION) & EVENT_MASK);
+
+ if ( frameMsg != 0 )
{
- mFocusSubscribers.add((int) cookie, eventCb);
- mShutterSubscribers.add((int) cookie, eventCb);
- mZoomSubscribers.add((int) cookie, eventCb);
- mFaceSubscribers.add((int) cookie, eventCb);
+ CAMHAL_LOGVB("Frame message type id=0x%x subscription request", frameMsg);
+ switch ( frameMsg )
+ {
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ mFrameSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ mFrameDataSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ mSnapshotSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::IMAGE_FRAME:
+ mImageSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::RAW_FRAME:
+ mRawSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ mVideoSubscribers.add((int) cookie, callback);
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ mVideoInSubscribers.add((int) cookie, callback);
+ break;
+ default:
+ CAMHAL_LOGEA("Frame message type id=0x%x subscription no supported yet!", frameMsg);
+ break;
+ }
}
- else
+
+ if ( eventMsg != 0)
{
- CAMHAL_LOGEA("Message type subscription no supported yet!");
+ CAMHAL_LOGVB("Event message type id=0x%x subscription request", eventMsg);
+ if ( CameraHalEvent::ALL_EVENTS == eventMsg )
+ {
+ mFocusSubscribers.add((int) cookie, eventCb);
+ mShutterSubscribers.add((int) cookie, eventCb);
+ mZoomSubscribers.add((int) cookie, eventCb);
+ mMetadataSubscribers.add((int) cookie, eventCb);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event message type id=0x%x subscription no supported yet!", eventMsg);
+ }
}
LOG_FUNCTION_NAME_EXIT;
@@ -171,59 +231,78 @@ void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, eve
void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie)
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
LOG_FUNCTION_NAME;
- if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
- {
- mFrameSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
- {
- mFrameDataSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::IMAGE_FRAME == msgs)
- {
- mImageSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::RAW_FRAME == msgs)
- {
- mRawSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
- {
- mVideoSubscribers.removeItem((int) cookie);
- }
- else if ( CameraFrame::ALL_FRAMES == msgs )
- {
- mFrameSubscribers.removeItem((int) cookie);
- mFrameDataSubscribers.removeItem((int) cookie);
- mImageSubscribers.removeItem((int) cookie);
- mRawSubscribers.removeItem((int) cookie);
- mVideoSubscribers.removeItem((int) cookie);
- }
- else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ int32_t frameMsg = ((msgs >> MessageNotifier::FRAME_BIT_FIELD_POSITION) & EVENT_MASK);
+ int32_t eventMsg = ((msgs >> MessageNotifier::EVENT_BIT_FIELD_POSITION) & EVENT_MASK);
+
+ if ( frameMsg != 0 )
{
- //Subscribe only for focus
- //TODO: Process case by case
- mFocusSubscribers.removeItem((int) cookie);
- mShutterSubscribers.removeItem((int) cookie);
- mZoomSubscribers.removeItem((int) cookie);
- mFaceSubscribers.removeItem((int) cookie);
+ CAMHAL_LOGVB("Frame message type id=0x%x remove subscription request", frameMsg);
+ switch ( frameMsg )
+ {
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ mFrameSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ mFrameDataSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ mSnapshotSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::IMAGE_FRAME:
+ mImageSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::RAW_FRAME:
+ mRawSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ mVideoSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ mVideoInSubscribers.removeItem((int) cookie);
+ break;
+ case CameraFrame::ALL_FRAMES:
+ mFrameSubscribers.removeItem((int) cookie);
+ mFrameDataSubscribers.removeItem((int) cookie);
+ mSnapshotSubscribers.removeItem((int) cookie);
+ mImageSubscribers.removeItem((int) cookie);
+ mRawSubscribers.removeItem((int) cookie);
+ mVideoSubscribers.removeItem((int) cookie);
+ mVideoInSubscribers.removeItem((int) cookie);
+ break;
+ default:
+ CAMHAL_LOGEA("Frame message type id=0x%x subscription remove not supported yet!", frameMsg);
+ break;
+ }
}
- else
+
+ if ( eventMsg != 0 )
{
- CAMHAL_LOGEB("Message type 0x%x subscription no supported yet!", msgs);
+ CAMHAL_LOGVB("Event message type id=0x%x remove subscription request", eventMsg);
+ if ( CameraHalEvent::ALL_EVENTS == eventMsg)
+ {
+ //TODO: Process case by case
+ mFocusSubscribers.removeItem((int) cookie);
+ mShutterSubscribers.removeItem((int) cookie);
+ mZoomSubscribers.removeItem((int) cookie);
+ mMetadataSubscribers.removeItem((int) cookie);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event message type id=0x%x subscription remove not supported yet!", eventMsg);
+ }
}
LOG_FUNCTION_NAME_EXIT;
}
-void BaseCameraAdapter::addFramePointers(void *frameBuf, void *buf)
+void BaseCameraAdapter::addFramePointers(CameraBuffer *frameBuf, void *buf)
{
unsigned int *pBuf = (unsigned int *)buf;
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
if ((frameBuf != NULL) && ( pBuf != NULL) )
{
@@ -239,7 +318,7 @@ void BaseCameraAdapter::addFramePointers(void *frameBuf, void *buf)
void BaseCameraAdapter::removeFramePointers()
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
int size = mFrameQueue.size();
CAMHAL_LOGVB("Removing %d Frames = ", size);
@@ -252,7 +331,7 @@ void BaseCameraAdapter::removeFramePointers()
mFrameQueue.clear();
}
-void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frameType)
+void BaseCameraAdapter::returnFrame(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
status_t res = NO_ERROR;
size_t subscriberCount = 0;
@@ -266,7 +345,7 @@ void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frame
if ( NO_ERROR == res)
{
- Mutex::Autolock lock(mReturnFrameLock);
+ android::AutoMutex lock(mReturnFrameLock);
refCount = getFrameRefCount(frameBuf, frameType);
@@ -310,13 +389,15 @@ void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frame
//check if someone is holding this buffer
if ( 0 == refCount )
{
-#ifdef DEBUG_LOG
- if(mBuffersWithDucati.indexOfKey((int)frameBuf)>=0)
+#ifdef CAMERAHAL_DEBUG
+ if((mBuffersWithDucati.indexOfKey((int)camera_buffer_get_omx_ptr(frameBuf)) >= 0) &&
+ ((CameraFrame::PREVIEW_FRAME_SYNC == frameType) ||
+ (CameraFrame::SNAPSHOT_FRAME == frameType)))
{
- ALOGE("Buffer already with Ducati!! 0x%x", frameBuf);
- for(int i=0;i<mBuffersWithDucati.size();i++) ALOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ CAMHAL_LOGE("Buffer already with Ducati!! 0x%x", frameBuf);
+ for(int i=0;i<mBuffersWithDucati.size();i++) CAMHAL_LOGE("0x%x", mBuffersWithDucati.keyAt(i));
}
- mBuffersWithDucati.add((int)frameBuf,1);
+ mBuffersWithDucati.add((int)camera_buffer_get_omx_ptr(frameBuf),1);
#endif
res = fillThisBuffer(frameBuf, frameType);
}
@@ -324,8 +405,7 @@ void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frame
}
-status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, int value2, int value3)
-{
+status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, int value2, int value3, int value4) {
status_t ret = NO_ERROR;
struct timeval *refTimestamp;
BuffersDescriptor *desc = NULL;
@@ -351,19 +431,20 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mPreviewBufferLock);
- mPreviewBuffers = (int *) desc->mBuffers;
+ android::AutoMutex lock(mPreviewBufferLock);
+ mPreviewBuffers = desc->mBuffers;
mPreviewBuffersLength = desc->mLength;
mPreviewBuffersAvailable.clear();
+ mSnapshotBuffersAvailable.clear();
for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
{
- mPreviewBuffersAvailable.add(mPreviewBuffers[i], 0);
+ mPreviewBuffersAvailable.add(&mPreviewBuffers[i], 0);
}
// initial ref count for undeqeueued buffers is 1 since buffer provider
// is still holding on to it
for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
{
- mPreviewBuffersAvailable.add(mPreviewBuffers[i], 1);
+ mPreviewBuffersAvailable.add(&mPreviewBuffers[i], 1);
}
}
@@ -404,19 +485,19 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
- mPreviewDataBuffers = (int *) desc->mBuffers;
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffers = desc->mBuffers;
mPreviewDataBuffersLength = desc->mLength;
mPreviewDataBuffersAvailable.clear();
for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
{
- mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 0);
+ mPreviewDataBuffersAvailable.add(&mPreviewDataBuffers[i], 0);
}
// initial ref count for undeqeueued buffers is 1 since buffer provider
// is still holding on to it
for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
{
- mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 1);
+ mPreviewDataBuffersAvailable.add(&mPreviewDataBuffers[i], 1);
}
}
@@ -457,19 +538,19 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mCaptureBufferLock);
- mCaptureBuffers = (int *) desc->mBuffers;
+ android::AutoMutex lock(mCaptureBufferLock);
+ mCaptureBuffers = desc->mBuffers;
mCaptureBuffersLength = desc->mLength;
mCaptureBuffersAvailable.clear();
for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
{
- mCaptureBuffersAvailable.add(mCaptureBuffers[i], 0);
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
}
// initial ref count for undeqeueued buffers is 1 since buffer provider
// is still holding on to it
for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
{
- mCaptureBuffersAvailable.add(mCaptureBuffers[i], 1);
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
}
}
@@ -493,6 +574,48 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
break;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDA("Use buffers for reprocessing");
+ desc = (BuffersDescriptor *) value1;
+
+ if (NULL == desc) {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if (ret == NO_ERROR) {
+ ret = setState(operation);
+ }
+
+ if (ret == NO_ERROR) {
+ android::AutoMutex lock(mVideoInBufferLock);
+ mVideoInBuffers = desc->mBuffers;
+ mVideoInBuffersAvailable.clear();
+ for (uint32_t i = 0 ; i < desc->mMaxQueueable ; i++) {
+ mVideoInBuffersAvailable.add(&mVideoInBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) {
+ mVideoInBuffersAvailable.add(&mVideoInBuffers[i], 1);
+ }
+ ret = useBuffers(CameraAdapter::CAMERA_REPROCESS,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = commitState();
+ } else {
+ ret |= rollbackState();
+ }
+
+ break;
+#endif
+
case CameraAdapter::CAMERA_START_SMOOTH_ZOOM:
{
@@ -657,32 +780,6 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
}
- case CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS:
- {
-
- if ( ret == NO_ERROR )
- {
- ret = setState(operation);
- }
-
- if ( ret == NO_ERROR )
- {
- ret = flushBuffers();
- }
-
- if ( ret == NO_ERROR )
- {
- ret = commitState();
- }
- else
- {
- ret |= rollbackState();
- }
-
- break;
-
- }
-
case CameraAdapter::CAMERA_START_IMAGE_CAPTURE:
{
@@ -908,7 +1005,7 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( NULL != frame )
{
- ret = getPictureBufferSize(frame->mLength, value2);
+ ret = getPictureBufferSize(*frame, value2);
}
else
{
@@ -959,7 +1056,6 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
break;
-#ifndef OMAP_TUNA
case CameraAdapter::CAMERA_START_FD:
ret = startFaceDetection();
@@ -971,11 +1067,69 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
ret = stopFaceDetection();
break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE:
+
+ CAMHAL_LOGDA("Use buffers for video (RAW + JPEG) capture");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc ) {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR ) {
+ android::AutoMutex lock(mVideoBufferLock);
+ mVideoBuffers = desc->mBuffers;
+ mVideoBuffersLength = desc->mLength;
+ mVideoBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ ) {
+ mVideoBuffersAvailable.add(&mVideoBuffers[i], 1);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) {
+ mVideoBuffersAvailable.add(&mVideoBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc ) {
+ ret = useBuffers(CameraAdapter::CAMERA_VIDEO,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR ) {
+ ret = commitState();
+ } else {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_SWITCH_TO_EXECUTING:
+ ret = switchToExecuting();
+ break;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ case CameraAdapter::CAMERA_SETUP_TUNNEL:
+ ret = setupTunnel(value1, value2, value3, value4);
+ break;
+
+ case CameraAdapter::CAMERA_DESTROY_TUNNEL:
+ ret = destroyTunnel();
+ break;
#endif
- case CameraAdapter::CAMERA_SWITCH_TO_EXECUTING:
- ret = switchToExecuting();
- break;
+ case CameraAdapter::CAMERA_PREVIEW_INITIALIZATION:
+ ret = cameraPreviewInitialization();
+ break;
default:
CAMHAL_LOGEB("Command 0x%x unsupported!", operation);
@@ -1000,9 +1154,9 @@ status_t BaseCameraAdapter::notifyFocusSubscribers(CameraHalEvent::FocusStatus s
}
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- if (status == CameraHalEvent::FOCUS_STATUS_PENDING) {
+ if (status == CameraHalEvent::FOCUS_STATUS_PENDING) {
gettimeofday(&mStartFocus, NULL);
- } else {
+ } else {
//dump the AF latency
CameraHal::PPM("Focus finished in: ", &mStartFocus);
}
@@ -1056,14 +1210,14 @@ status_t BaseCameraAdapter::notifyShutterSubscribers()
shutterEvent.mCookie = ( void * ) mShutterSubscribers.keyAt(i);
eventCb = ( event_callback ) mShutterSubscribers.valueAt(i);
- CAMHAL_LOGDA("Sending shutter callback");
+ CAMHAL_LOGD("Sending shutter callback");
eventCb ( &shutterEvent );
}
shutterEvent.mEventData.clear();
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1104,35 +1258,35 @@ status_t BaseCameraAdapter::notifyZoomSubscribers(int zoomIdx, bool targetReache
return ret;
}
-status_t BaseCameraAdapter::notifyFaceSubscribers(sp<CameraFDResult> &faces)
+status_t BaseCameraAdapter::notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta)
{
event_callback eventCb;
- CameraHalEvent faceEvent;
+ CameraHalEvent metaEvent;
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- if ( mFaceSubscribers.size() == 0 ) {
- CAMHAL_LOGDA("No face detection subscribers!");
+ if ( mMetadataSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No preview metadata subscribers!");
return NO_INIT;
}
- faceEvent.mEventData = new CameraHalEvent::CameraHalEventData();
- if ( NULL == faceEvent.mEventData.get() ) {
+ metaEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == metaEvent.mEventData.get() ) {
return -ENOMEM;
}
- faceEvent.mEventType = CameraHalEvent::EVENT_FACE;
- faceEvent.mEventData->faceEvent = faces;
+ metaEvent.mEventType = CameraHalEvent::EVENT_METADATA;
+ metaEvent.mEventData->metadataEvent = meta;
- for (unsigned int i = 0 ; i < mFaceSubscribers.size(); i++ ) {
- faceEvent.mCookie = (void *) mFaceSubscribers.keyAt(i);
- eventCb = (event_callback) mFaceSubscribers.valueAt(i);
+ for (unsigned int i = 0 ; i < mMetadataSubscribers.size(); i++ ) {
+ metaEvent.mCookie = (void *) mMetadataSubscribers.keyAt(i);
+ eventCb = (event_callback) mMetadataSubscribers.valueAt(i);
- eventCb ( &faceEvent );
+ eventCb ( &metaEvent );
}
- faceEvent.mEventData.clear();
+ metaEvent.mEventData.clear();
LOG_FUNCTION_NAME_EXIT;
@@ -1174,7 +1328,7 @@ status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
break;
case CameraFrame::SNAPSHOT_FRAME:
{
- ret = __sendFrameToSubscribers(frame, &mFrameSubscribers, CameraFrame::SNAPSHOT_FRAME);
+ ret = __sendFrameToSubscribers(frame, &mSnapshotSubscribers, CameraFrame::SNAPSHOT_FRAME);
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
@@ -1187,6 +1341,11 @@ status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
ret = __sendFrameToSubscribers(frame, &mFrameDataSubscribers, CameraFrame::FRAME_DATA_SYNC);
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ {
+ ret = __sendFrameToSubscribers(frame, &mVideoInSubscribers, CameraFrame::REPROCESS_INPUT_FRAME);
+ }
+ break;
default:
CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", mask);
break;
@@ -1204,7 +1363,7 @@ status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
}
status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
- KeyedVector<int, frame_callback> *subscribers,
+ android::KeyedVector<int, frame_callback> *subscribers,
CameraFrame::FrameType frameType)
{
size_t refCount = 0;
@@ -1219,7 +1378,7 @@ status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
if (mFrameQueue.size() > 0){
CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(frame->mBuffer);
frame->mYuv[0] = lframe->mYuv[0];
- frame->mYuv[1] = lframe->mYuv[1];
+ frame->mYuv[1] = frame->mYuv[0] + (frame->mLength + frame->mOffset)*2/3;
}
else{
CAMHAL_LOGDA("Empty Frame Queue");
@@ -1264,7 +1423,7 @@ status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
return ret;
}
-int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
+int BaseCameraAdapter::setInitFrameRefCount(CameraBuffer * buf, unsigned int mask)
{
int ret = NO_ERROR;
unsigned int lmask;
@@ -1297,7 +1456,7 @@ int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
break;
case CameraFrame::SNAPSHOT_FRAME:
{
- setFrameRefCount(buf, CameraFrame::SNAPSHOT_FRAME, mFrameSubscribers.size());
+ setFrameRefCount(buf, CameraFrame::SNAPSHOT_FRAME, mSnapshotSubscribers.size());
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
@@ -1310,6 +1469,11 @@ int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
setFrameRefCount(buf, CameraFrame::FRAME_DATA_SYNC, mFrameDataSubscribers.size());
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME:
+ {
+ setFrameRefCount(buf,CameraFrame::REPROCESS_INPUT_FRAME, mVideoInSubscribers.size());
+ }
+ break;
default:
CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", lmask);
break;
@@ -1321,7 +1485,7 @@ int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
return ret;
}
-int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType)
+int BaseCameraAdapter::getFrameRefCount(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
int res = -1;
@@ -1332,29 +1496,39 @@ int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType f
case CameraFrame::IMAGE_FRAME:
case CameraFrame::RAW_FRAME:
{
- Mutex::Autolock lock(mCaptureBufferLock);
- res = mCaptureBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mCaptureBufferLock);
+ res = mCaptureBuffersAvailable.valueFor(frameBuf );
}
break;
- case CameraFrame::PREVIEW_FRAME_SYNC:
case CameraFrame::SNAPSHOT_FRAME:
{
- Mutex::Autolock lock(mPreviewBufferLock);
- res = mPreviewBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mSnapshotBufferLock);
+ res = mSnapshotBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ res = mPreviewBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::FRAME_DATA_SYNC:
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
- res = mPreviewDataBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ res = mPreviewDataBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
{
- Mutex::Autolock lock(mVideoBufferLock);
- res = mVideoBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ android::AutoMutex lock(mVideoBufferLock);
+ res = mVideoBuffersAvailable.valueFor(frameBuf );
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME: {
+ android::AutoMutex lock(mVideoInBufferLock);
+ res = mVideoInBuffersAvailable.valueFor(frameBuf );
+ }
+ break;
default:
break;
};
@@ -1364,7 +1538,7 @@ int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType f
return res;
}
-void BaseCameraAdapter::setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount)
+void BaseCameraAdapter::setFrameRefCount(CameraBuffer * frameBuf, CameraFrame::FrameType frameType, int refCount)
{
LOG_FUNCTION_NAME;
@@ -1374,29 +1548,39 @@ void BaseCameraAdapter::setFrameRefCount(void* frameBuf, CameraFrame::FrameType
case CameraFrame::IMAGE_FRAME:
case CameraFrame::RAW_FRAME:
{
- Mutex::Autolock lock(mCaptureBufferLock);
- mCaptureBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mCaptureBufferLock);
+ mCaptureBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
- case CameraFrame::PREVIEW_FRAME_SYNC:
case CameraFrame::SNAPSHOT_FRAME:
{
- Mutex::Autolock lock(mPreviewBufferLock);
- mPreviewBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mSnapshotBufferLock);
+ mSnapshotBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ mPreviewBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::FRAME_DATA_SYNC:
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
- mPreviewDataBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
{
- Mutex::Autolock lock(mVideoBufferLock);
- mVideoBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ android::AutoMutex lock(mVideoBufferLock);
+ mVideoBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
+ case CameraFrame::REPROCESS_INPUT_FRAME: {
+ android::AutoMutex lock(mVideoInBufferLock);
+ mVideoInBuffersAvailable.replaceValueFor(frameBuf, refCount);
+ }
+ break;
default:
break;
};
@@ -1411,7 +1595,7 @@ status_t BaseCameraAdapter::startVideoCapture()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mVideoBufferLock);
+ android::AutoMutex lock(mVideoBufferLock);
//If the capture is already ongoing, return from here.
if ( mRecording )
@@ -1423,8 +1607,6 @@ status_t BaseCameraAdapter::startVideoCapture()
if ( NO_ERROR == ret )
{
- mVideoBuffersAvailable.clear();
-
for ( unsigned int i = 0 ; i < mPreviewBuffersAvailable.size() ; i++ )
{
mVideoBuffersAvailable.add(mPreviewBuffersAvailable.keyAt(i), 0);
@@ -1453,13 +1635,15 @@ status_t BaseCameraAdapter::stopVideoCapture()
{
for ( unsigned int i = 0 ; i < mVideoBuffersAvailable.size() ; i++ )
{
- void *frameBuf = ( void * ) mVideoBuffersAvailable.keyAt(i);
+ CameraBuffer *frameBuf = mVideoBuffersAvailable.keyAt(i);
if( getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC) > 0)
{
returnFrame(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
}
}
+ mVideoBuffersAvailable.clear();
+
mRecording = false;
}
@@ -1582,7 +1766,7 @@ status_t BaseCameraAdapter::stopPreview()
return ret;
}
-status_t BaseCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t BaseCameraAdapter::useBuffers(CameraMode mode, CameraBuffer* bufArr, int num, size_t length, unsigned int queueable)
{
status_t ret = NO_ERROR;
@@ -1593,7 +1777,7 @@ status_t BaseCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, s
return ret;
}
-status_t BaseCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t BaseCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
status_t ret = NO_ERROR;
@@ -1626,7 +1810,7 @@ status_t BaseCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t buffe
return ret;
}
-status_t BaseCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t BaseCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
{
status_t ret = NO_ERROR;
@@ -1667,12 +1851,44 @@ status_t BaseCameraAdapter::switchToExecuting()
return ret;
}
+const char* BaseCameraAdapter::getLUTvalue_translateHAL(int Value, LUTtypeHAL LUT) {
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].halDefinition == Value )
+ return LUT.Table[i].userDefinition;
+
+ return NULL;
+}
+
+status_t BaseCameraAdapter::setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height) {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::destroyTunnel() {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::cameraPreviewInitialization() {
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
status_t BaseCameraAdapter::setState(CameraCommands operation)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
+ const char *printState = getLUTvalue_translateHAL(operation, CamCommandsLUT);
+
mLock.lock();
switch ( mAdapterState )
@@ -1684,8 +1900,8 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_USE_BUFFERS_PREVIEW:
- CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = %s",
+ printState);
mNextState = LOADED_PREVIEW_STATE;
break;
@@ -1693,20 +1909,21 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
case CAMERA_QUERY_RESOLUTION_PREVIEW:
case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
- CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = %s",
+ printState);
mNextState = INTIALIZED_STATE;
break;
-
- case CAMERA_CANCEL_AUTOFOCUS:
case CAMERA_STOP_BRACKET_CAPTURE:
case CAMERA_STOP_IMAGE_CAPTURE:
ret = INVALID_OPERATION;
break;
+ case CAMERA_CANCEL_AUTOFOCUS:
+ ret = INVALID_OPERATION;
+ break;
default:
- CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1720,8 +1937,8 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_START_PREVIEW:
- CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
@@ -1735,14 +1952,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
case CAMERA_USE_BUFFERS_PREVIEW_DATA:
- CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = %s",
+ printState);
mNextState = LOADED_PREVIEW_STATE;
break;
default:
- CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1756,46 +1973,61 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_PREVIEW:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = %s",
+ printState);
mNextState = INTIALIZED_STATE;
break;
case CAMERA_PERFORM_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = %s",
+ printState);
mNextState = AF_STATE;
break;
case CAMERA_START_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
mNextState = LOADED_CAPTURE_STATE;
break;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+#endif
+
case CAMERA_START_VIDEO:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
case CAMERA_CANCEL_AUTOFOCUS:
case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
default:
- CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1803,26 +2035,72 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
break;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case LOADED_REPROCESS_STATE:
+ switch (operation) {
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_STATE->LOADED_REPROCESS_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_CAPTURE_STATE;
+ break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_STATE->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_REPROCESS_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+ }
+
+ break;
+
+ case LOADED_REPROCESS_CAPTURE_STATE:
+ switch (operation) {
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_REPROCESS_CAPTURE_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = REPROCESS_STATE;
+ break;
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_REPROCESS_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+ }
+ break;
+#endif
+
case LOADED_CAPTURE_STATE:
switch ( operation )
{
case CAMERA_START_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
mNextState = CAPTURE_STATE;
break;
case CAMERA_START_BRACKET_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = %s",
+ printState);
mNextState = BRACKETING_STATE;
break;
+ case CAMERA_USE_BUFFERS_VIDEO_CAPTURE:
+ //Hadnle this state for raw capture path.
+ //Just need to keep the same state.
+ //The next CAMERA_START_IMAGE_CAPTURE command will assign the mNextState.
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
+ break;
+
default:
- CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1836,14 +2114,28 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_IMAGE_CAPTURE:
case CAMERA_STOP_BRACKET_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
+ mNextState = CAPTURE_STATE;
+ break;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->->LOADED_REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+#endif
+
default:
- CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1858,20 +2150,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
case CAMERA_STOP_IMAGE_CAPTURE:
case CAMERA_STOP_BRACKET_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
case CAMERA_START_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = %s",
+ printState);
mNextState = CAPTURE_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1885,26 +2177,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
- case CAMERA_START_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch AF_STATE->CAPTURE_STATE event = 0x%x",
- operation);
- mNextState = CAPTURE_STATE;
- break;
-
case CAMERA_START_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = %s",
+ printState);
mNextState = AF_ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1918,32 +2204,32 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
case CAMERA_PERFORM_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = %s",
+ printState);
mNextState = AF_ZOOM_STATE;
break;
case CAMERA_START_VIDEO:
- CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->VIDEO_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->VIDEO_ZOOM_STATE event = %s",
+ printState);
mNextState = VIDEO_ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -1957,38 +2243,38 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_VIDEO:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = PREVIEW_STATE;
break;
case CAMERA_PERFORM_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_AF_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_AF_STATE event = %s",
+ printState);
mNextState = VIDEO_AF_STATE;
break;
case CAMERA_START_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_ZOOM_STATE event = %s",
+ printState);
mNextState = VIDEO_ZOOM_STATE;
break;
case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_LOADED_CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_LOADED_CAPTURE_STATE event = %s",
+ printState);
mNextState = VIDEO_LOADED_CAPTURE_STATE;
break;
case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2002,14 +2288,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch VIDEO_AF_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_AF_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch VIDEO_AF_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch VIDEO_AF_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2023,14 +2309,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_START_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = %s",
+ printState);
mNextState = VIDEO_CAPTURE_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2043,14 +2329,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
switch ( operation )
{
case CAMERA_STOP_IMAGE_CAPTURE:
- CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2064,20 +2350,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = %s",
+ printState);
mNextState = AF_STATE;
break;
case CAMERA_CANCEL_AUTOFOCUS:
- CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2091,20 +2377,20 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = %s",
+ printState);
mNextState = VIDEO_STATE;
break;
case CAMERA_STOP_VIDEO:
- CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->ZOOM_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->ZOOM_STATE event = %s",
+ printState);
mNextState = ZOOM_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2118,14 +2404,14 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
{
case CAMERA_STOP_SMOOTH_ZOOM:
- CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = 0x%x",
- operation);
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = %s",
+ printState);
mNextState = BRACKETING_STATE;
break;
default:
- CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = 0x%x",
- operation);
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = %s",
+ printState);
ret = INVALID_OPERATION;
break;
@@ -2133,6 +2419,33 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
break;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case REPROCESS_STATE:
+ switch (operation) {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->PREVIEW_STATE event = %s",
+ printState);
+ mNextState = PREVIEW_STATE;
+ break;
+ case CAMERA_START_IMAGE_CAPTURE:
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = REPROCESS_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch REPROCESS_STATE Invalid Op! event = %s",
+ printState);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+#endif
+
+
default:
CAMHAL_LOGEA("Invalid Adapter state!");
ret = INVALID_OPERATION;
@@ -2175,6 +2488,9 @@ status_t BaseCameraAdapter::rollbackToPreviousState()
break;
case CAPTURE_STATE:
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ case REPROCESS_STATE:
+#endif
ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE);
break;
@@ -2265,7 +2581,7 @@ CameraAdapter::AdapterState BaseCameraAdapter::getState()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME_EXIT;
@@ -2278,7 +2594,7 @@ CameraAdapter::AdapterState BaseCameraAdapter::getNextState()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME_EXIT;
@@ -2321,11 +2637,57 @@ void BaseCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME;
LOG_FUNCTION_NAME_EXIT;
}
+
//-----------------------------------------------------------------------------
+extern "C" status_t OMXCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras);
+extern "C" status_t V4LCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras);
+extern "C" status_t CameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
-};
+ status_t ret = NO_ERROR;
+ status_t err = NO_ERROR;
+ int num_cameras_supported = 0;
+
+ LOG_FUNCTION_NAME;
+
+ supportedCameras = 0;
+#ifdef OMX_CAMERA_ADAPTER
+ //Query OMX cameras
+ err = OMXCameraAdapter_Capabilities( properties_array, starting_camera,
+ max_camera, supportedCameras);
+ if(err != NO_ERROR) {
+ CAMHAL_LOGEA("error while getting OMXCameraAdapter capabilities");
+ ret = UNKNOWN_ERROR;
+ }
+#endif
+#ifdef V4L_CAMERA_ADAPTER
+ //Query V4L cameras
+ err = V4LCameraAdapter_Capabilities( properties_array, (const int) supportedCameras,
+ max_camera, num_cameras_supported);
+ if(err != NO_ERROR) {
+ CAMHAL_LOGEA("error while getting V4LCameraAdapter capabilities");
+ ret = UNKNOWN_ERROR;
+ }
+#endif
+
+ supportedCameras += num_cameras_supported;
+ CAMHAL_LOGEB("supportedCameras= %d\n", supportedCameras);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+//-----------------------------------------------------------------------------
+
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/BufferSourceAdapter.cpp b/camera/BufferSourceAdapter.cpp
new file mode 100644
index 0000000..d63b117
--- /dev/null
+++ b/camera/BufferSourceAdapter.cpp
@@ -0,0 +1,816 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+
+#include "BufferSourceAdapter.h"
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+static int getANWFormat(const char* parameters_format)
+{
+ int format = HAL_PIXEL_FORMAT_TI_NV12;
+
+ if (parameters_format != NULL) {
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ // TODO(XXX): not defined yet
+ format = -1;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ format = HAL_PIXEL_FORMAT_TI_NV12;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ CAMHAL_LOGDA("RGB565 format selected");
+ // TODO(XXX): not defined yet
+ format = -1;
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ format = HAL_PIXEL_FORMAT_TI_Y16;
+ } else {
+ CAMHAL_LOGDA("Invalid format, NV12 format selected as default");
+ format = HAL_PIXEL_FORMAT_TI_NV12;
+ }
+ }
+
+ return format;
+}
+
+static int getUsageFromANW(int format)
+{
+ int usage = GRALLOC_USAGE_SW_READ_RARELY |
+ GRALLOC_USAGE_SW_WRITE_NEVER;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ // This usage flag indicates to gralloc we want the
+ // buffers to come from system heap
+ usage |= GRALLOC_USAGE_PRIVATE_0;
+ break;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ default:
+ // No special flags needed
+ break;
+ }
+ return usage;
+}
+
+static const char* getFormatFromANW(int format)
+{
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ // Assuming NV12 1D is RAW or Image frame
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ return android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ default:
+ break;
+ }
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+}
+
+static CameraFrame::FrameType formatToOutputFrameType(const char* format) {
+ switch (getANWFormat(format)) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ // Assuming NV12 1D is RAW or Image frame
+ return CameraFrame::RAW_FRAME;
+ default:
+ break;
+ }
+ return CameraFrame::RAW_FRAME;
+}
+
+static int getHeightFromFormat(const char* format, int stride, int size) {
+ CAMHAL_ASSERT((NULL != format) && (0 <= stride) && (0 <= size));
+ switch (getANWFormat(format)) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ return (size / (3 * stride)) * 2;
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ return (size / stride) / 2;
+ default:
+ break;
+ }
+ return 0;
+}
+
+/*--------------------BufferSourceAdapter Class STARTS here-----------------------------*/
+
+
+/**
+ * Display Adapter class STARTS here..
+ */
+BufferSourceAdapter::BufferSourceAdapter() : mBufferCount(0)
+{
+ LOG_FUNCTION_NAME;
+
+ mPixelFormat = NULL;
+ mBuffers = NULL;
+ mFrameProvider = NULL;
+ mBufferSource = NULL;
+
+ mFrameWidth = 0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+BufferSourceAdapter::~BufferSourceAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ destroy();
+
+ if (mFrameProvider) {
+ // Unregister with the frame provider
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ if (mQueueFrame.get()) {
+ mQueueFrame->requestExit();
+ mQueueFrame.clear();
+ }
+
+ if (mReturnFrame.get()) {
+ mReturnFrame->requestExit();
+ mReturnFrame.clear();
+ }
+
+ if( mBuffers != NULL)
+ {
+ delete [] mBuffers;
+ mBuffers = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t BufferSourceAdapter::initialize()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mReturnFrame.clear();
+ mReturnFrame = new ReturnFrame(this);
+ mReturnFrame->run();
+
+ mQueueFrame.clear();
+ mQueueFrame = new QueueFrame(this);
+ mQueueFrame->run();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int BufferSourceAdapter::setPreviewWindow(preview_stream_ops_t *source)
+{
+ LOG_FUNCTION_NAME;
+
+ if (!source) {
+ CAMHAL_LOGEA("NULL window object passed to DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( source == mBufferSource ) {
+ return ALREADY_EXISTS;
+ }
+
+ // Destroy the existing source, if it exists
+ destroy();
+
+ // Move to new source obj
+ mBufferSource = source;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int BufferSourceAdapter::setFrameProvider(FrameNotifier *frameProvider)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( !frameProvider ) {
+ CAMHAL_LOGEA("NULL passed for frame provider");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( NULL != mFrameProvider ) {
+ delete mFrameProvider;
+ }
+
+ mFrameProvider = new FrameProvider(frameProvider, this, frameCallback);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int BufferSourceAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier ) {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ return -EINVAL;
+ }
+
+ mErrorNotifier = errorNotifier;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int BufferSourceAdapter::enableDisplay(int width, int height,
+ struct timeval *refTime)
+{
+ LOG_FUNCTION_NAME;
+ CameraFrame::FrameType frameType;
+
+ if (mFrameProvider == NULL) {
+ // no-op frame provider not set yet
+ return NO_ERROR;
+ }
+
+ if (mBufferSourceDirection == BUFFER_SOURCE_TAP_IN) {
+ // only supporting one type of input frame
+ frameType = CameraFrame::REPROCESS_INPUT_FRAME;
+ } else {
+ frameType = formatToOutputFrameType(mPixelFormat);
+ }
+
+ mFrameProvider->enableFrameNotification(frameType);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int BufferSourceAdapter::disableDisplay(bool cancel_buffer)
+{
+ LOG_FUNCTION_NAME;
+
+ if (mFrameProvider) mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t BufferSourceAdapter::pauseDisplay(bool pause)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // no-op for BufferSourceAdapter
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+void BufferSourceAdapter::destroy()
+{
+ LOG_FUNCTION_NAME;
+
+ mBufferCount = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight, const char* format,
+ int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ status_t err;
+ int i = -1;
+ const int lnumBufs = numBufs;
+ int undequeued = 0;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ if ( NULL == mBufferSource ) {
+ return NULL;
+ }
+
+ int pixFormat = getANWFormat(format);
+ int usage = getUsageFromANW(pixFormat);
+
+ // Set gralloc usage bits for window.
+ err = mBufferSource->set_usage(mBufferSource, usage);
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs);
+ // Set the number of buffers needed for this buffer source
+ err = mBufferSource->set_buffer_count(mBufferSource, numBufs);
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs);
+ mBufferCount = numBufs;
+
+ // re-calculate height depending on stride and size
+ int height = getHeightFromFormat(format, width, bytes);
+
+ // Set window geometry
+ err = mBufferSource->set_buffers_geometry(mBufferSource,
+ width, height,
+ pixFormat);
+
+ if (err != 0) {
+ CAMHAL_LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ return NULL;
+ }
+
+ if ( mBuffers == NULL ) {
+ CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeued);
+
+ for (i = 0; i < mBufferCount; i++ ) {
+ buffer_handle_t *handle;
+ int stride; // dummy variable to get stride
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &handle, &stride);
+
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+
+ CAMHAL_LOGDB("got handle %p", handle);
+ mBuffers[i].opaque = (void *)handle;
+ mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mFramesWithCameraAdapterMap.add(handle, i);
+
+ bytes = getBufSize(format, width, height);
+ }
+
+ for( i = 0; i < mBufferCount-undequeued; i++ ) {
+ void *y_uv[2];
+ android::Rect bounds(width, height);
+
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ mBufferSource->lock_buffer(mBufferSource, handle);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ }
+
+ // return the rest of the buffers back to ANativeWindow
+ for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++) {
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
+ void *y_uv[2];
+ android::Rect bounds(width, height);
+
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[i].mapped = y_uv[0];
+ mapper.unlock(*handle);
+
+ err = mBufferSource->cancel_buffer(mBufferSource, handle);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque);
+ }
+
+ mPixelFormat = getPixFormatConstant(format);
+ mFrameWidth = width;
+ mFrameHeight = height;
+ mBufferSourceDirection = BUFFER_SOURCE_TAP_OUT;
+
+ return mBuffers;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ for (int start = 0; start < i && i > 0; start++) {
+ int err = mBufferSource->cancel_buffer(mBufferSource,
+ (buffer_handle_t *) mBuffers[start].opaque);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err);
+ break;
+ }
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[start].opaque);
+ }
+
+ freeBufferList(mBuffers);
+
+ CAMHAL_LOGEA("Error occurred, performing cleanup");
+
+ if (NULL != mErrorNotifier.get()) {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+
+}
+
+CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
+ LOG_FUNCTION_NAME;
+ status_t err;
+ const int lnumBufs = 1;
+ int formatSource;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ buffer_handle_t *handle;
+
+ // TODO(XXX): Only supporting one input buffer at a time right now
+ *num = 1;
+ mBuffers = new CameraBuffer [lnumBufs];
+ memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ if ( NULL == mBufferSource ) {
+ return NULL;
+ }
+
+ err = extendedOps()->update_and_get_buffer(mBufferSource, &handle, &mBuffers[0].stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("update and get buffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+
+ CAMHAL_LOGD("got handle %p", handle);
+ mBuffers[0].opaque = (void *)handle;
+ mBuffers[0].type = CAMERA_BUFFER_ANW;
+ mFramesWithCameraAdapterMap.add(handle, 0);
+
+ err = extendedOps()->get_buffer_dimension(mBufferSource, &mBuffers[0].width, &mBuffers[0].height);
+ err = extendedOps()->get_buffer_format(mBufferSource, &formatSource);
+
+ // lock buffer
+ {
+ void *y_uv[2];
+ android::Rect bounds(mBuffers[0].width, mBuffers[0].height);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mBuffers[0].mapped = y_uv[0];
+ }
+
+ mFrameWidth = mBuffers[0].width;
+ mFrameHeight = mBuffers[0].height;
+ mPixelFormat = getFormatFromANW(formatSource);
+
+ mBuffers[0].format = mPixelFormat;
+ mBufferSourceDirection = BUFFER_SOURCE_TAP_IN;
+
+ return mBuffers;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ freeBufferList(mBuffers);
+
+ if (NULL != mErrorNotifier.get()) {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+}
+
+uint32_t * BufferSourceAdapter::getOffsets()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int BufferSourceAdapter::minUndequeueableBuffers(int& undequeueable) {
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+
+ if(!mBufferSource)
+ {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeueable);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret);
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ return -ret;
+ }
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+int BufferSourceAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0) {
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ ret = minUndequeueableBuffers(undequeued);
+ if (ret != NO_ERROR) {
+ goto end;
+ }
+
+ queueable = mBufferCount - undequeued;
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+int BufferSourceAdapter::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return -1;
+
+}
+
+status_t BufferSourceAdapter::returnBuffersToWindow()
+{
+ status_t ret = NO_ERROR;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ //Give the buffers back to display here - sort of free it
+ if (mBufferSource) {
+ for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(i);
+ buffer_handle_t *handle = (buffer_handle_t *) mBuffers[value].opaque;
+
+ // if buffer index is out of bounds skip
+ if ((value < 0) || (value >= mBufferCount)) {
+ CAMHAL_LOGEA("Potential out bounds access to handle...skipping");
+ continue;
+ }
+
+ // unlock buffer before giving it up
+ mapper.unlock(*handle);
+
+ ret = mBufferSource->cancel_buffer(mBufferSource, handle);
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ return -ret;
+ } else if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)",
+ strerror(-ret),
+ -ret);
+ return -ret;
+ }
+ }
+ } else {
+ CAMHAL_LOGE("mBufferSource is NULL");
+ }
+
+ ///Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+
+ return ret;
+
+}
+
+int BufferSourceAdapter::freeBufferList(CameraBuffer * buflist)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ android::AutoMutex lock(mLock);
+
+ if (mBufferSourceDirection == BUFFER_SOURCE_TAP_OUT) returnBuffersToWindow();
+
+ if ( NULL != buflist )
+ {
+ delete [] buflist;
+ mBuffers = NULL;
+ }
+
+ if( mBuffers != NULL)
+ {
+ delete [] mBuffers;
+ mBuffers = NULL;
+ }
+
+ return NO_ERROR;
+}
+
+
+bool BufferSourceAdapter::supportsExternalBuffering()
+{
+ return false;
+}
+
+void BufferSourceAdapter::addFrame(CameraFrame* frame)
+{
+ if (mQueueFrame.get()) {
+ mQueueFrame->addFrame(frame);
+ }
+}
+
+void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
+{
+ status_t ret = NO_ERROR;
+ buffer_handle_t *handle = NULL;
+ int i;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ if (!mBuffers || !frame->mBuffer) {
+ CAMHAL_LOGEA("Adapter sent BufferSourceAdapter a NULL frame?");
+ return;
+ }
+
+ android::AutoMutex lock(mLock);
+
+ for ( i = 0; i < mBufferCount; i++ ) {
+ if (frame->mBuffer == &mBuffers[i]) {
+ break;
+ }
+ }
+
+ handle = (buffer_handle_t *) mBuffers[i].opaque;
+
+ // Handle input buffers
+ // TODO(XXX): Move handling of input buffers out of here if
+ // it becomes more complex
+ if (frame->mFrameType == CameraFrame::REPROCESS_INPUT_FRAME) {
+ CAMHAL_LOGD("Unlock %p (buffer #%d)", handle, i);
+ mapper.unlock(*handle);
+ return;
+ }
+
+ if ( NULL != frame->mMetaData.get() ) {
+ camera_memory_t *extMeta = frame->mMetaData->getExtendedMetadata();
+ if ( NULL != extMeta ) {
+ camera_metadata_t *metaData = static_cast<camera_metadata_t *> (extMeta->data);
+ metaData->timestamp = frame->mTimestamp;
+ ret = extendedOps()->set_metadata(mBufferSource, extMeta);
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::set_metadata returned error %d", ret);
+ }
+ }
+ }
+
+ // unlock buffer before enqueueing
+ mapper.unlock(*handle);
+
+ ret = mBufferSource->enqueue_buffer(mBufferSource, handle);
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret);
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) frame->mBuffer->opaque);
+
+ // signal return frame thread that it can dequeue a buffer now
+ mReturnFrame->signal();
+}
+
+
+bool BufferSourceAdapter::handleFrameReturn()
+{
+ status_t err;
+ buffer_handle_t *buf;
+ int i = 0;
+ int stride; // dummy variable to get stride
+ CameraFrame::FrameType type;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ void *y_uv[2];
+ android::Rect bounds(mFrameWidth, mFrameHeight);
+
+ if ( NULL == mBufferSource ) {
+ return false;
+ }
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &buf, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return false;
+ }
+
+ err = mBufferSource->lock_buffer(mBufferSource, buf);
+ if (err != 0) {
+ CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+
+ return false;
+ }
+
+ mapper.lock(*buf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+
+ for(i = 0; i < mBufferCount; i++) {
+ if (mBuffers[i].opaque == buf)
+ break;
+ }
+
+ if (i >= mBufferCount) {
+ CAMHAL_LOGEB("Failed to find handle %p", buf);
+ }
+
+ mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i);
+
+ CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount - 1);
+
+ mFrameProvider->returnFrame(&mBuffers[i], formatToOutputFrameType(mPixelFormat));
+ return true;
+}
+
+void BufferSourceAdapter::frameCallback(CameraFrame* caFrame)
+{
+ if ((NULL != caFrame) && (NULL != caFrame->mCookie)) {
+ BufferSourceAdapter *da = (BufferSourceAdapter*) caFrame->mCookie;
+ da->addFrame(caFrame);
+ } else {
+ CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p",
+ caFrame, caFrame ? caFrame->mCookie : NULL);
+ }
+}
+
+/*--------------------BufferSourceAdapter Class ENDS here-----------------------------*/
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp
index 7a5fdc0..71ba1bb 100644
--- a/camera/CameraHal.cpp
+++ b/camera/CameraHal.cpp
@@ -21,10 +21,9 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "ANativeWindowDisplayAdapter.h"
+#include "BufferSourceAdapter.h"
#include "TICameraParameters.h"
#include "CameraProperties.h"
#include <cutils/properties.h>
@@ -32,9 +31,11 @@
#include <poll.h>
#include <math.h>
-namespace android {
+namespace Ti {
+namespace Camera {
-extern "C" CameraAdapter* CameraAdapter_Factory(size_t);
+extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t);
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t);
/*****************************************************************************/
@@ -43,13 +44,74 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t);
//// Currently, they are hard-coded
const int CameraHal::NO_BUFFERS_PREVIEW = MAX_CAMERA_BUFFERS;
-const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 2;
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 5;
+const int CameraHal::SW_SCALING_FPS_LIMIT = 15;
+
+const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 16;
-const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 0;
const uint32_t MessageNotifier::FRAME_BIT_FIELD_POSITION = 0;
+// TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
+// until faux-NPA mode is implemented
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP = 15;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+// HACK: Default path to directory where RAW images coming from video port will be saved to.
+// If directory not exists the saving is skipped and video port frame is ignored.
+// The directory name is choosed in so weird way to enable RAW images saving only when
+// directory has been created explicitly by user.
+extern const char * const kRawImagesOutputDirPath = "/data/misc/camera/RaW_PiCtUrEs";
+extern const char * const kYuvImagesOutputDirPath = "/data/misc/camera/YuV_PiCtUrEs";
+#endif
+
/******************************************************************************/
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+static int dummy_update_and_get_buffer(preview_stream_ops_t*, buffer_handle_t**, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_dimension(preview_stream_ops_t*, int*, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_format(preview_stream_ops_t*, int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_set_metadata(preview_stream_ops_t*, const camera_memory_t*) {
+ return INVALID_OPERATION;
+}
+#endif
+
+#ifdef OMAP_ENHANCEMENT
+static preview_stream_extended_ops_t dummyPreviewStreamExtendedOps = {
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ dummy_update_and_get_buffer,
+ dummy_get_buffer_dimension,
+ dummy_get_buffer_format,
+ dummy_set_metadata,
+#endif
+};
+#endif
+
+
+DisplayAdapter::DisplayAdapter()
+{
+#ifdef OMAP_ENHANCEMENT
+ mExtendedOps = &dummyPreviewStreamExtendedOps;
+#endif
+}
+
+#ifdef OMAP_ENHANCEMENT
+void DisplayAdapter::setExtendedOps(preview_stream_extended_ops_t * extendedOps) {
+ mExtendedOps = extendedOps ? extendedOps : &dummyPreviewStreamExtendedOps;
+}
+#endif
+
+
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
struct timeval CameraHal::mStartPreview;
@@ -110,6 +172,10 @@ void CameraHal::setCallbacks(camera_notify_callback notify_cb,
user);
}
+ if ( NULL != mCameraAdapter ) {
+ mCameraAdapter->setSharedAllocator(get_memory);
+ }
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -131,10 +197,13 @@ void CameraHal::enableMsgType(int32_t msgType)
// ignoring enable focus message from camera service
// we will enable internally in autoFocus call
- msgType &= ~(CAMERA_MSG_FOCUS | CAMERA_MSG_FOCUS_MOVE);
+ msgType &= ~CAMERA_MSG_FOCUS;
+#ifdef ANDROID_API_JB_OR_LATER
+ msgType &= ~CAMERA_MSG_FOCUS_MOVE;
+#endif
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled |= msgType;
}
@@ -173,7 +242,7 @@ void CameraHal::disableMsgType(int32_t msgType)
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled &= ~msgType;
}
@@ -201,10 +270,18 @@ void CameraHal::disableMsgType(int32_t msgType)
*/
int CameraHal::msgTypeEnabled(int32_t msgType)
{
+ int32_t msgEnabled = 0;
+
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
+
+ msgEnabled = mMsgEnabled;
+ if (!previewEnabled() && !mPreviewInitializationDone) {
+ msgEnabled &= ~(CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_PREVIEW_METADATA);
+ }
+
LOG_FUNCTION_NAME_EXIT;
- return (mMsgEnabled & msgType);
+ return (msgEnabled & msgType);
}
/**
@@ -218,11 +295,11 @@ int CameraHal::msgTypeEnabled(int32_t msgType)
int CameraHal::setParameters(const char* parameters)
{
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME;
- CameraParameters params;
+ android::CameraParameters params;
- String8 str_params(parameters);
+ android::String8 str_params(parameters);
params.unflatten(str_params);
LOG_FUNCTION_NAME_EXIT;
@@ -238,77 +315,99 @@ int CameraHal::setParameters(const char* parameters)
@todo Define error codes
*/
-int CameraHal::setParameters(const CameraParameters& params)
+int CameraHal::setParameters(const android::CameraParameters& params)
{
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME;
int w, h;
- int w_orig, h_orig;
- int framerate,minframerate;
+ int framerate;
int maxFPS, minFPS;
const char *valstr = NULL;
int varint = 0;
status_t ret = NO_ERROR;
- CameraParameters oldParams = mParameters;
// Needed for KEY_RECORDING_HINT
bool restartPreviewRequired = false;
bool updateRequired = false;
- bool videoMode = false;
+ android::CameraParameters oldParams = mParameters;
+
+#ifdef V4L_CAMERA_ADAPTER
+ if (strcmp (V4L_CAMERA_NAME_USB, mCameraProperties->get(CameraProperties::CAMERA_NAME)) == 0 ) {
+ updateRequired = true;
+ }
+#endif
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
///Ensure that preview is not enabled when the below parameters are changed.
if(!previewEnabled())
{
-
- CAMHAL_LOGDB("PreviewFormat %s", params.getPreviewFormat());
-
if ((valstr = params.getPreviewFormat()) != NULL) {
if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS))) {
mParameters.setPreviewFormat(valstr);
+ CAMHAL_LOGDB("PreviewFormat set %s", valstr);
} else {
- CAMHAL_LOGEB("Invalid preview format.Supported: %s", mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ CAMHAL_LOGEB("Invalid preview format: %s. Supported: %s", valstr,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
return BAD_VALUE;
}
}
- varint = params.getInt(TICameraParameters::KEY_VNF);
- valstr = params.get(TICameraParameters::KEY_VNF);
- if ( valstr != NULL ) {
- if ( ( varint == 0 ) || ( varint == 1 ) ) {
- CAMHAL_LOGDB("VNF set %s", valstr);
- mParameters.set(TICameraParameters::KEY_VNF, varint);
- } else {
+ if ((valstr = params.get(TICameraParameters::KEY_VNF)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::VNF_SUPPORTED),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("VNF %s", valstr);
+ mParameters.set(TICameraParameters::KEY_VNF, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid VNF: %s", valstr);
return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_VNF,
+ android::CameraParameters::FALSE);
}
}
- if ((valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
// make sure we support vstab...if we don't and application is trying to set
// vstab then return an error
if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
- CameraParameters::TRUE) == 0) {
- CAMHAL_LOGDB("VSTAB %s",valstr);
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION, valstr);
- } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("VSTAB %s", valstr);
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid VSTAB: %s", valstr);
return BAD_VALUE;
} else {
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
- CameraParameters::FALSE);
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ android::CameraParameters::FALSE);
}
}
+ if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL) {
+ if (strcmp(TICameraParameters::VIDEO_MODE, valstr)) {
+ mCapModeBackup = valstr;
+ }
- if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL)
- {
- CAMHAL_LOGDB("Capture mode set %s", valstr);
- mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr);
- }
+ CAMHAL_LOGDB("Capture mode set %s", valstr);
+
+ const char *currentMode = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ( NULL != currentMode ) {
+ if ( strcmp(currentMode, valstr) != 0 ) {
+ updateRequired = true;
+ }
+ } else {
+ updateRequired = true;
+ }
+
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr);
+ } else if (!mCapModeBackup.isEmpty()) {
+ // Restore previous capture mode after stopPreview()
+ mParameters.set(TICameraParameters::KEY_CAP_MODE,
+ mCapModeBackup.string());
+ updateRequired = true;
+ }
if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) {
if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
@@ -320,122 +419,94 @@ int CameraHal::setParameters(const CameraParameters& params)
}
}
-#ifdef OMAP_ENHANCEMENT
+#ifdef OMAP_ENHANCEMENT_VTC
+ if ((valstr = params.get(TICameraParameters::KEY_VTC_HINT)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VTC_HINT, valstr);
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ mVTCUseCase = true;
+ } else {
+ mVTCUseCase = false;
+ }
+ CAMHAL_LOGDB("VTC Hint = %d", mVTCUseCase);
+ }
- if((valstr = params.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL)
- {
- CAMHAL_LOGDB("Stereo 3D->2D Preview mode is %s", params.get(TICameraParameters::KEY_S3D2D_PREVIEW));
- mParameters.set(TICameraParameters::KEY_S3D2D_PREVIEW, valstr);
+ if (mVTCUseCase) {
+ if ((valstr = params.get(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE, valstr);
}
- if((valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
- {
- CAMHAL_LOGDB("AutoConvergence mode is %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
- mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ if ((valstr = params.get(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT)) != NULL ) {
+ mParameters.set(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT, valstr);
}
+ }
#endif
-
}
- params.getPreviewSize(&w, &h);
- if (w == -1 && h == -1) {
- CAMHAL_LOGEA("Unable to get preview size");
- return BAD_VALUE;
- }
-
- int oldWidth, oldHeight;
- mParameters.getPreviewSize(&oldWidth, &oldHeight);
-
-#ifdef OMAP_ENHANCEMENT
-
- int orientation =0;
- if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
- {
- CAMHAL_LOGDB("Sensor Orientation is set to %s", params.get(TICameraParameters::KEY_SENSOR_ORIENTATION));
- mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
- orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
- }
-
- if(orientation ==90 || orientation ==270)
- {
- if ( !isResolutionValid(h,w, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
- {
- CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
- return BAD_VALUE;
- }
- else
- {
- mParameters.setPreviewSize(w, h);
- mVideoWidth = w;
- mVideoHeight = h;
- }
- }
- else
- {
- if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
- {
- CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
- return BAD_VALUE;
- }
- else
+ if ( (valstr = params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL )
+ {
+ if (strcmp(valstr, mParameters.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)))
{
- mParameters.setPreviewSize(w, h);
+ CAMHAL_LOGDB("Stereo 3D preview image layout is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT, valstr);
+ restartPreviewRequired = true;
}
- }
-
+ }
-#else
+#ifdef OMAP_ENHANCEMENT
+ int orientation =0;
+ if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
+ {
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(TICameraParameters::KEY_SENSOR_ORIENTATION),
+ updateRequired);
- if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES))) {
- CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
- return BAD_VALUE;
- } else {
- mParameters.setPreviewSize(w, h);
- }
+ orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ( orientation < 0 || orientation >= 360 || (orientation%90) != 0 ) {
+ CAMHAL_LOGE("Invalid sensor orientation: %s. Value must be one of: [0, 90, 180, 270]", valstr);
+ return BAD_VALUE;
+ }
+ CAMHAL_LOGD("Sensor Orientation is set to %d", orientation);
+ mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
+ }
#endif
- if ( ( oldWidth != w ) || ( oldHeight != h ) ) {
- restartPreviewRequired |= true;
+ params.getPreviewSize(&w, &h);
+ if (w == -1 && h == -1) {
+ CAMHAL_LOGEA("Unable to get preview size");
+ return BAD_VALUE;
}
- CAMHAL_LOGDB("PreviewResolution by App %d x %d", w, h);
+ mVideoWidth = w;
+ mVideoHeight = h;
// Handle RECORDING_HINT to Set/Reset Video Mode Parameters
- valstr = params.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = params.get(android::CameraParameters::KEY_RECORDING_HINT);
if(valstr != NULL)
{
- if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
- CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
- videoMode = true;
- int w, h;
-
- params.getPreviewSize(&w, &h);
- CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
- //HACK FOR MMS
- mVideoWidth = w;
- mVideoHeight = h;
- CAMHAL_LOGVB("%s Video Width=%d Height=%d\n", __FUNCTION__, mVideoWidth, mVideoHeight);
-
- setPreferredPreviewRes(w, h);
- mParameters.getPreviewSize(&w, &h);
- CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
- //Avoid restarting preview for MMS HACK
- if ((w != mVideoWidth) && (h != mVideoHeight))
- {
- restartPreviewRequired = false;
- }
+ CAMHAL_LOGVB("Video Resolution: %d x %d", mVideoWidth, mVideoHeight);
+#ifdef OMAP_ENHANCEMENT_VTC
+ if (!mVTCUseCase)
+#endif
+ {
+ int maxFPS, minFPS;
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+ maxFPS /= CameraHal::VFR_SCALE;
+ if ( ( maxFPS <= SW_SCALING_FPS_LIMIT ) ) {
+ getPreferredPreviewRes(&w, &h);
+ }
+ }
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
restartPreviewRequired |= setVideoModeParameters(params);
}
- else if(strcmp(valstr, CameraParameters::FALSE) == 0)
+ else if(strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
- CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
restartPreviewRequired |= resetVideoModeParameters();
- params.getPreviewSize(&mVideoWidth, &mVideoHeight);
}
else
{
@@ -450,52 +521,95 @@ int CameraHal::setParameters(const CameraParameters& params)
// ImageCapture activity doesnot set KEY_RECORDING_HINT to FALSE (i.e. simply NULL),
// then Video Mode parameters may remain present in ImageCapture activity as well.
CAMHAL_LOGDA("Recording Hint is set to NULL");
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, "");
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, "");
restartPreviewRequired |= resetVideoModeParameters();
- params.getPreviewSize(&mVideoWidth, &mVideoHeight);
}
- if ((valstr = params.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) {
+ if ( (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES)))
+ && (!isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES))) ) {
+ CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
+ return BAD_VALUE;
+ }
+
+ int oldWidth, oldHeight;
+ mParameters.getPreviewSize(&oldWidth, &oldHeight);
+ if ( ( oldWidth != w ) || ( oldHeight != h ) )
+ {
+ mParameters.setPreviewSize(w, h);
+ restartPreviewRequired = true;
+ }
+
+ CAMHAL_LOGDB("Preview Resolution: %d x %d", w, h);
+
+ if ((valstr = params.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES))) {
CAMHAL_LOGDB("Focus mode set %s", valstr);
// we need to take a decision on the capture mode based on whether CAF picture or
// video is chosen so the behavior of each is consistent to the application
- if(strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
+ if(strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
restartPreviewRequired |= resetVideoModeParameters();
- } else if (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
+ } else if (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
restartPreviewRequired |= setVideoModeParameters(params);
}
- mParameters.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid FOCUS mode = %s", valstr);
return BAD_VALUE;
}
}
- ///Below parameters can be changed when the preview is running
- if ( (valstr = params.getPictureFormat()) != NULL ) {
- if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
- mParameters.setPictureFormat(valstr);
- } else {
- CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr);
- return BAD_VALUE;
- }
+ mRawCapture = false;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (!valstr || strcmp(valstr, TICameraParameters::HIGH_QUALITY_MODE) == 0) &&
+ access(kRawImagesOutputDirPath, F_OK) != -1 ) {
+ mRawCapture = true;
}
+#endif
+
+ if ( (valstr = params.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT)) != NULL )
+ {
+ CAMHAL_LOGDB("Stereo 3D capture image layout is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT, valstr);
+ }
params.getPictureSize(&w, &h);
- if ( isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES))) {
+ if ( (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES)))
+ || (isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES))) ) {
mParameters.setPictureSize(w, h);
} else {
- CAMHAL_LOGEB("ERROR: Invalid picture resolution %dx%d", w, h);
+ CAMHAL_LOGEB("ERROR: Invalid picture resolution %d x %d", w, h);
return BAD_VALUE;
}
CAMHAL_LOGDB("Picture Size by App %d x %d", w, h);
-#ifdef OMAP_ENHANCEMENT
+ if ( (valstr = params.getPictureFormat()) != NULL ) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
+ if ((strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) &&
+ mCameraProperties->get(CameraProperties::MAX_PICTURE_WIDTH) &&
+ mCameraProperties->get(CameraProperties::MAX_PICTURE_HEIGHT)) {
+ unsigned int width = 0, height = 0;
+ // Set picture size to full frame for raw bayer capture
+ width = atoi(mCameraProperties->get(CameraProperties::MAX_PICTURE_WIDTH));
+ height = atoi(mCameraProperties->get(CameraProperties::MAX_PICTURE_HEIGHT));
+ mParameters.setPictureSize(width,height);
+ }
+ mParameters.setPictureFormat(valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr);
+ ret = BAD_VALUE;
+ }
+ }
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
if ((valstr = params.get(TICameraParameters::KEY_BURST)) != NULL) {
if (params.getInt(TICameraParameters::KEY_BURST) >=0) {
CAMHAL_LOGDB("Burst set %s", valstr);
@@ -505,145 +619,181 @@ int CameraHal::setParameters(const CameraParameters& params)
return BAD_VALUE;
}
}
-
#endif
- framerate = params.getPreviewFrameRate();
- valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
- CAMHAL_LOGDB("FRAMERATE %d", framerate);
-
- CAMHAL_LOGVB("Passed FRR: %s, Supported FRR %s", valstr
- , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
- CAMHAL_LOGVB("Passed FR: %d, Supported FR %s", framerate
- , mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
-
-
- //Perform parameter validation
- if(!isParameterValid(valstr
- , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))
- || !isParameterValid(framerate,
- mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES)))
- {
- CAMHAL_LOGEA("Invalid frame rate range or frame rate");
- return BAD_VALUE;
- }
-
// Variable framerate ranges have higher priority over
// deprecated constant FPS. "KEY_PREVIEW_FPS_RANGE" should
// be cleared by the client in order for constant FPS to get
// applied.
- if ( strcmp(valstr, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE)) != 0)
- {
+ // If Port FPS needs to be used for configuring, then FPS RANGE should not be set by the APP.
+ valstr = params.get(android::CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ if (valstr != NULL && strlen(valstr)) {
+ int curMaxFPS = 0;
+ int curMinFPS = 0;
+
// APP wants to set FPS range
- //Set framerate = MAXFPS
+ // Set framerate = MAXFPS
CAMHAL_LOGDA("APP IS CHANGING FRAME RATE RANGE");
- params.getPreviewFpsRange(&minFPS, &maxFPS);
- if ( ( 0 > minFPS ) || ( 0 > maxFPS ) )
- {
- CAMHAL_LOGEA("ERROR: FPS Range is negative!");
- return BAD_VALUE;
- }
+ mParameters.getPreviewFpsRange(&curMinFPS, &curMaxFPS);
+ CAMHAL_LOGDB("## current minFPS = %d; maxFPS=%d",curMinFPS, curMaxFPS);
- framerate = maxFPS /CameraHal::VFR_SCALE;
-
- }
- else
- {
- if ( framerate != atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)) )
- {
-
- selectFPSRange(framerate, &minFPS, &maxFPS);
- CAMHAL_LOGDB("Select FPS Range %d %d", minFPS, maxFPS);
- }
- else
- {
- if (videoMode) {
- valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_VIDEO);
- CameraParameters temp;
- temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
- temp.getPreviewFpsRange(&minFPS, &maxFPS);
- }
- else {
- valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_IMAGE);
- CameraParameters temp;
- temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
- temp.getPreviewFpsRange(&minFPS, &maxFPS);
- }
-
- framerate = maxFPS / CameraHal::VFR_SCALE;
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+ CAMHAL_LOGDB("## requested minFPS = %d; maxFPS=%d",minFPS, maxFPS);
+ // Validate VFR
+ if (!isFpsRangeValid(minFPS, maxFPS, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE)) &&
+ !isFpsRangeValid(minFPS, maxFPS, params.get(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED))) {
+ CAMHAL_LOGEA("Invalid FPS Range");
+ return BAD_VALUE;
+ } else {
+ framerate = maxFPS / CameraHal::VFR_SCALE;
+ mParameters.setPreviewFrameRate(framerate);
+ CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
+ mParameters.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ CAMHAL_LOGDB("FPS Range = %s", valstr);
+ if ( curMaxFPS == (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) &&
+ maxFPS < (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) ) {
+ restartPreviewRequired = true;
}
+ }
+ } else {
+ framerate = params.getPreviewFrameRate();
+ if (!isParameterValid(framerate, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)) &&
+ !isParameterValid(framerate, params.get(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED))) {
+ CAMHAL_LOGEA("Invalid frame rate");
+ return BAD_VALUE;
+ }
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
- }
-
- CAMHAL_LOGDB("FPS Range = %s", valstr);
- CAMHAL_LOGDB("DEFAULT FPS Range = %s", mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
-
- minFPS /= CameraHal::VFR_SCALE;
- maxFPS /= CameraHal::VFR_SCALE;
-
- if ( ( 0 == minFPS ) || ( 0 == maxFPS ) )
- {
- CAMHAL_LOGEA("ERROR: FPS Range is invalid!");
- return BAD_VALUE;
- }
-
- if ( maxFPS < minFPS )
- {
- CAMHAL_LOGEA("ERROR: Max FPS is smaller than Min FPS!");
- return BAD_VALUE;
- }
- CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
- mParameters.setPreviewFrameRate(framerate);
- mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE));
-
- CAMHAL_LOGDB("FPS Range [%d, %d]", minFPS, maxFPS);
- mParameters.set(TICameraParameters::KEY_MINFRAMERATE, minFPS);
- mParameters.set(TICameraParameters::KEY_MAXFRAMERATE, maxFPS);
+ sprintf(tmpBuffer, "%d,%d", framerate * CameraHal::VFR_SCALE, framerate * CameraHal::VFR_SCALE);
+ mParameters.setPreviewFrameRate(framerate);
+ CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
+ mParameters.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, tmpBuffer);
+ CAMHAL_LOGDB("FPS Range = %s", tmpBuffer);
+ }
- if( ( valstr = params.get(TICameraParameters::KEY_GBCE) ) != NULL )
- {
- CAMHAL_LOGDB("GBCE Value = %s", valstr);
- mParameters.set(TICameraParameters::KEY_GBCE, valstr);
+ if ((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GBCE),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("GBCE %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GBCE, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid GBCE: %s", valstr);
+ return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
}
+ } else {
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
+ }
- if( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
- {
- CAMHAL_LOGDB("GLBCE Value = %s", valstr);
- mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
+ if ((valstr = params.get(TICameraParameters::KEY_GLBCE)) != NULL) {
+ if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GLBCE),
+ android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("GLBCE %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid GLBCE: %s", valstr);
+ return BAD_VALUE;
+ } else {
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
}
+ } else {
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
+ }
-#ifdef OMAP_ENHANCEMENT
-
+#ifdef OMAP_ENHANCEMENT_S3D
///Update the current parameter set
- if( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
- {
- CAMHAL_LOGDB("AutoConvergence Mode is set = %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
- mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ if ( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE_MODE)) != NULL ) {
+ CAMHAL_LOGDB("AutoConvergence mode set = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, valstr);
+ }
+
+ if ( (valstr = params.get(TICameraParameters::KEY_MANUAL_CONVERGENCE)) != NULL ) {
+ int manualConvergence = (int)strtol(valstr, 0, 0);
+
+ if ( ( manualConvergence < strtol(mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN), 0, 0) ) ||
+ ( manualConvergence > strtol(mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX), 0, 0) ) ) {
+ CAMHAL_LOGEB("ERROR: Invalid Manual Convergence = %d", manualConvergence);
+ return BAD_VALUE;
+ } else {
+ CAMHAL_LOGDB("ManualConvergence Value = %d", manualConvergence);
+ mParameters.set(TICameraParameters::KEY_MANUAL_CONVERGENCE, valstr);
}
+ }
- if( (valstr = params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES)) !=NULL )
- {
- CAMHAL_LOGDB("ManualConvergence Value = %s", params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES));
- mParameters.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, valstr);
+ if((valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION)) != NULL) {
+ if ( strcmp(mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED),
+ android::CameraParameters::TRUE) == 0 ) {
+ CAMHAL_LOGDB("Mechanical Mialignment Correction is %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, valstr);
+ } else {
+ mParameters.remove(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
}
+ }
if ((valstr = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES))) {
- CAMHAL_LOGDB("Exposure set = %s", valstr);
+ CAMHAL_LOGDB("Exposure mode set = %s", valstr);
mParameters.set(TICameraParameters::KEY_EXPOSURE_MODE, valstr);
+ if (!strcmp(valstr, TICameraParameters::EXPOSURE_MODE_MANUAL)) {
+ int manualVal;
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_EXPOSURE)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Exposure = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN);
+ }
+ CAMHAL_LOGDB("Manual Exposure = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_EXPOSURE, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Exposure right = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN);
+ }
+ CAMHAL_LOGDB("Manual Exposure right = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_GAIN_ISO)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Gain = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN);
+ }
+ CAMHAL_LOGDB("Manual Gain = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_GAIN_ISO, valstr);
+ }
+ if ((valstr = params.get(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT)) != NULL) {
+ manualVal = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT);
+ if (manualVal < mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN) ||
+ manualVal > mParameters.getInt(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX)) {
+ CAMHAL_LOGEB("ERROR: Manual Gain right = %s is out of range - "
+ "setting minimum supported value", valstr);
+ valstr = mParameters.get(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN);
+ }
+ CAMHAL_LOGDB("Manual Gain right = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT, valstr);
+ }
+ }
} else {
- CAMHAL_LOGEB("ERROR: Invalid Exposure = %s", valstr);
+ CAMHAL_LOGEB("ERROR: Invalid Exposure mode = %s", valstr);
return BAD_VALUE;
}
}
-
#endif
- if ((valstr = params.get(CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE))) {
CAMHAL_LOGDB("White balance set %s", valstr);
- mParameters.set(CameraParameters::KEY_WHITE_BALANCE, valstr);
+ mParameters.set(android::CameraParameters::KEY_WHITE_BALANCE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid white balance = %s", valstr);
return BAD_VALUE;
@@ -651,7 +801,6 @@ int CameraHal::setParameters(const CameraParameters& params)
}
#ifdef OMAP_ENHANCEMENT
-
if ((valstr = params.get(TICameraParameters::KEY_CONTRAST)) != NULL) {
if (params.getInt(TICameraParameters::KEY_CONTRAST) >= 0 ) {
CAMHAL_LOGDB("Contrast set %s", valstr);
@@ -691,13 +840,12 @@ int CameraHal::setParameters(const CameraParameters& params)
return BAD_VALUE;
}
}
-
#endif
- if ((valstr = params.get(CameraParameters::KEY_ANTIBANDING)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_ANTIBANDING)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING))) {
CAMHAL_LOGDB("Antibanding set %s", valstr);
- mParameters.set(CameraParameters::KEY_ANTIBANDING, valstr);
+ mParameters.set(android::CameraParameters::KEY_ANTIBANDING, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Antibanding = %s", valstr);
return BAD_VALUE;
@@ -705,7 +853,6 @@ int CameraHal::setParameters(const CameraParameters& params)
}
#ifdef OMAP_ENHANCEMENT
-
if ((valstr = params.get(TICameraParameters::KEY_ISO)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES))) {
CAMHAL_LOGDB("ISO set %s", valstr);
@@ -715,27 +862,25 @@ int CameraHal::setParameters(const CameraParameters& params)
return BAD_VALUE;
}
}
-
#endif
- if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_FOCUS_AREAS)) != NULL )
{
- CAMHAL_LOGDB("Focus areas position set %s",valstr);
- mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ CAMHAL_LOGDB("Focus areas position set %s", params.get(android::CameraParameters::KEY_FOCUS_AREAS));
+ mParameters.set(android::CameraParameters::KEY_FOCUS_AREAS, valstr);
}
#ifdef OMAP_ENHANCEMENT
-
if( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
{
- CAMHAL_LOGDB("Measurements set to %s", params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE));
+ CAMHAL_LOGDB("Measurements set to %s", valstr);
mParameters.set(TICameraParameters::KEY_MEASUREMENT_ENABLE, valstr);
- if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
mMeasurementEnabled = true;
}
- else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
mMeasurementEnabled = false;
}
@@ -745,113 +890,107 @@ int CameraHal::setParameters(const CameraParameters& params)
}
}
-
#endif
- if( (valstr = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
+ if( (valstr = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
{
- CAMHAL_LOGDB("Exposure compensation set %s", valstr);
- mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
+ CAMHAL_LOGDB("Exposure compensation set %s", params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ mParameters.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
}
- if ((valstr = params.get(CameraParameters::KEY_SCENE_MODE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_SCENE_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES))) {
CAMHAL_LOGDB("Scene mode set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_SCENE_MODE),
+ mParameters.get(android::CameraParameters::KEY_SCENE_MODE),
updateRequired);
- mParameters.set(CameraParameters::KEY_SCENE_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_SCENE_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Scene mode = %s", valstr);
return BAD_VALUE;
}
}
- if ((valstr = params.get(CameraParameters::KEY_FLASH_MODE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_FLASH_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES))) {
CAMHAL_LOGDB("Flash mode set %s", valstr);
- mParameters.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Flash mode = %s", valstr);
return BAD_VALUE;
}
}
- if ((valstr = params.get(CameraParameters::KEY_EFFECT)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_EFFECT)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS))) {
CAMHAL_LOGDB("Effect set %s", valstr);
- mParameters.set(CameraParameters::KEY_EFFECT, valstr);
+ mParameters.set(android::CameraParameters::KEY_EFFECT, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Effect = %s", valstr);
return BAD_VALUE;
}
}
- varint = params.getInt(CameraParameters::KEY_ROTATION);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_ROTATION);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Rotation set %d", varint);
- mParameters.set(CameraParameters::KEY_ROTATION, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_ROTATION, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
- if( varint >= 0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Jpeg quality set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_QUALITY, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_QUALITY, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Thumbnail width set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Thumbnail width set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, varint);
+ }
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
- if( varint >=0 )
- {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if ( varint >= 0 ) {
CAMHAL_LOGDB("Thumbnail quality set %d", varint);
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, varint);
- }
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, varint);
+ }
- if( (valstr = params.get(CameraParameters::KEY_GPS_LATITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS latitude set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_LATITUDE, valstr);
+ CAMHAL_LOGDB("GPS latitude set %s", params.get(android::CameraParameters::KEY_GPS_LATITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LATITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_LATITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_LATITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS longitude set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, valstr);
+ CAMHAL_LOGDB("GPS longitude set %s", params.get(android::CameraParameters::KEY_GPS_LONGITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LONGITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_LONGITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS altitude set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, valstr);
+ CAMHAL_LOGDB("GPS altitude set %s", params.get(android::CameraParameters::KEY_GPS_ALTITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_ALTITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_ALTITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
{
- CAMHAL_LOGDB("GPS timestamp set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, valstr);
+ CAMHAL_LOGDB("GPS timestamp set %s", params.get(android::CameraParameters::KEY_GPS_TIMESTAMP));
+ mParameters.set(android::CameraParameters::KEY_GPS_TIMESTAMP, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP);
+ mParameters.remove(android::CameraParameters::KEY_GPS_TIMESTAMP);
}
if( (valstr = params.get(TICameraParameters::KEY_GPS_DATESTAMP)) != NULL )
@@ -862,12 +1001,12 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.remove(TICameraParameters::KEY_GPS_DATESTAMP);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
{
- CAMHAL_LOGDB("GPS processing method set %s", valstr);
- mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
+ CAMHAL_LOGDB("GPS processing method set %s", params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD));
+ mParameters.set(android::CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ mParameters.remove(android::CameraParameters::KEY_GPS_PROCESSING_METHOD);
}
if( (valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM )) != NULL )
@@ -899,67 +1038,116 @@ int CameraHal::setParameters(const CameraParameters& params)
}
#ifdef OMAP_ENHANCEMENT
-
if( (valstr = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL )
{
CAMHAL_LOGDB("Exposure Bracketing set %s", params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE));
mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valstr);
+ mParameters.remove(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE);
}
- else
+ else if ((valstr = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
+ CAMHAL_LOGDB("ABS Exposure+Gain Bracketing set %s", params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE));
+ mParameters.set(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE, valstr);
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+ } else
{
mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
}
+ if( (valstr = params.get(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE)) != NULL ) {
+ CAMHAL_LOGDB("Zoom Bracketing range %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE, valstr);
+ } else {
+ mParameters.remove(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
+ }
#endif
- valstr = params.get(CameraParameters::KEY_ZOOM);
- varint = params.getInt(CameraParameters::KEY_ZOOM);
- if ( valstr != NULL ) {
- if ( ( varint >= 0 ) && ( varint <= mMaxZoomSupported ) ) {
- CAMHAL_LOGDB("Zoom set %s", valstr);
+ if ((valstr = params.get(android::CameraParameters::KEY_ZOOM)) != NULL ) {
+ varint = atoi(valstr);
+ if ( varint >= 0 && varint <= mMaxZoomSupported ) {
+ CAMHAL_LOGDB("Zoom set %d", varint);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_ZOOM),
+ mParameters.get(android::CameraParameters::KEY_ZOOM),
updateRequired);
- mParameters.set(CameraParameters::KEY_ZOOM, valstr);
+ mParameters.set(android::CameraParameters::KEY_ZOOM, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Zoom: %s", valstr);
return BAD_VALUE;
}
}
- if( (valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
{
CAMHAL_LOGDB("Auto Exposure Lock set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
+ mParameters.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
updateRequired);
- mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
+ mParameters.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
}
- if( (valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
{
CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
+ mParameters.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
updateRequired);
- mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
+ mParameters.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
}
- if( (valstr = params.get(CameraParameters::KEY_METERING_AREAS)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_METERING_AREAS)) != NULL )
{
- CAMHAL_LOGDB("Metering areas position set %s", valstr);
- mParameters.set(CameraParameters::KEY_METERING_AREAS, valstr);
+ CAMHAL_LOGDB("Metering areas position set %s", params.get(android::CameraParameters::KEY_METERING_AREAS));
+ mParameters.set(android::CameraParameters::KEY_METERING_AREAS, valstr);
}
- // Only send parameters to adapter if preview is already
- // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
- // will be called in startPreview()
- // TODO(XXX): Need to identify other parameters that need update from camera adapter
- if ( (NULL != mCameraAdapter) && (mPreviewEnabled || updateRequired) ) {
- ret |= mCameraAdapter->setParameters(mParameters);
+ if( (valstr = params.get(TICameraParameters::RAW_WIDTH)) != NULL ) {
+ CAMHAL_LOGDB("Raw image width set %s", params.get(TICameraParameters::RAW_WIDTH));
+ mParameters.set(TICameraParameters::RAW_WIDTH, valstr);
}
-#ifdef OMAP_ENHANCEMENT
+ if( (valstr = params.get(TICameraParameters::RAW_HEIGHT)) != NULL ) {
+ CAMHAL_LOGDB("Raw image height set %s", params.get(TICameraParameters::RAW_HEIGHT));
+ mParameters.set(TICameraParameters::RAW_HEIGHT, valstr);
+ }
+
+ //TI extensions for enable/disable algos
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_FIXED_GAMMA)) != NULL )
+ {
+ CAMHAL_LOGDB("Fixed Gamma set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_FIXED_GAMMA, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_NSF1)) != NULL )
+ {
+ CAMHAL_LOGDB("NSF1 set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_NSF1, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_NSF2)) != NULL )
+ {
+ CAMHAL_LOGDB("NSF2 set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_NSF2, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_SHARPENING)) != NULL )
+ {
+ CAMHAL_LOGDB("Sharpening set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_SHARPENING, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_THREELINCOLORMAP)) != NULL )
+ {
+ CAMHAL_LOGDB("Color Conversion set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_GIC)) != NULL )
+ {
+ CAMHAL_LOGDB("Green Inballance Correction set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_GIC, valstr);
+ }
+ android::CameraParameters adapterParams = mParameters;
+
+#ifdef OMAP_ENHANCEMENT
if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) )
{
int posBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS);
@@ -982,44 +1170,58 @@ int CameraHal::setParameters(const CameraParameters& params)
CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative);
if( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL) &&
- ( strcmp(valstr, TICameraParameters::BRACKET_ENABLE) == 0 ))
- {
- if ( !mBracketingEnabled )
- {
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 )) {
+ if ( !mBracketingEnabled ) {
CAMHAL_LOGDA("Enabling bracketing");
mBracketingEnabled = true;
-
- //Wait for AF events to enable bracketing
- if ( NULL != mCameraAdapter )
- {
- setEventProvider( CameraHalEvent::ALL_EVENTS, mCameraAdapter );
- }
- }
- else
- {
+ } else {
CAMHAL_LOGDA("Bracketing already enabled");
- }
}
- else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
- ( strcmp(valstr, TICameraParameters::BRACKET_DISABLE) == 0 ))
- {
+ adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ mParameters.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ } else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 )) {
CAMHAL_LOGDA("Disabling bracketing");
+ adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
+ mParameters.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
mBracketingEnabled = false;
- stopImageBracketing();
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ }
- //Remove AF events subscription
- if ( NULL != mEventProvider )
- {
- mEventProvider->disableEventNotification( CameraHalEvent::ALL_EVENTS );
- delete mEventProvider;
- mEventProvider = NULL;
- }
+ } else {
+ adapterParams.remove(TICameraParameters::KEY_TEMP_BRACKETING);
+ mParameters.remove(TICameraParameters::KEY_TEMP_BRACKETING);
+ }
+#endif
- }
+#ifdef OMAP_ENHANCEMENT_VTC
+ if (mVTCUseCase && !mTunnelSetup && (mCameraAdapter != NULL) &&
+ ((mParameters.get(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE)) != NULL )&&
+ ((mParameters.get(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT)) != NULL )) {
+
+ uint32_t sliceHeight = mParameters.getInt(TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT);
+ uint32_t encoderHandle = mParameters.getInt(TICameraParameters::KEY_VIDEO_ENCODER_HANDLE);
+ int w, h;
+ mParameters.getPreviewSize(&w, &h);
+ status_t done = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SETUP_TUNNEL, sliceHeight, encoderHandle, w, h);
+ if (done == NO_ERROR) mTunnelSetup = true;
+ ret |= done;
+ }
+#endif
+
+ // Only send parameters to adapter if preview is already
+ // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
+ // will be called in startPreview()
+ // TODO(XXX): Need to identify other parameters that need update from camera adapter
+ if ( (NULL != mCameraAdapter) && (mPreviewEnabled || updateRequired) && !restartPreviewRequired ) {
+ ret |= mCameraAdapter->setParameters(adapterParams);
+ }
+#ifdef OMAP_ENHANCEMENT
if( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
- ( strcmp(valstr, TICameraParameters::SHUTTER_ENABLE) == 0 ))
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 ))
{
CAMHAL_LOGDA("Enabling shutter sound");
@@ -1028,7 +1230,7 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
}
else if ( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
- ( strcmp(valstr, TICameraParameters::SHUTTER_DISABLE) == 0 ))
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 ))
{
CAMHAL_LOGDA("Disabling shutter sound");
@@ -1036,9 +1238,7 @@ int CameraHal::setParameters(const CameraParameters& params)
mMsgEnabled &= ~CAMERA_MSG_SHUTTER;
mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
}
-
#endif
-
}
//On fail restore old parameters
@@ -1057,6 +1257,10 @@ int CameraHal::setParameters(const CameraParameters& params)
forceStopPreview();
}
+ if ( !mBracketingRunning && mBracketingEnabled ) {
+ startImageBracketing();
+ }
+
if (ret != NO_ERROR)
{
CAMHAL_LOGEA("Failed to restart Preview");
@@ -1082,45 +1286,35 @@ status_t CameraHal::allocPreviewBufs(int width, int height, const char* previewF
return NO_MEMORY;
}
- if(!mPreviewBufs)
+ if(!mPreviewBuffers)
{
- ///@todo Pluralise the name of this method to allocateBuffers
mPreviewLength = 0;
- mPreviewBufs = (int32_t *) mDisplayAdapter->allocateBuffer(width, height,
+ mPreviewBuffers = mDisplayAdapter->allocateBufferList(width, height,
previewFormat,
mPreviewLength,
buffercount);
-
- if (NULL == mPreviewBufs ) {
+ if (NULL == mPreviewBuffers ) {
CAMHAL_LOGEA("Couldn't allocate preview buffers");
return NO_MEMORY;
- }
+ }
mPreviewOffsets = (uint32_t *) mDisplayAdapter->getOffsets();
if ( NULL == mPreviewOffsets ) {
CAMHAL_LOGEA("Buffer mapping failed");
return BAD_VALUE;
- }
-
- mPreviewFd = mDisplayAdapter->getFd();
- if ( -1 == mPreviewFd ) {
- CAMHAL_LOGEA("Invalid handle");
- return BAD_VALUE;
- }
+ }
mBufProvider = (BufferProvider*) mDisplayAdapter.get();
ret = mDisplayAdapter->maxQueueableBuffers(max_queueable);
if (ret != NO_ERROR) {
return ret;
- }
-
+ }
}
LOG_FUNCTION_NAME_EXIT;
return ret;
-
}
status_t CameraHal::freePreviewBufs()
@@ -1128,12 +1322,11 @@ status_t CameraHal::freePreviewBufs()
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- CAMHAL_LOGDB("mPreviewBufs = 0x%x", (unsigned int)mPreviewBufs);
- if(mPreviewBufs)
+ CAMHAL_LOGDB("mPreviewBuffers = %p", mPreviewBuffers);
+ if(mPreviewBuffers)
{
- ///@todo Pluralise the name of this method to freeBuffers
- ret = mBufProvider->freeBuffer(mPreviewBufs);
- mPreviewBufs = NULL;
+ ret = mBufProvider->freeBufferList(mPreviewBuffers);
+ mPreviewBuffers = NULL;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1153,7 +1346,7 @@ status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
if ( NO_ERROR == ret )
{
- if( NULL != mPreviewDataBufs )
+ if( NULL != mPreviewDataBuffers )
{
ret = freePreviewDataBufs();
}
@@ -1162,10 +1355,10 @@ status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
if ( NO_ERROR == ret )
{
bytes = ((bytes+4095)/4096)*4096;
- mPreviewDataBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, NULL, bytes, bufferCount);
+ mPreviewDataBuffers = mMemoryManager->allocateBufferList(0, 0, NULL, bytes, bufferCount);
CAMHAL_LOGDB("Size of Preview data buffer = %d", bytes);
- if( NULL == mPreviewDataBufs )
+ if( NULL == mPreviewDataBuffers )
{
CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
ret = -NO_MEMORY;
@@ -1189,7 +1382,7 @@ status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
mPreviewDataOffsets = NULL;
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1203,12 +1396,11 @@ status_t CameraHal::freePreviewDataBufs()
if ( NO_ERROR == ret )
{
- if( NULL != mPreviewDataBufs )
+ if( NULL != mPreviewDataBuffers )
{
- ///@todo Pluralise the name of this method to freeBuffers
- ret = mMemoryManager->freeBuffer(mPreviewDataBufs);
- mPreviewDataBufs = NULL;
+ ret = mMemoryManager->freeBufferList(mPreviewDataBuffers);
+ mPreviewDataBuffers = NULL;
}
}
@@ -1218,7 +1410,9 @@ status_t CameraHal::freePreviewDataBufs()
return ret;
}
-status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, const char* previewFormat, unsigned int bufferCount)
+status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size,
+ const char* previewFormat, unsigned int bufferCount,
+ unsigned int *max_queueable)
{
status_t ret = NO_ERROR;
int bytes;
@@ -1228,41 +1422,44 @@ status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size
bytes = size;
// allocate image buffers only if not already allocated
- if(NULL != mImageBufs) {
+ if(NULL != mImageBuffers) {
+ if (mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out->maxQueueableBuffers(*max_queueable);
+ } else {
+ *max_queueable = bufferCount;
+ }
return NO_ERROR;
}
- if ( NO_ERROR == ret )
- {
- bytes = ((bytes+4095)/4096)*4096;
- mImageBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, previewFormat, bytes, bufferCount);
+ if (mBufferSourceAdapter_Out.get()) {
+ mImageBuffers = mBufferSourceAdapter_Out->allocateBufferList(width, height, previewFormat,
+ bytes, bufferCount);
+ mBufferSourceAdapter_Out->maxQueueableBuffers(*max_queueable);
+ } else {
+ bytes = ((bytes + 4095) / 4096) * 4096;
+ mImageBuffers = mMemoryManager->allocateBufferList(0, 0, previewFormat, bytes, bufferCount);
+ *max_queueable = bufferCount;
+ }
- CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
- if( NULL == mImageBufs )
- {
- CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
- ret = -NO_MEMORY;
- }
- else
- {
- bytes = size;
- }
- }
+ CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
+ if ( NULL == mImageBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ } else {
+ bytes = size;
+ }
- if ( NO_ERROR == ret )
- {
+ if ( NO_ERROR == ret ) {
mImageFd = mMemoryManager->getFd();
mImageLength = bytes;
mImageOffsets = mMemoryManager->getOffsets();
- }
- else
- {
+ } else {
mImageFd = -1;
mImageLength = 0;
mImageOffsets = NULL;
- }
+ }
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1272,36 +1469,38 @@ status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t buf
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- if( NULL != mVideoBufs ){
- ret = freeVideoBufs(mVideoBufs);
- mVideoBufs = NULL;
+ if( NULL != mVideoBuffers ){
+ ret = freeVideoBufs(mVideoBuffers);
+ mVideoBuffers = NULL;
}
if ( NO_ERROR == ret ){
int32_t stride;
- buffer_handle_t *bufsArr = new buffer_handle_t [bufferCount];
+ CameraBuffer *buffers = new CameraBuffer [bufferCount];
- if (bufsArr != NULL){
- for (int i = 0; i< bufferCount; i++){
- GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
- buffer_handle_t buf;
- ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &buf, &stride);
+ memset (buffers, 0, sizeof(CameraBuffer) * bufferCount);
+
+ if (buffers != NULL){
+ for (unsigned int i = 0; i< bufferCount; i++){
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
+ buffer_handle_t handle;
+ ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &handle, &stride);
if (ret != NO_ERROR){
CAMHAL_LOGEA("Couldn't allocate video buffers using Gralloc");
ret = -NO_MEMORY;
- for (int j=0; j< i; j++){
- buf = (buffer_handle_t)bufsArr[j];
- CAMHAL_LOGEB("Freeing Gralloc Buffer 0x%x", buf);
- GrallocAlloc.free(buf);
+ for (unsigned int j=0; j< i; j++){
+ CAMHAL_LOGEB("Freeing Gralloc Buffer %p", buffers[i].opaque);
+ GrallocAlloc.free((buffer_handle_t)buffers[i].opaque);
}
- delete [] bufsArr;
+ delete [] buffers;
goto exit;
}
- bufsArr[i] = buf;
- CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", buf);
+ buffers[i].type = CAMERA_BUFFER_GRALLOC;
+ buffers[i].opaque = (void *)handle;
+ CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", handle);
}
- mVideoBufs = (int32_t *)bufsArr;
+ mVideoBuffers = buffers;
}
else{
CAMHAL_LOGEA("Couldn't allocate video buffers ");
@@ -1310,11 +1509,53 @@ status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t buf
}
exit:
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
+status_t CameraHal::allocRawBufs(int width, int height, const char* previewFormat, int bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+
+ ///@todo Enhance this method allocImageBufs() to take in a flag for burst capture
+ ///Always allocate the buffers for image capture using MemoryManager
+ if (NO_ERROR == ret) {
+ if(( NULL != mVideoBuffers )) {
+ // Re-use the buffer for raw capture.
+ return ret;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ mVideoLength = 0;
+ mVideoLength = (((width * height * 2) + 4095)/4096)*4096;
+ mVideoBuffers = mMemoryManager->allocateBufferList(width, height, previewFormat,
+ mVideoLength, bufferCount);
+
+ CAMHAL_LOGDB("Size of Video cap buffer (used for RAW capture) %d", mVideoLength);
+ if( NULL == mVideoBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate Video buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ mVideoFd = mMemoryManager->getFd();
+ mVideoOffsets = mMemoryManager->getOffsets();
+ } else {
+ mVideoFd = -1;
+ mVideoOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
void endImageCapture( void *userData)
{
LOG_FUNCTION_NAME;
@@ -1344,11 +1585,18 @@ status_t CameraHal::signalEndImageCapture()
{
status_t ret = NO_ERROR;
int w,h;
- CameraParameters adapterParams = mParameters;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
+ if (mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out->disableDisplay();
+ }
+
+ if (mBufferSourceAdapter_In.get()) {
+ mBufferSourceAdapter_In->disableDisplay();
+ }
+
if ( mBracketingRunning ) {
stopImageBracketing();
} else {
@@ -1366,50 +1614,44 @@ status_t CameraHal::freeImageBufs()
LOG_FUNCTION_NAME;
- if ( NO_ERROR == ret )
- {
-
- if( NULL != mImageBufs )
- {
-
- ///@todo Pluralise the name of this method to freeBuffers
- ret = mMemoryManager->freeBuffer(mImageBufs);
- mImageBufs = NULL;
+ if (NULL == mImageBuffers) {
+ return -EINVAL;
+ }
- }
- else
- {
- ret = -EINVAL;
- }
+ if (mBufferSourceAdapter_Out.get()) {
+ ret = mBufferSourceAdapter_Out->freeBufferList(mImageBuffers);
+ } else {
+ ret = mMemoryManager->freeBufferList(mImageBuffers);
+ }
- }
+ if (ret == NO_ERROR) {
+ mImageBuffers = NULL;
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t CameraHal::freeVideoBufs(void *bufs)
+status_t CameraHal::freeVideoBufs(CameraBuffer *bufs)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- buffer_handle_t *pBuf = (buffer_handle_t*)bufs;
int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
- if(pBuf == NULL)
+ if(bufs == NULL)
{
CAMHAL_LOGEA("NULL pointer passed to freeVideoBuffer");
LOG_FUNCTION_NAME_EXIT;
return BAD_VALUE;
}
- GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
for(int i = 0; i < count; i++){
- buffer_handle_t ptr = *pBuf++;
- CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", ptr);
- GrallocAlloc.free(ptr);
+ CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", bufs[i].opaque);
+ GrallocAlloc.free((buffer_handle_t)bufs[i].opaque);
}
LOG_FUNCTION_NAME_EXIT;
@@ -1417,6 +1659,27 @@ status_t CameraHal::freeVideoBufs(void *bufs)
return ret;
}
+status_t CameraHal::freeRawBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+ if ( NO_ERROR == ret ) {
+ if( NULL != mVideoBuffers ) {
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBufferList(mVideoBuffers);
+ mVideoBuffers = NULL;
+ } else {
+ ret = -EINVAL;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
/**
@brief Start preview mode.
@@ -1425,13 +1688,117 @@ status_t CameraHal::freeVideoBufs(void *bufs)
@todo Update function header with the different errors that are possible
*/
-status_t CameraHal::startPreview()
+status_t CameraHal::startPreview() {
+ LOG_FUNCTION_NAME;
+
+ // When tunneling is enabled during VTC, startPreview happens in 2 steps:
+ // When the application sends the command CAMERA_CMD_PREVIEW_INITIALIZATION,
+ // cameraPreviewInitialization() is called, which in turn causes the CameraAdapter
+ // to move from loaded to idle state. And when the application calls startPreview,
+ // the CameraAdapter moves from idle to executing state.
+ //
+ // If the application calls startPreview() without sending the command
+ // CAMERA_CMD_PREVIEW_INITIALIZATION, then the function cameraPreviewInitialization()
+ // AND startPreview() are executed. In other words, if the application calls
+ // startPreview() without sending the command CAMERA_CMD_PREVIEW_INITIALIZATION,
+ // then the CameraAdapter moves from loaded to idle to executing state in one shot.
+ status_t ret = cameraPreviewInitialization();
+
+ // The flag mPreviewInitializationDone is set to true at the end of the function
+ // cameraPreviewInitialization(). Therefore, if everything goes alright, then the
+ // flag will be set. Sometimes, the function cameraPreviewInitialization() may
+ // return prematurely if all the resources are not available for starting preview.
+ // For example, if the preview window is not set, then it would return NO_ERROR.
+ // Under such circumstances, one should return from startPreview as well and should
+ // not continue execution. That is why, we check the flag and not the return value.
+ if (!mPreviewInitializationDone) return ret;
+
+ // Once startPreview is called, there is no need to continue to remember whether
+ // the function cameraPreviewInitialization() was called earlier or not. And so
+ // the flag mPreviewInitializationDone is reset here. Plus, this preserves the
+ // current behavior of startPreview under the circumstances where the application
+ // calls startPreview twice or more.
+ mPreviewInitializationDone = false;
+
+ ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
+ if(mDisplayAdapter.get() != NULL) {
+ CAMHAL_LOGDA("Enabling display");
+ int width, height;
+ mParameters.getPreviewSize(&width, &height);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview);
+#else
+ ret = mDisplayAdapter->enableDisplay(width, height, NULL);
+#endif
+
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEA("Couldn't enable display");
+
+ // FIXME: At this stage mStateSwitchLock is locked and unlock is supposed to be called
+ // only from mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW)
+ // below. But this will never happen because of goto error. Thus at next
+ // startPreview() call CameraHAL will be deadlocked.
+ // Need to revisit mStateSwitch lock, for now just abort the process.
+ CAMHAL_ASSERT_X(false,
+ "At this stage mCameraAdapter->mStateSwitchLock is still locked, "
+ "deadlock is guaranteed");
+
+ goto error;
+ }
+
+ }
+
+ ///Send START_PREVIEW command to adapter
+ CAMHAL_LOGDA("Starting CameraAdapter preview mode");
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
+
+ if(ret!=NO_ERROR) {
+ CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
+ goto error;
+ }
+ CAMHAL_LOGDA("Started preview");
+
+ mPreviewEnabled = true;
+ mPreviewStartInProgress = false;
+ return ret;
+
+ error:
+
+ CAMHAL_LOGEA("Performing cleanup after error");
+
+ //Do all the cleanup
+ freePreviewBufs();
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ if(mDisplayAdapter.get() != NULL) {
+ mDisplayAdapter->disableDisplay(false);
+ }
+ mAppCallbackNotifier->stop();
+ mPreviewStartInProgress = false;
+ mPreviewEnabled = false;
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+////////////
+/**
+ @brief Set preview mode related initialization
+ -> Camera Adapter set params
+ -> Allocate buffers
+ -> Set use buffers for preview
+ @param none
+ @return NO_ERROR
+ @todo Update function header with the different errors that are possible
+
+ */
+status_t CameraHal::cameraPreviewInitialization()
{
status_t ret = NO_ERROR;
CameraAdapter::BuffersDescriptor desc;
CameraFrame frame;
- const char *valstr = NULL;
unsigned int required_buffer_count;
unsigned int max_queueble_buffers;
@@ -1441,6 +1808,10 @@ status_t CameraHal::startPreview()
LOG_FUNCTION_NAME;
+ if (mPreviewInitializationDone) {
+ return NO_ERROR;
+ }
+
if ( mPreviewEnabled ){
CAMHAL_LOGDA("Preview already running");
LOG_FUNCTION_NAME_EXIT;
@@ -1461,15 +1832,11 @@ status_t CameraHal::startPreview()
///Update the current preview width and height
mPreviewWidth = frame.mWidth;
mPreviewHeight = frame.mHeight;
- //Update the padded width and height - required for VNF and VSTAB
- mParameters.set(TICameraParameters::KEY_PADDED_WIDTH, mPreviewWidth);
- mParameters.set(TICameraParameters::KEY_PADDED_HEIGHT, mPreviewHeight);
-
}
///If we don't have the preview callback enabled and display adapter,
if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){
- CAMHAL_LOGDA("Preview not started. Preview in progress flag set");
+ CAMHAL_LOGD("Preview not started. Preview in progress flag set");
mPreviewStartInProgress = true;
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING);
if ( NO_ERROR != ret ){
@@ -1499,10 +1866,11 @@ status_t CameraHal::startPreview()
{
mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME);
}
+
+ signalEndImageCapture();
return ret;
}
-
required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
///Allocate the preview buffers
@@ -1534,7 +1902,7 @@ status_t CameraHal::startPreview()
if ( NO_ERROR == ret )
{
- desc.mBuffers = mPreviewDataBufs;
+ desc.mBuffers = mPreviewDataBuffers;
desc.mOffsets = mPreviewDataOffsets;
desc.mFd = mPreviewDataFd;
desc.mLength = mPreviewDataLength;
@@ -1548,7 +1916,7 @@ status_t CameraHal::startPreview()
}
///Pass the buffers to Camera Adapter
- desc.mBuffers = mPreviewBufs;
+ desc.mBuffers = mPreviewBuffers;
desc.mOffsets = mPreviewOffsets;
desc.mFd = mPreviewFd;
desc.mLength = mPreviewLength;
@@ -1565,8 +1933,6 @@ status_t CameraHal::startPreview()
return ret;
}
- mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
-
///Start the callback notifier
ret = mAppCallbackNotifier->start();
@@ -1586,72 +1952,10 @@ status_t CameraHal::startPreview()
goto error;
}
- ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
- if(mDisplayAdapter.get() != NULL)
- {
- CAMHAL_LOGDA("Enabling display");
- bool isS3d = false;
- DisplayAdapter::S3DParameters s3dParams;
- int width, height;
- mParameters.getPreviewSize(&width, &height);
-#if 0 //TODO: s3d is not part of bringup...will reenable
- if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_SUPPORTED)) != NULL) {
- isS3d = (strcmp(valstr, "true") == 0);
- }
- if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL) {
- if (strcmp(valstr, "off") == 0)
- {
- CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS OFF");
- //TODO: obtain the frame packing configuration from camera or user settings
- //once side by side configuration is supported
- s3dParams.mode = OVERLAY_S3D_MODE_ON;
- s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
- s3dParams.order = OVERLAY_S3D_ORDER_LF;
- s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
- }
- else
- {
- CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS ON");
- s3dParams.mode = OVERLAY_S3D_MODE_OFF;
- s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
- s3dParams.order = OVERLAY_S3D_ORDER_LF;
- s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
- }
- }
-#endif //if 0
-
-#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ if (ret == NO_ERROR) mPreviewInitializationDone = true;
- ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview, isS3d ? &s3dParams : NULL);
+ mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
-#else
-
- ret = mDisplayAdapter->enableDisplay(width, height, NULL, isS3d ? &s3dParams : NULL);
-
-#endif
-
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEA("Couldn't enable display");
- goto error;
- }
-
- }
-
- ///Send START_PREVIEW command to adapter
- CAMHAL_LOGDA("Starting CameraAdapter preview mode");
-
- ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
-
- if(ret!=NO_ERROR)
- {
- CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
- goto error;
- }
- CAMHAL_LOGDA("Started preview");
-
- mPreviewEnabled = true;
- mPreviewStartInProgress = false;
return ret;
error:
@@ -1698,19 +2002,22 @@ status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
if(mDisplayAdapter.get() != NULL)
{
///NULL window passed, destroy the display adapter if present
- CAMHAL_LOGDA("NULL window passed, destroying display adapter");
+ CAMHAL_LOGD("NULL window passed, destroying display adapter");
mDisplayAdapter.clear();
///@remarks If there was a window previously existing, we usually expect another valid window to be passed by the client
///@remarks so, we will wait until it passes a valid window to begin the preview again
mSetPreviewWindowCalled = false;
}
- CAMHAL_LOGDA("NULL ANativeWindow passed to setPreviewWindow");
+ CAMHAL_LOGD("NULL ANativeWindow passed to setPreviewWindow");
return NO_ERROR;
}else if(mDisplayAdapter.get() == NULL)
{
// Need to create the display adapter since it has not been created
// Create display adapter
mDisplayAdapter = new ANativeWindowDisplayAdapter();
+#ifdef OMAP_ENHANCEMENT
+ mDisplayAdapter->setExtendedOps(mExtendedPreviewStreamOps);
+#endif
ret = NO_ERROR;
if(!mDisplayAdapter.get() || ((ret=mDisplayAdapter->initialize())!=NO_ERROR))
{
@@ -1768,6 +2075,148 @@ status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
}
+#ifdef OMAP_ENHANCEMENT_CPCAM
+void CameraHal::setExtendedPreviewStreamOps(preview_stream_extended_ops_t *ops)
+{
+ mExtendedPreviewStreamOps = ops;
+}
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Buffers provided to CameraHal via this object for tap-in/tap-out
+ functionality.
+
+ TODO(XXX): this is just going to use preview_stream_ops for now, but we
+ most likely need to extend it when we want more functionality
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // If either a tapin or tapout was previously set
+ // we need to clean up and clear capturing
+ if ((!tapout && mBufferSourceAdapter_Out.get()) ||
+ (!tapin && mBufferSourceAdapter_In.get())) {
+ signalEndImageCapture();
+ }
+
+ // Set tapout point
+ // destroy current buffer tapout if NULL tapout is passed
+ if (!tapout) {
+ if (mBufferSourceAdapter_Out.get() != NULL) {
+ CAMHAL_LOGD("NULL tapout passed, destroying buffer tapout adapter");
+ mBufferSourceAdapter_Out.clear();
+ mBufferSourceAdapter_Out = 0;
+ }
+ ret = NO_ERROR;
+ } else if (mBufferSourceAdapter_Out.get() == NULL) {
+ mBufferSourceAdapter_Out = new BufferSourceAdapter();
+ mBufferSourceAdapter_Out->setExtendedOps(mExtendedPreviewStreamOps);
+ if(!mBufferSourceAdapter_Out.get()) {
+ CAMHAL_LOGEA("Couldn't create DisplayAdapter");
+ ret = NO_MEMORY;
+ goto exit;
+ }
+
+ ret = mBufferSourceAdapter_Out->initialize();
+ if (ret != NO_ERROR)
+ {
+ mBufferSourceAdapter_Out.clear();
+ mBufferSourceAdapter_Out = 0;
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ goto exit;
+ }
+
+ // CameraAdapter will be the frame provider for BufferSourceAdapter
+ mBufferSourceAdapter_Out->setFrameProvider(mCameraAdapter);
+
+ // BufferSourceAdapter will use ErrorHandler to send errors back to
+ // the application
+ mBufferSourceAdapter_Out->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mBufferSourceAdapter_Out->setPreviewWindow(tapout);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ goto exit;
+ }
+ } else {
+ // Update the display adapter with the new window that is passed from CameraService
+ freeImageBufs();
+ ret = mBufferSourceAdapter_Out->setPreviewWindow(tapout);
+ if (ret == ALREADY_EXISTS) {
+ // ALREADY_EXISTS should be treated as a noop in this case
+ ret = NO_ERROR;
+ }
+ }
+
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error while trying to set tapout point");
+ goto exit;
+ }
+
+ // 1. Set tapin point
+ if (!tapin) {
+ if (mBufferSourceAdapter_In.get() != NULL) {
+ CAMHAL_LOGD("NULL tapin passed, destroying buffer tapin adapter");
+ mBufferSourceAdapter_In.clear();
+ mBufferSourceAdapter_In = 0;
+ }
+ ret = NO_ERROR;
+ } else if (mBufferSourceAdapter_In.get() == NULL) {
+ mBufferSourceAdapter_In = new BufferSourceAdapter();
+ mBufferSourceAdapter_In->setExtendedOps(mExtendedPreviewStreamOps);
+ if(!mBufferSourceAdapter_In.get()) {
+ CAMHAL_LOGEA("Couldn't create DisplayAdapter");
+ ret = NO_MEMORY;
+ goto exit;
+ }
+
+ ret = mBufferSourceAdapter_In->initialize();
+ if (ret != NO_ERROR)
+ {
+ mBufferSourceAdapter_In.clear();
+ mBufferSourceAdapter_In = 0;
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ goto exit;
+ }
+
+ // We need to set a frame provider so camera adapter can return the frame back to us
+ mBufferSourceAdapter_In->setFrameProvider(mCameraAdapter);
+
+ // BufferSourceAdapter will use ErrorHandler to send errors back to
+ // the application
+ mBufferSourceAdapter_In->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mBufferSourceAdapter_In->setPreviewWindow(tapin);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ goto exit;
+ }
+ } else {
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mBufferSourceAdapter_In->setPreviewWindow(tapin);
+ if (ret == ALREADY_EXISTS) {
+ // ALREADY_EXISTS should be treated as a noop in this case
+ ret = NO_ERROR;
+ }
+ }
+
+ exit:
+ return ret;
+}
+#endif
+
+
/**
@brief Stop a previously started preview.
@@ -1785,7 +2234,7 @@ void CameraHal::stopPreview()
return;
}
- bool imageCaptureRunning = (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE) &&
+ bool imageCaptureRunning = (mCameraAdapter->getState() & CameraAdapter::CAPTURE_STATE) &&
(mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE);
if(mDisplayPaused && !imageCaptureRunning)
{
@@ -1857,23 +2306,27 @@ status_t CameraHal::startRecording( )
// set internal recording hint in case camera adapter needs to make some
// decisions....(will only be sent to camera adapter if camera restart is required)
- mParameters.set(TICameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+ mParameters.set(TICameraParameters::KEY_RECORDING_HINT, android::CameraParameters::TRUE);
// if application starts recording in continuous focus picture mode...
// then we need to force default capture mode (as opposed to video mode)
- if ( ((valstr = mParameters.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
- (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
+ if ( ((valstr = mParameters.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
+ (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
restartPreviewRequired = resetVideoModeParameters();
}
// only need to check recording hint if preview restart is not already needed
- valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
if ( !restartPreviewRequired &&
- (!valstr || (valstr && (strcmp(valstr, CameraParameters::TRUE) != 0))) ) {
+ (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::TRUE) != 0))) ) {
restartPreviewRequired = setVideoModeParameters(mParameters);
}
if (restartPreviewRequired) {
+ {
+ android::AutoMutex lock(mLock);
+ mCapModeBackup = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ }
ret = restartPreview();
}
@@ -1895,13 +2348,13 @@ status_t CameraHal::startRecording( )
mAppCallbackNotifier->useVideoBuffers(true);
mAppCallbackNotifier->setVideoRes(mVideoWidth, mVideoHeight);
- ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBufs);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBuffers);
}
else
{
mAppCallbackNotifier->useVideoBuffers(false);
mAppCallbackNotifier->setVideoRes(mPreviewWidth, mPreviewHeight);
- ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL);
}
}
@@ -1938,12 +2391,11 @@ status_t CameraHal::startRecording( )
@todo Modify the policies for enabling VSTAB & VNF usecase based later.
*/
-bool CameraHal::setVideoModeParameters(const CameraParameters& params)
+bool CameraHal::setVideoModeParameters(const android::CameraParameters& params)
{
const char *valstr = NULL;
const char *valstrRemote = NULL;
bool restartPreviewRequired = false;
- status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
@@ -1957,53 +2409,56 @@ bool CameraHal::setVideoModeParameters(const CameraParameters& params)
restartPreviewRequired = true;
}
- // Check if CAPTURE_MODE is VIDEO_MODE, since VSTAB & VNF work only in VIDEO_MODE.
- valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
- if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
- valstrRemote = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- // set VSTAB. restart is required if vstab value has changed
- if ( valstrRemote != NULL) {
- // make sure we support vstab
- if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
- CameraParameters::TRUE) == 0) {
- valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- // vstab value has changed
- if ((valstr != NULL) &&
- strcmp(valstr, valstrRemote) != 0) {
- restartPreviewRequired = true;
- }
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION, valstrRemote);
+ // set VSTAB. restart is required if vstab value has changed
+ if ( (valstrRemote = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL ) {
+ // make sure we support vstab
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ android::CameraParameters::TRUE) == 0) {
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ // vstab value has changed
+ if ((valstr != NULL) &&
+ strcmp(valstr, valstrRemote) != 0) {
+ restartPreviewRequired = true;
}
- } else if (mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION)) {
- // vstab was configured but now unset
- restartPreviewRequired = true;
- mParameters.remove(CameraParameters::KEY_VIDEO_STABILIZATION);
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ valstrRemote);
}
+ } else if (mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) {
+ // vstab was configured but now unset
+ restartPreviewRequired = true;
+ mParameters.remove(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ }
- // Set VNF
- valstrRemote = params.get(TICameraParameters::KEY_VNF);
- if ( valstrRemote == NULL) {
- CAMHAL_LOGDA("Enable VNF");
- mParameters.set(TICameraParameters::KEY_VNF, "1");
+ // Set VNF
+ if ((valstrRemote = params.get(TICameraParameters::KEY_VNF)) == NULL) {
+ CAMHAL_LOGDA("Enable VNF");
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
+ restartPreviewRequired = true;
+ } else {
+ valstr = mParameters.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, valstrRemote) != 0) {
restartPreviewRequired = true;
- } else {
- valstr = mParameters.get(TICameraParameters::KEY_VNF);
- if (valstr && strcmp(valstr, valstrRemote) != 0) {
- restartPreviewRequired = true;
- }
- mParameters.set(TICameraParameters::KEY_VNF, valstrRemote);
}
+ mParameters.set(TICameraParameters::KEY_VNF, valstrRemote);
+ }
+#if !defined(OMAP_ENHANCEMENT) && !defined(ENHANCED_DOMX)
// For VSTAB alone for 1080p resolution, padded width goes > 2048, which cannot be rendered by GPU.
// In such case, there is support in Ducati for combination of VSTAB & VNF requiring padded width < 2048.
// So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution.
- valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- if (valstr && (strcmp(valstr, CameraParameters::TRUE) == 0) && (mPreviewWidth == 1920)) {
+ int w, h;
+ params.getPreviewSize(&w, &h);
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && (strcmp(valstr, android::CameraParameters::TRUE) == 0) && (w == 1920)) {
CAMHAL_LOGDA("Force Enable VNF for 1080p");
- mParameters.set(TICameraParameters::KEY_VNF, "1");
- restartPreviewRequired = true;
+ const char *valKeyVnf = mParameters.get(TICameraParameters::KEY_VNF);
+ if(!valKeyVnf || (strcmp(valKeyVnf, android::CameraParameters::TRUE) != 0)) {
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
+ restartPreviewRequired = true;
+ }
}
- }
+#endif
+
LOG_FUNCTION_NAME_EXIT;
return restartPreviewRequired;
@@ -2055,31 +2510,22 @@ bool CameraHal::resetVideoModeParameters()
*/
status_t CameraHal::restartPreview()
{
- const char *valstr = NULL;
- char tmpvalstr[30];
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
// Retain CAPTURE_MODE before calling stopPreview(), since it is reset in stopPreview().
- tmpvalstr[0] = 0;
- valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
- if(valstr != NULL)
- {
- if(sizeof(tmpvalstr) < (strlen(valstr)+1))
- {
- return -EINVAL;
- }
-
- strncpy(tmpvalstr, valstr, sizeof(tmpvalstr));
- tmpvalstr[sizeof(tmpvalstr)-1] = 0;
- }
forceStopPreview();
{
- Mutex::Autolock lock(mLock);
- mParameters.set(TICameraParameters::KEY_CAP_MODE, tmpvalstr);
+ android::AutoMutex lock(mLock);
+ if (!mCapModeBackup.isEmpty()) {
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, mCapModeBackup.string());
+ mCapModeBackup = "";
+ } else {
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+ }
mCameraAdapter->setParameters(mParameters);
}
@@ -2103,7 +2549,7 @@ void CameraHal::stopRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (!mRecordingEnabled )
{
@@ -2122,12 +2568,12 @@ void CameraHal::stopRecording()
mRecordingEnabled = false;
if ( mAppCallbackNotifier->getUesVideoBuffers() ){
- freeVideoBufs(mVideoBufs);
- if (mVideoBufs){
- CAMHAL_LOGVB(" FREEING mVideoBufs 0x%x", mVideoBufs);
- delete [] mVideoBufs;
+ freeVideoBufs(mVideoBuffers);
+ if (mVideoBuffers){
+ CAMHAL_LOGVB(" FREEING mVideoBuffers %p", mVideoBuffers);
+ delete [] mVideoBuffers;
}
- mVideoBufs = NULL;
+ mVideoBuffers = NULL;
}
// reset internal recording hint in case camera adapter needs to make some
@@ -2203,7 +2649,7 @@ status_t CameraHal::autoFocus()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled |= CAMERA_MSG_FOCUS;
@@ -2260,13 +2706,13 @@ status_t CameraHal::cancelAutoFocus()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
- CameraParameters adapterParams = mParameters;
+ android::AutoMutex lock(mLock);
+ android::CameraParameters adapterParams = mParameters;
mMsgEnabled &= ~CAMERA_MSG_FOCUS;
if( NULL != mCameraAdapter )
{
- adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, CameraParameters::FALSE);
+ adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, android::CameraParameters::FALSE);
mCameraAdapter->setParameters(adapterParams);
mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS);
mAppCallbackNotifier->flushEventQueue();
@@ -2315,34 +2761,17 @@ void CameraHal::eventCallback(CameraHalEvent* event)
{
LOG_FUNCTION_NAME;
- if ( NULL != event )
- {
- switch( event->mEventType )
- {
- case CameraHalEvent::EVENT_FOCUS_LOCKED:
- case CameraHalEvent::EVENT_FOCUS_ERROR:
- {
- if ( mBracketingEnabled )
- {
- startImageBracketing();
- }
- break;
- }
- default:
- {
- break;
- }
- };
- }
-
LOG_FUNCTION_NAME_EXIT;
}
status_t CameraHal::startImageBracketing()
{
- status_t ret = NO_ERROR;
- CameraFrame frame;
- CameraAdapter::BuffersDescriptor desc;
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+ unsigned int max_queueable = 0;
+
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
@@ -2390,6 +2819,7 @@ status_t CameraHal::startImageBracketing()
if ( NO_ERROR == ret )
{
+ unsigned int bufferCount = mBracketRangeNegative + 1;
mParameters.getPictureSize(( int * ) &frame.mWidth,
( int * ) &frame.mHeight);
@@ -2397,7 +2827,9 @@ status_t CameraHal::startImageBracketing()
frame.mHeight,
frame.mLength,
mParameters.getPictureFormat(),
- ( mBracketRangeNegative + 1 ));
+ bufferCount,
+ &max_queueable);
+ mBracketRangeNegative = bufferCount - 1;
if ( NO_ERROR != ret )
{
CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
@@ -2407,12 +2839,12 @@ status_t CameraHal::startImageBracketing()
if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
{
- desc.mBuffers = mImageBufs;
+ desc.mBuffers = mImageBuffers;
desc.mOffsets = mImageOffsets;
desc.mFd = mImageFd;
desc.mLength = mImageLength;
desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 );
- desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 );
+ desc.mMaxQueueable = ( size_t) max_queueable;
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
( int ) &desc);
@@ -2465,16 +2897,36 @@ status_t CameraHal::stopImageBracketing()
@todo Define error codes if unable to switch to image capture
*/
-status_t CameraHal::takePicture( )
+status_t CameraHal::takePicture(const char *params)
{
+ android::AutoMutex lock(mLock);
+ return __takePicture(params);
+}
+
+/**
+ @brief Internal function for getting a captured image.
+ shared by takePicture and reprocess.
+ @param none
+ @return NO_ERROR If able to switch to image capture
+ @todo Define error codes if unable to switch to image capture
+
+ */
+status_t CameraHal::__takePicture(const char *params)
+{
+ // cancel AF state if needed (before any operation and mutex lock)
+ if (mCameraAdapter->getState() == CameraAdapter::AF_STATE) {
+ cancelAutoFocus();
+ }
+
status_t ret = NO_ERROR;
CameraFrame frame;
CameraAdapter::BuffersDescriptor desc;
- int burst;
+ int burst = -1;
const char *valstr = NULL;
unsigned int bufferCount = 1;
-
- Mutex::Autolock lock(mLock);
+ unsigned int max_queueable = 0;
+ unsigned int rawBufferCount = 1;
+ bool isCPCamMode = false;
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
@@ -2491,53 +2943,132 @@ status_t CameraHal::takePicture( )
return NO_INIT;
}
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+
+ isCPCamMode = valstr && !strcmp(valstr, TICameraParameters::CP_CAM_MODE);
+
// return error if we are already capturing
- if ( (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
+ // however, we can queue a capture when in cpcam mode
+ if ( ((mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) ||
(mCameraAdapter->getState() == CameraAdapter::VIDEO_CAPTURE_STATE &&
- mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE) ) {
+ mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE)) &&
+ !isCPCamMode) {
CAMHAL_LOGEA("Already capturing an image...");
return NO_INIT;
}
// we only support video snapshot if we are in video mode (recording hint is set)
- valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
if ( (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE) &&
(valstr && strcmp(valstr, TICameraParameters::VIDEO_MODE)) ) {
CAMHAL_LOGEA("Trying to capture while recording without recording hint set...");
return INVALID_OPERATION;
}
- if ( !mBracketingRunning )
- {
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ // check if camera application is using shots parameters
+ // api. parameters set here override anything set using setParameters
+ // TODO(XXX): Just going to use legacy TI parameters for now. Need
+ // add new APIs in CameraHal to utilize ShotParameters later, so
+ // we don't have to parse through the whole set of parameters
+ // in camera adapter
+ if (strlen(params) > 0) {
+ android::ShotParameters shotParams;
+ const char *valStr;
+ const char *valExpComp, *valExpGain;
+ int valNum;
+
+ android::String8 shotParams8(params);
+
+ shotParams.unflatten(shotParams8);
+ mParameters.remove(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE);
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+
+ valExpGain = shotParams.get(android::ShotParameters::KEY_EXP_GAIN_PAIRS);
+ valExpComp = shotParams.get(android::ShotParameters::KEY_EXP_COMPENSATION);
+ if (NULL != valExpComp) {
+ mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valExpComp);
+ } else if (NULL != valExpGain) {
+ mParameters.set(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE, valExpGain);
+ }
- if ( NO_ERROR == ret )
- {
- burst = mParameters.getInt(TICameraParameters::KEY_BURST);
+ valNum = shotParams.getInt(android::ShotParameters::KEY_BURST);
+ if (valNum >= 0) {
+ mParameters.set(TICameraParameters::KEY_BURST, valNum);
+ burst = valNum;
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_FLUSH_CONFIG);
+ if (valStr!= NULL) {
+ if ( 0 == strcmp(valStr, android::ShotParameters::TRUE) ) {
+ mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
+ android::CameraParameters::TRUE);
+ } else if ( 0 == strcmp(valStr, android::ShotParameters::FALSE) ) {
+ mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
+ android::CameraParameters::FALSE);
}
+ }
+
+ mCameraAdapter->setParameters(mParameters);
+ } else
+#endif
+ {
+ // TODO(XXX): Should probably reset burst and bracketing params
+ // when we remove legacy TI parameters implementation
+ }
+
+ // if we are already in the middle of a capture...then we just need
+ // setParameters and start image capture to queue more shots
+ if (((mCameraAdapter->getState() & CameraAdapter::CAPTURE_STATE) ==
+ CameraAdapter::CAPTURE_STATE) &&
+ (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE)) {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE,
+ (int) &mStartCapture);
+#else
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
+#endif
+ return ret;
+ }
+
+ if ( !mBracketingRunning )
+ {
+ // if application didn't set burst through ShotParameters
+ // then query from TICameraParameters
+ if ((burst == -1) && (NO_ERROR == ret)) {
+ burst = mParameters.getInt(TICameraParameters::KEY_BURST);
+ }
//Allocate all buffers only in burst capture case
- if ( burst > 1 )
- {
- bufferCount = CameraHal::NO_BUFFERS_IMAGE_CAPTURE;
- if ( NULL != mAppCallbackNotifier.get() )
- {
+ if ( burst > 0 ) {
+ // For CPCam mode...allocate for worst case burst
+ bufferCount = isCPCamMode || (burst > CameraHal::NO_BUFFERS_IMAGE_CAPTURE) ?
+ CameraHal::NO_BUFFERS_IMAGE_CAPTURE : burst;
+
+ if (mBufferSourceAdapter_Out.get()) {
+ // TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
+ // until faux-NPA mode is implemented
+ bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+ }
+
+ if ( NULL != mAppCallbackNotifier.get() ) {
mAppCallbackNotifier->setBurst(true);
- }
}
- else
- {
- if ( NULL != mAppCallbackNotifier.get() )
- {
+ } else if ( mBracketingEnabled ) {
+ bufferCount = mBracketRangeNegative + 1;
+ if ( NULL != mAppCallbackNotifier.get() ) {
+ mAppCallbackNotifier->setBurst(false);
+ }
+ } else {
+ if ( NULL != mAppCallbackNotifier.get() ) {
mAppCallbackNotifier->setBurst(false);
- }
}
+ }
// pause preview during normal image capture
// do not pause preview if recording (video state)
- if (NO_ERROR == ret &&
- NULL != mDisplayAdapter.get() &&
- burst < 1) {
+ if ( (NO_ERROR == ret) && (NULL != mDisplayAdapter.get()) ) {
if (mCameraAdapter->getState() != CameraAdapter::VIDEO_STATE) {
mDisplayPaused = true;
mPreviewEnabled = false;
@@ -2577,14 +3108,12 @@ status_t CameraHal::takePicture( )
if ( NO_ERROR == ret )
{
- mParameters.getPictureSize(( int * ) &frame.mWidth,
- ( int * ) &frame.mHeight);
-
- ret = allocImageBufs(frame.mWidth,
+ ret = allocImageBufs(frame.mAlignment,
frame.mHeight,
frame.mLength,
mParameters.getPictureFormat(),
- bufferCount);
+ bufferCount,
+ &max_queueable);
if ( NO_ERROR != ret )
{
CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
@@ -2593,20 +3122,48 @@ status_t CameraHal::takePicture( )
if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
{
- desc.mBuffers = mImageBufs;
+ desc.mBuffers = mImageBuffers;
desc.mOffsets = mImageOffsets;
desc.mFd = mImageFd;
desc.mLength = mImageLength;
desc.mCount = ( size_t ) bufferCount;
- desc.mMaxQueueable = ( size_t ) bufferCount;
+ desc.mMaxQueueable = ( size_t ) max_queueable;
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
( int ) &desc);
}
+ if (mRawCapture) {
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDB("Raw capture buffers setup - %s", mParameters.getPictureFormat());
+ ret = allocRawBufs(mParameters.getInt(TICameraParameters::RAW_WIDTH),
+ mParameters.getInt(TICameraParameters::RAW_HEIGHT),
+ android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+ rawBufferCount);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("allocRawBufs (for RAW capture) returned error 0x%x", ret);
+ }
+ }
+
+ if ((NO_ERROR == ret) && ( NULL != mCameraAdapter )) {
+ desc.mBuffers = mVideoBuffers;
+ desc.mOffsets = mVideoOffsets;
+ desc.mFd = mVideoFd;
+ desc.mLength = mVideoLength;
+ desc.mCount = ( size_t ) rawBufferCount;
+ desc.mMaxQueueable = ( size_t ) rawBufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE,
+ ( int ) &desc);
+ }
}
+ }
- if ( ( NO_ERROR == ret ) && ( NULL != mCameraAdapter ) )
- {
+ if ((ret == NO_ERROR) && mBufferSourceAdapter_Out.get()) {
+ mBufferSourceAdapter_Out->enableDisplay(0, 0, NULL);
+ }
+
+ if ((NO_ERROR == ret) && (NULL != mCameraAdapter)) {
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
@@ -2619,7 +3176,7 @@ status_t CameraHal::takePicture( )
#endif
- }
+ }
return ret;
}
@@ -2637,11 +3194,9 @@ status_t CameraHal::takePicture( )
status_t CameraHal::cancelPicture( )
{
LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
- Mutex::Autolock lock(mLock);
-
- mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
-
+ ret = signalEndImageCapture();
return NO_ERROR;
}
@@ -2654,7 +3209,7 @@ status_t CameraHal::cancelPicture( )
*/
char* CameraHal::getParameters()
{
- String8 params_str8;
+ android::String8 params_str8;
char* params_string;
const char * valstr = NULL;
@@ -2665,16 +3220,38 @@ char* CameraHal::getParameters()
mCameraAdapter->getParameters(mParameters);
}
- CameraParameters mParams = mParameters;
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT)) != NULL ) {
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ }
+ }
+
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL ) {
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ }
+ }
+
+ android::CameraParameters mParams = mParameters;
// Handle RECORDING_HINT to Set/Reset Video Mode Parameters
- valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
if(valstr != NULL)
{
- if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
//HACK FOR MMS MODE
- resetPreviewRes(&mParams, mVideoWidth, mVideoHeight);
+ resetPreviewRes(&mParams);
}
}
@@ -2695,6 +3272,80 @@ char* CameraHal::getParameters()
return params_string;
}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+/**
+ @brief Starts reprocessing operation.
+ */
+status_t CameraHal::reprocess(const char *params)
+{
+ status_t ret = NO_ERROR;
+ int bufferCount = 0;
+ CameraAdapter::BuffersDescriptor desc;
+ CameraBuffer *reprocBuffers = NULL;
+ android::ShotParameters shotParams;
+
+ android::AutoMutex lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ // 1. Get buffers
+ if (mBufferSourceAdapter_In.get()) {
+ reprocBuffers = mBufferSourceAdapter_In->getBufferList(&bufferCount);
+ }
+
+ if (!reprocBuffers) {
+ CAMHAL_LOGE("Error: couldn't get input buffers for reprocess()");
+ goto exit;
+ }
+
+ // 2. Get buffer information and parse parameters
+ {
+ shotParams.setBurst(bufferCount);
+ }
+
+ // 3. Give buffer to camera adapter
+ desc.mBuffers = reprocBuffers;
+ desc.mOffsets = 0;
+ desc.mFd = 0;
+ desc.mLength = 0;
+ desc.mCount = (size_t) bufferCount;
+ desc.mMaxQueueable = (size_t) bufferCount;
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS, (int) &desc);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error calling camera use buffers");
+ goto exit;
+ }
+
+ // 4. Start reprocessing
+ ret = mBufferSourceAdapter_In->enableDisplay(0, 0, NULL);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error enabling tap in point");
+ goto exit;
+ }
+
+ // 5. Start capturing
+ ret = __takePicture(shotParams.flatten().string());
+
+ exit:
+ return ret;
+}
+
+/**
+ @brief Cancels current reprocessing operation
+
+ */
+status_t CameraHal::cancel_reprocess( )
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ret = signalEndImageCapture();
+ return NO_ERROR;
+}
+#endif
+
+
void CameraHal::putParameters(char *parms)
{
free(parms);
@@ -2714,7 +3365,6 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
LOG_FUNCTION_NAME;
-
if ( ( NO_ERROR == ret ) && ( NULL == mCameraAdapter ) )
{
CAMHAL_LOGEA("No CameraAdapter instance");
@@ -2724,24 +3374,31 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
///////////////////////////////////////////////////////
// Following commands do NOT need preview to be started
///////////////////////////////////////////////////////
- switch(cmd) {
- case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
- bool enable = static_cast<bool>(arg1);
- Mutex::Autolock lock(mLock);
- if (enable) {
- mMsgEnabled |= CAMERA_MSG_FOCUS_MOVE;
- } else {
- mMsgEnabled &= ~CAMERA_MSG_FOCUS_MOVE;
- }
- return NO_ERROR;
- break;
+
+ switch ( cmd ) {
+#ifdef ANDROID_API_JB_OR_LATER
+ case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
+ {
+ const bool enable = static_cast<bool>(arg1);
+ android::AutoMutex lock(mLock);
+ if ( enable ) {
+ mMsgEnabled |= CAMERA_MSG_FOCUS_MOVE;
+ } else {
+ mMsgEnabled &= ~CAMERA_MSG_FOCUS_MOVE;
+ }
+ }
+ return OK;
+#endif
}
- if ( ( NO_ERROR == ret ) && ( !previewEnabled() ))
- {
+ if ( ret == OK && !previewEnabled()
+#ifdef OMAP_ENHANCEMENT_VTC
+ && (cmd != CAMERA_CMD_PREVIEW_INITIALIZATION)
+#endif
+ ) {
CAMHAL_LOGEA("Preview is not running");
ret = -EINVAL;
- }
+ }
///////////////////////////////////////////////////////
// Following commands NEED preview to be started
@@ -2760,7 +3417,7 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
break;
-#ifndef OMAP_TUNA
+
case CAMERA_CMD_START_FACE_DETECTION:
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_FD);
@@ -2772,7 +3429,32 @@ status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
break;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ case CAMERA_CMD_PREVIEW_DEINITIALIZATION:
+ if(mDisplayAdapter.get() != NULL) {
+ ///Stop the buffer display first
+ mDisplayAdapter->disableDisplay();
+ }
+
+ if(mAppCallbackNotifier.get() != NULL) {
+ //Stop the callback sending
+ mAppCallbackNotifier->stop();
+ mAppCallbackNotifier->flushAndReturnFrames();
+ mAppCallbackNotifier->stopPreviewCallbacks();
+ }
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_DESTROY_TUNNEL);
+ mTunnelSetup = false;
+
+ break;
+
+ case CAMERA_CMD_PREVIEW_INITIALIZATION:
+ ret = cameraPreviewInitialization();
+
+ break;
#endif
+
default:
break;
};
@@ -2838,11 +3520,11 @@ CameraHal::CameraHal(int cameraId)
///Initialize all the member variables to their defaults
mPreviewEnabled = false;
- mPreviewBufs = NULL;
- mImageBufs = NULL;
+ mPreviewBuffers = NULL;
+ mImageBuffers = NULL;
mBufProvider = NULL;
mPreviewStartInProgress = false;
- mVideoBufs = NULL;
+ mVideoBuffers = NULL;
mVideoBufProvider = NULL;
mRecordingEnabled = false;
mDisplayPaused = false;
@@ -2859,7 +3541,7 @@ CameraHal::CameraHal(int cameraId)
mMaxZoomSupported = 0;
mShutterEnabled = true;
mMeasurementEnabled = false;
- mPreviewDataBufs = NULL;
+ mPreviewDataBuffers = NULL;
mCameraProperties = NULL;
mCurrentTime = 0;
mFalsePreview = 0;
@@ -2884,6 +3566,19 @@ CameraHal::CameraHal(int cameraId)
mSensorListener = NULL;
mVideoWidth = 0;
mVideoHeight = 0;
+#ifdef OMAP_ENHANCEMENT_VTC
+ mVTCUseCase = false;
+ mTunnelSetup = false;
+#endif
+ mPreviewInitializationDone = false;
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mExtendedPreviewStreamOps = 0;
+#endif
+
+ //These values depends on the sensor characteristics
+
+ mRawCapture = false;
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
@@ -2933,6 +3628,7 @@ CameraHal::~CameraHal()
}
freeImageBufs();
+ freeRawBufs();
/// Free the memory manager
mMemoryManager.clear();
@@ -2957,6 +3653,7 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
LOG_FUNCTION_NAME;
int sensor_index = 0;
+ const char* sensor_name = NULL;
///Initialize the event mask used for registering an event provider for AppCallbackNotifier
///Currently, registering all events as to be coming from CameraAdapter
@@ -2979,9 +3676,22 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
}
- CAMHAL_LOGDB("Sensor index %d", sensor_index);
+ if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_NAME)) != 0 ) {
+ sensor_name = mCameraProperties->get(CameraProperties::CAMERA_NAME);
+ }
+ CAMHAL_LOGDB("Sensor index= %d; Sensor name= %s", sensor_index, sensor_name);
+
+ if (strcmp(sensor_name, V4L_CAMERA_NAME_USB) == 0) {
+#ifdef V4L_CAMERA_ADAPTER
+ mCameraAdapter = V4LCameraAdapter_Factory(sensor_index);
+#endif
+ }
+ else {
+#ifdef OMX_CAMERA_ADAPTER
+ mCameraAdapter = OMXCameraAdapter_Factory(sensor_index);
+#endif
+ }
- mCameraAdapter = CameraAdapter_Factory(sensor_index);
if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
@@ -3079,39 +3789,64 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
bool CameraHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions)
{
- bool ret = true;
+ bool ret = false;
status_t status = NO_ERROR;
- char tmpBuffer[PARAM_BUFFER + 1];
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
char *pos = NULL;
LOG_FUNCTION_NAME;
- if ( NULL == supportedResolutions )
- {
+ if (NULL == supportedResolutions) {
CAMHAL_LOGEA("Invalid supported resolutions string");
- ret = false;
goto exit;
- }
+ }
- status = snprintf(tmpBuffer, PARAM_BUFFER, "%dx%d", width, height);
- if ( 0 > status )
- {
+ status = snprintf(tmpBuffer, MAX_PROP_VALUE_LENGTH - 1, "%dx%d", width, height);
+ if (0 > status) {
CAMHAL_LOGEA("Error encountered while generating validation string");
- ret = false;
goto exit;
- }
+ }
- pos = strstr(supportedResolutions, tmpBuffer);
- if ( NULL == pos )
- {
- ret = false;
- }
- else
- {
- ret = true;
- }
+ ret = isParameterValid(tmpBuffer, supportedResolutions);
exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isFpsRangeValid(int fpsMin, int fpsMax, const char *supportedFpsRanges)
+{
+ bool ret = false;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char *pos;
+ int suppFpsRangeArray[2];
+ int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedFpsRanges ) {
+ CAMHAL_LOGEA("Invalid supported FPS ranges string");
+ return false;
+ }
+
+ if (fpsMin <= 0 || fpsMax <= 0 || fpsMin > fpsMax) {
+ return false;
+ }
+
+ strncpy(supported, supportedFpsRanges, MAX_PROP_VALUE_LENGTH);
+ pos = strtok(supported, " (,)");
+ while (pos != NULL) {
+ suppFpsRangeArray[i] = atoi(pos);
+ if (i++) {
+ if (fpsMin >= suppFpsRangeArray[0] && fpsMax <= suppFpsRangeArray[1]) {
+ ret = true;
+ break;
+ }
+ i = 0;
+ }
+ pos = strtok(NULL, " (,)");
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -3120,37 +3855,34 @@ exit:
bool CameraHal::isParameterValid(const char *param, const char *supportedParams)
{
- bool ret = true;
- char *pos = NULL;
+ bool ret = false;
+ char *pos;
+ char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
- if ( NULL == supportedParams )
- {
+ if (NULL == supportedParams) {
CAMHAL_LOGEA("Invalid supported parameters string");
- ret = false;
goto exit;
- }
+ }
- if ( NULL == param )
- {
+ if (NULL == param) {
CAMHAL_LOGEA("Invalid parameter string");
- ret = false;
goto exit;
- }
+ }
- pos = strstr(supportedParams, param);
- if ( NULL == pos )
- {
- ret = false;
- }
- else
- {
- ret = true;
+ strncpy(supported, supportedParams, MAX_PROP_VALUE_LENGTH - 1);
+
+ pos = strtok(supported, ",");
+ while (pos != NULL) {
+ if (!strcmp(pos, param)) {
+ ret = true;
+ break;
}
+ pos = strtok(NULL, ",");
+ }
exit:
-
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -3158,40 +3890,26 @@ exit:
bool CameraHal::isParameterValid(int param, const char *supportedParams)
{
- bool ret = true;
- char *pos = NULL;
+ bool ret = false;
status_t status;
- char tmpBuffer[PARAM_BUFFER + 1];
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
- if ( NULL == supportedParams )
- {
+ if (NULL == supportedParams) {
CAMHAL_LOGEA("Invalid supported parameters string");
- ret = false;
goto exit;
- }
+ }
- status = snprintf(tmpBuffer, PARAM_BUFFER, "%d", param);
- if ( 0 > status )
- {
+ status = snprintf(tmpBuffer, MAX_PROP_VALUE_LENGTH - 1, "%d", param);
+ if (0 > status) {
CAMHAL_LOGEA("Error encountered while generating validation string");
- ret = false;
goto exit;
- }
+ }
- pos = strstr(supportedParams, tmpBuffer);
- if ( NULL == pos )
- {
- ret = false;
- }
- else
- {
- ret = true;
- }
+ ret = isParameterValid(tmpBuffer, supportedParams);
exit:
-
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -3215,7 +3933,6 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
status_t ret = NO_ERROR;
char *ctx, *pWidth, *pHeight;
const char *sep = "x";
- char *tmp = NULL;
LOG_FUNCTION_NAME;
@@ -3226,11 +3943,10 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
//This fixes "Invalid input resolution"
char *resStr_copy = (char *)malloc(strlen(resStr) + 1);
- if ( NULL!=resStr_copy ) {
- if ( NO_ERROR == ret )
+ if ( NULL != resStr_copy )
{
strcpy(resStr_copy, resStr);
- pWidth = strtok_r( (char *) resStr_copy, sep, &ctx);
+ pWidth = strtok_r(resStr_copy, sep, &ctx);
if ( NULL != pWidth )
{
@@ -3258,9 +3974,9 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
}
}
- free(resStr_copy);
- resStr_copy = NULL;
- }
+ free(resStr_copy);
+ resStr_copy = NULL;
+
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -3268,51 +3984,65 @@ status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
void CameraHal::insertSupportedParams()
{
- char tmpBuffer[PARAM_BUFFER + 1];
-
LOG_FUNCTION_NAME;
- CameraParameters &p = mParameters;
+ android::CameraParameters &p = mParameters;
///Set the name of the camera
p.set(TICameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME));
mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
- p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
- p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
- p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
- p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
- p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
- p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
- p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
- p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+ p.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ p.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
+ p.set(android::CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
+ p.set(android::CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
+ p.set(android::CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
p.set(TICameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP));
p.set(TICameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES));
- p.set(CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
- p.set(CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
- p.set(CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
- p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
+ p.set(android::CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
+ p.set(android::CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+ p.set(android::CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
p.set(TICameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
- p.set(TICameraParameters::KEY_S3D_SUPPORTED,mCameraProperties->get(CameraProperties::S3D_SUPPORTED));
- p.set(TICameraParameters::KEY_S3D2D_PREVIEW_MODE,mCameraProperties->get(CameraProperties::S3D2D_PREVIEW_MODES));
- p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
- p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
- p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
- p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
- p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
- p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
- p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
- p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+ p.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES));
+ p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE_VALUES, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE_VALUES));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX));
+ p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
+ p.set(TICameraParameters::KEY_VNF_SUPPORTED, mCameraProperties->get(CameraProperties::VNF_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+ p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED));
+ p.set(TICameraParameters::KEY_CAP_MODE_VALUES, mCameraProperties->get(CameraProperties::CAP_MODE_VALUES));
LOG_FUNCTION_NAME_EXIT;
@@ -3323,13 +4053,16 @@ void CameraHal::initDefaultParameters()
//Purpose of this function is to initialize the default current and supported parameters for the currently
//selected camera.
- CameraParameters &p = mParameters;
+ android::CameraParameters &p = mParameters;
int currentRevision, adapterRevision;
status_t ret = NO_ERROR;
int width, height;
+ const char *valstr;
LOG_FUNCTION_NAME;
+ insertSupportedParams();
+
ret = parseResolution(mCameraProperties->get(CameraProperties::PREVIEW_SIZE), width, height);
if ( NO_ERROR == ret )
@@ -3356,62 +4089,78 @@ void CameraHal::initDefaultParameters()
if ( NO_ERROR == ret )
{
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
}
else
{
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
}
- insertSupportedParams();
-
//Insert default values
p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)));
+ p.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT));
p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT));
- p.set(CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
- p.set(CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
- p.set(CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
- p.set(CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
- p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
- p.set(CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
- p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
- p.set(CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
- p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
- p.set(CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
+ p.set(android::CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
+ p.set(android::CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
+ p.set(android::CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
+ p.set(android::CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
+ p.set(android::CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(android::CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
+ p.set(android::CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
+ p.set(android::CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
p.set(TICameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST));
p.set(TICameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION));
p.set(TICameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS));
p.set(TICameraParameters::KEY_SHARPNESS, mCameraProperties->get(CameraProperties::SHARPNESS));
p.set(TICameraParameters::KEY_EXPOSURE_MODE, mCameraProperties->get(CameraProperties::EXPOSURE_MODE));
+ p.set(TICameraParameters::KEY_MANUAL_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_GAIN_ISO, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
+ p.set(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN));
p.set(TICameraParameters::KEY_ISO, mCameraProperties->get(CameraProperties::ISO_MODE));
p.set(TICameraParameters::KEY_IPP, mCameraProperties->get(CameraProperties::IPP));
p.set(TICameraParameters::KEY_GBCE, mCameraProperties->get(CameraProperties::GBCE));
- p.set(TICameraParameters::KEY_S3D2D_PREVIEW, mCameraProperties->get(CameraProperties::S3D2D_PREVIEW));
- p.set(TICameraParameters::KEY_AUTOCONVERGENCE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE));
- p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
- p.set(CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
- p.set(CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
- p.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
- p.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
- p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.set(TICameraParameters::KEY_GBCE_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_GBCE));
+ p.set(TICameraParameters::KEY_GLBCE, mCameraProperties->get(CameraProperties::GLBCE));
+ p.set(TICameraParameters::KEY_GLBCE_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_GLBCE));
+ p.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT, mCameraProperties->get(CameraProperties::S3D_PRV_FRAME_LAYOUT));
+ p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
+ p.set(TICameraParameters::KEY_MANUAL_CONVERGENCE, mCameraProperties->get(CameraProperties::MANUAL_CONVERGENCE));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(TICameraParameters::KEY_VNF, mCameraProperties->get(CameraProperties::VNF));
+ p.set(android::CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
+ p.set(android::CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
+ p.set(android::CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
- p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
p.set(TICameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE));
p.set(TICameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL));
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
- p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
- p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
- p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
-
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
+ p.set(android::CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
+ p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION));
// Only one area a.k.a Touch AF for now.
// TODO: Add support for multiple focus areas.
- p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
- p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
- p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
- p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+ p.set(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
+ p.set(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+ p.set(TICameraParameters::RAW_WIDTH, mCameraProperties->get(CameraProperties::RAW_WIDTH));
+ p.set(TICameraParameters::RAW_HEIGHT,mCameraProperties->get(CameraProperties::RAW_HEIGHT));
+
+ // TI extensions for enable/disable algos
+ // Hadcoded for now
+ p.set(TICameraParameters::KEY_ALGO_FIXED_GAMMA, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_NSF1, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_NSF2, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_SHARPENING, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_GIC, android::CameraParameters::TRUE);
LOG_FUNCTION_NAME_EXIT;
}
@@ -3427,7 +4176,9 @@ void CameraHal::forceStopPreview()
LOG_FUNCTION_NAME;
// stop bracketing if it is running
- stopImageBracketing();
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ }
if(mDisplayAdapter.get() != NULL) {
///Stop the buffer display first
@@ -3504,66 +4255,51 @@ status_t CameraHal::storeMetaDataInBuffers(bool enable)
LOG_FUNCTION_NAME_EXIT;
}
-void CameraHal::selectFPSRange(int framerate, int *min_fps, int *max_fps)
+void CameraHal::getPreferredPreviewRes(int *width, int *height)
{
- char * ptr;
- char supported[MAX_PROP_VALUE_LENGTH];
- int fpsrangeArray[2];
- int i = 0;
-
- LOG_FUNCTION_NAME;
- size_t size = strlen(mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))+1;
- strncpy(supported, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED), size);
-
- ptr = strtok (supported," (,)");
+ LOG_FUNCTION_NAME;
- while (ptr != NULL)
- {
- fpsrangeArray[i]= atoi(ptr)/CameraHal::VFR_SCALE;
- if (i == 1)
- {
- if (framerate == fpsrangeArray[i])
- {
- CAMHAL_LOGDB("SETTING FPS RANGE min = %d max = %d \n", fpsrangeArray[0], fpsrangeArray[1]);
- *min_fps = fpsrangeArray[0]*CameraHal::VFR_SCALE;
- *max_fps = fpsrangeArray[1]*CameraHal::VFR_SCALE;
- break;
- }
- }
- ptr = strtok (NULL, " (,)");
- i++;
- i%=2;
+ // We request Ducati for a higher resolution so preview looks better and then downscale the frame before the callback.
+ // TODO: This should be moved to configuration constants and boolean flag whether to provide such optimization
+ // Also consider providing this configurability of the desired display resolution from the application
+ if ( ( *width == 320 ) && ( *height == 240 ) ) {
+ *width = 640;
+ *height = 480;
+ } else if ( ( *width == 176 ) && ( *height == 144 ) ) {
+ *width = 704;
+ *height = 576;
}
- LOG_FUNCTION_NAME_EXIT;
-
+ LOG_FUNCTION_NAME_EXIT;
}
-void CameraHal::setPreferredPreviewRes(int width, int height)
+void CameraHal::resetPreviewRes(android::CameraParameters *params)
{
LOG_FUNCTION_NAME;
- if ( (width == 320) && (height == 240)){
- mParameters.setPreviewSize(640,480);
- }
- if ( (width == 176) && (height == 144)){
- mParameters.setPreviewSize(704,576);
+ if ( (mVideoWidth <= 320) && (mVideoHeight <= 240)){
+ params->setPreviewSize(mVideoWidth, mVideoHeight);
}
LOG_FUNCTION_NAME_EXIT;
}
-void CameraHal::resetPreviewRes(CameraParameters *mParams, int width, int height)
+void *
+camera_buffer_get_omx_ptr (CameraBuffer *buffer)
{
- LOG_FUNCTION_NAME;
-
- if ( (width <= 320) && (height <= 240)){
- mParams->setPreviewSize(mVideoWidth, mVideoHeight);
- }
-
- LOG_FUNCTION_NAME_EXIT;
+ CAMHAL_LOGV("buffer_type %d opaque %p", buffer->type, buffer->opaque);
+
+ if (buffer->type == CAMERA_BUFFER_ANW) {
+ buffer_handle_t *handle = (buffer_handle_t *)buffer->opaque;
+ CAMHAL_LOGV("anw %08x", *handle);
+ return (void *)*handle;
+ } else if (buffer->type == CAMERA_BUFFER_ION) {
+ return (void *)buffer->fd;
+ } else {
+ CAMHAL_LOGV("other %08x", buffer->opaque);
+ return (void *)buffer->opaque;
+ }
}
-};
-
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalCommon.cpp b/camera/CameraHalCommon.cpp
index 7e81a09..1cad4cb 100644
--- a/camera/CameraHalCommon.cpp
+++ b/camera/CameraHalCommon.cpp
@@ -16,7 +16,8 @@
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const char CameraHal::PARAMS_DELIMITER []= ",";
@@ -46,7 +47,7 @@ void CameraHal::PPM(const char* str){
ppm.tv_sec = ppm.tv_sec * 1000000;
ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_start.tv_usec;
- ALOGD("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ));
+ CAMHAL_LOGI("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ));
}
#elif PPM_INSTRUMENTATION_ABS
@@ -76,7 +77,7 @@ void CameraHal::PPM(const char* str){
absolute *= 1000;
absolute += ppm.tv_usec /1000;
- ALOGD("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute);
+ CAMHAL_LOGI("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute);
}
#endif
@@ -109,13 +110,12 @@ void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){
ppm.tv_sec = ppm.tv_sec * 1000000;
ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_first->tv_usec;
- ALOGD("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute);
+ CAMHAL_LOGI("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute);
va_end(args);
}
#endif
-};
-
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalUtilClasses.cpp b/camera/CameraHalUtilClasses.cpp
index c74ca60..53c9a55 100644
--- a/camera/CameraHalUtilClasses.cpp
+++ b/camera/CameraHalUtilClasses.cpp
@@ -21,12 +21,10 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
-
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
/*--------------------FrameProvider Class STARTS here-----------------------------*/
@@ -59,7 +57,7 @@ int FrameProvider::disableFrameNotification(int32_t frameTypes)
return ret;
}
-int FrameProvider::returnFrame(void *frameBuf, CameraFrame::FrameType frameType)
+int FrameProvider::returnFrame(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
{
status_t ret = NO_ERROR;
@@ -68,7 +66,7 @@ int FrameProvider::returnFrame(void *frameBuf, CameraFrame::FrameType frameType)
return ret;
}
-void FrameProvider::addFramePointers(void *frameBuf, void *buf)
+void FrameProvider::addFramePointers(CameraBuffer *frameBuf, void *buf)
{
mFrameNotifier->addFramePointers(frameBuf, buf);
return;
@@ -105,7 +103,7 @@ int EventProvider::disableEventNotification(int32_t frameTypes)
LOG_FUNCTION_NAME;
status_t ret = NO_ERROR;
- mEventNotifier->disableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ mEventNotifier->disableMsgType(frameTypes<<MessageNotifier::EVENT_BIT_FIELD_POSITION
, mCookie
);
@@ -117,15 +115,15 @@ int EventProvider::disableEventNotification(int32_t frameTypes)
/*--------------------CameraArea Class STARTS here-----------------------------*/
-status_t CameraArea::transfrom(uint32_t width,
- uint32_t height,
- int32_t &top,
- int32_t &left,
- uint32_t &areaWidth,
- uint32_t &areaHeight)
+status_t CameraArea::transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight)
{
status_t ret = NO_ERROR;
- uint32_t hRange, vRange;
+ size_t hRange, vRange;
double hScale, vScale;
LOG_FUNCTION_NAME
@@ -147,11 +145,11 @@ status_t CameraArea::transfrom(uint32_t width,
return ret;
}
-status_t CameraArea::checkArea(int32_t top,
- int32_t left,
- int32_t bottom,
- int32_t right,
- int32_t weight)
+status_t CameraArea::checkArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ ssize_t weight)
{
//Handles the invalid regin corner case.
@@ -198,8 +196,8 @@ status_t CameraArea::checkArea(int32_t top,
}
status_t CameraArea::parseAreas(const char *area,
- uint32_t areaLength,
- Vector< sp<CameraArea> > &areas)
+ size_t areaLength,
+ android::Vector<android::sp<CameraArea> > &areas)
{
status_t ret = NO_ERROR;
char *ctx;
@@ -209,9 +207,9 @@ status_t CameraArea::parseAreas(const char *area,
const char *startToken = "(";
const char endToken = ')';
const char sep = ',';
- int32_t top, left, bottom, right, weight;
+ ssize_t top, left, bottom, right, weight;
char *tmpBuffer = NULL;
- sp<CameraArea> currentArea;
+ android::sp<CameraArea> currentArea;
LOG_FUNCTION_NAME
@@ -243,7 +241,7 @@ status_t CameraArea::parseAreas(const char *area,
}
else
{
- left = static_cast<int32_t>(strtol(pStart, &pEnd, 10));
+ left = static_cast<ssize_t>(strtol(pStart, &pEnd, 10));
}
if ( sep != *pEnd )
@@ -254,7 +252,7 @@ status_t CameraArea::parseAreas(const char *area,
}
else
{
- top = static_cast<int32_t>(strtol(pEnd+1, &pEnd, 10));
+ top = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
}
if ( sep != *pEnd )
@@ -265,7 +263,7 @@ status_t CameraArea::parseAreas(const char *area,
}
else
{
- right = static_cast<int32_t>(strtol(pEnd+1, &pEnd, 10));
+ right = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
}
if ( sep != *pEnd )
@@ -276,7 +274,7 @@ status_t CameraArea::parseAreas(const char *area,
}
else
{
- bottom = static_cast<int32_t>(strtol(pEnd+1, &pEnd, 10));
+ bottom = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
}
if ( sep != *pEnd )
@@ -287,7 +285,7 @@ status_t CameraArea::parseAreas(const char *area,
}
else
{
- weight = static_cast<int32_t>(strtol(pEnd+1, &pEnd, 10));
+ weight = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
}
if ( endToken != *pEnd )
@@ -334,8 +332,8 @@ status_t CameraArea::parseAreas(const char *area,
return ret;
}
-bool CameraArea::areAreasDifferent(Vector< sp<CameraArea> > &area1,
- Vector< sp<CameraArea> > &area2) {
+bool CameraArea::areAreasDifferent(android::Vector< android::sp<CameraArea> > &area1,
+ android::Vector< android::sp<CameraArea> > &area2) {
if (area1.size() != area2.size()) {
return true;
}
@@ -350,7 +348,7 @@ bool CameraArea::areAreasDifferent(Vector< sp<CameraArea> > &area1,
return false;
}
-bool CameraArea::compare(const sp<CameraArea> &area) {
+bool CameraArea::compare(const android::sp<CameraArea> &area) {
return ((mTop == area->mTop) && (mLeft == area->mLeft) &&
(mBottom == area->mBottom) && (mRight == area->mRight) &&
(mWeight == area->mWeight));
@@ -359,4 +357,5 @@ bool CameraArea::compare(const sp<CameraArea> &area) {
/*--------------------CameraArea Class ENDS here-----------------------------*/
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp
index cb4e804..313c7fc 100644
--- a/camera/CameraHal_Module.cpp
+++ b/camera/CameraHal_Module.cpp
@@ -21,8 +21,6 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
#include <utils/threads.h>
#include "CameraHal.h"
@@ -30,8 +28,18 @@
#include "TICameraParameters.h"
-static android::CameraProperties gCameraProperties;
-static android::CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
+#ifdef CAMERAHAL_DEBUG_VERBOSE
+# define CAMHAL_LOG_MODULE_FUNCTION_NAME LOG_FUNCTION_NAME
+#else
+# define CAMHAL_LOG_MODULE_FUNCTION_NAME
+#endif
+
+
+namespace Ti {
+namespace Camera {
+
+static CameraProperties gCameraProperties;
+static CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
static unsigned int gCamerasOpen = 0;
static android::Mutex gCameraHalDeviceLock;
@@ -45,6 +53,10 @@ static struct hw_module_methods_t camera_module_methods = {
open: camera_device_open
};
+} // namespace Camera
+} // namespace Ti
+
+
camera_module_t HAL_MODULE_INFO_SYM = {
common: {
tag: HARDWARE_MODULE_TAG,
@@ -53,14 +65,18 @@ camera_module_t HAL_MODULE_INFO_SYM = {
id: CAMERA_HARDWARE_MODULE_ID,
name: "TI OMAP CameraHal Module",
author: "TI",
- methods: &camera_module_methods,
+ methods: &Ti::Camera::camera_module_methods,
dso: NULL, /* remove compilation warnings */
reserved: {0}, /* remove compilation warnings */
},
- get_number_of_cameras: camera_get_number_of_cameras,
- get_camera_info: camera_get_camera_info,
+ get_number_of_cameras: Ti::Camera::camera_get_number_of_cameras,
+ get_camera_info: Ti::Camera::camera_get_camera_info,
};
+
+namespace Ti {
+namespace Camera {
+
typedef struct ti_camera_device {
camera_device_t base;
/* TI specific "private" data can go here (base.priv) */
@@ -75,11 +91,11 @@ typedef struct ti_camera_device {
int camera_set_preview_window(struct camera_device * device,
struct preview_stream_ops *window)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -90,6 +106,42 @@ int camera_set_preview_window(struct camera_device * device,
return rv;
}
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_set_extended_preview_ops(struct camera_device * device,
+ preview_stream_extended_ops_t * extendedOps)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ if (!device) {
+ return BAD_VALUE;
+ }
+
+ ti_camera_device_t * const tiDevice = reinterpret_cast<ti_camera_device_t*>(device);
+ gCameraHals[tiDevice->cameraid]->setExtendedPreviewStreamOps(extendedOps);
+
+ return OK;
+}
+
+int camera_set_buffer_source(struct camera_device * device,
+ struct preview_stream_ops *tapin,
+ struct preview_stream_ops *tapout)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->setBufferSource(tapin, tapout);
+
+ return rv;
+}
+#endif
+
void camera_set_callbacks(struct camera_device * device,
camera_notify_callback notify_cb,
camera_data_callback data_cb,
@@ -97,9 +149,9 @@ void camera_set_callbacks(struct camera_device * device,
camera_request_memory get_memory,
void *user)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -111,9 +163,9 @@ void camera_set_callbacks(struct camera_device * device,
void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -125,9 +177,9 @@ void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -139,9 +191,9 @@ void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return 0;
@@ -153,11 +205,11 @@ int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
int camera_start_preview(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -170,9 +222,9 @@ int camera_start_preview(struct camera_device * device)
void camera_stop_preview(struct camera_device * device)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -184,11 +236,11 @@ void camera_stop_preview(struct camera_device * device)
int camera_preview_enabled(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -200,11 +252,11 @@ int camera_preview_enabled(struct camera_device * device)
int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -218,11 +270,11 @@ int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
int camera_start_recording(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -234,9 +286,9 @@ int camera_start_recording(struct camera_device * device)
void camera_stop_recording(struct camera_device * device)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -248,11 +300,11 @@ void camera_stop_recording(struct camera_device * device)
int camera_recording_enabled(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -265,9 +317,9 @@ int camera_recording_enabled(struct camera_device * device)
void camera_release_recording_frame(struct camera_device * device,
const void *opaque)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -279,11 +331,11 @@ void camera_release_recording_frame(struct camera_device * device,
int camera_auto_focus(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -295,11 +347,11 @@ int camera_auto_focus(struct camera_device * device)
int camera_cancel_auto_focus(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -311,27 +363,45 @@ int camera_cancel_auto_focus(struct camera_device * device)
int camera_take_picture(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->takePicture(0);
+ return rv;
+}
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_take_picture_with_parameters(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return rv;
ti_dev = (ti_camera_device_t*) device;
- rv = gCameraHals[ti_dev->cameraid]->takePicture();
+ rv = gCameraHals[ti_dev->cameraid]->takePicture(params);
return rv;
}
+#endif
int camera_cancel_picture(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
@@ -341,12 +411,46 @@ int camera_cancel_picture(struct camera_device * device)
return rv;
}
-int camera_set_parameters(struct camera_device * device, const char *params)
+#ifdef OMAP_ENHANCEMENT_CPCAM
+int camera_reprocess(struct camera_device * device, const char *params)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->reprocess(params);
+ return rv;
+}
+
+int camera_cancel_reprocess(struct camera_device * device)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->cancel_reprocess();
+ return rv;
+}
+#endif
+
+int camera_set_parameters(struct camera_device * device, const char *params)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return rv;
@@ -359,11 +463,11 @@ int camera_set_parameters(struct camera_device * device, const char *params)
char* camera_get_parameters(struct camera_device * device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
char* param = NULL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return NULL;
@@ -376,9 +480,9 @@ char* camera_get_parameters(struct camera_device * device)
static void camera_put_parameters(struct camera_device *device, char *parms)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -391,25 +495,42 @@ static void camera_put_parameters(struct camera_device *device, char *parms)
int camera_send_command(struct camera_device * device,
int32_t cmd, int32_t arg1, int32_t arg2)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
if(!device)
return rv;
ti_dev = (ti_camera_device_t*) device;
+#ifdef OMAP_ENHANCEMENT
+ if ( cmd == CAMERA_CMD_SETUP_EXTENDED_OPERATIONS ) {
+ camera_device_extended_ops_t * const ops = static_cast<camera_device_extended_ops_t*>(
+ camera_cmd_send_command_args_to_pointer(arg1, arg2));
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ ops->set_extended_preview_ops = camera_set_extended_preview_ops;
+ ops->set_buffer_source = camera_set_buffer_source;
+ ops->take_picture_with_parameters = camera_take_picture_with_parameters;
+ ops->reprocess = camera_reprocess;
+ ops->cancel_reprocess = camera_cancel_reprocess;
+#endif
+
+ return OK;
+ }
+#endif
+
rv = gCameraHals[ti_dev->cameraid]->sendCommand(cmd, arg1, arg2);
return rv;
}
void camera_release(struct camera_device * device)
{
- ti_camera_device_t* ti_dev = NULL;
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
- ALOGV("%s", __FUNCTION__);
+ ti_camera_device_t* ti_dev = NULL;
if(!device)
return;
@@ -421,6 +542,8 @@ void camera_release(struct camera_device * device)
int camera_dump(struct camera_device * device, int fd)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int rv = -EINVAL;
ti_camera_device_t* ti_dev = NULL;
@@ -437,12 +560,12 @@ extern "C" void heaptracker_free_leaked_memory(void);
int camera_device_close(hw_device_t* device)
{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
int ret = 0;
ti_camera_device_t* ti_dev = NULL;
- ALOGV("%s", __FUNCTION__);
-
- android::Mutex::Autolock lock(gCameraHalDeviceLock);
+ android::AutoMutex lock(gCameraHalDeviceLock);
if (!device) {
ret = -EINVAL;
@@ -488,10 +611,10 @@ int camera_device_open(const hw_module_t* module, const char* name,
int cameraid;
ti_camera_device_t* camera_device = NULL;
camera_device_ops_t* camera_ops = NULL;
- android::CameraHal* camera = NULL;
- android::CameraProperties::Properties* properties = NULL;
+ CameraHal* camera = NULL;
+ CameraProperties::Properties* properties = NULL;
- android::Mutex::Autolock lock(gCameraHalDeviceLock);
+ android::AutoMutex lock(gCameraHalDeviceLock);
CAMHAL_LOGI("camera_device open");
@@ -501,7 +624,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
if(cameraid > num_cameras)
{
- ALOGE("camera service provided cameraid out of bounds, "
+ CAMHAL_LOGE("camera service provided cameraid out of bounds, "
"cameraid = %d, num supported = %d",
cameraid, num_cameras);
rv = -EINVAL;
@@ -510,7 +633,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED)
{
- ALOGE("maximum number of cameras already open");
+ CAMHAL_LOGE("maximum number of cameras already open");
rv = -ENOMEM;
goto fail;
}
@@ -518,7 +641,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
camera_device = (ti_camera_device_t*)malloc(sizeof(*camera_device));
if(!camera_device)
{
- ALOGE("camera_device allocation fail");
+ CAMHAL_LOGE("camera_device allocation fail");
rv = -ENOMEM;
goto fail;
}
@@ -526,7 +649,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
if(!camera_ops)
{
- ALOGE("camera_ops allocation fail");
+ CAMHAL_LOGE("camera_ops allocation fail");
rv = -ENOMEM;
goto fail;
}
@@ -572,23 +695,23 @@ int camera_device_open(const hw_module_t* module, const char* name,
if(gCameraProperties.getProperties(cameraid, &properties) < 0)
{
- ALOGE("Couldn't get camera properties");
+ CAMHAL_LOGE("Couldn't get camera properties");
rv = -ENOMEM;
goto fail;
}
- camera = new android::CameraHal(cameraid);
+ camera = new CameraHal(cameraid);
if(!camera)
{
- ALOGE("Couldn't create instance of CameraHal class");
+ CAMHAL_LOGE("Couldn't create instance of CameraHal class");
rv = -ENOMEM;
goto fail;
}
- if(properties && (camera->initialize(properties) != android::NO_ERROR))
+ if(properties && (camera->initialize(properties) != NO_ERROR))
{
- ALOGE("Couldn't initialize camera instance");
+ CAMHAL_LOGE("Couldn't initialize camera instance");
rv = -ENODEV;
goto fail;
}
@@ -620,19 +743,15 @@ int camera_get_number_of_cameras(void)
{
int num_cameras = MAX_CAMERAS_SUPPORTED;
- // TODO(XXX): Ducati is not loaded yet when camera service gets here
- // Lets revisit this later to see if we can somehow get this working
-#if 0
// this going to be the first call from camera service
// initialize camera properties here...
- if(gCameraProperties.initialize() != android::NO_ERROR)
+ if(gCameraProperties.initialize() != NO_ERROR)
{
CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
return NULL;
}
num_cameras = gCameraProperties.camerasSupported();
-#endif
return num_cameras;
}
@@ -643,40 +762,41 @@ int camera_get_camera_info(int camera_id, struct camera_info *info)
int face_value = CAMERA_FACING_BACK;
int orientation = 0;
const char *valstr = NULL;
- android::CameraProperties::Properties* properties = NULL;
+ CameraProperties::Properties* properties = NULL;
// this going to be the first call from camera service
// initialize camera properties here...
- if(gCameraProperties.initialize() != android::NO_ERROR)
+ if(gCameraProperties.initialize() != NO_ERROR)
{
CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
- return NULL;
+ rv = -EINVAL;
+ goto end;
}
//Get camera properties for camera index
if(gCameraProperties.getProperties(camera_id, &properties) < 0)
{
- ALOGE("Couldn't get camera properties");
+ CAMHAL_LOGE("Couldn't get camera properties");
rv = -EINVAL;
goto end;
}
if(properties)
{
- valstr = properties->get(android::CameraProperties::FACING_INDEX);
+ valstr = properties->get(CameraProperties::FACING_INDEX);
if(valstr != NULL)
{
- if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_FRONT) == 0)
+ if (strcmp(valstr, TICameraParameters::FACING_FRONT) == 0)
{
face_value = CAMERA_FACING_FRONT;
}
- else if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_BACK) == 0)
+ else if (strcmp(valstr, TICameraParameters::FACING_BACK) == 0)
{
face_value = CAMERA_FACING_BACK;
}
}
- valstr = properties->get(android::CameraProperties::ORIENTATION_INDEX);
+ valstr = properties->get(CameraProperties::ORIENTATION_INDEX);
if(valstr != NULL)
{
orientation = atoi(valstr);
@@ -695,6 +815,5 @@ end:
}
-
-
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 527b7c2..82b1da4 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -21,27 +21,40 @@
*
*/
-#include "CameraHal.h"
#include "CameraProperties.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const char CameraProperties::INVALID[]="prop-invalid-key";
const char CameraProperties::CAMERA_NAME[]="prop-camera-name";
const char CameraProperties::CAMERA_SENSOR_INDEX[]="prop-sensor-index";
+const char CameraProperties::CAMERA_SENSOR_ID[] = "prop-sensor-id";
const char CameraProperties::ORIENTATION_INDEX[]="prop-orientation";
const char CameraProperties::FACING_INDEX[]="prop-facing";
-const char CameraProperties::S3D_SUPPORTED[]="prop-s3d-supported";
const char CameraProperties::SUPPORTED_PREVIEW_SIZES[] = "prop-preview-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[] = "prop-preview-subsampled-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[] = "prop-preview-topbottom-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[] = "prop-preview-sidebyside-size-values";
const char CameraProperties::SUPPORTED_PREVIEW_FORMATS[] = "prop-preview-format-values";
const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES[] = "prop-preview-frame-rate-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT[] = "prop-preview-frame-rate-ext-values";
const char CameraProperties::SUPPORTED_PICTURE_SIZES[] = "prop-picture-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES[] = "prop-picture-subsampled-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES[] = "prop-picture-topbottom-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[] = "prop-picture-sidebyside-size-values";
const char CameraProperties::SUPPORTED_PICTURE_FORMATS[] = "prop-picture-format-values";
const char CameraProperties::SUPPORTED_THUMBNAIL_SIZES[] = "prop-jpeg-thumbnail-size-values";
const char CameraProperties::SUPPORTED_WHITE_BALANCE[] = "prop-whitebalance-values";
const char CameraProperties::SUPPORTED_EFFECTS[] = "prop-effect-values";
const char CameraProperties::SUPPORTED_ANTIBANDING[] = "prop-antibanding-values";
const char CameraProperties::SUPPORTED_EXPOSURE_MODES[] = "prop-exposure-mode-values";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN[] = "prop-manual-exposure-min";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX[] = "prop-manual-exposure-max";
+const char CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP[] = "prop-manual-exposure-step";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN[] = "prop-manual-gain-iso-min";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX[] = "prop-manual-gain-iso-max";
+const char CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP[] = "prop-manual-gain-iso-step";
const char CameraProperties::SUPPORTED_EV_MAX[] = "prop-ev-compensation-max";
const char CameraProperties::SUPPORTED_EV_MIN[] = "prop-ev-compensation-min";
const char CameraProperties::SUPPORTED_EV_STEP[] = "prop-ev-compensation-step";
@@ -79,21 +92,30 @@ const char CameraProperties::SATURATION[] = "prop-saturation-default";
const char CameraProperties::SHARPNESS[] = "prop-sharpness-default";
const char CameraProperties::IPP[] = "prop-ipp-default";
const char CameraProperties::GBCE[] = "prop-gbce-default";
-const char CameraProperties::S3D2D_PREVIEW[] = "prop-s3d2d-preview";
-const char CameraProperties::S3D2D_PREVIEW_MODES[] = "prop-s3d2d-preview-values";
-const char CameraProperties::AUTOCONVERGENCE[] = "prop-auto-convergence";
+const char CameraProperties::SUPPORTED_GBCE[] = "prop-gbce-supported";
+const char CameraProperties::GLBCE[] = "prop-glbce-default";
+const char CameraProperties::SUPPORTED_GLBCE[] = "prop-glbce-supported";
+const char CameraProperties::S3D_PRV_FRAME_LAYOUT[] = "prop-s3d-prv-frame-layout";
+const char CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES[] = "prop-s3d-prv-frame-layout-values";
+const char CameraProperties::S3D_CAP_FRAME_LAYOUT[] = "prop-s3d-cap-frame-layout";
+const char CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES[] = "prop-s3d-cap-frame-layout-values";
const char CameraProperties::AUTOCONVERGENCE_MODE[] = "prop-auto-convergence-mode";
-const char CameraProperties::MANUALCONVERGENCE_VALUES[] = "prop-manual-convergence-values";
+const char CameraProperties::AUTOCONVERGENCE_MODE_VALUES[] = "prop-auto-convergence-mode-values";
+const char CameraProperties::MANUAL_CONVERGENCE[] = "prop-manual-convergence";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN[] = "prop-supported-manual-convergence-min";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX[] = "prop-supported-manual-convergence-max";
+const char CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP[] = "prop-supported-manual-convergence-step";
const char CameraProperties::VSTAB[] = "prop-vstab-default";
const char CameraProperties::VSTAB_SUPPORTED[] = "prop-vstab-supported";
+const char CameraProperties::VNF[] = "prop-vnf-default";
+const char CameraProperties::VNF_SUPPORTED[] = "prop-vnf-supported";
const char CameraProperties::REVISION[] = "prop-revision";
const char CameraProperties::FOCAL_LENGTH[] = "prop-focal-length";
const char CameraProperties::HOR_ANGLE[] = "prop-horizontal-angle";
const char CameraProperties::VER_ANGLE[] = "prop-vertical-angle";
const char CameraProperties::FRAMERATE_RANGE[] = "prop-framerate-range-default";
-const char CameraProperties::FRAMERATE_RANGE_IMAGE[] = "prop-framerate-range-image-default";
-const char CameraProperties::FRAMERATE_RANGE_VIDEO[]="prop-framerate-range-video-default";
const char CameraProperties::FRAMERATE_RANGE_SUPPORTED[]="prop-framerate-range-values";
+const char CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED[]="prop-framerate-range-ext-values";
const char CameraProperties::SENSOR_ORIENTATION[]= "sensor-orientation";
const char CameraProperties::SENSOR_ORIENTATION_VALUES[]= "sensor-orientation-values";
const char CameraProperties::EXIF_MAKE[] = "prop-exif-make";
@@ -111,8 +133,13 @@ const char CameraProperties::METERING_AREAS[] = "prop-metering-areas";
const char CameraProperties::VIDEO_SNAPSHOT_SUPPORTED[] = "prop-video-snapshot-supported";
const char CameraProperties::VIDEO_SIZE[] = "video-size";
const char CameraProperties::SUPPORTED_VIDEO_SIZES[] = "video-size-values";
-const char CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video";
-
+const char CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[] = "prop-mechanical-misalignment-correction-supported";
+const char CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION[] = "prop-mechanical-misalignment-correction";
+const char CameraProperties::CAP_MODE_VALUES[] = "prop-mode-values";
+const char CameraProperties::RAW_WIDTH[] = "prop-raw-width-values";
+const char CameraProperties::RAW_HEIGHT[] = "prop-raw-height-values";
+const char CameraProperties::MAX_PICTURE_WIDTH[] = "prop-max-picture-width";
+const char CameraProperties::MAX_PICTURE_HEIGHT[] = "prop-max-picture-height";
const char CameraProperties::DEFAULT_VALUE[] = "";
@@ -124,7 +151,7 @@ int CameraProperties::getProperties(int cameraIndex, CameraProperties::Propertie
{
LOG_FUNCTION_NAME;
- if((unsigned int)cameraIndex >= mCamerasSupported)
+ if(cameraIndex >= mCamerasSupported)
{
LOG_FUNCTION_NAME_EXIT;
return -EINVAL;
@@ -136,57 +163,78 @@ int CameraProperties::getProperties(int cameraIndex, CameraProperties::Propertie
return 0;
}
-ssize_t CameraProperties::Properties::set(const char *prop, const char *value)
-{
- if(!prop)
- return -EINVAL;
- if(!value)
- value = DEFAULT_VALUE;
+void CameraProperties::Properties::set(const char * const prop, const char * const value) {
+ CAMHAL_ASSERT(prop);
- return mProperties->replaceValueFor(String8(prop), String8(value));
+ if ( !value ) {
+ mProperties[mCurrentMode].removeItem(android::String8(prop));
+ } else {
+ mProperties[mCurrentMode].replaceValueFor(android::String8(prop), android::String8(value));
+ }
}
-ssize_t CameraProperties::Properties::set(const char *prop, int value)
-{
+void CameraProperties::Properties::set(const char * const prop, const int value) {
char s_val[30];
-
sprintf(s_val, "%d", value);
+ set(prop, s_val);
+}
- return set(prop, s_val);
+const char* CameraProperties::Properties::get(const char * prop) const {
+ return mProperties[mCurrentMode].valueFor(android::String8(prop)).string();
}
-const char* CameraProperties::Properties::get(const char * prop)
-{
- String8 value = mProperties->valueFor(String8(prop));
- return value.string();
+int CameraProperties::Properties::getInt(const char * prop) const {
+ android::String8 value = mProperties[mCurrentMode].valueFor(android::String8(prop));
+ if (value.isEmpty()) {
+ return -1;
+ }
+ return strtol(value, 0, 0);
}
-void CameraProperties::Properties::dump()
-{
- for (size_t i = 0; i < mProperties->size(); i++)
- {
- CAMHAL_LOGDB("%s = %s\n",
- mProperties->keyAt(i).string(),
- mProperties->valueAt(i).string());
+void CameraProperties::Properties::setSensorIndex(int idx) {
+ OperatingMode originalMode = getMode();
+ for ( int i = 0 ; i < MODE_MAX ; i++ ) {
+ setMode(static_cast<OperatingMode>(i));
+ set(CAMERA_SENSOR_INDEX, idx);
}
+ setMode(originalMode);
}
-const char* CameraProperties::Properties::keyAt(unsigned int index)
-{
- if(index < mProperties->size())
- {
- return mProperties->keyAt(index).string();
+void CameraProperties::Properties::setMode(OperatingMode mode) {
+ CAMHAL_ASSERT(mode >= 0 && mode < MODE_MAX);
+ mCurrentMode = mode;
+}
+
+OperatingMode CameraProperties::Properties::getMode() const {
+ return mCurrentMode;
+}
+
+void CameraProperties::Properties::dump() {
+ CAMHAL_LOGD("================================");
+ CAMHAL_LOGD("Dumping properties for camera: %d", getInt("prop-sensor-index"));
+
+ for (size_t i = 0; i < mProperties[mCurrentMode].size(); i++) {
+ CAMHAL_LOGD("%s = %s",
+ mProperties[mCurrentMode].keyAt(i).string(),
+ mProperties[mCurrentMode].valueAt(i).string());
+ }
+
+ CAMHAL_LOGD("--------------------------------");
+}
+
+const char* CameraProperties::Properties::keyAt(const unsigned int index) const {
+ if (index < mProperties[mCurrentMode].size()) {
+ return mProperties[mCurrentMode].keyAt(index).string();
}
return NULL;
}
-const char* CameraProperties::Properties::valueAt(unsigned int index)
-{
- if(index < mProperties->size())
- {
- return mProperties->valueAt(index).string();
+const char* CameraProperties::Properties::valueAt(const unsigned int index) const {
+ if (index < mProperties[mCurrentMode].size()) {
+ return mProperties[mCurrentMode].valueAt(index).string();
}
return NULL;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraProperties.cpp b/camera/CameraProperties.cpp
index 5d3ff20..93bc953 100644
--- a/camera/CameraProperties.cpp
+++ b/camera/CameraProperties.cpp
@@ -21,16 +21,13 @@
*
*/
-//#include "CameraHal.h"
-#include <utils/threads.h>
-
-#include "DebugUtils.h"
#include "CameraProperties.h"
#define CAMERA_ROOT "CameraRoot"
#define CAMERA_INSTANCE "CameraInstance"
-namespace android {
+namespace Ti {
+namespace Camera {
// lower entries have higher priority
static const char* g_camera_adapters[] = {
@@ -71,23 +68,24 @@ status_t CameraProperties::initialize()
status_t ret;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mInitialized)
return NO_ERROR;
ret = loadProperties();
- mInitialized = 1;
+ if (ret == NO_ERROR) {
+ mInitialized = 1;
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera);
+extern "C" status_t CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ int starting_camera, int max_camera, int & supported_cameras);
///Loads all the Camera related properties
status_t CameraProperties::loadProperties()
@@ -96,25 +94,32 @@ status_t CameraProperties::loadProperties()
status_t ret = NO_ERROR;
+ //Must be re-initialized here, since loadProperties() could potentially be called more than once.
+ mCamerasSupported = 0;
+
// adapter updates capabilities and we update camera count
- mCamerasSupported = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported, MAX_CAMERAS_SUPPORTED);
+ const status_t err = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported,
+ MAX_CAMERAS_SUPPORTED, mCamerasSupported);
- if((int)mCamerasSupported < 0) {
- ALOGE("error while getting capabilities");
+ if(err != NO_ERROR) {
+ CAMHAL_LOGE("error while getting capabilities");
+ ret = UNKNOWN_ERROR;
+ } else if (mCamerasSupported == 0) {
+ CAMHAL_LOGE("camera busy. properties not loaded. num_cameras = %d", mCamerasSupported);
ret = UNKNOWN_ERROR;
} else if (mCamerasSupported > MAX_CAMERAS_SUPPORTED) {
- ALOGE("returned too many adapaters");
+ CAMHAL_LOGE("returned too many adapaters");
ret = UNKNOWN_ERROR;
} else {
- ALOGE("num_cameras = %d", mCamerasSupported);
+ CAMHAL_LOGI("num_cameras = %d", mCamerasSupported);
- for (unsigned int i = 0; i < mCamerasSupported; i++) {
- mCameraProps[i].set(CAMERA_SENSOR_INDEX, i);
+ for (int i = 0; i < mCamerasSupported; i++) {
+ mCameraProps[i].setSensorIndex(i);
mCameraProps[i].dump();
}
}
- ALOGV("mCamerasSupported = %d", mCamerasSupported);
+ CAMHAL_LOGV("mCamerasSupported = %d", mCamerasSupported);
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -126,4 +131,5 @@ int CameraProperties::camerasSupported()
return mCamerasSupported;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/Encoder_libjpeg.cpp b/camera/Encoder_libjpeg.cpp
index 6e99a25..e11e3bf 100644
--- a/camera/Encoder_libjpeg.cpp
+++ b/camera/Encoder_libjpeg.cpp
@@ -23,11 +23,9 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
-#include "CameraHal.h"
#include "Encoder_libjpeg.h"
#include "NV12_resize.h"
+#include "TICameraParameters.h"
#include <stdlib.h>
#include <unistd.h>
@@ -46,7 +44,9 @@ extern "C" {
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
#define MIN(x,y) ((x < y) ? x : y)
-namespace android {
+namespace Ti {
+namespace Camera {
+
struct integer_string_pair {
unsigned int integer;
const char* string;
@@ -151,7 +151,7 @@ static void uyvy_to_yuv(uint8_t* dst, uint32_t* src, int width) {
" blt 5f \n\t"
"0: @ 16 pixel swap \n\t"
" vld2.8 {q0, q1} , [%[src]]! @ q0 = uv q1 = y \n\t"
- " vuzp.8 q0, q2 @ d1 = u d5 = v \n\t"
+ " vuzp.8 q0, q2 @ d0 = u d4 = v \n\t"
" vmov d1, d0 @ q0 = u0u1u2..u0u1u2... \n\t"
" vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t"
" vzip.8 d0, d1 @ q0 = u0u0u1u1u2u2... \n\t"
@@ -173,6 +173,61 @@ static void uyvy_to_yuv(uint8_t* dst, uint32_t* src, int width) {
}
}
+static void yuyv_to_yuv(uint8_t* dst, uint32_t* src, int width) {
+ if (!dst || !src) {
+ return;
+ }
+
+ if (width % 2) {
+ return; // not supporting odd widths
+ }
+
+ // currently, neon routine only supports multiple of 16 width
+ if (width % 16) {
+ while ((width-=2) >= 0) {
+ uint8_t y0 = (src[0] >> 0) & 0xFF;
+ uint8_t u0 = (src[0] >> 8) & 0xFF;
+ uint8_t y1 = (src[0] >> 16) & 0xFF;
+ uint8_t v0 = (src[0] >> 24) & 0xFF;
+ dst[0] = y0;
+ dst[1] = u0;
+ dst[2] = v0;
+ dst[3] = y1;
+ dst[4] = u0;
+ dst[5] = v0;
+ dst += 6;
+ src++;
+ }
+ } else {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel swap \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " vuzp.8 q1, q2 @ d2 = u d4 = v \n\t"
+ " vmov d3, d2 @ q1 = u0u1u2..u0u1u2... \n\t"
+ " vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t"
+ " vzip.8 d2, d3 @ q1 = u0u0u1u1u2u2... \n\t"
+ " vzip.8 d4, d5 @ q2 = v0v0v1v1v2v2... \n\t"
+ " @ now q0 = y q1 = u q2 = v \n\t"
+ " vst3.8 {d0,d2,d4},[%[dst]]! \n\t"
+ " vst3.8 {d1,d3,d5},[%[dst]]! \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (dst), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width)
+ : "cc", "memory", "q0", "q1", "q2"
+ );
+ }
+}
+
static void resize_nv12(Encoder_libjpeg::params* params, uint8_t* dst_buffer) {
structConvImage o_img_ptr, i_img_ptr;
@@ -269,7 +324,11 @@ void ExifElementsTable::insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size)
ResetJpgfile();
if (ReadJpegSectionsFromBuffer(jpeg, jpeg_size, read_mode)) {
jpeg_opened = true;
+#ifdef ANDROID_API_JB_OR_LATER
create_EXIF(table, exif_tag_count, gps_tag_count, has_datetime_tag);
+#else
+ create_EXIF(table, exif_tag_count, gps_tag_count);
+#endif
}
}
@@ -336,7 +395,14 @@ status_t ExifElementsTable::insertElement(const char* tag, const char* value) {
exif_tag_count++;
if (strcmp(tag, TAG_DATETIME) == 0) {
+#ifdef ANDROID_API_JB_OR_LATER
has_datetime_tag = true;
+#else
+ // jhead isn't taking datetime tag...this is a WA
+ ImageInfo.numDateTimeTags = 1;
+ memcpy(ImageInfo.DateTime, value,
+ MIN(ARRAY_SIZE(ImageInfo.DateTime), value_length + 1));
+#endif
}
}
@@ -388,20 +454,21 @@ size_t Encoder_libjpeg::encode(params* input) {
goto exit;
}
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
bpp = 1;
if ((in_width != out_width) || (in_height != out_height)) {
resize_src = (uint8_t*) malloc(input->dst_size);
resize_nv12(input, resize_src);
if (resize_src) src = resize_src;
}
- } else if ((in_width != out_width) || (in_height != out_height)) {
- CAMHAL_LOGEB("Encoder: resizing is not supported for this format: %s", input->format);
- goto exit;
- } else if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV422I)) {
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) &&
+ strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY)) {
// we currently only support yuv422i and yuv420sp
CAMHAL_LOGEB("Encoder: format not supported: %s", input->format);
goto exit;
+ } else if ((in_width != out_width) || (in_height != out_height)) {
+ CAMHAL_LOGEB("Encoder: resizing is not supported for this format: %s", input->format);
+ goto exit;
}
cinfo.err = jpeg_std_error(&jerr);
@@ -413,9 +480,10 @@ size_t Encoder_libjpeg::encode(params* input) {
"height:%d \n\t"
"dest %p \n\t"
"dest size:%d \n\t"
- "mSrc %p",
+ "mSrc %p \n\t"
+ "format: %s",
out_width, out_height, input->dst,
- input->dst_size, src);
+ input->dst_size, src, input->format);
cinfo.dest = &dest_mgr;
cinfo.image_width = out_width - right_crop;
@@ -438,10 +506,12 @@ size_t Encoder_libjpeg::encode(params* input) {
JSAMPROW row[1]; /* pointer to JSAMPLE row[s] */
// convert input yuv format to yuv444
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
nv21_to_yuv(row_tmp, row_src, row_uv, out_width - right_crop);
- } else {
+ } else if (strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY) == 0) {
uyvy_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ yuyv_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
}
row[0] = row_tmp;
@@ -449,7 +519,7 @@ size_t Encoder_libjpeg::encode(params* input) {
row_src = row_src + out_width*bpp;
// move uv row if input format needs it
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
if (!(cinfo.next_scanline % 2))
row_uv = row_uv + out_width * bpp;
}
@@ -469,4 +539,5 @@ size_t Encoder_libjpeg::encode(params* input) {
return dest_mgr.jpegsize;
}
-} // namespace android
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/MemoryManager.cpp b/camera/MemoryManager.cpp
index e3b642c..d466f52 100644
--- a/camera/MemoryManager.cpp
+++ b/camera/MemoryManager.cpp
@@ -14,25 +14,19 @@
* limitations under the License.
*/
-
-
-#define LOG_TAG "CameraHAL"
-
-
#include "CameraHal.h"
#include "TICameraParameters.h"
extern "C" {
-#include <ion_ti/ion.h>
-
//#include <timm_osal_interfaces.h>
//#include <timm_osal_trace.h>
};
-namespace android {
+namespace Ti {
+namespace Camera {
///@todo Move these constants to a common header file, preferably in tiler.h
#define STRIDE_8BIT (4 * 1024)
@@ -43,92 +37,112 @@ namespace android {
///Utility Macro Declarations
/*--------------------MemoryManager Class STARTS here-----------------------------*/
-void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
-{
- LOG_FUNCTION_NAME;
+MemoryManager::MemoryManager() {
+ mIonFd = -1;
+}
- if(mIonFd < 0)
- {
+MemoryManager::~MemoryManager() {
+ if ( mIonFd >= 0 ) {
+ ion_close(mIonFd);
+ mIonFd = -1;
+ }
+}
+
+status_t MemoryManager::initialize() {
+ if ( mIonFd == -1 ) {
mIonFd = ion_open();
- if(mIonFd < 0)
- {
- CAMHAL_LOGEA("ion_open failed!!!");
- return NULL;
- }
+ if ( mIonFd < 0 ) {
+ CAMHAL_LOGE("ion_open() failed, error: %d", mIonFd);
+ mIonFd = -1;
+ return NO_INIT;
}
+ }
+
+ return OK;
+}
+
+CameraBuffer* MemoryManager::allocateBufferList(int width, int height, const char* format, int &size, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_ASSERT(mIonFd != -1);
///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing
///the buffers
const uint numArrayEntriesC = (uint)(numBufs+1);
///Allocate a buffer array
- uint32_t *bufsArr = new uint32_t [numArrayEntriesC];
- if(!bufsArr)
- {
- CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC);
+ CameraBuffer *buffers = new CameraBuffer [numArrayEntriesC];
+ if(!buffers) {
+ CAMHAL_LOGEB("Allocation failed when creating buffers array of %d CameraBuffer elements", numArrayEntriesC);
goto error;
- }
+ }
///Initialize the array with zeros - this will help us while freeing the array in case of error
///If a value of an array element is NULL, it means we didnt allocate it
- memset(bufsArr, 0, sizeof(*bufsArr) * numArrayEntriesC);
+ memset(buffers, 0, sizeof(CameraBuffer) * numArrayEntriesC);
//2D Allocations are not supported currently
- if(bytes != 0)
- {
+ if(size != 0) {
struct ion_handle *handle;
int mmap_fd;
+ size_t stride;
///1D buffers
- for (int i = 0; i < numBufs; i++)
- {
- int ret = ion_alloc(mIonFd, bytes, 0, 1 << ION_HEAP_TYPE_CARVEOUT, &handle);
- if(ret < 0)
- {
- CAMHAL_LOGEB("ion_alloc resulted in error %d", ret);
+ for (int i = 0; i < numBufs; i++) {
+ unsigned char *data;
+ int ret = ion_alloc(mIonFd, size, 0, 1 << ION_HEAP_TYPE_CARVEOUT,
+ &handle);
+ if((ret < 0) || ((int)handle == -ENOMEM)) {
+ ret = ion_alloc_tiler(mIonFd, (size_t)size, 1, TILER_PIXEL_FMT_PAGE,
+ OMAP_ION_HEAP_TILER_MASK, &handle, &stride);
+ }
+
+ if((ret < 0) || ((int)handle == -ENOMEM)) {
+ CAMHAL_LOGEB("FAILED to allocate ion buffer of size=%d. ret=%d(0x%x)", size, ret, ret);
goto error;
- }
+ }
- CAMHAL_LOGDB("Before mapping, handle = %x, nSize = %d", handle, bytes);
- if ((ret = ion_map(mIonFd, handle, bytes, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
- (unsigned char**)&bufsArr[i], &mmap_fd)) < 0)
- {
+ CAMHAL_LOGDB("Before mapping, handle = %p, nSize = %d", handle, size);
+ if ((ret = ion_map(mIonFd, handle, size, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
+ &data, &mmap_fd)) < 0) {
CAMHAL_LOGEB("Userspace mapping of ION buffers returned error %d", ret);
ion_free(mIonFd, handle);
goto error;
- }
-
- mIonHandleMap.add(bufsArr[i], (unsigned int)handle);
- mIonFdMap.add(bufsArr[i], (unsigned int) mmap_fd);
- mIonBufLength.add(bufsArr[i], (unsigned int) bytes);
}
+ buffers[i].type = CAMERA_BUFFER_ION;
+ buffers[i].opaque = data;
+ buffers[i].mapped = data;
+ buffers[i].ion_handle = handle;
+ buffers[i].ion_fd = mIonFd;
+ buffers[i].fd = mmap_fd;
+ buffers[i].size = size;
+
}
- else // If bytes is not zero, then it is a 2-D tiler buffer request
- {
- }
+ }
- LOG_FUNCTION_NAME_EXIT;
+ LOG_FUNCTION_NAME_EXIT;
- return (void*)bufsArr;
+ return buffers;
error:
- ALOGE("Freeing buffers already allocated after error occurred");
- if(bufsArr)
- freeBuffer(bufsArr);
+
+ CAMHAL_LOGE("Freeing buffers already allocated after error occurred");
+ if(buffers)
+ freeBufferList(buffers);
if ( NULL != mErrorNotifier.get() )
- {
mErrorNotifier->errorNotify(-ENOMEM);
- }
+ LOG_FUNCTION_NAME_EXIT;
- if (mIonFd >= 0)
- {
- ion_close(mIonFd);
- mIonFd = -1;
- }
+ return NULL;
+}
+
+CameraBuffer* MemoryManager::getBufferList(int *numBufs) {
+ LOG_FUNCTION_NAME;
+ if (numBufs) *numBufs = -1;
- LOG_FUNCTION_NAME_EXIT;
return NULL;
}
@@ -152,50 +166,38 @@ int MemoryManager::getFd()
return -1;
}
-int MemoryManager::freeBuffer(void* buf)
+int MemoryManager::freeBufferList(CameraBuffer *buffers)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- uint32_t *bufEntry = (uint32_t*)buf;
+ int i;
- if(!bufEntry)
+ if(!buffers)
{
CAMHAL_LOGEA("NULL pointer passed to freebuffer");
LOG_FUNCTION_NAME_EXIT;
return BAD_VALUE;
}
- while(*bufEntry)
+ i = 0;
+ while(buffers[i].type == CAMERA_BUFFER_ION)
{
- unsigned int ptr = (unsigned int) *bufEntry++;
- if(mIonBufLength.valueFor(ptr))
+ if(buffers[i].size)
{
- munmap((void *)ptr, mIonBufLength.valueFor(ptr));
- close(mIonFdMap.valueFor(ptr));
- ion_free(mIonFd, (ion_handle*)mIonHandleMap.valueFor(ptr));
- mIonHandleMap.removeItem(ptr);
- mIonBufLength.removeItem(ptr);
- mIonFdMap.removeItem(ptr);
+ munmap(buffers[i].opaque, buffers[i].size);
+ close(buffers[i].fd);
+ ion_free(mIonFd, buffers[i].ion_handle);
}
else
{
CAMHAL_LOGEA("Not a valid Memory Manager buffer");
}
+ i++;
}
- ///@todo Check if this way of deleting array is correct, else use malloc/free
- uint32_t * bufArr = (uint32_t*)buf;
- delete [] bufArr;
+ delete [] buffers;
- if(mIonBufLength.size() == 0)
- {
- if(mIonFd >= 0)
- {
- ion_close(mIonFd);
- mIonFd = -1;
- }
- }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -222,7 +224,8 @@ status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier)
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------MemoryManager Class ENDS here-----------------------------*/
diff --git a/camera/NV12_resize.c b/camera/NV12_resize.cpp
index 7f92fb2..971ee38 100644
--- a/camera/NV12_resize.c
+++ b/camera/NV12_resize.cpp
@@ -1,12 +1,27 @@
-#include "NV12_resize.h"
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
-//#define LOG_NDEBUG 0
-#define LOG_NIDEBUG 0
-#define LOG_NDDEBUG 0
+#include "NV12_resize.h"
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
#define LOG_TAG "NV12_resize"
+
#define STRIDE 4096
-#include <utils/Log.h>
/*==========================================================================
* Function Name : VT_resizeFrame_Video_opt2_lp
@@ -23,95 +38,82 @@
* faster version.
============================================================================*/
mmBool
-VT_resizeFrame_Video_opt2_lp
-(
- structConvImage* i_img_ptr, /* Points to the input image */
- structConvImage* o_img_ptr, /* Points to the output image */
- IC_rect_type* cropout, /* how much to resize to in final image */
- mmUint16 dummy /* Transparent pixel value */
- )
-{
- ALOGV("VT_resizeFrame_Video_opt2_lp+");
-
- mmUint16 row,col;
- mmUint32 resizeFactorX;
- mmUint32 resizeFactorY;
-
-
- mmUint16 x, y;
-
- mmUchar* ptr8;
- mmUchar *ptr8Cb, *ptr8Cr;
-
-
- mmUint16 xf, yf;
- mmUchar* inImgPtrY;
- mmUchar* inImgPtrU;
- mmUchar* inImgPtrV;
- mmUint32 cox, coy, codx, cody;
- mmUint16 idx,idy, idxC;
-
- if(i_img_ptr->uWidth == o_img_ptr->uWidth)
- {
- if(i_img_ptr->uHeight == o_img_ptr->uHeight)
- {
- ALOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n");
- ALOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth );
- ALOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight );
- ALOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth );
- ALOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight );
- }
- }
-
- if (!i_img_ptr || !i_img_ptr->imgPtr ||
- !o_img_ptr || !o_img_ptr->imgPtr)
- {
- ALOGE("Image Point NULL");
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return FALSE;
- }
-
- inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset;
- inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2;
- inImgPtrV = (mmUchar*)inImgPtrU + 1;
-
- if (cropout == NULL)
- {
- cox = 0;
- coy = 0;
- codx = o_img_ptr->uWidth;
- cody = o_img_ptr->uHeight;
- }
- else
- {
- cox = cropout->x;
- coy = cropout->y;
- codx = cropout->uWidth;
- cody = cropout->uHeight;
- }
- idx = i_img_ptr->uWidth;
- idy = i_img_ptr->uHeight;
-
- /* make sure valid input size */
- if (idx < 1 || idy < 1 || i_img_ptr->uStride < 1)
- {
- ALOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride);
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return FALSE;
- }
-
- resizeFactorX = ((idx-1)<<9) / codx;
- resizeFactorY = ((idy-1)<<9) / cody;
-
- if(i_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp &&
- o_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp)
- {
- ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth;
+VT_resizeFrame_Video_opt2_lp(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ ) {
+ LOG_FUNCTION_NAME;
+
+ mmUint16 row,col;
+ mmUint32 resizeFactorX;
+ mmUint32 resizeFactorY;
+
+ mmUint16 x, y;
+
+ mmUchar* ptr8;
+ mmUchar *ptr8Cb, *ptr8Cr;
+
+ mmUint16 xf, yf;
+ mmUchar* inImgPtrY;
+ mmUchar* inImgPtrU;
+ mmUchar* inImgPtrV;
+ mmUint32 cox, coy, codx, cody;
+ mmUint16 idx,idy, idxC;
+
+ if ( i_img_ptr->uWidth == o_img_ptr->uWidth ) {
+ if ( i_img_ptr->uHeight == o_img_ptr->uHeight ) {
+ CAMHAL_LOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n");
+ CAMHAL_LOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth );
+ CAMHAL_LOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight );
+ CAMHAL_LOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth );
+ CAMHAL_LOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight );
+ }
+ }
+
+ if ( !i_img_ptr || !i_img_ptr->imgPtr || !o_img_ptr || !o_img_ptr->imgPtr ) {
+ CAMHAL_LOGE("Image Point NULL");
+ return false;
+ }
+
+ inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset;
+ inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2;
+ inImgPtrV = (mmUchar*)inImgPtrU + 1;
+
+ if ( !cropout ) {
+ cox = 0;
+ coy = 0;
+ codx = o_img_ptr->uWidth;
+ cody = o_img_ptr->uHeight;
+ } else {
+ cox = cropout->x;
+ coy = cropout->y;
+ codx = cropout->uWidth;
+ cody = cropout->uHeight;
+ }
+ idx = i_img_ptr->uWidth;
+ idy = i_img_ptr->uHeight;
+
+ /* make sure valid input size */
+ if ( idx < 1 || idy < 1 || i_img_ptr->uStride < 1 ) {
+ CAMHAL_LOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride);
+ return false;
+ }
+
+ resizeFactorX = ((idx-1)<<9) / codx;
+ resizeFactorY = ((idy-1)<<9) / cody;
+ if( i_img_ptr->eFormat != IC_FORMAT_YCbCr420_lp ||
+ o_img_ptr->eFormat != IC_FORMAT_YCbCr420_lp ) {
+ CAMHAL_LOGE("eFormat not supported");
+ return false;
+ }
+
+ ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth;
////////////////////////////for Y//////////////////////////
- for (row=0; row < cody; row++)
- {
+ for ( row = 0; row < cody; row++ ) {
mmUchar *pu8Yrow1 = NULL;
mmUchar *pu8Yrow2 = NULL;
y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
@@ -119,8 +121,7 @@ VT_resizeFrame_Video_opt2_lp
pu8Yrow1 = inImgPtrY + (y) * i_img_ptr->uStride;
pu8Yrow2 = pu8Yrow1 + i_img_ptr->uStride;
- for (col=0; col < codx; col++)
- {
+ for ( col = 0; col < codx; col++ ) {
mmUchar in11, in12, in21, in22;
mmUchar *pu8ptr1 = NULL;
mmUchar *pu8ptr2 = NULL;
@@ -128,12 +129,9 @@ VT_resizeFrame_Video_opt2_lp
mmUint16 accum_1;
//mmUint32 accum_W;
-
-
x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
-
//accum_W = 0;
accum_1 = 0;
@@ -175,7 +173,6 @@ VT_resizeFrame_Video_opt2_lp
accum_1 = (accum_1>>6);
*ptr8 = (mmUchar)accum_1 ;
-
ptr8++;
}
ptr8 = ptr8 + (o_img_ptr->uStride - codx);
@@ -189,8 +186,7 @@ VT_resizeFrame_Video_opt2_lp
ptr8Cr = (mmUchar*)(ptr8Cb+1);
idxC = (idx>>1);
- for (row=0; row < (((cody)>>1)); row++)
- {
+ for ( row = 0; row < (((cody)>>1)); row++ ) {
mmUchar *pu8Cbr1 = NULL;
mmUchar *pu8Cbr2 = NULL;
mmUchar *pu8Crr1 = NULL;
@@ -204,8 +200,7 @@ VT_resizeFrame_Video_opt2_lp
pu8Crr1 = inImgPtrV + (y) * i_img_ptr->uStride;
pu8Crr2 = pu8Crr1 + i_img_ptr->uStride;
- for (col=0; col < (((codx)>>1)); col++)
- {
+ for ( col = 0; col < (((codx)>>1)); col++ ) {
mmUchar in11, in12, in21, in22;
mmUchar *pu8Cbc1 = NULL;
mmUchar *pu8Cbc2 = NULL;
@@ -216,21 +211,17 @@ VT_resizeFrame_Video_opt2_lp
mmUint16 accum_1Cb, accum_1Cr;
//mmUint32 accum_WCb, accum_WCr;
-
x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
-
//accum_WCb = accum_WCr = 0;
accum_1Cb = accum_1Cr = 0;
pu8Cbc1 = pu8Cbr1 + (x*2);
pu8Cbc2 = pu8Cbr2 + (x*2);
- pu8Crc1 = pu8Crr1 + (x*2);
+ pu8Crc1 = pu8Crr1 + (x*2);
pu8Crc2 = pu8Crr2 + (x*2);
-
-
/* A pixel */
w = bWeights[xf][yf][0];
@@ -238,7 +229,7 @@ VT_resizeFrame_Video_opt2_lp
accum_1Cb = (w * in11);
// accum_WCb += (w);
- in11 = *(pu8Crc1);
+ in11 = *(pu8Crc1);
accum_1Cr = (w * in11);
//accum_WCr += (w);
@@ -260,7 +251,7 @@ VT_resizeFrame_Video_opt2_lp
accum_1Cb += (w * in21);
//accum_WCb += (w);
- in21 = *(pu8Crc2);
+ in21 = *(pu8Crc2);
accum_1Cr += (w * in21);
//accum_WCr += (w);
@@ -280,7 +271,6 @@ VT_resizeFrame_Video_opt2_lp
accum_1Cb = (accum_1Cb>>6);
*ptr8Cb = (mmUchar)accum_1Cb ;
-
accum_1Cr = (accum_1Cr >> 6);
*ptr8Cr = (mmUchar)accum_1Cr ;
@@ -294,14 +284,7 @@ VT_resizeFrame_Video_opt2_lp
ptr8Cr = ptr8Cr + (o_img_ptr->uStride-codx);
}
///////////////////For Cb- Cr////////////////////////////////////////
- }
- else
- {
- ALOGE("eFormat not supported");
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return FALSE;
- }
- ALOGV("success");
- ALOGV("VT_resizeFrame_Video_opt2_lp-");
- return TRUE;
+
+ CAMHAL_LOGV("success");
+ return true;
}
diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp
index 7ae50e4..e7171f6 100644
--- a/camera/OMXCameraAdapter/OMX3A.cpp
+++ b/camera/OMXCameraAdapter/OMX3A.cpp
@@ -21,24 +21,19 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
#include <cutils/properties.h>
-#undef TRUE
-#undef FALSE
-#define TRUE "true"
-#define FALSE "false"
-
#define METERING_AREAS_RANGE 0xFF
-namespace android {
+static const char PARAM_SEP[] = ",";
+
+namespace Ti {
+namespace Camera {
+
const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
OMX_SCENEMODETYPE scene) {
const SceneModesEntry* cameraLUT = NULL;
@@ -69,7 +64,7 @@ const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
return entry;
}
-status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
+status_t OMXCameraAdapter::setParameters3A(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -81,9 +76,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
- str = params.get(CameraParameters::KEY_SCENE_MODE);
+ str = params.get(android::CameraParameters::KEY_SCENE_MODE);
mode = getLUTvalue_HALtoOMX( str, SceneLUT);
if ( mFirstTimeInit || ((str != NULL) && ( mParameters3A.SceneMode != mode )) ) {
if ( 0 <= mode ) {
@@ -113,22 +108,52 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
- str = params.get(TICameraParameters::KEY_EXPOSURE_MODE);
- mode = getLUTvalue_HALtoOMX( str, ExpLUT);
- if ( ( str != NULL ) && ( mParameters3A.Exposure != mode ))
- {
- mParameters3A.Exposure = mode;
- CAMHAL_LOGDB("Exposure mode %d", mode);
- if ( 0 <= mParameters3A.Exposure )
- {
- mPending3Asettings |= SetExpMode;
+ if ( (str = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL ) {
+ mode = getLUTvalue_HALtoOMX(str, ExpLUT);
+ if ( mParameters3A.Exposure != mode ) {
+ // If either the new or the old exposure mode is manual set also
+ // the SetManualExposure flag to call setManualExposureVal where
+ // the auto gain and exposure flags are configured
+ if ( mParameters3A.Exposure == OMX_ExposureControlOff ||
+ mode == OMX_ExposureControlOff ) {
+ mPending3Asettings |= SetManualExposure;
+ }
+ mParameters3A.Exposure = mode;
+ CAMHAL_LOGDB("Exposure mode %d", mode);
+ if ( 0 <= mParameters3A.Exposure ) {
+ mPending3Asettings |= SetExpMode;
}
}
-
+ if ( mode == OMX_ExposureControlOff ) {
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE);
+ if ( mParameters3A.ManualExposure != mode ) {
+ mParameters3A.ManualExposure = mode;
+ CAMHAL_LOGDB("Manual Exposure = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT);
+ if ( mParameters3A.ManualExposureRight != mode ) {
+ mParameters3A.ManualExposureRight = mode;
+ CAMHAL_LOGDB("Manual Exposure right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO);
+ if ( mParameters3A.ManualGain != mode ) {
+ mParameters3A.ManualGain = mode;
+ CAMHAL_LOGDB("Manual Gain = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ mode = params.getInt(TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT);
+ if ( mParameters3A.ManualGainRight != mode ) {
+ mParameters3A.ManualGainRight = mode;
+ CAMHAL_LOGDB("Manual Gain right = %d", mode);
+ mPending3Asettings |= SetManualExposure;
+ }
+ }
+ }
#endif
- str = params.get(CameraParameters::KEY_WHITE_BALANCE);
+ str = params.get(android::CameraParameters::KEY_WHITE_BALANCE);
mode = getLUTvalue_HALtoOMX( str, WBalLUT);
if (mFirstTimeInit || ((str != NULL) && (mode != mParameters3A.WhiteBallance)))
{
@@ -141,7 +166,6 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
varint = params.getInt(TICameraParameters::KEY_CONTRAST);
if ( 0 <= varint )
{
@@ -189,10 +213,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
mPending3Asettings |= SetBrightness;
}
}
-
#endif
- str = params.get(CameraParameters::KEY_ANTIBANDING);
+ str = params.get(android::CameraParameters::KEY_ANTIBANDING);
mode = getLUTvalue_HALtoOMX(str,FlickerLUT);
if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Flicker != mode ) ))
{
@@ -205,7 +228,6 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
#ifdef OMAP_ENHANCEMENT
-
str = params.get(TICameraParameters::KEY_ISO);
mode = getLUTvalue_HALtoOMX(str, IsoLUT);
CAMHAL_LOGVB("ISO mode arrived in HAL : %s", str);
@@ -218,10 +240,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
mPending3Asettings |= SetISO;
}
}
-
#endif
- str = params.get(CameraParameters::KEY_FOCUS_MODE);
+ str = params.get(android::CameraParameters::KEY_FOCUS_MODE);
mode = getLUTvalue_HALtoOMX(str, FocusLUT);
if ( (mFirstTimeInit || ((str != NULL) && (mParameters3A.Focus != mode))))
{
@@ -237,19 +258,15 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
CAMHAL_LOGDB("Focus %x", mParameters3A.Focus);
}
- str = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- varint = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- if ( mFirstTimeInit ||
- (( str != NULL ) &&
- (mParameters3A.EVCompensation != varint )))
- {
+ str = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ varint = params.getInt(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if ( mFirstTimeInit || (str && (mParameters3A.EVCompensation != varint))) {
CAMHAL_LOGDB("Setting EV Compensation to %d", varint);
-
mParameters3A.EVCompensation = varint;
mPending3Asettings |= SetEVCompensation;
}
- str = params.get(CameraParameters::KEY_FLASH_MODE);
+ str = params.get(android::CameraParameters::KEY_FLASH_MODE);
mode = getLUTvalue_HALtoOMX( str, FlashLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.FlashMode != mode )) )
{
@@ -267,7 +284,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
CAMHAL_LOGVB("Flash Setting %s", str);
CAMHAL_LOGVB("FlashMode %d", mParameters3A.FlashMode);
- str = params.get(CameraParameters::KEY_EFFECT);
+ str = params.get(android::CameraParameters::KEY_EFFECT);
mode = getLUTvalue_HALtoOMX( str, EffLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.Effect != mode )) )
{
@@ -279,13 +296,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, "true")) )
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetExpLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
- if (str && ((strcmp(str, "true")) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking Exposure");
lock = OMX_TRUE;
@@ -304,13 +321,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, "true")) )
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetWbLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
- if (str && ((strcmp(str, "true")) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking WhiteBalance");
lock = OMX_TRUE;
@@ -329,24 +346,24 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
str = params.get(TICameraParameters::KEY_AUTO_FOCUS_LOCK);
- if (str && (strcmp(str, TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
+ if (str && (strcmp(str, android::CameraParameters::TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
CAMHAL_LOGVA("Locking Focus");
mParameters3A.FocusLock = OMX_TRUE;
setFocusLock(mParameters3A);
- } else if (str && (strcmp(str, FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
+ } else if (str && (strcmp(str, android::CameraParameters::FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
CAMHAL_LOGVA("UnLocking Focus");
mParameters3A.FocusLock = OMX_FALSE;
setFocusLock(mParameters3A);
}
- str = params.get(CameraParameters::KEY_METERING_AREAS);
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
if ( (str != NULL) ) {
size_t MAX_METERING_AREAS;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
- MAX_METERING_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_METERING_AREAS));
+ MAX_METERING_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS));
- Mutex::Autolock lock(mMeteringAreasLock);
+ android::AutoMutex lock(mMeteringAreasLock);
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -359,7 +376,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
if ( MAX_METERING_AREAS >= mMeteringAreas.size() ) {
CAMHAL_LOGDB("Setting Metering Areas %s",
- params.get(CameraParameters::KEY_METERING_AREAS));
+ params.get(android::CameraParameters::KEY_METERING_AREAS));
mPending3Asettings |= SetMeteringAreas;
} else {
@@ -370,11 +387,48 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
+// TI extensions for enable/disable algos
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_FIXED_GAMMA,
+ mParameters3A.AlgoFixedGamma, SetAlgoFixedGamma, "Fixed Gamma");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF1,
+ mParameters3A.AlgoNSF1, SetAlgoNSF1, "NSF1");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF2,
+ mParameters3A.AlgoNSF2, SetAlgoNSF2, "NSF2");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_SHARPENING,
+ mParameters3A.AlgoSharpening, SetAlgoSharpening, "Sharpening");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_THREELINCOLORMAP,
+ mParameters3A.AlgoThreeLinColorMap, SetAlgoThreeLinColorMap, "ThreeLinColorMap");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_GIC, mParameters3A.AlgoGIC, SetAlgoGIC, "GIC");
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+void OMXCameraAdapter::declareParameter3ABool(const android::CameraParameters &params, const char *key,
+ OMX_BOOL &current_setting, E3ASettingsFlags pending,
+ const char *msg)
+{
+ OMX_BOOL val = OMX_TRUE;
+ const char *str = params.get(key);
+
+ if (str && ((strcmp(str, android::CameraParameters::FALSE)) == 0))
+ {
+ CAMHAL_LOGVB("Disabling %s", msg);
+ val = OMX_FALSE;
+ }
+ else
+ {
+ CAMHAL_LOGVB("Enabling %s", msg);
+ }
+ if (current_setting != val)
+ {
+ current_setting = val;
+ CAMHAL_LOGDB("%s %s", msg, current_setting ? "enabled" : "disabled");
+ mPending3Asettings |= pending;
+ }
+}
+
int OMXCameraAdapter::getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT)
{
int LUTsize = LUT.size;
@@ -396,30 +450,23 @@ const char* OMXCameraAdapter::getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT)
return NULL;
}
-status_t OMXCameraAdapter::init3AParams(Gen3A_settings &Gen3A)
+int OMXCameraAdapter::getMultipleLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT, char * supported)
{
- LOG_FUNCTION_NAME;
-
- Gen3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
- Gen3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
- Gen3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
- Gen3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
- Gen3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
- Gen3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
- Gen3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
- Gen3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
- Gen3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
- Gen3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
- Gen3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
- Gen3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
- Gen3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
- Gen3A.ExposureLock = OMX_FALSE;
- Gen3A.FocusLock = OMX_FALSE;
- Gen3A.WhiteBalanceLock = OMX_FALSE;
-
- LOG_FUNCTION_NAME_EXIT;
+ int num = 0;
+ int remaining_size;
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].omxDefinition == OMXValue )
+ {
+ num++;
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ remaining_size = ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(supported)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(supported)));
+ strncat(supported, LUT.Table[i].userDefinition, remaining_size);
+ }
- return NO_ERROR;
+ return num;
}
status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
@@ -453,7 +500,7 @@ status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
static bool isFlashDisabled() {
@@ -470,14 +517,80 @@ static bool isFlashDisabled() {
char value[PROPERTY_VALUE_MAX];
if (property_get("camera.flash_off", value, NULL) &&
- (!strcasecmp(value, "true") || !strcasecmp(value, "1"))) {
- ALOGW("flash is disabled for testing purpose");
+ (!strcasecmp(value, android::CameraParameters::TRUE) || !strcasecmp(value, "1"))) {
+ CAMHAL_LOGW("flash is disabled for testing purpose");
return true;
}
return false;
}
+status_t OMXCameraAdapter::setManualExposureVal(Gen3A_settings& Gen3A) {
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expVal;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expVal, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
+ expVal.nPortIndex = OMX_ALL;
+ expValRight.nPortIndex = OMX_ALL;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( Gen3A.Exposure != OMX_ExposureControlOff ) {
+ expVal.bAutoShutterSpeed = OMX_TRUE;
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoShutterSpeed = OMX_FALSE;
+ expVal.nShutterSpeedMsec = Gen3A.ManualExposure;
+ expValRight.nShutterSpeedMsec = Gen3A.ManualExposureRight;
+ if ( Gen3A.ManualGain <= 0 || Gen3A.ManualGainRight <= 0 ) {
+ expVal.bAutoSensitivity = OMX_TRUE;
+ } else {
+ expVal.bAutoSensitivity = OMX_FALSE;
+ expVal.nSensitivity = Gen3A.ManualGain;
+ expValRight.nSensitivity = Gen3A.ManualGainRight;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expVal);
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error 0x%x while configuring manual exposure values", eError);
+ } else {
+ CAMHAL_LOGDA("Camera manual exposure values configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
{
status_t ret = NO_ERROR;
@@ -544,7 +657,7 @@ status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
@@ -576,7 +689,7 @@ status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
@@ -685,7 +798,7 @@ status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
@@ -716,7 +829,7 @@ status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
@@ -760,7 +873,7 @@ status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
@@ -804,7 +917,7 @@ status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
@@ -835,7 +948,7 @@ status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
@@ -855,21 +968,9 @@ status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
wb.eWhiteBalControl = ( OMX_WHITEBALCONTROLTYPE ) Gen3A.WhiteBallance;
- if ( WB_FACE_PRIORITY == Gen3A.WhiteBallance )
- {
- //Disable Region priority and enable Face priority
- setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, true);
-
- //Then set the mode to auto
- wb.eWhiteBalControl = OMX_WhiteBalControlAuto;
- }
- else
- {
- //Disable Face and Region priority
- setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
- setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
- }
+ // disable face and region priorities
+ setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
+ setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonWhiteBalance,
@@ -956,7 +1057,7 @@ status_t OMXCameraAdapter::setFlicker(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
@@ -993,7 +1094,7 @@ status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setContrast(Gen3A_settings& Gen3A)
@@ -1076,7 +1177,7 @@ status_t OMXCameraAdapter::setSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
@@ -1107,7 +1208,7 @@ status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
@@ -1144,7 +1245,7 @@ status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
@@ -1175,13 +1276,14 @@ status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+ OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE expValRight;
LOG_FUNCTION_NAME;
@@ -1191,41 +1293,60 @@ status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
return NO_INIT;
}
+ // In case of manual exposure Gain is applied from setManualExposureVal
+ if ( Gen3A.Exposure == OMX_ExposureControlOff ) {
+ return NO_ERROR;
+ }
+
OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ OMX_INIT_STRUCT_PTR (&expValRight, OMX_TI_CONFIG_EXPOSUREVALUERIGHTTYPE);
expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ expValRight.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
- OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonExposureValue,
- &expValues);
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
- if( 0 == Gen3A.ISO )
- {
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if( 0 == Gen3A.ISO ) {
expValues.bAutoSensitivity = OMX_TRUE;
- }
- else
- {
+ } else {
expValues.bAutoSensitivity = OMX_FALSE;
expValues.nSensitivity = Gen3A.ISO;
- }
+ expValRight.nSensitivity = expValues.nSensitivity;
+ }
eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonExposureValue,
- &expValues);
- if ( OMX_ErrorNone != eError )
- {
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if ( OMX_ErrorNone == eError ) {
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigRightExposureValue,
+ &expValRight);
+ }
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring ISO 0x%x error = 0x%x",
( unsigned int ) expValues.nSensitivity,
eError);
- }
- else
- {
+ } else {
CAMHAL_LOGDB("ISO 0x%x configured successfully",
( unsigned int ) expValues.nSensitivity);
- }
+ }
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
@@ -1256,7 +1377,7 @@ status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
@@ -1293,7 +1414,7 @@ status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
@@ -1325,7 +1446,7 @@ status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
@@ -1357,7 +1478,7 @@ status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
@@ -1388,7 +1509,7 @@ status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus)
@@ -1422,7 +1543,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_exp = toggleExp ? TRUE : FALSE;
CAMHAL_LOGDA("Exposure Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1431,7 +1551,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
setExposureLock(mParameters3A);
}
- mParams.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, lock_state_exp);
}
OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
@@ -1469,7 +1588,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_wb = toggleWb ? TRUE : FALSE;
CAMHAL_LOGDA("WhiteBalance Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1478,10 +1596,9 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
setWhiteBalanceLock(mParameters3A);
}
- mParams.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, lock_state_wb);
}
EXIT:
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
@@ -1489,14 +1606,21 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_ALGOAREASTYPE **meteringAreas;
+ CameraBuffer *bufferlist;
+ OMX_ALGOAREASTYPE *meteringAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
MemoryManager memMgr;
int areasSize = 0;
LOG_FUNCTION_NAME
- Mutex::Autolock lock(mMeteringAreasLock);
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
+ android::AutoMutex lock(mMeteringAreasLock);
if ( OMX_StateInvalid == mComponentState )
{
@@ -1505,7 +1629,8 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
}
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- meteringAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ meteringAreas = (OMX_ALGOAREASTYPE *)bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
@@ -1516,37 +1641,47 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
return -ENOMEM;
}
- OMX_INIT_STRUCT_PTR (meteringAreas[0], OMX_ALGOAREASTYPE);
+ OMX_INIT_STRUCT_PTR (meteringAreas, OMX_ALGOAREASTYPE);
- meteringAreas[0]->nPortIndex = OMX_ALL;
- meteringAreas[0]->nNumAreas = mMeteringAreas.size();
- meteringAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaExposure;
+ meteringAreas->nPortIndex = OMX_ALL;
+ meteringAreas->nNumAreas = mMeteringAreas.size();
+ meteringAreas->nAlgoAreaPurpose = OMX_AlgoAreaExposure;
for ( unsigned int n = 0; n < mMeteringAreas.size(); n++)
{
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
// transform the coordinates to 3A-type coordinates
- mMeteringAreas.itemAt(n)->transfrom(mPreviewData->mWidth,
- mPreviewData->mHeight,
- meteringAreas[0]->tAlgoAreas[n].nTop,
- meteringAreas[0]->tAlgoAreas[n].nLeft,
- meteringAreas[0]->tAlgoAreas[n].nWidth,
- meteringAreas[0]->tAlgoAreas[n].nHeight);
-
- meteringAreas[0]->tAlgoAreas[n].nLeft =
- ( meteringAreas[0]->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
- meteringAreas[0]->tAlgoAreas[n].nTop =
- ( meteringAreas[0]->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight;
- meteringAreas[0]->tAlgoAreas[n].nWidth =
- ( meteringAreas[0]->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
- meteringAreas[0]->tAlgoAreas[n].nHeight =
- ( meteringAreas[0]->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight;
-
- meteringAreas[0]->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight();
+ mMeteringAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)meteringAreas->tAlgoAreas[n].nTop,
+ (size_t&)meteringAreas->tAlgoAreas[n].nLeft,
+ (size_t&)meteringAreas->tAlgoAreas[n].nWidth,
+ (size_t&)meteringAreas->tAlgoAreas[n].nHeight);
+
+ meteringAreas->tAlgoAreas[n].nLeft =
+ ( meteringAreas->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nTop =
+ ( meteringAreas->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+ meteringAreas->tAlgoAreas[n].nWidth =
+ ( meteringAreas->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth;
+ meteringAreas->tAlgoAreas[n].nHeight =
+ ( meteringAreas->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight;
+
+ meteringAreas->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight();
CAMHAL_LOGDB("Metering area %d : top = %d left = %d width = %d height = %d prio = %d",
- n, (int)meteringAreas[0]->tAlgoAreas[n].nTop, (int)meteringAreas[0]->tAlgoAreas[n].nLeft,
- (int)meteringAreas[0]->tAlgoAreas[n].nWidth, (int)meteringAreas[0]->tAlgoAreas[n].nHeight,
- (int)meteringAreas[0]->tAlgoAreas[n].nPriority);
+ n, (int)meteringAreas->tAlgoAreas[n].nTop, (int)meteringAreas->tAlgoAreas[n].nLeft,
+ (int)meteringAreas->tAlgoAreas[n].nWidth, (int)meteringAreas->tAlgoAreas[n].nHeight,
+ (int)meteringAreas->tAlgoAreas[n].nPriority);
}
@@ -1554,7 +1689,7 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = areasSize;
- sharedBuffer.pSharedBuff = (OMX_U8 *) meteringAreas[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufferlist[0]);
if ( NULL == sharedBuffer.pSharedBuff )
{
@@ -1577,15 +1712,100 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
}
EXIT:
- if (NULL != meteringAreas)
+ if (NULL != bufferlist)
{
- memMgr.freeBuffer((void*) meteringAreas);
- meteringAreas = NULL;
+ memMgr.freeBufferList(bufferlist);
}
return ret;
}
+//TI extensions for enable/disable algos
+status_t OMXCameraAdapter::setParameter3ABoolInvert(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_BOOL inv_data;
+
+ if (OMX_TRUE == data)
+ {
+ inv_data = OMX_FALSE;
+ }
+ else if (OMX_FALSE == data)
+ {
+ inv_data = OMX_TRUE;
+ }
+ else
+ {
+ return BAD_VALUE;
+ }
+ return setParameter3ABool(omx_idx, inv_data, msg);
+}
+
+status_t OMXCameraAdapter::setParameter3ABool(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE cfgdata;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&cfgdata, OMX_CONFIG_BOOLEANTYPE);
+ cfgdata.bEnabled = data;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ omx_idx,
+ &cfgdata);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring %s error = 0x%x", msg, eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("%s configured successfully %d ", msg, cfgdata.bEnabled);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setAlgoFixedGamma(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABool((OMX_INDEXTYPE) OMX_TI_IndexConfigFixedGamma, Gen3A.AlgoFixedGamma, "Fixed Gamma");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF1(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF1, Gen3A.AlgoNSF1, "NSF1");
+}
+
+status_t OMXCameraAdapter::setAlgoNSF2(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableNSF2, Gen3A.AlgoNSF2, "NSF2");
+}
+
+status_t OMXCameraAdapter::setAlgoSharpening(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableSharpening, Gen3A.AlgoSharpening, "Sharpening");
+}
+
+status_t OMXCameraAdapter::setAlgoThreeLinColorMap(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableThreeLinColorMap, Gen3A.AlgoThreeLinColorMap, "Color Conversion");
+}
+
+status_t OMXCameraAdapter::setAlgoGIC(Gen3A_settings& Gen3A)
+{
+ return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableGIC, Gen3A.AlgoGIC, "Green Inballance Correction");
+}
+#endif
+
status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
{
status_t ret = NO_ERROR;
@@ -1594,7 +1814,7 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
/*
* Scenes have a priority during the process
@@ -1692,6 +1912,11 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
break;
}
+ case SetManualExposure: {
+ ret |= setManualExposureVal(Gen3A);
+ break;
+ }
+
case SetFlash:
{
ret |= setFlashMode(Gen3A);
@@ -1714,6 +1939,46 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
ret |= setMeteringAreas(Gen3A);
}
break;
+
+#ifndef OMAP_TUNA
+ //TI extensions for enable/disable algos
+ case SetAlgoFixedGamma:
+ {
+ ret |= setAlgoFixedGamma(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF1:
+ {
+ ret |= setAlgoNSF1(Gen3A);
+ }
+ break;
+
+ case SetAlgoNSF2:
+ {
+ ret |= setAlgoNSF2(Gen3A);
+ }
+ break;
+
+ case SetAlgoSharpening:
+ {
+ ret |= setAlgoSharpening(Gen3A);
+ }
+ break;
+
+ case SetAlgoThreeLinColorMap:
+ {
+ ret |= setAlgoThreeLinColorMap(Gen3A);
+ }
+ break;
+
+ case SetAlgoGIC:
+ {
+ ret |= setAlgoGIC(Gen3A);
+ }
+ break;
+#endif
+
default:
CAMHAL_LOGEB("this setting (0x%x) is still not supported in CameraAdapter ",
currSett);
@@ -1728,4 +1993,5 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp
index 12b9058..84090e9 100644
--- a/camera/OMXCameraAdapter/OMXAlgo.cpp
+++ b/camera/OMXCameraAdapter/OMXAlgo.cpp
@@ -21,70 +21,82 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
#undef TRUE
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersAlgo(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *valstr = NULL;
+ const char *valManualStr = NULL;
const char *oldstr = NULL;
+ OMXCameraPortParameters *cap;
+ BrightnessMode gbce = BRIGHTNESS_OFF;
+ BrightnessMode glbce = BRIGHTNESS_OFF;
LOG_FUNCTION_NAME;
CaptureMode capMode;
CAMHAL_LOGDB("Capture mode %s", params.get(TICameraParameters::KEY_CAP_MODE));
- if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL )
- {
- if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0)
- {
+ if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL ) {
+ if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_SPEED;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::EXPOSURE_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::ZOOM_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_QUALITY;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0) {
capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
- }
- else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0)
- {
+ mCapabilitiesOpMode = MODE_ZEROSHUTTERLAG;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
capMode = OMXCameraAdapter::VIDEO_MODE;
- }
- else
- {
+ mCapabilitiesOpMode = MODE_VIDEO;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::CP_CAM_MODE) == 0) {
+ capMode = OMXCameraAdapter::CP_CAM;
+ mCapabilitiesOpMode = MODE_CPCAM;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::TEMP_BRACKETING) == 0) {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ mCapabilitiesOpMode = MODE_HIGH_SPEED;
+ } else {
capMode = OMXCameraAdapter::HIGH_QUALITY;
- }
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
}
- else
- {
- capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
- }
+ } else {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_HIGH_QUALITY;
+ }
- if ( mCapMode != capMode )
- {
+ if ( mSensorIndex == 2 ) {
+ mCapabilitiesOpMode = MODE_STEREO;
+ }
+
+ if ( mCapMode != capMode ) {
mCapMode = capMode;
mOMXStateSwitch = true;
- }
+ mPendingPreviewSettings |= SetCapMode;
+ }
CAMHAL_LOGDB("Capture Mode set %d", mCapMode);
/// Configure IPP, LDCNSF, GBCE and GLBCE only in HQ mode
IPPMode ipp;
if((mCapMode == OMXCameraAdapter::HIGH_QUALITY) || (mCapMode == OMXCameraAdapter::HIGH_QUALITY_ZSL)
- || (mCapMode == OMXCameraAdapter::VIDEO_MODE) )
+ || (mCapMode == OMXCameraAdapter::VIDEO_MODE)
+ || (mCapMode == OMXCameraAdapter::CP_CAM))
{
if ( (valstr = params.get(TICameraParameters::KEY_IPP)) != NULL )
{
@@ -116,95 +128,57 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
CAMHAL_LOGVB("IPP Mode set %d", ipp);
- if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) )
- {
- // Configure GBCE only if the setting has changed since last time
- oldstr = mParams.get(TICameraParameters::KEY_GBCE);
- bool cmpRes = true;
- if ( NULL != oldstr )
- {
- cmpRes = strcmp(valstr, oldstr) != 0;
- }
- else
- {
- cmpRes = true;
- }
-
+ if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) ) {
+ if (strcmp(valstr, android::CameraParameters::TRUE ) == 0) {
+ gbce = BRIGHTNESS_ON;
+ } else {
+ gbce = BRIGHTNESS_OFF;
+ }
- if( cmpRes )
- {
- if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_ENABLE ) == 0)
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_ON);
- }
- else if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_DISABLE ) == 0)
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- else
- {
- setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- }
+ if ( gbce != mGBCE ) {
+ mGBCE = gbce;
+ setGBCE(mGBCE);
}
- else if(mParams.get(TICameraParameters::KEY_GBCE) || mFirstTimeInit)
- {
+
+ } else if(mFirstTimeInit) {
//Disable GBCE by default
setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
+ }
- if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
- {
- // Configure GLBCE only if the setting has changed since last time
-
- oldstr = mParams.get(TICameraParameters::KEY_GLBCE);
- bool cmpRes = true;
- if ( NULL != oldstr )
- {
- cmpRes = strcmp(valstr, oldstr) != 0;
- }
- else
- {
- cmpRes = true;
- }
+ if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL ) {
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
+ glbce = BRIGHTNESS_ON;
+ } else {
+ glbce = BRIGHTNESS_OFF;
+ }
- if( cmpRes )
- {
- if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_ENABLE ) == 0)
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_ON);
- }
- else if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_DISABLE ) == 0)
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- else
- {
- setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
- }
+ if ( glbce != mGLBCE ) {
+ mGLBCE = glbce;
+ setGLBCE(mGLBCE);
}
- else if(mParams.get(TICameraParameters::KEY_GLBCE) || mFirstTimeInit)
- {
+
+ } else if(mFirstTimeInit) {
//Disable GLBCE by default
setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
- }
}
- else
- {
+
+ } else {
ipp = OMXCameraAdapter::IPP_NONE;
- }
+ }
if ( mIPP != ipp )
{
mIPP = ipp;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetLDC;
+ mPendingPreviewSettings |= SetNSF;
}
///Set VNF Configuration
bool vnfEnabled = false;
- if ( params.getInt(TICameraParameters::KEY_VNF) > 0 )
+ valstr = params.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
CAMHAL_LOGDA("VNF Enabled");
vnfEnabled = true;
@@ -219,12 +193,13 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
{
mVnfEnabled = vnfEnabled;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVNF;
}
///Set VSTAB Configuration
bool vstabEnabled = false;
- valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- if (valstr && strcmp(valstr, CameraParameters::TRUE) == 0) {
+ valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDA("VSTAB Enabled");
vstabEnabled = true;
}
@@ -238,6 +213,7 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
{
mVstabEnabled = vstabEnabled;
mOMXStateSwitch = true;
+ mPendingPreviewSettings |= SetVSTAB;
}
//A work-around for a failing call to OMX flush buffers
@@ -250,35 +226,30 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
#ifdef OMAP_ENHANCEMENT
//Set Auto Convergence Mode
- valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE);
- if ( valstr != NULL )
- {
- // Set ManualConvergence default value
- OMX_S32 manualconvergence = -30;
- if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeDisable, manualconvergence);
+ valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE_MODE);
+ valManualStr = params.get(TICameraParameters::KEY_MANUAL_CONVERGENCE);
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ if (cap->mFrameLayoutType != OMX_TI_StereoFrameLayout2D) {
+ if ((valstr != NULL) || (valManualStr != NULL)) {
+ setAutoConvergence(valstr, valManualStr, params);
+ if (valstr != NULL) {
+ CAMHAL_LOGDB("AutoConvergenceMode %s", valstr);
+ }
+ if (valManualStr != NULL) {
+ CAMHAL_LOGDB("Manual Convergence %s", valManualStr);
}
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FRAME) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeFrame, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_CENTER) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeCenter, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FFT) == 0 )
- {
- setAutoConvergence(OMX_TI_AutoConvergenceModeFocusFaceTouch, manualconvergence);
- }
- else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL) == 0 )
- {
- manualconvergence = (OMX_S32)params.getInt(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES);
- setAutoConvergence(OMX_TI_AutoConvergenceModeManual, manualconvergence);
- }
- CAMHAL_LOGVB("AutoConvergenceMode %s, value = %d", valstr, (int) manualconvergence);
}
+ //Set Mechanical Misalignment Correction
+ valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
+ if ( valstr != NULL ) {
+ setMechanicalMisalignmentCorrection(strcmp(valstr, android::CameraParameters::TRUE) == 0);
+ CAMHAL_LOGDB("Mechanical Misalignment Correction %s", valstr);
+ }
+ }
+
#endif
LOG_FUNCTION_NAME_EXIT;
@@ -286,67 +257,123 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
return ret;
}
-// Get AutoConvergence
-status_t OMXCameraAdapter::getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode,
- OMX_S32 *pManualConverence)
+// Set AutoConvergence
+status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *pValManualstr, const android::CameraParameters &params)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
-
- ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
- ACParams.nVersion = mLocalVersionParam;
- ACParams.nPortIndex = OMX_ALL;
+ const char *str = NULL;
+ android::Vector<android::sp<CameraArea> > tempAreas;
+ int mode;
+ int changed = 0;
LOG_FUNCTION_NAME;
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
- &ACParams);
- if ( eError != OMX_ErrorNone )
- {
- CAMHAL_LOGEB("Error while getting AutoConvergence 0x%x", eError);
- ret = -EINVAL;
+ if ( pValManualstr != NULL ) {
+ OMX_S32 manualConvergence = (OMX_S32)strtol(pValManualstr ,0 ,0);
+
+ if (mManualConv != manualConvergence) {
+ mManualConv = manualConvergence;
+ changed = 1;
}
- else
- {
- *pManualConverence = ACParams.nManualConverence;
- *pACMode = ACParams.eACMode;
- CAMHAL_LOGDA("AutoConvergence got successfully");
+ }
+
+ if ( pValstr != NULL ) {
+ mode = getLUTvalue_HALtoOMX(pValstr, mAutoConvergenceLUT);
+
+ if ( NAME_NOT_FOUND == mode ) {
+ CAMHAL_LOGEB("Wrong convergence mode: %s", pValstr);
+ LOG_FUNCTION_NAME_EXIT;
+ return mode;
}
- LOG_FUNCTION_NAME_EXIT;
+ if ( mAutoConv != static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode) ) {
+ mAutoConv = static_cast<OMX_TI_AUTOCONVERGENCEMODETYPE> (mode);
+ changed = 1;
+ }
+ }
- return ret;
-}
+ if ( OMX_TI_AutoConvergenceModeFocusFaceTouch == mAutoConv ) {
+ android::AutoMutex lock(mTouchAreasLock);
-// Set AutoConvergence
-status_t OMXCameraAdapter::setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode,
- OMX_S32 pManualConverence)
-{
- status_t ret = NO_ERROR;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
- LOG_FUNCTION_NAME;
+ if ( NULL != str ) {
+ ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
+ } else {
+ CAMHAL_LOGEB("Touch areas not received in %s",
+ android::CameraParameters::KEY_METERING_AREAS);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( CameraArea::areAreasDifferent(mTouchAreas, tempAreas) ) {
+ mTouchAreas.clear();
+ mTouchAreas = tempAreas;
+ changed = 1;
+ }
+ }
- ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
+ if (!changed) {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ OMXCameraPortParameters * mPreviewData;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ ACParams.nSize = (OMX_U32)sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
ACParams.nVersion = mLocalVersionParam;
ACParams.nPortIndex = OMX_ALL;
- ACParams.nManualConverence = pManualConverence;
- ACParams.eACMode = pACMode;
+
+ OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
+ &ACParams);
+
+ ACParams.eACMode = mAutoConv;
+ ACParams.nManualConverence = mManualConv;
+
+ if (1 == mTouchAreas.size()) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
+ }
+
+ // transform the coordinates to 3A-type coordinates
+ mTouchAreas.itemAt(0)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&) ACParams.nACProcWinStartY,
+ (size_t&) ACParams.nACProcWinStartX,
+ (size_t&) ACParams.nACProcWinWidth,
+ (size_t&) ACParams.nACProcWinHeight);
+ }
+
+ CAMHAL_LOGDB("nSize %d", (int)ACParams.nSize);
+ CAMHAL_LOGDB("nPortIndex %d", (int)ACParams.nPortIndex);
+ CAMHAL_LOGDB("nManualConverence %d", (int)ACParams.nManualConverence);
+ CAMHAL_LOGDB("eACMode %d", (int)ACParams.eACMode);
+ CAMHAL_LOGDB("nACProcWinStartX %d", (int)ACParams.nACProcWinStartX);
+ CAMHAL_LOGDB("nACProcWinStartY %d", (int)ACParams.nACProcWinStartY);
+ CAMHAL_LOGDB("nACProcWinWidth %d", (int)ACParams.nACProcWinWidth);
+ CAMHAL_LOGDB("nACProcWinHeight %d", (int)ACParams.nACProcWinHeight);
+ CAMHAL_LOGDB("bACStatus %d", (int)ACParams.bACStatus);
+
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
&ACParams);
- if ( eError != OMX_ErrorNone )
- {
+
+ if ( eError != OMX_ErrorNone ) {
CAMHAL_LOGEB("Error while setting AutoConvergence 0x%x", eError);
- ret = -EINVAL;
- }
- else
- {
+ ret = BAD_VALUE;
+ } else {
CAMHAL_LOGDA("AutoConvergence applied successfully");
- }
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -603,17 +630,18 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
- OMX_TI_PARAM_ZSLHISTORYLENTYPE zslHistoryLen;
OMX_CONFIG_BOOLEANTYPE bCAC;
+#ifndef OMAP_TUNA
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
+#endif
LOG_FUNCTION_NAME;
- //ZSL have 4 buffers history by default
- OMX_INIT_STRUCT_PTR (&zslHistoryLen, OMX_TI_PARAM_ZSLHISTORYLENTYPE);
- zslHistoryLen.nHistoryLen = 4;
-
//CAC is disabled by default
OMX_INIT_STRUCT_PTR (&bCAC, OMX_CONFIG_BOOLEANTYPE);
+#ifndef OMAP_TUNA
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
+#endif
bCAC.bEnabled = OMX_FALSE;
if ( NO_ERROR == ret )
@@ -622,14 +650,32 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
if ( mSensorIndex == OMX_TI_StereoSensor )
{
- CAMHAL_LOGDA("Camera mode: STEREO");
- camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
+#ifndef OMAP_TUNA
+ if ( OMXCameraAdapter::VIDEO_MODE == mode ) {
+ CAMHAL_LOGDA("Camera mode: STEREO VIDEO");
+ camMode.eCamOperatingMode = OMX_TI_StereoVideo;
+ } else {
+#endif
+ CAMHAL_LOGDA("Camera mode: STEREO");
+ camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
+#ifndef OMAP_TUNA
+ }
+#endif
}
else if ( OMXCameraAdapter::HIGH_SPEED == mode )
{
CAMHAL_LOGDA("Camera mode: HIGH SPEED");
camMode.eCamOperatingMode = OMX_CaptureImageHighSpeedTemporalBracketing;
}
+ else if ( OMXCameraAdapter::CP_CAM == mode )
+ {
+ CAMHAL_LOGDA("Camera mode: CP CAM");
+#ifndef OMAP_TUNA
+ camMode.eCamOperatingMode = OMX_TI_CPCam;
+ // TODO(XXX): Hardcode for now until we implement re-proc pipe
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+#endif
+ }
else if( OMXCameraAdapter::HIGH_QUALITY == mode )
{
CAMHAL_LOGDA("Camera mode: HIGH QUALITY");
@@ -641,9 +687,11 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
CAMHAL_LOGDA("Camera mode: HIGH QUALITY_ZSL");
camMode.eCamOperatingMode = OMX_TI_CaptureImageProfileZeroShutterLag;
+#ifdef CAMERAHAL_TUNA
if ( !mIternalRecordingHint ) {
zslHistoryLen.nHistoryLen = 5;
}
+#endif
}
else if( OMXCameraAdapter::VIDEO_MODE == mode )
@@ -660,31 +708,12 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if( NO_ERROR == ret )
{
eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
- ( OMX_INDEXTYPE ) OMX_TI_IndexParamZslHistoryLen,
- &zslHistoryLen);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while configuring ZSL History len 0x%x", eError);
- // Don't return status for now
- // as high history values might lead
- // to errors on some platforms.
- // ret = ErrorUtils::omxToAndroidError(eError);
- }
- else
- {
- CAMHAL_LOGDA("ZSL History len configured successfully");
- }
- }
-
- if( NO_ERROR == ret )
- {
- eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
&camMode);
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring camera mode 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -692,6 +721,21 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
}
}
+#ifndef OMAP_TUNA
+ if((NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mode)) {
+ //Configure Single Preview Mode
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+#endif
+
if( NO_ERROR == ret )
{
//Configure CAC
@@ -701,7 +745,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring CAC 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -1009,7 +1053,7 @@ status_t OMXCameraAdapter::setAlgoPriority(AlgoPriority priority,
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setPictureRotation(unsigned int degree)
@@ -1055,76 +1099,83 @@ status_t OMXCameraAdapter::setSensorOrientation(unsigned int degree)
OMXCameraPortParameters *mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
+ if ( OMX_StateInvalid == mComponentState ) {
CAMHAL_LOGEA("OMX component is in invalid state");
ret = -1;
- }
+ }
/* Set Temproary Port resolution.
- * For resolution with height > 1008,resolution cannot be set without configuring orientation.
+ * For resolution with height >= 720,
+ * resolution cannot be set without configuring orientation.
* So we first set a temp resolution. We have used VGA
*/
- tmpHeight = mPreviewData->mHeight;
- tmpWidth = mPreviewData->mWidth;
- mPreviewData->mWidth = 640;
- mPreviewData->mHeight = 480;
- ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
+ if ( mPreviewData->mHeight >= 720 ) {
+ tmpHeight = mPreviewData->mHeight;
+ tmpWidth = mPreviewData->mWidth;
+ mPreviewData->mWidth = 640;
+ mPreviewData->mHeight = 480;
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
}
+ mPreviewData->mWidth = tmpWidth;
+ mPreviewData->mHeight = tmpHeight;
+ mPreviewPortInitialized = true;
+ }
+ else if (!mPreviewPortInitialized) {
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
+ }
+ mPreviewPortInitialized = true;
+ }
+
/* Now set Required Orientation*/
- if ( NO_ERROR == ret )
- {
+ if ( NO_ERROR == ret ) {
OMX_INIT_STRUCT(sensorOrientation, OMX_CONFIG_ROTATIONTYPE);
sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonRotate,
- &sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
- }
- CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
- ( unsigned int ) sensorOrientation.nRotation);
- sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
sensorOrientation.nRotation = degree;
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonRotate,
&sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError);
- }
- CAMHAL_LOGVA(" Read the Parameters that are set");
- eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
- OMX_IndexConfigCommonRotate,
- &sensorOrientation);
- if ( OMX_ErrorNone != eError )
- {
- CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
- }
+ }
CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
( unsigned int ) sensorOrientation.nRotation);
CAMHAL_LOGVB(" Sensor Configured for Port : %d",
( unsigned int ) sensorOrientation.nPortIndex);
- }
+ }
/* Now set the required resolution as requested */
+ if ( NO_ERROR == ret ) {
+ bool portConfigured = false;
+ ret = setSensorQuirks(degree,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex],
+ portConfigured);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring setSensorQuirks 0x%x", ret);
+ return ret;
+ }
- mPreviewData->mWidth = tmpWidth;
- mPreviewData->mHeight = tmpHeight;
- if ( NO_ERROR == ret )
- {
- ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
+ if ( !portConfigured ) {
+ ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error while configuring format 0x%x", ret);
+ return ret;
}
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(mPreviewData->mMinFrameRate,
+ mPreviewData->mMaxFrameRate);
}
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -1146,9 +1197,7 @@ status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameR
ret = -EINVAL;
}
- // The port framerate should never be smaller
- // than max framerate.
- if ( mPreviewData->mFrameRate < maxFrameRate ) {
+ if ( !mSetFormatDone ) {
return NO_INIT;
}
@@ -1175,6 +1224,36 @@ status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameR
}
return ret;
- }
+}
+
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setMechanicalMisalignmentCorrection(const bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_MM mm;
+
+ LOG_FUNCTION_NAME;
+
+ mm.nVersion = mLocalVersionParam;
+ mm.nSize = sizeof(OMX_TI_CONFIG_MM);
+ mm.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ mm.bMM = enable ? OMX_TRUE : OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigMechanicalMisalignment,
+ &mm);
+
+ if(OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while enabling mechanical misalignment correction. error = 0x%x", eError);
+ ret = -1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+#endif
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
index 1ae440d..ea7c890 100755..100644
--- a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
+++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
@@ -33,21 +33,21 @@
static int mDebugFps = 0;
static int mDebugFcs = 0;
-#undef TRUE
-#undef FALSE
-
#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
-namespace android {
+namespace Ti {
+namespace Camera {
+
+#ifdef CAMERAHAL_OMX_PROFILING
-#undef LOG_TAG
-///Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+const char OMXCameraAdapter::DEFAULT_PROFILE_PATH[] = "/data/dbg/profile_data.bin";
+
+#endif
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-Mutex gAdapterLock;
+android::Mutex gAdapterLock;
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
@@ -55,16 +55,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
LOG_FUNCTION_NAME;
char value[PROPERTY_VALUE_MAX];
+ const char *mountOrientationString = NULL;
+
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
property_get("debug.camera.framecounts", value, "0");
mDebugFcs = atoi(value);
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ property_get("debug.camera.profile", value, "0");
+ mDebugProfile = atoi(value);
+
+#endif
+
TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone;
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
-
mLocalVersionParam.s.nVersionMajor = 0x1;
mLocalVersionParam.s.nVersionMinor = 0x1;
mLocalVersionParam.s.nRevision = 0x0 ;
@@ -72,6 +80,7 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mPending3Asettings = 0;//E3AsettingsAll;
mPendingCaptureSettings = 0;
+ mPendingPreviewSettings = 0;
if ( 0 != mInitSem.Count() )
{
@@ -86,17 +95,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE;
mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT;
//currently not supported use preview port instead
- mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
+ mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ mCameraAdapterParameters.mVideoInPortIndex = OMX_CAMERA_PORT_VIDEO_IN_VIDEO;
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
mOmxInitialized = true;
+ // Initialize the callback handles
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = Camera::OMXCameraAdapterEventHandler;
+ callbacks.EmptyBufferDone = Camera::OMXCameraAdapterEmptyBufferDone;
+ callbacks.FillBufferDone = Camera::OMXCameraAdapterFillBufferDone;
+
///Get the handle to the OMX Component
- eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, (OMX_PTR)this);
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, this, callbacks);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
}
@@ -105,6 +121,10 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mComponentState = OMX_StateLoaded;
CAMHAL_LOGVB("OMX_GetHandle -0x%x sensor_index = %lu", eError, mSensorIndex);
+#ifndef OMAP_TUNA
+ initDccFileDataSave(&mCameraAdapterParameters.mHandleComp, mCameraAdapterParameters.mPrevPortIndex);
+#endif
+
eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
OMX_CommandPortDisable,
OMX_ALL,
@@ -162,26 +182,41 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
CAMHAL_LOGDB("Sensor %d selected successfully", mSensorIndex);
}
+#ifdef CAMERAHAL_DEBUG
+
printComponentVersion(mCameraAdapterParameters.mHandleComp);
+#endif
+
mBracketingEnabled = false;
+ mZoomBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
mBracketingRange = 1;
mLastBracetingBufferIdx = 0;
+ mBracketingBuffersQueued = NULL;
mOMXStateSwitch = false;
+ mBracketingSet = false;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+#endif
mCaptureSignalled = false;
mCaptureConfigured = false;
+ mReprocConfigured = false;
mRecording = false;
mWaitingForSnapshot = false;
- mSnapshotCount = 0;
+ mPictureFormatFromClient = NULL;
- mCapMode = HIGH_QUALITY;
+ mCapabilitiesOpMode = MODE_MAX;
+ mCapMode = INITIAL_MODE;
mIPP = IPP_NULL;
mVstabEnabled = false;
mVnfEnabled = false;
mBurstFrames = 1;
+ mBurstFramesAccum = 0;
mCapturedFrames = 0;
+ mFlushShotConfigQueue = false;
mPictureQuality = 100;
mCurrentZoomIdx = 0;
mTargetZoomIdx = 0;
@@ -190,13 +225,24 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mZoomInc = 1;
mZoomParameterIdx = 0;
mExposureBracketingValidEntries = 0;
+ mZoomBracketingValidEntries = 0;
mSensorOverclock = false;
+ mAutoConv = OMX_TI_AutoConvergenceModeMax;
+ mManualConv = 0;
+
+#ifdef CAMERAHAL_TUNA
mIternalRecordingHint = false;
+#endif
mDeviceOrientation = 0;
+ mFaceOrientation = 0;
mCapabilities = caps;
mZoomUpdating = false;
mZoomUpdate = false;
+ mGBCE = BRIGHTNESS_OFF;
+ mGLBCE = BRIGHTNESS_OFF;
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
mEXIFData.mGPSData.mAltitudeValid = false;
mEXIFData.mGPSData.mDatestampValid = false;
@@ -209,6 +255,25 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mEXIFData.mModelValid = false;
mEXIFData.mMakeValid = false;
+ //update the mDeviceOrientation with the sensor mount orientation.
+ //So that the face detect will work before onOrientationEvent()
+ //get triggered.
+ CAMHAL_ASSERT(mCapabilities);
+ mountOrientationString = mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ CAMHAL_ASSERT(mountOrientationString);
+ mDeviceOrientation = atoi(mountOrientationString);
+ mFaceOrientation = atoi(mountOrientationString);
+
+ if (mSensorIndex != 2) {
+ mCapabilities->setMode(MODE_HIGH_SPEED);
+ }
+
+ if (mCapabilities->get(CameraProperties::SUPPORTED_ZOOM_STAGES) != NULL) {
+ mMaxZoomSupported = mCapabilities->getInt(CameraProperties::SUPPORTED_ZOOM_STAGES) + 1;
+ } else {
+ mMaxZoomSupported = 1;
+ }
+
// initialize command handling thread
if(mCommandHandler.get() == NULL)
mCommandHandler = new CommandHandler(this);
@@ -219,14 +284,13 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mCommandHandler->run("CallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("command handler thread already runnning!!");
ret = NO_ERROR;
- } else
- {
+ } else {
CAMHAL_LOGEA("Couldn't run command handlerthread");
return ret;
}
@@ -242,31 +306,18 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mOMXCallbackHandler->run("OMXCallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("omx callback handler thread already runnning!!");
ret = NO_ERROR;
- }else
- {
+ } else {
CAMHAL_LOGEA("Couldn't run omx callback handler thread");
return ret;
}
}
- //Remove any unhandled events
- if (!mEventSignalQ.isEmpty()) {
- for (unsigned int i = 0 ;i < mEventSignalQ.size(); i++ ) {
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
- //remove from queue and free msg
- if ( NULL != msg ) {
- free(msg);
- }
- }
- mEventSignalQ.clear();
- }
-
OMX_INIT_STRUCT_PTR (&mRegionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY);
OMX_INIT_STRUCT_PTR (&mFacePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY);
mRegionPriority.nPortIndex = OMX_ALL;
@@ -276,31 +327,64 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
//and will not conditionally apply based on current values.
mFirstTimeInit = true;
+ //Flag to avoid calling setVFramerate() before OMX_SetParameter(OMX_IndexParamPortDefinition)
+ //Ducati will return an error otherwise.
+ mSetFormatDone = false;
+
memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int));
+ memset(mZoomBracketingValues, 0, ZOOM_BRACKET_RANGE*sizeof(int));
mMeasurementEnabled = false;
mFaceDetectionRunning = false;
mFaceDetectionPaused = false;
mFDSwitchAlgoPriority = false;
+ metadataLastAnalogGain = -1;
+ metadataLastExposureTime = -1;
+
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters));
-
- //Initialize 3A defaults
- ret = init3AParams(mParameters3A);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEA("Couldn't init 3A params!");
- goto EXIT;
- }
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex], 0, sizeof(OMXCameraPortParameters));
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex], 0, sizeof(OMXCameraPortParameters));
+
+ // initialize 3A defaults
+ mParameters3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
+ mParameters3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
+ mParameters3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
+ mParameters3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
+ mParameters3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
+ mParameters3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
+ mParameters3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
+ mParameters3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
+ mParameters3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
+ mParameters3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
+ mParameters3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
+ mParameters3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
+ mParameters3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.FocusLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
+
+ mParameters3A.ManualExposure = 0;
+ mParameters3A.ManualExposureRight = 0;
+ mParameters3A.ManualGain = 0;
+ mParameters3A.ManualGainRight = 0;
+
+ mParameters3A.AlgoFixedGamma = OMX_TRUE;
+ mParameters3A.AlgoNSF1 = OMX_TRUE;
+ mParameters3A.AlgoNSF2 = OMX_TRUE;
+ mParameters3A.AlgoSharpening = OMX_TRUE;
+ mParameters3A.AlgoThreeLinColorMap = OMX_TRUE;
+ mParameters3A.AlgoGIC = OMX_TRUE;
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
void OMXCameraAdapter::performCleanupAfterError()
@@ -324,9 +408,15 @@ OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(Camer
switch ( frameType )
{
case CameraFrame::IMAGE_FRAME:
- case CameraFrame::RAW_FRAME:
ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
break;
+ case CameraFrame::RAW_FRAME:
+ if (mRawCapture) {
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ } else {
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ }
+ break;
case CameraFrame::PREVIEW_FRAME_SYNC:
case CameraFrame::SNAPSHOT_FRAME:
case CameraFrame::VIDEO_FRAME_SYNC:
@@ -342,13 +432,16 @@ OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(Camer
return ret;
}
-status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t OMXCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType)
{
+ LOG_FUNCTION_NAME;
+
status_t ret = NO_ERROR;
OMXCameraPortParameters *port = NULL;
OMX_ERRORTYPE eError = OMX_ErrorNone;
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
+ bool isCaptureFrame = false;
if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE )
{
@@ -360,16 +453,19 @@ status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType
return -EINVAL;
}
- if ( (NO_ERROR == ret) &&
- ((CameraFrame::IMAGE_FRAME == frameType) || (CameraFrame::RAW_FRAME == frameType)) &&
- (1 > mCapturedFrames) &&
- (!mBracketingEnabled)) {
- // Signal end of image capture
- if ( NULL != mEndImageCaptureCallback) {
- mEndImageCaptureCallback(mEndCaptureData);
+ isCaptureFrame = (CameraFrame::IMAGE_FRAME == frameType) ||
+ (CameraFrame::RAW_FRAME == frameType);
+
+ if ( isCaptureFrame && (NO_ERROR == ret) ) {
+ // In CP_CAM mode, end image capture will be signalled when application starts preview
+ if ((1 > mCapturedFrames) && !mBracketingEnabled && (mCapMode != CP_CAM)) {
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ mEndImageCaptureCallback(mEndCaptureData);
+ }
+ return NO_ERROR;
}
- return NO_ERROR;
- }
+ }
if ( NO_ERROR == ret )
{
@@ -381,25 +477,29 @@ status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType
}
}
- if ( NO_ERROR == ret )
- {
-
- for ( int i = 0 ; i < port->mNumBufs ; i++)
- {
- if ( port->mBufferHeader[i]->pBuffer == frameBuf )
- {
+ if ( NO_ERROR == ret ) {
+ for ( int i = 0 ; i < port->mNumBufs ; i++) {
+ if ((CameraBuffer *) port->mBufferHeader[i]->pAppPrivate == frameBuf) {
+ if ( isCaptureFrame && !mBracketingEnabled ) {
+ android::AutoMutex lock(mBurstLock);
+ if (mBurstFramesQueued >= mBurstFramesAccum) {
+ port->mStatus[i] = OMXCameraPortParameters::IDLE;
+ return NO_ERROR;
+ }
+ mBurstFramesQueued++;
+ }
+ port->mStatus[i] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, port->mBufferHeader[i]);
if ( eError != OMX_ErrorNone )
- {
+ {
CAMHAL_LOGEB("OMX_FillThisBuffer 0x%x", eError);
goto EXIT;
- }
+ }
mFramesWithDucati++;
break;
- }
- }
-
- }
+ }
+ }
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -410,86 +510,106 @@ EXIT:
//Since fillthisbuffer is called asynchronously, make sure to signal error to the app
mErrorNotifier->errorNotify(CAMERA_ERROR_HARD);
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+#ifndef OMAP_TUNA
+void OMXCameraAdapter::setParamS3D(OMX_U32 port, const char *valstr)
+{
+ OMXCameraPortParameters *cap;
+
+ LOG_FUNCTION_NAME;
+
+ cap = &mCameraAdapterParameters.mCameraPortParams[port];
+ if (valstr != NULL)
+ {
+ if (strcmp(valstr, TICameraParameters::S3D_TB_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottom;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_FULL) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRight;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_TB_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutTopBottomSubsample;
+ }
+ else if (strcmp(valstr, TICameraParameters::S3D_SS_SUBSAMPLED) == 0)
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayoutLeftRightSubsample;
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+ }
+ else
+ {
+ cap->mFrameLayoutType = OMX_TI_StereoFrameLayout2D;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
}
+#endif
-status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
+status_t OMXCameraAdapter::setParameters(const android::CameraParameters &params)
{
LOG_FUNCTION_NAME;
- const char * str = NULL;
int mode = 0;
status_t ret = NO_ERROR;
bool updateImagePortParams = false;
int minFramerate, maxFramerate, frameRate;
const char *valstr = NULL;
- const char *oldstr = NULL;
int w, h;
OMX_COLOR_FORMATTYPE pixFormat;
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
///@todo Include more camera parameters
- if ( (valstr = params.getPreviewFormat()) != NULL )
- {
- if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
+ if ( (valstr = params.getPreviewFormat()) != NULL ) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
- pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- }
- else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
+ pixFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
- }
- else
- {
+ } else {
CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
pixFormat = OMX_COLOR_FormatCbYCrY;
- }
}
- else
- {
+ } else {
CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
pixFormat = OMX_COLOR_FormatCbYCrY;
- }
+ }
OMXCameraPortParameters *cap;
cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
params.getPreviewSize(&w, &h);
frameRate = params.getPreviewFrameRate();
- minFramerate = params.getInt(TICameraParameters::KEY_MINFRAMERATE);
- maxFramerate = params.getInt(TICameraParameters::KEY_MAXFRAMERATE);
- if ( ( 0 < minFramerate ) &&
- ( 0 < maxFramerate ) )
- {
- if ( minFramerate > maxFramerate )
- {
- CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value");
- maxFramerate = minFramerate;
- }
+ params.getPreviewFpsRange(&minFramerate, &maxFramerate);
+ minFramerate /= CameraHal::VFR_SCALE;
+ maxFramerate /= CameraHal::VFR_SCALE;
+ if ( ( 0 < minFramerate ) && ( 0 < maxFramerate ) ) {
+ if ( minFramerate > maxFramerate ) {
+ CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value");
+ maxFramerate = minFramerate;
+ }
- if ( 0 >= frameRate )
- {
+ if ( 0 >= frameRate ) {
frameRate = maxFramerate;
- }
+ }
- if( ( cap->mMinFrameRate != minFramerate ) ||
- ( cap->mMaxFrameRate != maxFramerate ) )
- {
+ if ( ( cap->mMinFrameRate != (OMX_U32) minFramerate ) ||
+ ( cap->mMaxFrameRate != (OMX_U32) maxFramerate ) ) {
cap->mMinFrameRate = minFramerate;
cap->mMaxFrameRate = maxFramerate;
setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
- }
}
-
- // TODO(XXX): Limiting 1080p to (24,24) or (15,15) for now. Need to remove later.
- if ((w >= 1920) && (h >= 1080)) {
- cap->mMaxFrameRate = cap->mMinFrameRate;
- setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
}
if ( 0 < frameRate )
@@ -537,22 +657,23 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
mOMXStateSwitch = true;
}
+#ifdef CAMERAHAL_TUNA
valstr = params.get(TICameraParameters::KEY_RECORDING_HINT);
- if (!valstr || (valstr && (strcmp(valstr, CameraParameters::FALSE)))) {
+ if (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::FALSE)))) {
mIternalRecordingHint = false;
} else {
mIternalRecordingHint = true;
}
+#endif
#ifdef OMAP_ENHANCEMENT
-
if ( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
{
- if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
mMeasurementEnabled = true;
}
- else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
mMeasurementEnabled = false;
}
@@ -566,7 +687,11 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
//Disable measurement data by default
mMeasurementEnabled = false;
}
+#endif
+#ifdef OMAP_ENHANCEMENT_S3D
+ setParamS3D(mCameraAdapterParameters.mPrevPortIndex,
+ params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT));
#endif
ret |= setParametersCapture(params, state);
@@ -586,6 +711,10 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
mParams = params;
mFirstTimeInit = false;
+ if ( MODE_MAX != mCapabilitiesOpMode ) {
+ mCapabilities->setMode(mCapabilitiesOpMode);
+ }
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -601,7 +730,7 @@ void saveFile(unsigned char *buff, int width, int height, int format) {
sprintf(fn, "/preview%03d.yuv", counter);
fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
if(fd < 0) {
- ALOGE("Unable to open file %s: %s", fn, strerror(fd));
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
return;
}
@@ -630,7 +759,36 @@ void saveFile(unsigned char *buff, int width, int height, int format) {
LOG_FUNCTION_NAME_EXIT;
}
-void OMXCameraAdapter::getParameters(CameraParameters& params)
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+static status_t saveBufferToFile(const void *buf, int size, const char *filename)
+{
+ if (size < 0) {
+ CAMHAL_LOGE("Wrong buffer size: %d", size);
+ return BAD_VALUE;
+ }
+
+ const int fd = open(filename, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0644);
+ if (fd < 0) {
+ CAMHAL_LOGE("ERROR: %s, Unable to save raw file", strerror(fd));
+ return BAD_VALUE;
+ }
+
+ if (write(fd, buf, size) != (signed)size) {
+ CAMHAL_LOGE("ERROR: Unable to write to raw file: %s ", strerror(errno));
+ close(fd);
+ return NO_MEMORY;
+ }
+
+ CAMHAL_LOGD("buffer=%p, size=%d stored at %s", buf, size, filename);
+
+ close(fd);
+ return OK;
+}
+#endif
+
+
+void OMXCameraAdapter::getParameters(android::CameraParameters& params)
{
status_t ret = NO_ERROR;
OMX_CONFIG_EXPOSUREVALUETYPE exp;
@@ -643,9 +801,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
if( mParameters3A.SceneMode != OMX_Manual ) {
const char *valstr_supported = NULL;
- // if preview is not started...we still need to feedback the proper params
- // look up the settings in the LUT
- if (((state & PREVIEW_ACTIVE) == 0) && mCapabilities) {
+ if (mCapabilities) {
const SceneModesEntry* entry = NULL;
entry = getSceneModeEntry(mCapabilities->get(CameraProperties::CAMERA_NAME),
(OMX_SCENEMODETYPE) mParameters3A.SceneMode);
@@ -657,40 +813,39 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
valstr = getLUTvalue_OMXtoHAL(mParameters3A.WhiteBallance, WBalLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_WHITE_BALANCE , valstr);
+ params.set(android::CameraParameters::KEY_WHITE_BALANCE , valstr);
valstr = getLUTvalue_OMXtoHAL(mParameters3A.FlashMode, FlashLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
if ((mParameters3A.Focus == OMX_IMAGE_FocusControlAuto) &&
(mCapMode != OMXCameraAdapter::VIDEO_MODE)) {
- valstr = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ valstr = android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
} else {
valstr = getLUTvalue_OMXtoHAL(mParameters3A.Focus, FocusLUT);
}
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
}
//Query focus distances only when focus is running
if ( ( AF_ACTIVE & state ) ||
- ( NULL == mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES) ) )
+ ( NULL == mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES) ) )
{
updateFocusDistances(params);
}
else
{
- params.set(CameraParameters::KEY_FOCUS_DISTANCES,
- mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES));
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES,
+ mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES));
}
#ifdef OMAP_ENHANCEMENT
-
OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSUREVALUETYPE);
exp.nPortIndex = OMX_ALL;
@@ -705,11 +860,10 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
{
CAMHAL_LOGEB("OMX error 0x%x, while retrieving current ISO value", eError);
}
-
#endif
{
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ZOOM_ACTIVE & state )
{
@@ -717,7 +871,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
{
mZoomParameterIdx += mZoomInc;
}
- params.set( CameraParameters::KEY_ZOOM, mZoomParameterIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mZoomParameterIdx);
if ( ( mCurrentZoomIdx == mTargetZoomIdx ) &&
( mZoomParameterIdx == mCurrentZoomIdx ) )
{
@@ -744,36 +898,240 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
else
{
- params.set( CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
}
}
//Populate current lock status
- if ( mParameters3A.ExposureLock ) {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::TRUE);
+ if ( mUserSetExpLock || mParameters3A.ExposureLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::FALSE);
}
- if ( mParameters3A.WhiteBalanceLock ) {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::TRUE);
+ if ( mUserSetWbLock || mParameters3A.WhiteBalanceLock ) {
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::FALSE);
}
+ // Update Picture size capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+
+ // Update framerate capabilities dynamically
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+
+ params.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
+
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+
+ params.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED,
+ mCapabilities->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
+
LOG_FUNCTION_NAME_EXIT;
}
-status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+status_t OMXCameraAdapter::setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height) {
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_HANDLETYPE *encoderHandle = (OMX_HANDLETYPE *)EncoderHandle;
+
+ CAMHAL_LOGDB("\n %s: SliceHeight:%d, EncoderHandle:%d width:%d height:%d \n", __FUNCTION__, SliceHeight, EncoderHandle, width, height);
+
+ if (SliceHeight == 0){
+ CAMHAL_LOGEA("\n\n #### Encoder Slice Height Not received, Dont Setup Tunnel $$$$\n\n");
+ return BAD_VALUE;
+ }
+
+ if (encoderHandle == NULL) {
+ CAMHAL_LOGEA("Encoder Handle not set \n\n");
+ return BAD_VALUE;
+ }
+
+ if ( 0 != mInitSem.Count() ) {
+ CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ // Register for port enable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mInitSem);
+ if(ret != NO_ERROR) {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ // Enable VIDEO Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ // Wait for the port enable event to occur
+ ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("-Port enable event arrived");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout for enabling preview port expired!");
+ return UNKNOWN_ERROR;
+ }
+
+ //Set the Video Port Params
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+ portCheck.nPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_IndexParamPortDefinition Error - %x", eError);
+ }
+
+ portCheck.format.video.nFrameWidth = width;
+ portCheck.format.video.nFrameHeight = height;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter OMX_IndexParamPortDefinition Error- %x", eError);
+ }
+
+#ifndef OMAP_TUNA
+ //Slice Configuration
+ OMX_TI_PARAM_VTCSLICE VTCSlice;
+ OMX_INIT_STRUCT_PTR(&VTCSlice, OMX_TI_PARAM_VTCSLICE);
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetParameter OMX_TI_IndexParamVtcSlice Error - %x", eError);
+ }
+
+ VTCSlice.nSliceHeight = SliceHeight;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexParamVtcSlice, &VTCSlice);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetParameter on OMX_TI_IndexParamVtcSlice returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+#endif
+
+ eError = OMX_SetupTunnel(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex, encoderHandle, 0);
+ if (OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("OMX_SetupTunnel returned error: 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::setSensorQuirks(int orientation,
+ OMXCameraPortParameters &portParams,
+ bool &portConfigured)
{
- size_t bufferCount;
+ status_t overclockStatus = NO_ERROR;
+ int sensorID = -1;
+ size_t overclockWidth;
+ size_t overclockHeight;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
LOG_FUNCTION_NAME;
+ portConfigured = false;
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+
+ portCheck.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition,
+ &portCheck);
+
+ if ( eError != OMX_ErrorNone ) {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ if ( ( orientation == 90 ) || ( orientation == 270 ) ) {
+ overclockWidth = 1080;
+ overclockHeight = 1920;
+ } else {
+ overclockWidth = 1920;
+ overclockHeight = 1080;
+ }
+
+ sensorID = mCapabilities->getInt(CameraProperties::CAMERA_SENSOR_ID);
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) &&
+ ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ ( portParams.mFrameRate >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portParams.mWidth >= overclockWidth ) &&
+ ( portParams.mHeight >= overclockHeight ) ) ) {
+ overclockStatus = setSensorOverclock(true);
+ } else {
+
+ //WA: If the next port resolution doesn't require
+ // sensor overclocking, but the previous resolution
+ // needed it, then we have to first set new port
+ // resolution and then disable sensor overclocking.
+ if( ( ( sensorID == SENSORID_IMX060 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) &&
+ ( ( portCheck.format.video.xFramerate >> 16 ) >= FRAME_RATE_FULL_HD ) ) ||
+ (( sensorID == SENSORID_OV14825) &&
+ (( portCheck.format.video.xFramerate >> 16) >= FRAME_RATE_HIGH_HD ))||
+ ( ( sensorID == SENSORID_OV5640 ) &&
+ ( portCheck.format.video.nFrameWidth >= overclockWidth ) &&
+ ( portCheck.format.video.nFrameHeight >= overclockHeight ) ) ) {
+ status_t ret = setFormat(mCameraAdapterParameters.mPrevPortIndex,
+ portParams);
+ if ( NO_ERROR != ret ) {
+ return ret;
+ }
+
+ // Another WA: Setting the port definition will reset the VFR
+ // configuration.
+ setVFramerate(portParams.mMinFrameRate, portParams.mMaxFrameRate);
+
+ portConfigured = true;
+ }
+
+ overclockStatus = setSensorOverclock(false);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return overclockStatus;
+}
+status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ size_t bufferCount;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_PARAM_PORTDEFINITIONTYPE portCheck;
@@ -783,152 +1141,155 @@ status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &port
eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- if ( OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port )
- {
+ if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
portCheck.format.video.nFrameWidth = portParams.mWidth;
portCheck.format.video.nFrameHeight = portParams.mHeight;
portCheck.format.video.eColorFormat = portParams.mColorFormat;
portCheck.format.video.nStride = portParams.mStride;
- if( ( portCheck.format.video.nFrameWidth >= 1920 ) &&
- ( portCheck.format.video.nFrameHeight >= 1080 ) &&
- ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) )
- {
- setSensorOverclock(true);
- }
- else
- {
- setSensorOverclock(false);
- }
portCheck.format.video.xFramerate = portParams.mFrameRate<<16;
portCheck.nBufferSize = portParams.mStride * portParams.mHeight;
portCheck.nBufferCountActual = portParams.mNumBufs;
mFocusThreshold = FOCUS_THRESHOLD * portParams.mFrameRate;
- }
- else if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
- {
+ // Used for RAW capture
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = OMX_COLOR_FormatRawBayer10bit; // portParams.mColorFormat;
+ portCheck.nBufferCountActual = 1; // portParams.mNumBufs;
+ } else if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
portCheck.format.image.nFrameWidth = portParams.mWidth;
portCheck.format.image.nFrameHeight = portParams.mHeight;
- if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingNone )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ if (OMX_COLOR_FormatUnused == portParams.mColorFormat) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ if (mCodingMode == CodingJPEG) {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ } else if (mCodingMode == CodingJPS) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
+ } else if (mCodingMode == CodingMPO) {
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
+ } else {
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
}
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingJPS )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingMPO )
- {
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWJPEG )
- {
- //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWJPEG when
- // RAW format is supported
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
- }
- else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWMPO )
- {
- //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWMPO when
- // RAW format is supported
- portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
- portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
- }
- else
- {
- portCheck.format.image.eColorFormat = portParams.mColorFormat;
+ } else {
+ portCheck.format.image.eColorFormat = portParams.mColorFormat;
portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
- }
+ }
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ // RAW + YUV Capture
+ if (mYuvCapture) {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+#endif
//Stride for 1D tiler buffer is zero
portCheck.format.image.nStride = 0;
- portCheck.nBufferSize = portParams.mStride * portParams.mWidth * portParams.mHeight;
portCheck.nBufferCountActual = portParams.mNumBufs;
+ } else if (OMX_CAMERA_PORT_VIDEO_IN_VIDEO == port) {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nStride = portParams.mStride;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = portParams.mColorFormat;
+ portCheck.format.video.xFramerate = 30 << 16;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ } else {
+ CAMHAL_LOGEB("Unsupported port index (%lu)", port);
+ }
+
+#ifndef OMAP_TUNA
+ if (( mSensorIndex == OMX_TI_StereoSensor ) && (OMX_CAMERA_PORT_VIDEO_OUT_VIDEO != port)) {
+ ret = setS3DFrameLayout(port);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Error configuring stereo 3D frame layout");
+ return ret;
+ }
}
- else
- {
- CAMHAL_LOGEB("Unsupported port index 0x%x", (unsigned int)port);
- }
+#endif
eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
- OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
/* check if parameters are set correctly by calling GetParameter() */
eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
- OMX_IndexParamPortDefinition, &portCheck);
- if(eError!=OMX_ErrorNone)
- {
+ OMX_IndexParamPortDefinition, &portCheck);
+ if (eError!=OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
- }
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
portParams.mBufSize = portCheck.nBufferSize;
portParams.mStride = portCheck.format.image.nStride;
- if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
- {
+ if (OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port) {
CAMHAL_LOGDB("\n *** IMG Width = %ld", portCheck.format.image.nFrameWidth);
- CAMHAL_LOGDB("\n ***IMG Height = %ld", portCheck.format.image.nFrameHeight);
-
- CAMHAL_LOGDB("\n ***IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountMin = %ld\n",
- portCheck.nBufferCountMin);
- CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountActual = %ld\n",
- portCheck.nBufferCountActual);
- CAMHAL_LOGDB("\n ***IMG portCheck.format.image.nStride = %ld\n",
- portCheck.format.image.nStride);
- }
- else
- {
+ CAMHAL_LOGDB("\n *** IMG Height = %ld", portCheck.format.image.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** IMG portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** IMG portCheck.format.image.nStride = %ld\n",
+ portCheck.format.image.nStride);
+ } else if (OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port) {
CAMHAL_LOGDB("\n *** PRV Width = %ld", portCheck.format.video.nFrameWidth);
- CAMHAL_LOGDB("\n ***PRV Height = %ld", portCheck.format.video.nFrameHeight);
-
- CAMHAL_LOGDB("\n ***PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountMin = %ld\n",
- portCheck.nBufferCountMin);
- CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountActual = %ld\n",
- portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** PRV Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** PRV portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
CAMHAL_LOGDB("\n ***PRV portCheck.format.video.nStride = %ld\n",
- portCheck.format.video.nStride);
- }
+ portCheck.format.video.nStride);
+ } else {
+ CAMHAL_LOGDB("\n *** VID Width = %ld", portCheck.format.video.nFrameWidth);
+ CAMHAL_LOGDB("\n *** VID Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n *** VID IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n *** VID portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n *** VID portCheck.format.video.nStride = %ld\n",
+ portCheck.format.video.nStride);
+ }
+
+ mSetFormatDone = true;
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
- CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError);
+ CAMHAL_LOGEB("Exiting function %s because of eError = 0x%x", __FUNCTION__, eError);
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
-status_t OMXCameraAdapter::flushBuffers()
+status_t OMXCameraAdapter::flushBuffers(OMX_U32 nPort)
{
+ LOG_FUNCTION_NAME;
+
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- TIMM_OSAL_ERRORTYPE err;
- TIMM_OSAL_U32 uRequestedEvents = OMXCameraAdapter::CAMERA_PORT_FLUSH;
- TIMM_OSAL_U32 pRetrievedEvents;
if ( 0 != mFlushSem.Count() )
{
@@ -937,10 +1298,8 @@ status_t OMXCameraAdapter::flushBuffers()
return NO_INIT;
}
- LOG_FUNCTION_NAME;
-
OMXCameraPortParameters * mPreviewData = NULL;
- mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[nPort];
///Register for the FLUSH event
///This method just inserts a message in Event Q, which is checked in the callback
@@ -948,7 +1307,7 @@ status_t OMXCameraAdapter::flushBuffers()
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandFlush,
- OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ nPort,
mFlushSem);
if(ret!=NO_ERROR)
{
@@ -959,7 +1318,7 @@ status_t OMXCameraAdapter::flushBuffers()
///Send FLUSH command to preview port
eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
OMX_CommandFlush,
- mCameraAdapterParameters.mPrevPortIndex,
+ nPort,
NULL);
if(eError!=OMX_ErrorNone)
@@ -989,25 +1348,27 @@ status_t OMXCameraAdapter::flushBuffers()
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandFlush,
- OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ nPort,
NULL);
CAMHAL_LOGDA("Flush event timeout expired");
goto EXIT;
}
+ mOMXCallbackHandler->flush();
+
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
///API to give the buffers to Adapter
-status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t OMXCameraAdapter::useBuffers(CameraMode mode, CameraBuffer * bufArr, int num, size_t length, unsigned int queueable)
{
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
@@ -1029,9 +1390,9 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
break;
case CAMERA_VIDEO:
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num;
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable;
- ret = UseBuffersPreview(bufArr, num);
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersRawCapture(bufArr, num);
break;
case CAMERA_MEASUREMENT:
@@ -1040,6 +1401,11 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
ret = UseBuffersPreviewData(bufArr, num);
break;
+ case CAMERA_REPROCESS:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersReprocess(bufArr, num);
+ break;
}
LOG_FUNCTION_NAME_EXIT;
@@ -1047,13 +1413,12 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
return ret;
}
-status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num)
+status_t OMXCameraAdapter::UseBuffersPreviewData(CameraBuffer * bufArr, int num)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * measurementData = NULL;
- uint32_t *buffers;
- Mutex::Autolock lock( mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
LOG_FUNCTION_NAME;
@@ -1080,7 +1445,6 @@ status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num)
{
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
measurementData->mNumBufs = num ;
- buffers= (uint32_t*) bufArr;
}
if ( NO_ERROR == ret )
@@ -1158,13 +1522,13 @@ EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::switchToExecuting()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1173,7 +1537,7 @@ status_t OMXCameraAdapter::switchToExecuting()
msg.arg1 = mErrorNotifier;
ret = mCommandHandler->put(&msg);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1275,23 +1639,21 @@ status_t OMXCameraAdapter::doSwitchToExecuting()
performCleanupAfterError();
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::switchToLoaded()
-{
+status_t OMXCameraAdapter::switchToIdle() {
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mStateSwitchLock);
+ android::AutoMutex lock(mIdleStateSwitchLock);
- if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
+ if ( mComponentState == OMX_StateIdle || mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_StateIdle, OMX_Loaded state or OMX_StateInvalid state");
return NO_ERROR;
- }
+ }
if ( 0 != mSwitchToLoadedSem.Count() )
{
@@ -1353,6 +1715,107 @@ status_t OMXCameraAdapter::switchToLoaded()
goto EXIT;
}
+ mComponentState = OMX_StateIdle;
+
+ return NO_ERROR;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+
+
+status_t OMXCameraAdapter::prevPortEnable() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ ///Register for Preview port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mSwitchToLoadedSem);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Enabling Preview port");
+ ///Wait for state to switch to idle
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ //If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview port enabled!");
+ }
+ else
+ {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Preview enable timedout");
+
+ goto EXIT;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::switchToLoaded(bool bPortEnableRequired) {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mStateSwitchLock);
+ if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
+ return NO_ERROR;
+ }
+
+ if ( mComponentState != OMX_StateIdle) {
+ ret = switchToIdle();
+ if (ret != NO_ERROR) return ret;
+ }
+
+ if ( 0 != mSwitchToLoadedSem.Count() ) {
+ CAMHAL_LOGEB("Error mSwitchToLoadedSem semaphore count %d", mSwitchToLoadedSem.Count());
+ goto EXIT;
+ }
+
///Register for LOADED state transition.
///This method just inserts a message in Event Q, which is checked in the callback
///The sempahore passed is signalled by the callback
@@ -1379,93 +1842,91 @@ status_t OMXCameraAdapter::switchToLoaded()
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- CAMHAL_LOGDA("Switching IDLE->LOADED state");
- ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
-
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!");
- goto EXIT;
- }
+ if ( !bPortEnableRequired ) {
+ OMXCameraPortParameters *mCaptureData , *mPreviewData, *measurementData;
+ mCaptureData = mPreviewData = measurementData = NULL;
- if ( NO_ERROR == ret )
- {
- CAMHAL_LOGDA("IDLE->LOADED state changed");
- }
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandStateSet,
- OMX_StateLoaded,
- NULL);
- CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
- goto EXIT;
- }
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
- mComponentState = OMX_StateLoaded;
+ ///Free the OMX Buffers
+ for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mPreviewData->mBufferHeader[i]);
- ///Register for Preview port ENABLE event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
- mSwitchToLoadedSem);
-
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("Error in registering for event %d", ret);
- goto EXIT;
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
- ///Enable Preview Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
+ if ( mMeasurementEnabled ) {
+ for ( int i = 0 ; i < measurementData->mNumBufs ; i++ ) {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ measurementData->mBufferHeader[i]);
+ if(eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
- CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ {
+ android::AutoMutex lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.clear();
+ }
- CAMHAL_LOGDA("Enabling Preview port");
- ///Wait for state to switch to idle
+ }
+ }
+
+ CAMHAL_LOGDA("Switching IDLE->LOADED state");
ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
//If somethiing bad happened while we wait
if (mComponentState == OMX_StateInvalid)
{
- CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!");
+ CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!");
goto EXIT;
}
if ( NO_ERROR == ret )
{
- CAMHAL_LOGDA("Preview port enabled!");
+ CAMHAL_LOGDA("IDLE->LOADED state changed");
}
else
{
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mPrevPortIndex,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
NULL);
- CAMHAL_LOGEA("Preview enable timedout");
-
+ CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
goto EXIT;
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ mComponentState = OMX_StateLoaded;
+ if (bPortEnableRequired == true) {
+ prevPortEnable();
+ }
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
+status_t OMXCameraAdapter::UseBuffersPreview(CameraBuffer * bufArr, int num)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -1485,7 +1946,6 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
mPreviewData->mNumBufs = num ;
- uint32_t *buffers = (uint32_t*)bufArr;
if ( 0 != mUsePreviewSem.Count() )
{
@@ -1503,70 +1963,52 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mStateSwitchLock.lock();
- if ( mComponentState == OMX_StateLoaded )
- {
+ if ( mComponentState == OMX_StateLoaded ) {
- ret = setLDC(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setLDC() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
}
+ }
- ret = setNSF(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setNSF() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
}
+ }
- ret = setCaptureMode(mCapMode);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
}
+ }
- CAMHAL_LOGDB("Camera Mode = %d", mCapMode);
-
- if( mCapMode == OMXCameraAdapter::VIDEO_MODE )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(mVnfEnabled);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- return ret;
- }
+ if(mCapMode == OMXCameraAdapter::VIDEO_MODE) {
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(mVstabEnabled);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- return ret;
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
}
}
- else
- {
- ret = enableVideoNoiseFilter(false);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- return ret;
- }
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(false);
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- return ret;
+
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
+ ret = enableVideoStabilization(mVstabEnabled);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
}
}
+
}
+ }
ret = setSensorOrientation(mSensorOrientation);
if ( NO_ERROR != ret )
@@ -1575,14 +2017,6 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
mSensorOrientation = 0;
}
- ret = setVFramerate(mPreviewData->mMinFrameRate, mPreviewData->mMaxFrameRate);
- if ( ret != NO_ERROR )
- {
- CAMHAL_LOGEB("VFR configuration failed 0x%x", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
- }
-
if ( mComponentState == OMX_StateLoaded )
{
///Register for IDLE state switch event
@@ -1650,21 +2084,22 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
OMX_BUFFERHEADERTYPE *pBufferHdr;
for(int index=0;index<num;index++) {
+ OMX_U8 *ptr;
- CAMHAL_LOGDB("OMX_UseBuffer(0x%x)", buffers[index]);
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufArr[index]);
eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
&pBufferHdr,
mCameraAdapterParameters.mPrevPortIndex,
0,
mPreviewData->mBufSize,
- (OMX_U8*)buffers[index]);
+ ptr);
if(eError!=OMX_ErrorNone)
{
CAMHAL_LOGEB("OMX_UseBuffer-0x%x", eError);
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- //pBufferHdr->pAppPrivate = (OMX_PTR)pBufferHdr;
+ pBufferHdr->pAppPrivate = (OMX_PTR)&bufArr[index];
pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufferHdr->nVersion.s.nVersionMajor = 1 ;
pBufferHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1679,15 +2114,19 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
for( int i = 0; i < num; i++ )
{
OMX_BUFFERHEADERTYPE *pBufHdr;
+ OMX_U8 *ptr;
+
+ ptr = (OMX_U8 *)camera_buffer_get_omx_ptr (&mPreviewDataBuffers[i]);
eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
&pBufHdr,
mCameraAdapterParameters.mMeasurementPortIndex,
0,
measurementData->mBufSize,
- (OMX_U8*)(mPreviewDataBuffers[i]));
+ ptr);
if ( eError == OMX_ErrorNone )
{
+ pBufHdr->pAppPrivate = (OMX_PTR *)&mPreviewDataBuffers[i];
pBufHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufHdr->nVersion.s.nVersionMajor = 1 ;
pBufHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1744,7 +2183,7 @@ status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
///If there is any failure, we reach here.
///Here, we do any resource freeing and convert from OMX error code to Camera Hal error code
@@ -1757,7 +2196,7 @@ EXIT:
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::startPreview()
@@ -1776,6 +2215,14 @@ status_t OMXCameraAdapter::startPreview()
goto EXIT;
}
+ // Enable all preview mode extra data.
+ if ( OMX_ErrorNone == eError) {
+ ret |= setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_AncillaryData);
+#ifndef OMAP_TUNA
+ ret |= setExtraData(true, OMX_ALL, OMX_TI_VectShotInfo);
+#endif
+ }
+
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
@@ -1838,11 +2285,11 @@ status_t OMXCameraAdapter::startPreview()
mStateSwitchLock.unlock();
- apply3Asettings(mParameters3A);
//Queue all the buffers on preview port
for(int index=0;index< mPreviewData->mMaxQueueable;index++)
{
CAMHAL_LOGDB("Queuing buffer on Preview port - 0x%x", (uint32_t)mPreviewData->mBufferHeader[index]->pBuffer);
+ mPreviewData->mStatus[index] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*)mPreviewData->mBufferHeader[index]);
if(eError!=OMX_ErrorNone)
@@ -1850,8 +2297,8 @@ status_t OMXCameraAdapter::startPreview()
CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError);
}
mFramesWithDucati++;
-#ifdef DEGUG_LOG
- mBuffersWithDucati.add((uint32_t)mPreviewData->mBufferHeader[index]->pBuffer,1);
+#ifdef CAMERAHAL_DEBUG
+ mBuffersWithDucati.add((int)mPreviewData->mBufferHeader[index]->pAppPrivate,1);
#endif
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
@@ -1862,6 +2309,7 @@ status_t OMXCameraAdapter::startPreview()
for(int index=0;index< mPreviewData->mNumBufs;index++)
{
CAMHAL_LOGDB("Queuing buffer on Measurement port - 0x%x", (uint32_t) measurementData->mBufferHeader[index]->pBuffer);
+ measurementData->mStatus[index] = OMXCameraPortParameters::FILL;
eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*) measurementData->mBufferHeader[index]);
if(eError!=OMX_ErrorNone)
@@ -1873,22 +2321,7 @@ status_t OMXCameraAdapter::startPreview()
}
- // Enable Ancillary data. The nDCCStatus field is used to signify
- // whether the preview frame is a snapshot
- if ( OMX_ErrorNone == eError)
- {
- ret = setExtraData(true, OMX_ALL, OMX_AncillaryData);
- }
-
-
- if ( mPending3Asettings )
- apply3Asettings(mParameters3A);
-
- // enable focus callbacks just once here
- // fixes an issue with slow callback registration in Ducati
- if ( NO_ERROR == ret ) {
- ret = setFocusCallback(true);
- }
+ setFocusCallback(true);
//reset frame rate estimates
mFPS = 0.0f;
@@ -1904,10 +2337,11 @@ status_t OMXCameraAdapter::startPreview()
mLastFrameCount = 0;
mIter = 1;
mLastFPSTime = systemTime();
+ mTunnelDestroyed = false;
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
@@ -1916,11 +2350,11 @@ status_t OMXCameraAdapter::startPreview()
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-status_t OMXCameraAdapter::stopPreview()
+status_t OMXCameraAdapter::destroyTunnel()
{
LOG_FUNCTION_NAME;
@@ -1934,13 +2368,13 @@ status_t OMXCameraAdapter::stopPreview()
mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
- if (mAdapterState == LOADED_PREVIEW_STATE) {
- // Something happened in CameraHal between UseBuffers and startPreview
- // this means that state switch is still locked..so we need to unlock else
- // deadlock will occur on the next start preview
- mStateSwitchLock.unlock();
- return NO_ERROR;
- }
+ if (mAdapterState == LOADED_PREVIEW_STATE) {
+ // Something happened in CameraHal between UseBuffers and startPreview
+ // this means that state switch is still locked..so we need to unlock else
+ // deadlock will occur on the next start preview
+ mStateSwitchLock.unlock();
+ return ALREADY_EXISTS;
+ }
if ( mComponentState != OMX_StateExecuting )
{
@@ -1950,7 +2384,7 @@ status_t OMXCameraAdapter::stopPreview()
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
// we should wait for the first frame to come before trying to stopPreview...if not
// we might put OMXCamera in a bad state (IDLE->LOADED timeout). Seeing this a lot
// after a capture
@@ -1969,11 +2403,9 @@ status_t OMXCameraAdapter::stopPreview()
mFirstFrameCondition.broadcast();
}
- ret = cancelAutoFocus();
- if(ret!=NO_ERROR)
{
- CAMHAL_LOGEB("Error canceling autofocus %d", ret);
- // Error, but we probably still want to continue to stop preview
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
}
OMX_CONFIG_FOCUSASSISTTYPE focusAssist;
@@ -2016,89 +2448,53 @@ status_t OMXCameraAdapter::stopPreview()
goto EXIT;
}
- ///Register for Preview port Disable event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- mStopPreviewSem);
-
- ///Disable Preview Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
-
- ///Free the OMX Buffers
- for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ )
- {
- eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
- mCameraAdapterParameters.mPrevPortIndex,
- mPreviewData->mBufferHeader[i]);
-
- if(eError!=OMX_ErrorNone)
- {
- CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
-
- if ( mMeasurementEnabled )
- {
+ switchToIdle();
- for ( int i = 0 ; i < measurementData->mNumBufs ; i++ )
- {
- eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
- mCameraAdapterParameters.mMeasurementPortIndex,
- measurementData->mBufferHeader[i]);
- if(eError!=OMX_ErrorNone)
- {
- CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
+ mTunnelDestroyed = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
- {
- Mutex::Autolock lock(mPreviewDataBufferLock);
- mPreviewDataBuffersAvailable.clear();
- }
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
- }
+}
- CAMHAL_LOGDA("Disabling preview port");
- ret = mStopPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+status_t OMXCameraAdapter::stopPreview() {
+ LOG_FUNCTION_NAME;
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after Disabling preview port Exitting!!!");
- goto EXIT;
- }
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
- if ( NO_ERROR == ret )
- {
- CAMHAL_LOGDA("Preview port disabled");
+ if (mTunnelDestroyed == false){
+ ret = destroyTunnel();
+ if (ret == ALREADY_EXISTS) {
+ // Special case to handle invalid stopping preview in LOADED_PREVIEW_STATE
+ return NO_ERROR;
}
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortDisable,
- mCameraAdapterParameters.mPrevPortIndex,
- NULL);
- CAMHAL_LOGEA("Timeout expired on preview port disable");
- goto EXIT;
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB(" destroyTunnel returned error ");
+ return ret;
}
+ }
- {
- Mutex::Autolock lock(mPreviewBufferLock);
+ mTunnelDestroyed = false;
+
+ {
+ android::AutoMutex lock(mPreviewBufferLock);
///Clear all the available preview buffers
mPreviewBuffersAvailable.clear();
- }
+ }
switchToLoaded();
-
mFirstTimeInit = true;
mPendingCaptureSettings = 0;
mFramesWithDucati = 0;
@@ -2107,19 +2503,7 @@ status_t OMXCameraAdapter::stopPreview()
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
-
-EXIT:
- CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
- {
- Mutex::Autolock lock(mPreviewBufferLock);
- ///Clear all the available preview buffers
- mPreviewBuffersAvailable.clear();
- }
- performCleanupAfterError();
- LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
-
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::setSensorOverclock(bool enable)
@@ -2154,7 +2538,6 @@ status_t OMXCameraAdapter::setSensorOverclock(bool enable)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while setting Sensor overclock 0x%x", eError);
- ret = BAD_VALUE;
}
else
{
@@ -2164,7 +2547,7 @@ status_t OMXCameraAdapter::setSensorOverclock(bool enable)
LOG_FUNCTION_NAME_EXIT;
- return ret;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
@@ -2259,15 +2642,72 @@ status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
return ret;
}
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setS3DFrameLayout(OMX_U32 port) const
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_FRAMELAYOUTTYPE frameLayout;
+ const OMXCameraPortParameters *cap =
+ &mCameraAdapterParameters.mCameraPortParams[port];
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&frameLayout, OMX_TI_FRAMELAYOUTTYPE);
+ frameLayout.nPortIndex = port;
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while getting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+
+ if (cap->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutTopBottom;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else if (cap->mFrameLayoutType ==
+ OMX_TI_StereoFrameLayoutLeftRightSubsample)
+ {
+ frameLayout.eFrameLayout = OMX_TI_StereoFrameLayoutLeftRight;
+ frameLayout.nSubsampleRatio = 2;
+ }
+ else
+ {
+ frameLayout.eFrameLayout = cap->mFrameLayoutType;
+ frameLayout.nSubsampleRatio = 1;
+ }
+ frameLayout.nSubsampleRatio = frameLayout.nSubsampleRatio << 7;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamStereoFrmLayout, &frameLayout);
+ if (eError != OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("Error while setting S3D frame layout: 0x%x", eError);
+ return -EINVAL;
+ }
+ else
+ {
+ CAMHAL_LOGDB("S3D frame layout %d applied successfully on port %lu",
+ frameLayout.eFrameLayout, port);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+#endif
+
status_t OMXCameraAdapter::autoFocus()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -2285,7 +2725,7 @@ status_t OMXCameraAdapter::autoFocus()
EXIT:
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -2293,12 +2733,12 @@ status_t OMXCameraAdapter::autoFocus()
status_t OMXCameraAdapter::takePicture()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -2310,8 +2750,16 @@ status_t OMXCameraAdapter::takePicture()
}
}
- msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ // TODO(XXX): re-using take picture to kick off reprocessing pipe
+ // Need to rethink this approach during reimplementation
+ if (mNextState == REPROCESS_STATE) {
+ msg.command = CommandHandler::CAMERA_START_REPROCESS;
+ } else {
+ msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ }
+
msg.arg1 = mErrorNotifier;
+ msg.arg2 = cacheCaptureParameters();
ret = mCommandHandler->put(&msg);
EXIT:
@@ -2345,7 +2793,7 @@ status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
if ( mOMXStateSwitch )
{
- ret = switchToLoaded();
+ ret = switchToLoaded(true);
if ( NO_ERROR != ret )
{
CAMHAL_LOGEB("switchToLoaded() failed 0x%x", ret);
@@ -2358,78 +2806,54 @@ status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
if ( OMX_StateLoaded == mComponentState )
{
- ret = setLDC(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setLDC() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- goto exit;
+ if (mPendingPreviewSettings & SetLDC) {
+ mPendingPreviewSettings &= ~SetLDC;
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
}
+ }
- ret = setNSF(mIPP);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setNSF() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- goto exit;
+ if (mPendingPreviewSettings & SetNSF) {
+ mPendingPreviewSettings &= ~SetNSF;
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
}
+ }
- ret = setCaptureMode(mCapMode);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
+ if (mPendingPreviewSettings & SetCapMode) {
+ mPendingPreviewSettings &= ~SetCapMode;
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
}
+ }
- if(mCapMode == OMXCameraAdapter::VIDEO_MODE)
- {
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(mVnfEnabled);
- }
+ if(mCapMode == OMXCameraAdapter::VIDEO_MODE) {
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ if (mPendingPreviewSettings & SetVNF) {
+ mPendingPreviewSettings &= ~SetVNF;
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret){
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
}
+ }
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Stabilization
+ if (mPendingPreviewSettings & SetVSTAB) {
+ mPendingPreviewSettings &= ~SetVSTAB;
ret = enableVideoStabilization(mVstabEnabled);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
- }
- }
- else
- {
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Noise Filter
- ret = enableVideoNoiseFilter(false);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VNF %x", ret);
- }
-
- if ( NO_ERROR == ret )
- {
- ///Enable/Disable Video Stabilization
- ret = enableVideoStabilization(false);
- }
-
- if ( NO_ERROR != ret)
- {
- CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
}
}
}
+ }
ret = setSensorOrientation(mSensorOrientation);
if ( NO_ERROR != ret )
@@ -2522,9 +2946,6 @@ void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME;
static const unsigned int DEGREES_TILT_IGNORE = 45;
- int device_orientation = 0;
- int mount_orientation = 0;
- const char *facing_direction = NULL;
// if tilt angle is greater than DEGREES_TILT_IGNORE
// we are going to ignore the orientation returned from
@@ -2534,34 +2955,36 @@ void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
return;
}
+ int mountOrientation = 0;
+ bool isFront = false;
if (mCapabilities) {
- if (mCapabilities->get(CameraProperties::ORIENTATION_INDEX)) {
- mount_orientation = atoi(mCapabilities->get(CameraProperties::ORIENTATION_INDEX));
+ const char * const mountOrientationString =
+ mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
+ if (mountOrientationString) {
+ mountOrientation = atoi(mountOrientationString);
}
- facing_direction = mCapabilities->get(CameraProperties::FACING_INDEX);
- }
- // calculate device orientation relative to the sensor orientation
- // front camera display is mirrored...needs to be accounted for when orientation
- // is 90 or 270...since this will result in a flip on orientation otherwise
- if (facing_direction && !strcmp(facing_direction, TICameraParameters::FACING_FRONT) &&
- (orientation == 90 || orientation == 270)) {
- device_orientation = (orientation - mount_orientation + 360) % 360;
- } else { // back-facing camera
- device_orientation = (orientation + mount_orientation) % 360;
+ const char * const facingString = mCapabilities->get(CameraProperties::FACING_INDEX);
+ if (facingString) {
+ isFront = strcmp(facingString, TICameraParameters::FACING_FRONT) == 0;
+ }
}
- if (device_orientation != mDeviceOrientation) {
- mDeviceOrientation = device_orientation;
+ // direction is a constant sign for facing, meaning the rotation direction relative to device
+ // +1 (clockwise) for back sensor and -1 (counter-clockwise) for front sensor
+ const int direction = isFront ? -1 : 1;
-#ifndef OMAP_TUNA
- mFaceDetectionLock.lock();
- if (mFaceDetectionRunning) {
- // restart face detection with new rotation
- setFaceDetection(true, mDeviceOrientation);
- }
- mFaceDetectionLock.unlock();
-#endif
+ int rotation = mountOrientation + direction*orientation;
+
+ // crop the calculated value to [0..360) range
+ while ( rotation < 0 ) rotation += 360;
+ rotation %= 360;
+
+ if (rotation != mDeviceOrientation) {
+ mDeviceOrientation = rotation;
+
+ // restart face detection with new rotation
+ setFaceDetectionOrientation(mDeviceOrientation);
}
CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation);
@@ -2652,10 +3075,10 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETY
{
CAMHAL_LOGEB("***Removing %d EVENTS***** \n", mEventSignalQ.size());
//remove from queue and free msg
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
if ( sem )
{
sem->Signal();
@@ -2714,8 +3137,8 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
bool eventSignalled = false;
LOG_FUNCTION_NAME;
@@ -2734,7 +3157,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
//Signal the semaphore provided
@@ -2756,7 +3179,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
// Handling for focus callback
if ((nData2 == OMX_IndexConfigCommonFocusStatus) &&
(eEvent == (OMX_EVENTTYPE) OMX_EventIndexSettingChanged)) {
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::CAMERA_FOCUS_STATUS;
msg.arg1 = NULL;
msg.arg2 = NULL;
@@ -2775,8 +3198,8 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
LOG_FUNCTION_NAME;
if ( !mEventSignalQ.isEmpty() )
@@ -2793,7 +3216,7 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
free(msg);
@@ -2816,14 +3239,14 @@ status_t OMXCameraAdapter::RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN Semaphore &semaphore)
+ OMX_IN Utils::Semaphore &semaphore)
{
status_t ret = NO_ERROR;
ssize_t res;
- Mutex::Autolock lock(mEventLock);
+ android::AutoMutex lock(mEventLock);
LOG_FUNCTION_NAME;
- TIUTILS::Message * msg = ( struct TIUTILS::Message * ) malloc(sizeof(struct TIUTILS::Message));
+ Utils::Message * msg = ( struct Utils::Message * ) malloc(sizeof(struct Utils::Message));
if ( NULL != msg )
{
msg->command = ( unsigned int ) eEvent;
@@ -2871,11 +3294,36 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDL
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
{
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME;
+ status_t stat = NO_ERROR;
+ status_t res1, res2;
+ OMXCameraPortParameters *pPortParam;
+ CameraFrame::FrameType typeOfFrame = CameraFrame::ALL_FRAMES;
+ unsigned int refCount = 0;
+ unsigned int mask = 0xFFFF;
+ CameraFrame cameraFrame;
+ OMX_TI_PLATFORMPRIVATE *platformPrivate;
- LOG_FUNCTION_NAME_EXIT;
+ res1 = res2 = NO_ERROR;
- return OMX_ErrorNone;
+ if (!pBuffHeader || !pBuffHeader->pBuffer) {
+ CAMHAL_LOGE("NULL Buffer from OMX");
+ return OMX_ErrorNone;
+ }
+
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nInputPortIndex]);
+ platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate;
+
+ if (pBuffHeader->nInputPortIndex == OMX_CAMERA_PORT_VIDEO_IN_VIDEO) {
+ typeOfFrame = CameraFrame::REPROCESS_INPUT_FRAME;
+ mask = (unsigned int)CameraFrame::REPROCESS_INPUT_FRAME;
+
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
}
static void debugShowFPS()
@@ -2891,7 +3339,7 @@ static void debugShowFPS()
mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
mLastFpsTime = now;
mLastFrameCount = mFrameCount;
- ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps);
}
// XXX: mFPS has the value we want
}
@@ -2903,7 +3351,7 @@ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
{
- TIUTILS::Message msg;
+ Utils::Message msg;
OMX_ERRORTYPE eError = OMX_ErrorNone;
if (UNLIKELY(mDebugFps)) {
@@ -2922,6 +3370,48 @@ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
return eError;
}
+#ifdef CAMERAHAL_OMX_PROFILING
+
+status_t OMXCameraAdapter::storeProfilingData(OMX_BUFFERHEADERTYPE* pBuffHeader) {
+ OMX_TI_PLATFORMPRIVATE *platformPrivate = NULL;
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+ FILE *fd = NULL;
+
+ LOG_FUNCTION_NAME
+
+ if ( UNLIKELY( mDebugProfile ) ) {
+
+ platformPrivate = static_cast<OMX_TI_PLATFORMPRIVATE *> (pBuffHeader->pPlatformPrivate);
+ extraData = getExtradata(static_cast<OMX_OTHER_EXTRADATATYPE *> (platformPrivate->pMetaDataBuffer),
+ platformPrivate->nMetaDataSize,
+ static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData));
+
+ if ( NULL != extraData ) {
+ if( extraData->eType == static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData) ) {
+
+ fd = fopen(DEFAULT_PROFILE_PATH, "ab");
+ if ( NULL != fd ) {
+ fwrite(extraData->data, 1, extraData->nDataSize, fd);
+ fclose(fd);
+ } else {
+ return -errno;
+ }
+
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ } else {
+ return NOT_ENOUGH_DATA;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return NO_ERROR;
+}
+
+#endif
+
/*========================================================*/
/* @ fn SampleTest_FillBufferDone :: Application callback*/
/*========================================================*/
@@ -2938,22 +3428,39 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
BaseCameraAdapter::AdapterState state, nextState;
BaseCameraAdapter::getState(state);
BaseCameraAdapter::getNextState(nextState);
- sp<CameraFDResult> fdResult = NULL;
+ android::sp<CameraMetadataResult> metadataResult = NULL;
unsigned int mask = 0xFFFF;
CameraFrame cameraFrame;
- OMX_TI_PLATFORMPRIVATE *platformPrivate;
OMX_OTHER_EXTRADATATYPE *extraData;
OMX_TI_ANCILLARYDATATYPE *ancillaryData = NULL;
bool snapshotFrame = false;
+ if ( NULL == pBuffHeader ) {
+ return OMX_ErrorBadParameter;
+ }
+
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ storeProfilingData(pBuffHeader);
+
+#endif
+
res1 = res2 = NO_ERROR;
- pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
if ( !pBuffHeader || !pBuffHeader->pBuffer ) {
CAMHAL_LOGEA("NULL Buffer from OMX");
return OMX_ErrorNone;
}
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
+
+ // Find buffer and mark it as filled
+ for (int i = 0; i < pPortParam->mNumBufs; i++) {
+ if (pPortParam->mBufferHeader[i] == pBuffHeader) {
+ pPortParam->mStatus[i] = OMXCameraPortParameters::DONE;
+ }
+ }
+
if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW)
{
@@ -2962,44 +3469,26 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
return OMX_ErrorNone;
}
- if ( mWaitingForSnapshot )
- {
- platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate;
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer,
- platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE) OMX_AncillaryData);
+ if ( mWaitingForSnapshot ) {
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE) OMX_AncillaryData);
- if ( NULL != extraData )
- {
+ if ( NULL != extraData ) {
ancillaryData = (OMX_TI_ANCILLARYDATATYPE*) extraData->data;
+#ifdef OMAP_TUNA
snapshotFrame = ancillaryData->nDCCStatus;
- mPending3Asettings |= SetFocus;
+#else
+ if ((OMX_2D_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Left_Snap == ancillaryData->eCameraView)
+ || (OMX_3D_Right_Snap == ancillaryData->eCameraView)) {
+ snapshotFrame = OMX_TRUE;
+ } else {
+ snapshotFrame = OMX_FALSE;
}
- }
-
- recalculateFPS();
-#ifndef OMAP_TUNA
- {
- Mutex::Autolock lock(mFaceDetectionLock);
- if ( mFaceDetectionRunning && !mFaceDetectionPaused ) {
- detectFaces(pBuffHeader, fdResult, pPortParam->mWidth, pPortParam->mHeight);
- if ( NULL != fdResult.get() ) {
- notifyFaceSubscribers(fdResult);
- fdResult.clear();
- }
- if ( mFDSwitchAlgoPriority ) {
-
- //Disable region priority and enable face priority for AF
- setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
-
- //Disable Region priority and enable Face priority
- setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
- setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
- mFDSwitchAlgoPriority = false;
- }
- }
- }
#endif
+ mPending3Asettings |= SetFocus;
+ }
+ }
///Prepare the frames to be sent - initialize CameraFrame object and reference count
// TODO(XXX): ancillary data for snapshot frame is not being sent for video snapshot
@@ -3014,8 +3503,8 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
// video snapshot gets ancillary data and wb info from last snapshot frame
mCaptureAncillaryData = ancillaryData;
mWhiteBalanceData = NULL;
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer,
- platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE) OMX_WhiteBalance);
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE) OMX_WhiteBalance);
if ( NULL != extraData )
{
mWhiteBalanceData = (OMX_TI_WHITEBALANCERESULTTYPE*) extraData->data;
@@ -3033,13 +3522,11 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mFramesWithEncoder++;
}
- //ALOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
+ //CAMHAL_LOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
if( mWaitingForSnapshot )
{
- mSnapshotCount++;
-
- if ( (mSnapshotCount == 1) &&
+ if (!mBracketingEnabled &&
((HIGH_SPEED == mCapMode) || (VIDEO_MODE == mCapMode)) )
{
notifyShutterSubscribers();
@@ -3051,11 +3538,11 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mFramesWithDucati--;
-#ifdef DEBUG_LOG
- if(mBuffersWithDucati.indexOfKey((int)pBuffHeader->pBuffer)<0)
+#ifdef CAMERAHAL_DEBUG
+ if(mBuffersWithDucati.indexOfKey((uint32_t)pBuffHeader->pBuffer)<0)
{
- ALOGE("Buffer was never with Ducati!! 0x%x", pBuffHeader->pBuffer);
- for(int i=0;i<mBuffersWithDucati.size();i++) ALOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ CAMHAL_LOGE("Buffer was never with Ducati!! %p", pBuffHeader->pBuffer);
+ for(unsigned int i=0;i<mBuffersWithDucati.size();i++) CAMHAL_LOGE("0x%x", mBuffersWithDucati.keyAt(i));
}
mBuffersWithDucati.removeItem((int)pBuffHeader->pBuffer);
#endif
@@ -3063,6 +3550,33 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
if(mDebugFcs)
CAMHAL_LOGEB("C[%d] D[%d] E[%d]", mFramesWithDucati, mFramesWithDisplay, mFramesWithEncoder);
+ recalculateFPS();
+
+ createPreviewMetadata(pBuffHeader, metadataResult, pPortParam->mWidth, pPortParam->mHeight);
+ if ( NULL != metadataResult.get() ) {
+ notifyMetadataSubscribers(metadataResult);
+ metadataResult.clear();
+ }
+
+ {
+ android::AutoMutex lock(mFaceDetectionLock);
+ if ( mFDSwitchAlgoPriority ) {
+
+ //Disable region priority and enable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
+
+ //Disable Region priority and enable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
+ mFDSwitchAlgoPriority = false;
+ }
+ }
+
+#ifndef OMAP_TUNA
+ sniffDccFileDataSave(pBuffHeader);
+#endif
+
stat |= advanceZoom();
// On the fly update to 3A settings not working
@@ -3070,10 +3584,9 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
// or in the middle of transitioning to it
if( mPending3Asettings &&
( (nextState & CAPTURE_ACTIVE) == 0 ) &&
- ( (state & CAPTURE_ACTIVE) == 0 ) )
- {
+ ( (state & CAPTURE_ACTIVE) == 0 ) ) {
apply3Asettings(mParameters3A);
- }
+ }
}
else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT )
@@ -3084,11 +3597,11 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
}
else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_IMAGE_OUT_IMAGE )
- {
+ {
OMX_COLOR_FORMATTYPE pixFormat;
const char *valstr = NULL;
- pixFormat = mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mColorFormat;
+ pixFormat = pPortParam->mColorFormat;
if ( OMX_COLOR_FormatUnused == pixFormat )
{
@@ -3096,13 +3609,15 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
mask = (unsigned int) CameraFrame::IMAGE_FRAME;
} else if ( pixFormat == OMX_COLOR_FormatCbYCrY &&
((mPictureFormatFromClient &&
- !strcmp(mPictureFormatFromClient, CameraParameters::PIXEL_FORMAT_JPEG)) ||
- !mPictureFormatFromClient) ) {
+ !strcmp(mPictureFormatFromClient,
+ android::CameraParameters::PIXEL_FORMAT_JPEG)) ||
+ !mPictureFormatFromClient) ) {
// signals to callbacks that this needs to be coverted to jpeg
// before returning to framework
typeOfFrame = CameraFrame::IMAGE_FRAME;
mask = (unsigned int) CameraFrame::IMAGE_FRAME;
cameraFrame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ cameraFrame.mQuirks |= CameraFrame::FORMAT_YUV422I_UYVY;
// populate exif data and pass to subscribers via quirk
// subscriber is in charge of freeing exif data
@@ -3110,12 +3625,10 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
setupEXIF_libjpeg(exif, mCaptureAncillaryData, mWhiteBalanceData);
cameraFrame.mQuirks |= CameraFrame::HAS_EXIF_DATA;
cameraFrame.mCookie2 = (void*) exif;
- }
- else
- {
+ } else {
typeOfFrame = CameraFrame::RAW_FRAME;
mask = (unsigned int) CameraFrame::RAW_FRAME;
- }
+ }
pPortParam->mImageType = typeOfFrame;
@@ -3131,7 +3644,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
doBracketing(pBuffHeader, typeOfFrame);
@@ -3139,28 +3652,131 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
}
+ if (mZoomBracketingEnabled) {
+ doZoom(mZoomBracketingValues[mCurrentZoomBracketing]);
+ CAMHAL_LOGDB("Current Zoom Bracketing: %d", mZoomBracketingValues[mCurrentZoomBracketing]);
+ mCurrentZoomBracketing++;
+ if (mCurrentZoomBracketing == ARRAY_SIZE(mZoomBracketingValues)) {
+ mZoomBracketingEnabled = false;
+ }
+ }
+
if ( 1 > mCapturedFrames )
{
goto EXIT;
}
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != mSharedAllocator ) {
+ cameraFrame.mMetaData = new CameraMetadataResult(getMetaData(pBuffHeader->pPlatformPrivate, mSharedAllocator));
+ }
+#endif
+
CAMHAL_LOGDB("Captured Frames: %d", mCapturedFrames);
mCapturedFrames--;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mYuvCapture) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/yuv_%d_%d_%d_%lu.yuv",
+ kYuvImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile(((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d, while saving yuv!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("yuv_%d_%d_%d_%lu.yuv successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kYuvImagesOutputDirPath);
+ }
+ }
+#endif
+
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != cameraFrame.mMetaData.get() ) {
+ cameraFrame.mMetaData.clear();
+ }
+#endif
}
- else
- {
- CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
- goto EXIT;
+ else if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_VIDEO) {
+ typeOfFrame = CameraFrame::RAW_FRAME;
+ pPortParam->mImageType = typeOfFrame;
+ {
+ android::AutoMutex lock(mLock);
+ if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE ) {
+ goto EXIT;
+ }
+ }
+
+ CAMHAL_LOGD("RAW buffer done on video port, length = %d", pBuffHeader->nFilledLen);
+
+ mask = (unsigned int) CameraFrame::RAW_FRAME;
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if ( mRawCapture ) {
+ struct timeval timeStampUsec;
+ gettimeofday(&timeStampUsec, NULL);
+
+ time_t saveTime;
+ time(&saveTime);
+ const struct tm * const timeStamp = gmtime(&saveTime);
+
+ char filename[256];
+ snprintf(filename,256, "%s/raw_%d_%d_%d_%lu.raw",
+ kRawImagesOutputDirPath,
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec);
+
+ const status_t saveBufferStatus = saveBufferToFile( ((CameraBuffer*)pBuffHeader->pAppPrivate)->mapped,
+ pBuffHeader->nFilledLen, filename);
+
+ if (saveBufferStatus != OK) {
+ CAMHAL_LOGE("ERROR: %d , while saving raw!", saveBufferStatus);
+ } else {
+ CAMHAL_LOGD("raw_%d_%d_%d_%lu.raw successfully saved in %s",
+ timeStamp->tm_hour,
+ timeStamp->tm_min,
+ timeStamp->tm_sec,
+ timeStampUsec.tv_usec,
+ kRawImagesOutputDirPath);
+ stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
+ }
+ }
+#endif
+ } else {
+ CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
+ goto EXIT;
}
if ( NO_ERROR != stat )
{
+ CameraBuffer *camera_buffer;
+
+ camera_buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+
CAMHAL_LOGDB("sendFrameToSubscribers error: %d", stat);
- returnFrame(pBuffHeader->pBuffer, typeOfFrame);
+ returnFrame(camera_buffer, typeOfFrame);
}
return eError;
@@ -3185,7 +3801,7 @@ status_t OMXCameraAdapter::recalculateFPS()
float currentFPS;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount++;
if (mFrameCount == 1) {
mFirstFrameCondition.broadcast();
@@ -3217,23 +3833,6 @@ status_t OMXCameraAdapter::recalculateFPS()
return NO_ERROR;
}
-status_t OMXCameraAdapter::sendFrame(CameraFrame &frame)
-{
- status_t ret = NO_ERROR;
-
- LOG_FUNCTION_NAME;
-
-
- if ( NO_ERROR == ret )
- {
- ret = sendFrameToSubscribers(&frame);
- }
-
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
-}
-
status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port)
{
status_t ret = NO_ERROR;
@@ -3252,18 +3851,18 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
return -EINVAL;
}
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
//frame.mFrameType = typeOfFrame;
frame.mFrameMask = mask;
- frame.mBuffer = pBuffHeader->pBuffer;
+ frame.mBuffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
frame.mLength = pBuffHeader->nFilledLen;
frame.mAlignment = port->mStride;
frame.mOffset = pBuffHeader->nOffset;
frame.mWidth = port->mWidth;
frame.mHeight = port->mHeight;
- frame.mYuv[0] = NULL;
- frame.mYuv[1] = NULL;
+ frame.mYuv[0] = 0; //NULL;
+ frame.mYuv[1] = 0; //NULL;
if ( onlyOnce && mRecording )
{
@@ -3288,60 +3887,9 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
return ret;
}
-status_t OMXCameraAdapter::initCameraFrame( CameraFrame &frame,
- OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader,
- int typeOfFrame,
- OMXCameraPortParameters *port)
-{
- status_t ret = NO_ERROR;
-
- LOG_FUNCTION_NAME;
-
- if ( NULL == port)
- {
- CAMHAL_LOGEA("Invalid portParam");
- return -EINVAL;
- }
-
- if ( NULL == pBuffHeader )
- {
- CAMHAL_LOGEA("Invalid Buffer header");
- return -EINVAL;
- }
-
- frame.mFrameType = typeOfFrame;
- frame.mBuffer = pBuffHeader->pBuffer;
- frame.mLength = pBuffHeader->nFilledLen;
- frame.mAlignment = port->mStride;
- frame.mOffset = pBuffHeader->nOffset;
- frame.mWidth = port->mWidth;
- frame.mHeight = port->mHeight;
-
- // Timestamp in pBuffHeader->nTimeStamp is derived on DUCATI side, which is
- // is not same time value as derived using systemTime. It would be ideal to use
- // exactly same time source across Android and Ducati, which is limited by
- // system now. So, workaround for now is to find the time offset between the two
- // time sources and compensate the difference, along with the latency involved
- // in camera buffer reaching CameraHal. Also, Do timeset offset calculation only
- // when recording is in progress, when nTimestamp will be populated by Camera
- if ( onlyOnce && mRecording )
- {
- mTimeSourceDelta = (pBuffHeader->nTimeStamp * 1000) - systemTime(SYSTEM_TIME_MONOTONIC);
- mTimeSourceDelta += kCameraBufferLatencyNs;
- onlyOnce = false;
- }
-
- // Calculating the new video timestamp based on offset from ducati source.
- frame.mTimestamp = (pBuffHeader->nTimeStamp * 1000) - mTimeSourceDelta;
-
- LOG_FUNCTION_NAME_EXIT;
-
- return ret;
-}
-
bool OMXCameraAdapter::CommandHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t stat;
ErrorNotifier *errorNotify = NULL;
@@ -3352,16 +3900,19 @@ bool OMXCameraAdapter::CommandHandler::Handler()
{
stat = NO_ERROR;
CAMHAL_LOGDA("Handler: waiting for messsage...");
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
}
CAMHAL_LOGDB("msg.command = %d", msg.command);
switch ( msg.command ) {
case CommandHandler::CAMERA_START_IMAGE_CAPTURE:
{
- stat = mCameraAdapter->startImageCapture();
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
break;
}
case CommandHandler::CAMERA_PERFORM_AUTOFOCUS:
@@ -3377,8 +3928,17 @@ bool OMXCameraAdapter::CommandHandler::Handler()
}
case CommandHandler::CAMERA_SWITCH_TO_EXECUTING:
{
- stat = mCameraAdapter->doSwitchToExecuting();
- break;
+ stat = mCameraAdapter->doSwitchToExecuting();
+ break;
+ }
+ case CommandHandler::CAMERA_START_REPROCESS:
+ {
+ OMXCameraAdapter::CachedCaptureParameters* cap_params =
+ static_cast<OMXCameraAdapter::CachedCaptureParameters*>(msg.arg2);
+ stat = mCameraAdapter->startReprocess();
+ stat = mCameraAdapter->startImageCapture(false, cap_params);
+ delete cap_params;
+ break;
}
}
@@ -3391,17 +3951,18 @@ bool OMXCameraAdapter::CommandHandler::Handler()
bool OMXCameraAdapter::OMXCallbackHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
while(forever){
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
+ mIsProcessed = false;
}
switch ( msg.command ) {
@@ -3423,12 +3984,43 @@ bool OMXCameraAdapter::OMXCallbackHandler::Handler()
break;
}
}
+
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = mCommandMsgQ.isEmpty();
+ if ( mIsProcessed )
+ mCondition.signal();
+ }
+ }
+
+ // force the condition to wake
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mIsProcessed = true;
+ mCondition.signal();
}
LOG_FUNCTION_NAME_EXIT;
return false;
}
+void OMXCameraAdapter::OMXCallbackHandler::flush()
+{
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ if ( mIsProcessed )
+ return;
+
+ mCondition.wait(mLock);
+}
+
status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT_EXTRADATATYPE eType) {
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -3446,7 +4038,9 @@ status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT
extraDataControl.nPortIndex = nPortIndex;
extraDataControl.eExtraDataType = eType;
+#ifdef CAMERAHAL_TUNA
extraDataControl.eCameraView = OMX_2D;
+#endif
if (enable) {
extraDataControl.bEnable = OMX_TRUE;
@@ -3460,28 +4054,76 @@ status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-
-OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(OMX_OTHER_EXTRADATATYPE *extraData, OMX_U32 extraDataSize, OMX_EXTRADATATYPE type) {
- OMX_U32 remainingSize = extraDataSize;
-
- if ( NULL != extraData ) {
- while ( extraData->eType && extraData->nDataSize && extraData->data &&
- (remainingSize >= extraData->nSize)) {
- if ( type == extraData->eType ) {
- return extraData;
+OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const
+{
+ if ( NULL != ptrPrivate ) {
+ const OMX_TI_PLATFORMPRIVATE *platformPrivate = (const OMX_TI_PLATFORMPRIVATE *) ptrPrivate;
+
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
+ platformPrivate->nSize,
+ sizeof(OMX_TI_PLATFORMPRIVATE),
+ platformPrivate->pAuxBuf1,
+ platformPrivate->pAuxBufSize1,
+ platformPrivate->pMetaDataBuffer,
+ platformPrivate->nMetaDataSize);
+ if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
+ if ( 0 < platformPrivate->nMetaDataSize ) {
+ OMX_U32 remainingSize = platformPrivate->nMetaDataSize;
+ OMX_OTHER_EXTRADATATYPE *extraData = (OMX_OTHER_EXTRADATATYPE *) platformPrivate->pMetaDataBuffer;
+ if ( NULL != extraData ) {
+ while ( extraData->eType && extraData->nDataSize && extraData->data &&
+ (remainingSize >= extraData->nSize)) {
+ if ( type == extraData->eType ) {
+ return extraData;
+ }
+ remainingSize -= extraData->nSize;
+ extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize);
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE pMetaDataBuffer is NULL");
+ }
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
+ ( unsigned int ) platformPrivate->nMetaDataSize);
}
- extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize);
- remainingSize -= extraData->nSize;
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
+ ( unsigned int ) platformPrivate->nSize);
}
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
}
// Required extradata type wasn't found
return NULL;
}
+OMXCameraAdapter::CachedCaptureParameters* OMXCameraAdapter::cacheCaptureParameters() {
+ CachedCaptureParameters* params = new CachedCaptureParameters();
+
+ params->mPendingCaptureSettings = mPendingCaptureSettings;
+ params->mPictureRotation = mPictureRotation;
+ memcpy(params->mExposureBracketingValues,
+ mExposureBracketingValues,
+ sizeof(mExposureBracketingValues));
+ memcpy(params->mExposureGainBracketingValues,
+ mExposureGainBracketingValues,
+ sizeof(mExposureGainBracketingValues));
+ memcpy(params->mExposureGainBracketingModes,
+ mExposureGainBracketingModes,
+ sizeof(mExposureGainBracketingModes));
+ params->mExposureBracketingValidEntries = mExposureBracketingValidEntries;
+ params->mExposureBracketMode = mExposureBracketMode;
+ params->mBurstFrames = mBurstFrames;
+ params->mFlushShotConfigQueue = mFlushShotConfigQueue;
+
+ return params;
+}
+
OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
{
LOG_FUNCTION_NAME;
@@ -3493,16 +4135,21 @@ OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
// Initial values
mTimeSourceDelta = 0;
onlyOnce = true;
+#ifndef OMAP_TUNA
+ mDccData.pData = NULL;
+#endif
mInitSem.Create(0);
mFlushSem.Create(0);
mUsePreviewDataSem.Create(0);
mUsePreviewSem.Create(0);
mUseCaptureSem.Create(0);
+ mUseReprocessSem.Create(0);
mStartPreviewSem.Create(0);
mStopPreviewSem.Create(0);
mStartCaptureSem.Create(0);
mStopCaptureSem.Create(0);
+ mStopReprocSem.Create(0);
mSwitchToLoadedSem.Create(0);
mCaptureSem.Create(0);
@@ -3517,6 +4164,14 @@ OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index)
mFramesWithDisplay = 0;
mFramesWithEncoder = 0;
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ mDebugProfile = 0;
+
+#endif
+
+ mPreviewPortInitialized = false;
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -3524,12 +4179,17 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
if ( mOmxInitialized ) {
// return to OMX Loaded state
switchToLoaded();
+#ifndef OMAP_TUNA
+ saveDccFileDataSave();
+
+ closeDccFileDataSave();
+#endif
// deinit the OMX
if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid ) {
// free the handle for the Camera component
@@ -3548,11 +4208,11 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
for (unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
{
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
//remove from queue and free msg
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
sem->Signal();
free(msg);
@@ -3564,7 +4224,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to command handling thread
if ( NULL != mCommandHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = CommandHandler::COMMAND_EXIT;
msg.arg1 = mErrorNotifier;
mCommandHandler->clearCommandQ();
@@ -3576,7 +4236,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to callback handling thread
if ( NULL != mOMXCallbackHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::COMMAND_EXIT;
//Clear all messages pending first
mOMXCallbackHandler->clearCommandQ();
@@ -3588,10 +4248,10 @@ OMXCameraAdapter::~OMXCameraAdapter()
LOG_FUNCTION_NAME_EXIT;
}
-extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
+extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
LOG_FUNCTION_NAME;
@@ -3599,7 +4259,7 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
if ( adapter ) {
CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index);
} else {
- CAMHAL_LOGEA("Camera adapter create failed!");
+ CAMHAL_LOGEA("OMX Camera adapter create failed for sensor index = %d!",sensor_index);
}
LOG_FUNCTION_NAME_EXIT;
@@ -3607,7 +4267,8 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
return adapter;
}
-OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData )
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ const OMX_CALLBACKTYPE & callbacks)
{
OMX_ERRORTYPE eError = OMX_ErrorUndefined;
@@ -3618,12 +4279,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_P
}
// setup key parameters to send to Ducati during init
- OMX_CALLBACKTYPE oCallbacks;
-
- // initialize the callback handles
- oCallbacks.EventHandler = android::OMXCameraAdapterEventHandler;
- oCallbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
- oCallbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
+ OMX_CALLBACKTYPE oCallbacks = callbacks;
// get handle
eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.CAMERA", pAppData, &oCallbacks);
@@ -3638,80 +4294,422 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_P
return eError;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera) {
- int num_cameras_supported = 0;
- CameraProperties::Properties* properties = NULL;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_HANDLETYPE handle = NULL;
- OMX_TI_CAPTYPE caps;
+class CapabilitiesHandler
+{
+public:
+ CapabilitiesHandler()
+ {
+ mComponent = 0;
+ mIsAborted = true;
+ }
+
+ const OMX_HANDLETYPE & component() const
+ {
+ return mComponent;
+ }
+
+ OMX_HANDLETYPE & componentRef()
+ {
+ return mComponent;
+ }
+
+ status_t disableAllPorts()
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ mPortsLeftToDisable = OMX_CAMERA_NUM_PORTS;
+ mIsOk = false;
+ mIsAborted = false;
+
+ CAMHAL_LOGD("Disabling ports...");
+ const OMX_ERRORTYPE sendCommandError = OMX_SendCommand(component(),
+ OMX_CommandPortDisable, OMX_ALL, 0);
+ CAMHAL_LOGD("Disabling ports... DONE");
+
+ if ( sendCommandError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Failed disabling all ports, error: 0x%x", sendCommandError);
+ return Utils::ErrorUtils::omxToAndroidError(sendCommandError);
+ }
+
+ CAMHAL_LOGD("Waiting for disabling all ports will be finished...");
+ const status_t waitStatus = mCondition.waitRelative(mLock, seconds_to_nanoseconds(3));
+ CAMHAL_LOGD("Waiting for disabling all ports will be finished... DONE");
+
+ if ( waitStatus != NO_ERROR )
+ {
+ CAMHAL_LOGE("Timeout triggered while waiting for all ports to be disabled");
+ return TIMED_OUT;
+ }
+
+ if ( !mIsOk )
+ {
+ CAMHAL_LOGE("Failed to disable all ports");
+ return UNKNOWN_ERROR;
+ }
+
+ // all ports have been disabled
+ mIsAborted = true;
+
+ return NO_ERROR;
+ }
+
+ status_t switchToState(OMX_STATETYPE state)
+ {
+ CAMHAL_LOGD(".");
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+ CAMHAL_LOGD(".");
+
+ mState = state;
+ mIsOk = false;
+ mIsAborted = false;
+
+ CAMHAL_LOGD("Switching to state 0x%x...", mState);
+ const OMX_ERRORTYPE switchError = OMX_SendCommand(mComponent,
+ OMX_CommandStateSet, mState, 0);
+ CAMHAL_LOGD("Switching to state 0x%x... DONE", mState);
+
+ if ( switchError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Failed switching to state 0x%x, error: 0x%x", mState, switchError);
+ return Utils::ErrorUtils::omxToAndroidError(switchError);
+ }
+
+ // wait for the event for 3 seconds
+ CAMHAL_LOGD("Waiting...");
+ const status_t waitStatus = mCondition.waitRelative(mLock, seconds_to_nanoseconds(3));
+ CAMHAL_LOGD("Waiting... DONE");
+
+ // disable following events
+ mIsAborted = true;
+
+ if ( waitStatus != NO_ERROR )
+ {
+ CAMHAL_LOGE("Timeout triggered while switching to state 0x%x", mState);
+ return TIMED_OUT;
+ }
+
+ // state has been switched, check whether is was Idle
+ if ( !mIsOk )
+ {
+ CAMHAL_LOGE("Switching to state 0x%x has failed", mState);
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+ }
+
+ status_t fetchCapabiltiesForMode(OMX_CAMOPERATINGMODETYPE mode,
+ int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
+
+ OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
+ camMode.eCamOperatingMode = mode;
+
+ OMX_ERRORTYPE eError = OMX_SetParameter(component(),
+ ( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
+ &camMode);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGE("Error while configuring camera mode in CameraAdapter_Capabilities 0x%x", eError);
+ return BAD_VALUE;
+ }
+
+ const status_t idleSwitchError = switchToState(OMX_StateIdle);
+ if ( idleSwitchError != NO_ERROR ) {
+ CAMHAL_LOGE("Failed to switch to Idle state, error: %d", idleSwitchError);
+ return UNKNOWN_ERROR;
+ }
+
+ // get and fill capabilities
+ OMXCameraAdapter::getCaps(sensorId, properties, component());
+
+ const status_t loadedSwitchError = switchToState(OMX_StateLoaded);
+ if ( loadedSwitchError != NO_ERROR ) {
+ CAMHAL_LOGE("Failed to switch to Loaded state, error: %d", loadedSwitchError);
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+ }
+
+ status_t fetchCapabilitiesForSensor(int sensorId,
+ CameraProperties::Properties * properties)
+ {
+ CAMHAL_LOGD("Disabling all ports...");
+ const status_t disableAllPortsError = disableAllPorts();
+ CAMHAL_LOGD("Disabling all ports... DONE");
+
+ if ( disableAllPortsError != NO_ERROR ) {
+ CAMHAL_LOGE("Failed to disable all ports, error: %d",
+ disableAllPortsError);
+ return UNKNOWN_ERROR;
+ }
+
+ // sensor select
+ OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
+ OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
+ sensorSelect.eSensor = (OMX_SENSORSELECT)sensorId;
+
+ CAMHAL_LOGD("Selecting sensor %d...", sensorId);
+ const OMX_ERRORTYPE sensorSelectError = OMX_SetConfig(component(),
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigSensorSelect, &sensorSelect);
+ CAMHAL_LOGD("Selecting sensor %d... DONE", sensorId);
+
+ if ( sensorSelectError != OMX_ErrorNone ) {
+ CAMHAL_LOGD("Max supported sensor number reached: %d", sensorId);
+ return BAD_VALUE;
+ }
+
+ status_t err = NO_ERROR;
+ if ( sensorId == 2 ) {
+ CAMHAL_LOGD("Camera mode: STEREO");
+ properties->setMode(MODE_STEREO);
+ err = fetchCapabiltiesForMode(OMX_CaptureStereoImageCapture,
+ sensorId,
+ properties);
+ } else {
+ CAMHAL_LOGD("Camera MONO");
+
+ CAMHAL_LOGD("Camera mode: HQ ");
+ properties->setMode(MODE_HIGH_QUALITY);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageProfileBase,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: VIDEO ");
+ properties->setMode(MODE_VIDEO);
+ err = fetchCapabiltiesForMode(OMX_CaptureVideo,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: ZSL ");
+ properties->setMode(MODE_ZEROSHUTTERLAG);
+ err = fetchCapabiltiesForMode(OMX_TI_CaptureImageProfileZeroShutterLag,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ CAMHAL_LOGD("Camera mode: HS ");
+ properties->setMode(MODE_HIGH_SPEED);
+ err = fetchCapabiltiesForMode(OMX_CaptureImageHighSpeedTemporalBracketing,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("Camera mode: CPCAM ");
+ properties->setMode(MODE_CPCAM);
+ err = fetchCapabiltiesForMode(OMX_TI_CPCam,
+ sensorId,
+ properties);
+#endif
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+ }
+
+ return err;
+ }
+
+public:
+ static OMX_ERRORTYPE eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR cookie, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData)
+ {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_UNUSED(pEventData);
+
+ CAMHAL_LOGD("event = 0x%x", event);
+ CAMHAL_LOGD("data1 = 0x%x", data1);
+ CAMHAL_LOGD("data2 = 0x%x", data2);
+
+ CapabilitiesHandler * handler = reinterpret_cast<CapabilitiesHandler*>(cookie);
+
+ // ensure this is out component
+ if ( handler->component() != component )
+ {
+ CAMHAL_LOGE("Wrong component handle received: %p, expecting: %p",
+ component, handler->component());
+ return OMX_ErrorBadParameter;
+ }
+
+ return handler->processEvent(event, data1, data2);
+ }
+
+ OMX_ERRORTYPE processEvent(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2)
+ {
+ android::AutoMutex locker(mLock);
+ CAMHAL_UNUSED(locker);
+
+ if ( mIsAborted )
+ {
+ CAMHAL_LOGE("Waiting for state switch has been aborted");
+ return OMX_ErrorNone;
+ }
+
+ switch ( event )
+ {
+ case OMX_EventCmdComplete:
+ switch ( data1 )
+ {
+ case OMX_CommandStateSet:
+ // this is our state switch command we are waiting for
+ mIsOk = static_cast<OMX_STATETYPE>(data2) == mState;
+
+ // wake up the caller
+ CAMHAL_LOGD("Waking the condition...");
+ mCondition.signal();
+ CAMHAL_LOGD("Waking the condition... DONE");
+ break;
+
+ case OMX_CommandPortDisable:
+ CAMHAL_LOGD("Decreasing disabled port count: %d", mPortsLeftToDisable);
+ mPortsLeftToDisable--;
+ if ( mPortsLeftToDisable == 0 )
+ {
+ CAMHAL_LOGD("All ports have been disabled, waking the caller...");
+ mIsOk = true;
+ mCondition.signal();
+ CAMHAL_LOGD("All ports have been disabled, waking the caller... DONE");
+ }
+ break;
+
+ default:
+ // ignore rest of the commands
+ break;
+ }
+ break;
+
+ case OMX_EventError:
+ CAMHAL_LOGE("Error event received, data1 = 0x%8x, data2 = 0x%8x", data1, data2);
+
+ // keep mIsOk in false state, indicating that request has failed
+
+ CAMHAL_LOGD("Waking the condition...");
+ mCondition.signal();
+ CAMHAL_LOGD("Waking the condition... DONE");
+ break;
+
+ default:
+ // ignore rest of the event types
+ break;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+private:
+ android::Mutex mLock;
+ android::Condition mCondition;
+ OMX_HANDLETYPE mComponent;
+ OMX_STATETYPE mState;
+ bool mIsAborted;
+ bool mIsOk;
+ int mPortsLeftToDisable;
+};
+
+extern "C" status_t OMXCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(gAdapterLock);
+ supportedCameras = 0;
+
+ int num_cameras_supported = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ android::AutoMutex lock(gAdapterLock);
if (!properties_array) {
CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
LOG_FUNCTION_NAME_EXIT;
- return -EINVAL;
+ return BAD_VALUE;
}
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("Error OMX_Init -0x%x", eError);
- return eError;
- }
-
- eError = OMXCameraAdapter::OMXCameraGetHandle(&handle);
- if (eError != OMX_ErrorNone) {
- CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
- goto EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
// Continue selecting sensor and then querying OMX Camera for it's capabilities
// When sensor select returns an error, we know to break and stop
while (eError == OMX_ErrorNone &&
(starting_camera + num_cameras_supported) < max_camera) {
- // sensor select
- OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
- OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
- sensorSelect.eSensor = (OMX_SENSORSELECT) num_cameras_supported;
- eError = OMX_SetConfig(handle, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
- if ( OMX_ErrorNone != eError ) {
- break;
+ CapabilitiesHandler handler;
+
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = CapabilitiesHandler::eventCallback;
+ callbacks.EmptyBufferDone = 0;
+ callbacks.FillBufferDone = 0;
+
+ eError = OMXCameraAdapter::OMXCameraGetHandle(&handler.componentRef(), &handler, callbacks);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
+ goto EXIT;
}
- // get and fill capabilities
- properties = properties_array + starting_camera + num_cameras_supported;
- OMXCameraAdapter::getCaps(properties, handle);
+ const int sensorId = num_cameras_supported;
+ CameraProperties::Properties * properties = properties_array + starting_camera + sensorId;
+ const status_t err = handler.fetchCapabilitiesForSensor(sensorId, properties);
- // need to fill facing information
- // assume that only sensor 0 is back facing
- if (num_cameras_supported == 0) {
- properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_BACK);
- } else {
- properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
+ // clean up
+ if(handler.component()) {
+ CAMHAL_LOGD("Freeing the component...");
+ OMX_FreeHandle(handler.component());
+ CAMHAL_LOGD("Freeing the component... DONE");
+ handler.componentRef() = NULL;
}
+ if ( err != NO_ERROR )
+ break;
+
num_cameras_supported++;
+ CAMHAL_LOGDB("Number of OMX Cameras detected = %d \n",num_cameras_supported);
}
EXIT:
- // clean up
- if(handle) {
- OMX_FreeHandle(handle);
- handle=NULL;
- }
+ CAMHAL_LOGD("Deinit...");
OMX_Deinit();
+ CAMHAL_LOGD("Deinit... DONE");
+
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGE("Error: 0x%x", eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ supportedCameras = num_cameras_supported;
LOG_FUNCTION_NAME_EXIT;
- return num_cameras_supported;
+ return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp
index e1323ee..646e964 100644
--- a/camera/OMXCameraAdapter/OMXCapabilities.cpp
+++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp
@@ -26,46 +26,90 @@
#include "ErrorUtils.h"
#include "TICameraParameters.h"
-namespace android {
-
-#undef LOG_TAG
-
-// Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
/************************************
* global constants and variables
*************************************/
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
-#define FPS_MIN 5
-#define FPS_STEP 5
-#define FPS_RANGE_STEP 5
static const char PARAM_SEP[] = ",";
-static const int PARAM_SEP_CHAR = ',';
static const uint32_t VFR_OFFSET = 8;
-static const char VFR_BACKET_START[] = "(";
-static const char VFR_BRACKET_END[] = ")";
-static const char FRAMERATE_COUNT = 10;
+static const char FPS_STR_MAX_LEN = 10;
+
+static const unsigned int MANUAL_EXPOSURE_STEP = 1;
+static const unsigned int MANUAL_GAIN_ISO_MIN = 100;
+static const unsigned int MANUAL_GAIN_ISO_STEP = 100;
+
+const int OMXCameraAdapter::SENSORID_IMX060 = 300;
+const int OMXCameraAdapter::SENSORID_OV5650 = 301;
+const int OMXCameraAdapter::SENSORID_OV5640 = 302;
+const int OMXCameraAdapter::SENSORID_OV14825 = 304;
+const int OMXCameraAdapter::SENSORID_S5K4E1GA = 305;
+const int OMXCameraAdapter::SENSORID_S5K6A1GX03 = 306;
+
+const int OMXCameraAdapter::FPS_MIN = 5;
+const int OMXCameraAdapter::FPS_MAX = 30;
+const int OMXCameraAdapter::FPS_MAX_EXTENDED = 60;
+
+inline static int androidFromDucatiFrameRate(OMX_U32 frameRate) {
+ return (frameRate >> VFR_OFFSET) * CameraHal::VFR_SCALE;
+}
/**** look up tables to translate OMX Caps to Parameter ****/
const CapResolution OMXCameraAdapter::mImageCapRes [] = {
+ { 4416, 3312, "4416x3312" },
{ 4032, 3024, "4032x3024" },
{ 4000, 3000, "4000x3000" },
{ 3648, 2736, "3648x2736" },
{ 3264, 2448, "3264x2448" },
+ { 2608, 1960, "2608x1960" },
{ 2592, 1944, "2592x1944" },
{ 2592, 1728, "2592x1728" },
{ 2592, 1458, "2592x1458" },
+ { 2304, 1296, "2304x1296" },
+ { 2240, 1344, "2240x1344" },
+ { 2160, 1440, "2160x1440" },
+ { 2112, 1728, "2112x1728" },
{ 2048, 1536, "2048x1536" },
+ { 2016, 1512, "2016x1512" },
+ { 2000, 1600, "2000x1600" },
{ 1600, 1200, "1600x1200" },
{ 1280, 1024, "1280x1024" },
- { 1152, 864, "1152x864" },
- { 1280, 960, "1280x960" },
- { 640, 480, "640x480" },
- { 320, 240, "320x240" },
+ { 1152, 864, "1152x864" },
+ { 1280, 960, "1280x960" },
+ { 1024, 768, "1024x768" },
+ { 640, 480, "640x480" },
+ { 320, 240, "320x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResSS [] = {
+ { 4032*2, 3024, "8064x3024" },
+ { 3648*2, 2736, "7296x2736" },
+ { 3264*2, 2448, "6528x2448" },
+ { 2592*2, 1944, "5184x1944" },
+ { 2048*2, 1536, "4096x1536" },
+ { 1600*2, 1200, "3200x1200" },
+ { 1280*2, 960, "2560x960" },
+ { 1024*2, 768, "2048x768" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+};
+
+const CapResolution OMXCameraAdapter::mImageCapResTB [] = {
+ { 4032, 3024*2, "4032x6048" },
+ { 3648, 2736*2, "3648x5472" },
+ { 3264, 2448*2, "3264x4896" },
+ { 2592, 1944*2, "2592x3888" },
+ { 2048, 1536*2, "2048x3072" },
+ { 1600, 1200*2, "1600x2400" },
+ { 1280, 960*2, "1280x1920" },
+ { 1024, 768*2, "1024x1536" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
};
const CapResolution OMXCameraAdapter::mPreviewRes [] = {
@@ -81,9 +125,57 @@ const CapResolution OMXCameraAdapter::mPreviewRes [] = {
{ 352, 288, "352x288" },
{ 240, 160, "240x160" },
{ 176, 144, "176x144" },
+ { 160, 120, "160x120" },
{ 128, 96, "128x96" },
};
+const CapResolution OMXCameraAdapter::mPreviewPortraitRes [] = {
+ //Portrait resolutions
+ { 1088, 1920, "1088x1920" },
+ { 720, 1280, "720x1280" },
+ { 480, 800, "480x800" },
+ { 576, 720, "576x720" },
+ { 576, 768, "576x768" },
+ { 480, 720, "480x720" },
+ { 480, 640, "480x640" },
+ { 288, 352, "288x352" },
+ { 240, 320, "240x320" },
+ { 160, 240, "160x240" },
+ { 144, 176, "144x176" },
+ { 120, 160, "120x160"},
+ { 96, 128, "96x128" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResSS [] = {
+ { 1920*2, 1080, "3840x1080" },
+ { 1280*2, 720, "2560x720" },
+ { 800*2, 480, "1600x480" },
+ { 720*2, 576, "1440x576" },
+ { 720*2, 480, "1440x480" },
+ { 768*2, 576, "1536x576" },
+ { 640*2, 480, "1280x480" },
+ { 320*2, 240, "640x240" },
+ { 352*2, 288, "704x288" },
+ { 240*2, 160, "480x160" },
+ { 176*2, 144, "352x144" },
+ { 128*2, 96, "256x96" }
+};
+
+const CapResolution OMXCameraAdapter::mPreviewResTB [] = {
+ { 1920, 1080*2, "1920x2160" },
+ { 1280, 720*2, "1280x1440" },
+ { 800, 480*2, "800x960" },
+ { 720, 576*2, "720x1152" },
+ { 720, 480*2, "720x960" },
+ { 768, 576*2, "768x1152" },
+ { 640, 480*2, "640x960" },
+ { 320, 240*2, "320x480" },
+ { 352, 288*2, "352x576" },
+ { 240, 160*2, "240x320" },
+ { 176, 144*2, "176x288" },
+ { 128, 96*2, "128x192" },
+};
+
const CapResolution OMXCameraAdapter::mThumbRes [] = {
{ 640, 480, "640x480" },
{ 160, 120, "160x120" },
@@ -96,16 +188,42 @@ const CapResolution OMXCameraAdapter::mThumbRes [] = {
};
const CapPixelformat OMXCameraAdapter::mPixelformats [] = {
- { OMX_COLOR_FormatCbYCrY, CameraParameters::PIXEL_FORMAT_YUV422I },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420SP },
- { OMX_COLOR_Format16bitRGB565, CameraParameters::PIXEL_FORMAT_RGB565 },
- { OMX_COLOR_FormatRawBayer10bit, TICameraParameters::PIXEL_FORMAT_RAW },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatCbYCrY, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420SP },
+ { OMX_COLOR_Format16bitRGB565, android::CameraParameters::PIXEL_FORMAT_RGB565 },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatUnused, TICameraParameters::PIXEL_FORMAT_UNUSED },
+ { OMX_COLOR_FormatRawBayer10bit, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB },
+};
+
+const userToOMX_LUT OMXCameraAdapter::mFrameLayout [] = {
+ { TICameraParameters::S3D_NONE, OMX_TI_StereoFrameLayout2D },
+ { TICameraParameters::S3D_TB_FULL, OMX_TI_StereoFrameLayoutTopBottom },
+ { TICameraParameters::S3D_SS_FULL, OMX_TI_StereoFrameLayoutLeftRight },
+#ifndef OMAP_TUNA
+ { TICameraParameters::S3D_TB_SUBSAMPLED, OMX_TI_StereoFrameLayoutTopBottomSubsample },
+ { TICameraParameters::S3D_SS_SUBSAMPLED, OMX_TI_StereoFrameLayoutLeftRightSubsample },
+#endif
+};
+
+const LUTtype OMXCameraAdapter::mLayoutLUT = {
+ ARRAY_SIZE(mFrameLayout),
+ mFrameLayout
+};
+
+const CapCodingFormat OMXCameraAdapter::mImageCodingFormat [] = {
+ { OMX_IMAGE_CodingJPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingJPS, TICameraParameters::PIXEL_FORMAT_JPS },
+ { (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingMPO, TICameraParameters::PIXEL_FORMAT_MPO },
};
const CapFramerate OMXCameraAdapter::mFramerates [] = {
+ { 60, "60" },
{ 30, "30" },
+ { 24, "24" },
+ { 20, "20" },
{ 15, "15" },
+ { 10, "10" },
};
const CapZoom OMXCameraAdapter::mZoomStages [] = {
@@ -185,37 +303,45 @@ const CapISO OMXCameraAdapter::mISOStages [] = {
// mapped values have to match with new_sensor_MSP.h
const CapU32 OMXCameraAdapter::mSensorNames [] = {
- { 300, "IMX060" },
- { 301, "OV5650" },
- { 305, "S5K4E1GA"},
- { 306, "S5K6A1GX03" }
+ { SENSORID_IMX060, "IMX060" },
+ { SENSORID_OV5650, "OV5650" },
+ { SENSORID_OV5640, "OV5640" },
+ { SENSORID_OV14825, "OV14825"},
+ { SENSORID_S5K4E1GA, "S5K4E1GA"},
+ { SENSORID_S5K6A1GX03, "S5K6A1GX03" }
// TODO(XXX): need to account for S3D camera later
};
-// values for supported variable framerates sorted in ascending order
-// CapU32Pair = (max fps, min fps, string representation)
-const CapU32Pair OMXCameraAdapter::mVarFramerates [] = {
- { 15, 15, "(15000,15000)"},
- { 30, 15, "(15000,30000)" },
- { 30, 24, "(24000,30000)" },
-// TODO(XXX): Removing 30,30 range to limit 1080p at 24fps. Will put back soon.
-#if 0
- { 30, 30, "(30000,30000)" },
-#endif
+const userToOMX_LUT OMXCameraAdapter::mAutoConvergence [] = {
+ { TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE, OMX_TI_AutoConvergenceModeDisable },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_FRAME, OMX_TI_AutoConvergenceModeFrame },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_CENTER, OMX_TI_AutoConvergenceModeCenter },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_TOUCH, OMX_TI_AutoConvergenceModeFocusFaceTouch },
+ { TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL, OMX_TI_AutoConvergenceModeManual }
};
-/************************************
- * static helper functions
- *************************************/
-// utility function to remove last seperator
-void remove_last_sep(char* buffer) {
- char* last_sep = NULL;
- last_sep = strrchr(buffer, PARAM_SEP_CHAR);
- if (last_sep != NULL) {
- last_sep[0] = '\0';
- }
-}
+const LUTtype OMXCameraAdapter::mAutoConvergenceLUT = {
+ ARRAY_SIZE(mAutoConvergence),
+ mAutoConvergence
+};
+const userToOMX_LUT OMXCameraAdapter::mBracketingModes [] = {
+ { TICameraParameters::TEMP_BRACKETING , OMX_BracketTemporal },
+ { TICameraParameters::EXPOSURE_BRACKETING , OMX_BracketExposureRelativeInEV }
+};
+
+const LUTtype OMXCameraAdapter::mBracketingModesLUT = {
+ ARRAY_SIZE(mBracketingModes),
+ mBracketingModes
+};
+
+// values for supported camera facing direction
+const CapU32 OMXCameraAdapter::mFacing [] = {
+#ifndef OMAP_TUNA
+ { OMX_TI_SENFACING_BACK , TICameraParameters::FACING_BACK },
+ { OMX_TI_SENFACING_FRONT, TICameraParameters::FACING_FRONT},
+#endif
+};
/*****************************************
* internal static function declarations
@@ -223,24 +349,28 @@ void remove_last_sep(char* buffer) {
/**** Utility functions to help translate OMX Caps to Parameter ****/
-status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
- const CapPixelformat *cap,
- size_t capCount,
- char * buffer,
- size_t bufferSize) {
+status_t OMXCameraAdapter::encodeImageCodingFormatCap(OMX_IMAGE_CODINGTYPE format,
+ const CapCodingFormat *cap,
+ size_t capCount,
+ char * buffer) {
+
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
if ( ( NULL == buffer ) || ( NULL == cap ) ) {
CAMHAL_LOGEA("Invalid input arguments");
- return -EINVAL;
+ ret = -EINVAL;
}
- for ( unsigned int i = 0; i < capCount; i++ ) {
- if ( format == cap[i].pixelformat ) {
- strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
+ if ( NO_ERROR == ret ) {
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( format == cap[i].imageCodingFormat ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
+ strncat(buffer, cap[i].param, ((((int)MAX_PROP_VALUE_LENGTH - 1 - (int)strlen(buffer)) < 0) ? 0 : (MAX_PROP_VALUE_LENGTH - 1 - strlen(buffer))));
+ }
}
}
@@ -249,16 +379,13 @@ status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
return ret;
}
-status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax,
- OMX_U32 framerateMin,
- const CapFramerate *cap,
- size_t capCount,
- char * buffer,
- size_t bufferSize) {
+status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
+ const CapPixelformat *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
- bool minInserted = false;
- bool maxInserted = false;
- char tmpBuffer[FRAMERATE_COUNT];
LOG_FUNCTION_NAME;
@@ -267,113 +394,86 @@ status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax,
return -EINVAL;
}
- for ( unsigned int i = 0; i < capCount; i++ ) {
- if ( (framerateMax >= cap[i].num) && (framerateMin <= cap[i].num) ) {
- strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- if ( cap[i].num == framerateMin ) {
- minInserted = true;
+ for ( unsigned int i = 0 ; i < capCount ; i++ )
+ {
+ if ( format == cap[i].pixelformat )
+ {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize - 1);
}
}
- if ( cap[i].num == framerateMax ) {
- maxInserted = true;
- }
- }
-
- if ( !maxInserted ) {
- memset(tmpBuffer, 0, FRAMERATE_COUNT);
- snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMax);
- strncat(buffer, tmpBuffer, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
-
- if ( !minInserted ) {
- memset(tmpBuffer, 0, FRAMERATE_COUNT);
- snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMin);
- strncat(buffer, tmpBuffer, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
-
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps,
- const CapU32Pair *cap,
- size_t capCount,
- char *buffer,
- char *defaultRange,
- size_t bufferSize) {
- status_t ret = NO_ERROR;
- uint32_t minVFR, maxVFR;
- int default_index = -1;
-
+void OMXCameraAdapter::encodeFrameRates(const int minFrameRate, const int maxFrameRate,
+ const OMX_TI_CAPTYPE & caps, const CapFramerate * const fixedFrameRates,
+ const int frameRateCount, android::Vector<FpsRange> & fpsRanges) {
LOG_FUNCTION_NAME;
- if ( (NULL == buffer) || (NULL == cap) ) {
- CAMHAL_LOGEA("Invalid input arguments");
- return -EINVAL;
+ if ( minFrameRate == maxFrameRate ) {
+ // single fixed frame rate supported
+ fpsRanges.add(FpsRange(minFrameRate, maxFrameRate));
+ return;
}
- if(caps.ulPrvVarFPSModesCount < 1) {
- return NO_ERROR;
- }
+ // insert min and max frame rates
+ fpsRanges.add(FpsRange(minFrameRate, minFrameRate));
+ fpsRanges.add(FpsRange(maxFrameRate, maxFrameRate));
- // Assumption: last range in tPrvVarFPSModes will be for S30FPSHD mode
- minVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMin >> VFR_OFFSET;
- maxVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMax >> VFR_OFFSET;
+ // insert variable frame rates
+ for ( int i = 0; i < static_cast<int>(caps.ulPrvVarFPSModesCount); ++i ) {
+ const FpsRange fpsRange = FpsRange(
+ max(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMin), minFrameRate),
+ min(androidFromDucatiFrameRate(caps.tPrvVarFPSModes[i].nVarFPSMax), maxFrameRate));
- if (minVFR < FPS_MIN) {
- minVFR = FPS_MIN;
+ if ( fpsRange.isFixed() ) {
+ // this range is either min or max fixed frame rate, already added above
+ continue;
+ }
+
+ fpsRanges.add(fpsRange);
}
- for (unsigned int i = 0; i < capCount; i++) {
- // add cap[i] if it is in range and maxVFR != minVFR
- if ((maxVFR >= cap[i].num1) && (minVFR <= cap[i].num2)) {
- if (buffer[0] != '\0') {
- strncat(buffer, PARAM_SEP, bufferSize - 1);
- }
- strncat(buffer, cap[i].param, bufferSize - 1);
+ // insert fixed frame rates
+ for ( int i = 0; i < frameRateCount; ++i ) {
+ const int fixedFrameRate = fixedFrameRates[i].num * CameraHal::VFR_SCALE;
- // choose the max variable framerate as default
- if (cap[i].num1 != cap[i].num2) {
- default_index = i;
- }
+ if ( fixedFrameRate < minFrameRate || fixedFrameRate > maxFrameRate ) {
+ // not supported by hardware
+ continue;
}
- }
- // if we haven't found any caps in the list to populate
- // just use the min and max
- if (buffer[0] == '\0') {
- snprintf(buffer, bufferSize - 1,
- "(%u,%u)",
- minVFR * CameraHal::VFR_SCALE,
- maxVFR * CameraHal::VFR_SCALE);
+ const FpsRange fpsRange = FpsRange(fixedFrameRate, fixedFrameRate);
+ fpsRanges.add(fpsRange);
}
- if (default_index != -1) {
- snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%lu,%lu",
- cap[default_index].num2 * CameraHal::VFR_SCALE,
- cap[default_index].num1 * CameraHal::VFR_SCALE);
- } else {
- snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%u,%u",
- minVFR * CameraHal::VFR_SCALE, maxVFR * CameraHal::VFR_SCALE);
- }
-
- LOG_FUNCTION_NAME_EXIT;
+ // sort first by max, then by min, according to Android API requirements
+ fpsRanges.sort(FpsRange::compare);
- return ret;
+ // remove duplicated frame rates
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()) - 1; ) {
+ const FpsRange & current = fpsRanges.itemAt(i);
+ const FpsRange & next = fpsRanges.itemAt(i + 1);
+ if ( current == next ) {
+ fpsRanges.removeAt(i + 1);
+ } else {
+ i++;
+ }
+ }
}
size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
const CapZoom *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t res = NO_ERROR;
size_t ret = 0;
@@ -387,12 +487,13 @@ size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
for ( unsigned int i = 0; i < capCount; i++ ) {
if ( cap[i].num <= maxZoom ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
ret++;
}
}
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
@@ -403,7 +504,8 @@ status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
const CapISO *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
@@ -415,11 +517,12 @@ status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
for ( unsigned int i = 0; i < capCount; i++ ) {
if ( cap[i].num <= maxISO) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize - 1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
}
}
- remove_last_sep(buffer);
LOG_FUNCTION_NAME_EXIT;
@@ -430,7 +533,8 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
const CapResolution *cap,
size_t capCount,
char * buffer,
- size_t bufferSize) {
+ size_t bufferSize)
+{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
@@ -445,8 +549,10 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
(cap[i].height <= res.nHeightMax) &&
(cap[i].width >= res.nWidthMin) &&
(cap[i].height >= res.nHeightMin) ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
strncat(buffer, cap[i].param, bufferSize -1);
- strncat(buffer, PARAM_SEP, bufferSize - 1);
}
}
@@ -455,59 +561,287 @@ status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
return ret;
}
-status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::encodeSizeCap3D(OMX_TI_CAPRESTYPE &res,
+ const CapResolution *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( (cap[i].width <= res.nWidthMax) &&
+ (cap[i].height <= res.nHeightMax) &&
+ (cap[i].width >= res.nWidthMin) &&
+ (cap[i].height >= res.nHeightMin)
+#ifndef OMAP_TUNA
+ && (cap[i].width * cap[i].height <= res.nMaxResInPixels)
+#endif
+ ) {
+ if (buffer[0] != '\0') {
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ strncat(buffer, cap[i].param, bufferSize -1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.eCapFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+#endif
+
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
- ret = encodeSizeCap(caps.tImageResRange,
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tImageResRange,
mImageCapRes,
ARRAY_SIZE(mImageCapRes),
supported,
MAX_PROP_VALUE_LENGTH);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
- } else {
- remove_last_sep(supported);
- params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ }
+ params->set(CameraProperties::MAX_PICTURE_WIDTH, caps.tImageResRange.nWidthMax);
+ params->set(CameraProperties::MAX_PICTURE_HEIGHT, caps.tImageResRange.nHeightMax);
}
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResTB,
+ ARRAY_SIZE(mImageCapResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES, supported);
+ }
- LOG_FUNCTION_NAME;
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapResSS,
+ ARRAY_SIZE(mImageCapResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tImageResRange,
+ mImageCapRes,
+ ARRAY_SIZE(mImageCapRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ } else {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
+ int s3d_detected = 0;
+ int s3d_ss_detected = 0;
+ int s3d_tb_detected = 0;
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottom)
+ {
+ s3d_tb_detected = 1;
+ }
+ else if (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRight)
+ {
+ s3d_ss_detected = 1;
+ }
+ else if ( (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutTopBottomSubsample)
+ || (caps.ePrvFrameLayout[i] == OMX_TI_StereoFrameLayoutLeftRightSubsample) )
+ {
+ s3d_detected = 1;
+ }
+ }
+#endif
+
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
- ret = encodeSizeCap(caps.tPreviewResRange,
+ // Check if we are in 2d mode
+ if (!s3d_ss_detected && !s3d_tb_detected && !s3d_detected)
+ {
+ ret = encodeSizeCap(caps.tPreviewResRange,
mPreviewRes,
ARRAY_SIZE(mPreviewRes),
supported,
MAX_PROP_VALUE_LENGTH);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
- } else {
- remove_last_sep(supported);
- params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Landscape preview sizes 0x%x", ret);
+ return ret;
+ }
+
+#ifndef OMAP_TUNA
+ /* Insert Portait Resolutions by verifying Potrait Capability Support */
+ ret = encodeSizeCap(caps.tRotatedPreviewResRange,
+ mPreviewPortraitRes,
+ ARRAY_SIZE(mPreviewPortraitRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported Potrait preview sizes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ }
+#endif
}
+ else // 3d mode
+ {
+ if (s3d_tb_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResTB,
+ ARRAY_SIZE(mPreviewResTB),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D TB preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, supported);
+ }
- LOG_FUNCTION_NAME;
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_ss_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewResSS,
+ ARRAY_SIZE(mPreviewResSS),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported 3D SS preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, supported);
+ }
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ if (s3d_detected)
+ {
+ ret = encodeSizeCap3D(caps.tPreviewResRange,
+ mPreviewRes,
+ ARRAY_SIZE(mPreviewRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
+ return ret;
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -524,16 +858,16 @@ status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error inserting supported video sizes 0x%x", ret);
} else {
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_VIDEO_SIZES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -551,11 +885,14 @@ status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params
CAMHAL_LOGEB("Error inserting supported thumbnail sizes 0x%x", ret);
} else {
//CTS Requirement: 0x0 should always be supported
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, "0x0", MAX_PROP_NAME_LENGTH);
params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -580,25 +917,26 @@ status_t OMXCameraAdapter::insertZoomStages(CameraProperties::Properties* params
params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, zoomStageCount - 1); //As per CTS requirement
if ( 0 == zoomStageCount ) {
- params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::FALSE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::FALSE);
} else {
- params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::TRUE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::TRUE);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ memset(supported, '\0', sizeof(supported));
for ( int i = 0 ; i < caps.ulImageFormatCount ; i++ ) {
ret = encodePixelformatCap(caps.eImageFormats[i],
@@ -606,24 +944,38 @@ status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* para
ARRAY_SIZE(mPixelformats),
supported,
MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
+ break;
+ }
+ }
+
+#ifndef OMAP_TUNA
+ for (int i = 0; i < caps.ulImageCodingFormatCount ; i++) {
+ ret = encodeImageCodingFormatCap(caps.eImageCodingFormat[i],
+ mImageCodingFormat,
+ ARRAY_SIZE(mImageCodingFormat),
+ supported);
+
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
break;
}
}
+#endif
if ( NO_ERROR == ret ) {
- //jpeg is not supported in OMX capabilies yet
- strncat(supported, CameraParameters::PIXEL_FORMAT_JPEG, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -646,74 +998,132 @@ status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* pa
if ( NO_ERROR == ret ) {
// need to advertise we support YV12 format
// We will program preview port with NV21 when we see application set YV12
- strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
+status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ // collect supported normal frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
+
+ const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMin));
+ const int maxFrameRate = min<int>(FPS_MAX * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMax));
+
+ if ( minFrameRate > maxFrameRate ) {
+ CAMHAL_LOGE("Invalid frame rate range: [%d .. %d]", caps.xFramerateMin, caps.xFramerateMax);
+ return BAD_VALUE;
+ }
- LOG_FUNCTION_NAME;
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char defaultRange[MAX_PROP_VALUE_LENGTH];
- ret = encodeFramerateCap(caps.xFramerateMax >> VFR_OFFSET,
- caps.xFramerateMin >> VFR_OFFSET,
- mFramerates,
- ARRAY_SIZE(mFramerates),
- supported,
- MAX_PROP_VALUE_LENGTH);
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview framerates 0x%x", ret);
- } else {
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
+
+ const FpsRange & defaultFpsRange = fpsRanges.itemAt(fpsRanges.size() - 1);
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d,%d", defaultFpsRange.min(), defaultFpsRange.max());
+
+ CAMHAL_LOGD("Supported framerate ranges: %s", supported);
+ CAMHAL_LOGD("Default framerate range: [%s]", defaultRange);
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
+ params->set(CameraProperties::FRAMERATE_RANGE, defaultRange);
+
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported));
+ memset(defaultRange, 0, sizeof(defaultRange));
+
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
+
+ CAMHAL_LOGD("Supported preview framerates: %s", supported);
params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+
+ // insert default frame rate only if it is fixed
+ if ( defaultFpsRange.isFixed() && (defaultFpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ snprintf(defaultRange, sizeof(defaultRange) - 1, "%d", defaultFpsRange.min()/CameraHal::VFR_SCALE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, defaultRange);
+ }
}
- LOG_FUNCTION_NAME;
+ // collect supported extended frame rates
+ {
+ android::Vector<FpsRange> fpsRanges;
- return ret;
-}
+ const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMin));
+ const int maxFrameRate = min<int>(FPS_MAX_EXTENDED * CameraHal::VFR_SCALE,
+ androidFromDucatiFrameRate(caps.xFramerateMax));
-status_t OMXCameraAdapter::insertVFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
- char defaultRange[MAX_PROP_VALUE_LENGTH];
+ encodeFrameRates(minFrameRate, maxFrameRate, caps, mFramerates, ARRAY_SIZE(mFramerates), fpsRanges);
- LOG_FUNCTION_NAME;
+ // populate variable frame rates
+ char supported[MAX_PROP_VALUE_LENGTH];
+ memset(supported, 0, sizeof(supported) - 1);
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "(%d,%d)", fpsRange.min(), fpsRange.max());
+ strcat(supported, tmp);
+ }
- ret = encodeVFramerateCap(caps,
- mVarFramerates,
- ARRAY_SIZE(mVarFramerates),
- supported,
- defaultRange,
- MAX_PROP_VALUE_LENGTH);
+ CAMHAL_LOGD("Supported framerate ranges extended: %s", supported);
+ params->set(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED, supported);
- if ( NO_ERROR != ret ) {
- CAMHAL_LOGEB("Error inserting supported preview framerate ranges 0x%x", ret);
- } else {
- params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
- CAMHAL_LOGDB("framerate ranges %s", supported);
- params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
- params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, DEFAULT_FRAMERATE_RANGE_VIDEO);
- params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
- CAMHAL_LOGDB("Default framerate range: [%s]", DEFAULT_FRAMERATE_RANGE_IMAGE);
- }
+ // populate fixed frame rates
+ memset(supported, 0, sizeof(supported) - 1);
- LOG_FUNCTION_NAME;
+ for ( int i = 0; i < static_cast<int>(fpsRanges.size()); ++i ) {
+ const FpsRange & fpsRange = fpsRanges.itemAt(i);
+ if ( fpsRange.isFixed() && (fpsRange.min()%CameraHal::VFR_SCALE) == 0 ) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ char tmp[MAX_PROP_VALUE_LENGTH];
+ snprintf(tmp, sizeof(tmp) - 1, "%d", fpsRange.min()/CameraHal::VFR_SCALE);
+ strcat(supported, tmp);
+ }
+ }
- return ret;
+ CAMHAL_LOGD("Supported extended preview framerates: %s", supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT, supported);
+ }
+
+ return OK;
}
-status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -727,12 +1137,13 @@ status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_T
snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMax * 10 ));
params->set(CameraProperties::SUPPORTED_EV_MAX, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -751,12 +1162,13 @@ status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params,
params->set(CameraProperties::SUPPORTED_ISO_VALUES, supported);
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
@@ -766,32 +1178,31 @@ status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params,
//Off is always supported
strncat(supported, TICameraParameters::IPP_NONE, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
if ( caps.bLensDistortionCorrectionSupported ) {
- strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
}
if ( caps.bISONoiseFilterSupported ) {
- strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
}
if ( caps.bISONoiseFilterSupported && caps.bLensDistortionCorrectionSupported ) {
- strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
strncat(supported, PARAM_SEP, 1);
+ strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
}
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_IPP_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -803,19 +1214,22 @@ status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, O
for ( unsigned int i = 0 ; i < caps.ulWhiteBalanceCount ; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eWhiteBalanceModes[i], WBalLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -827,19 +1241,22 @@ status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, O
for ( unsigned int i = 0 ; i < caps.ulColorEffectCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eColorEffects[i], EffLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+
params->set(CameraProperties::SUPPORTED_EFFECTS, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -851,19 +1268,77 @@ status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params,
for ( unsigned int i = 0 ; i < caps.ulExposureModeCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eExposureModes[i], ExpLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, supported);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualExpRanges(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ if (caps.nManualExpMin > caps.nManualExpMax) {
+#endif
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+#ifndef OMAP_TUNA
+ } else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMin);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nManualExpMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_EXPOSURE_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_STEP, supported);
+ }
+#endif
+
+ if (MANUAL_GAIN_ISO_MIN > caps.nSensitivityMax) {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) 0);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported); }
+ else {
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_MIN);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) caps.nSensitivityMax);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", (int) MANUAL_GAIN_ISO_STEP);
+ params->set(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP, supported);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
-status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -875,20 +1350,26 @@ status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params
for ( unsigned int i = 0 ; i < caps.ulFlashCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eFlashModes[i], FlashLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+ if ( strlen(supported) == 0 ) {
+ strncpy(supported, DEFAULT_FLASH_MODE, MAX_PROP_NAME_LENGTH);
+ }
+
params->set(CameraProperties::SUPPORTED_FLASH_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -900,34 +1381,31 @@ status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params
for ( unsigned int i = 0 ; i < caps.ulSceneCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eSceneModes[i], SceneLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
params->set(CameraProperties::SUPPORTED_SCENE_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
- const char *p;
LOG_FUNCTION_NAME;
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
for ( unsigned int i = 0 ; i < caps.ulFocusModeCount; i++ ) {
- p = getLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT);
- if ( NULL != p ) {
- strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
- }
+ getMultipleLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT, supported);
}
// Check if focus is supported by camera
@@ -935,21 +1413,21 @@ status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params
caps.eFocusModes[0] == OMX_IMAGE_FocusControlOff) {
// Focus is not supported by camera
// Advertise this to app as infinitiy focus mode
- strncat(supported, CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
- } else {
- // Focus is supported but these modes are not supported by the
- // capability feature. Apply manually
- strncat(supported, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, MAX_PROP_NAME_LENGTH);
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, android::CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
}
params->set(CameraProperties::SUPPORTED_FOCUS_MODES, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -961,19 +1439,22 @@ status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* para
for ( unsigned int i = 0 ; i < caps.ulFlickerCount; i++ ) {
p = getLUTvalue_OMXtoHAL(caps.eFlicker[i], FlickerLUT);
if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
strncat(supported, p, MAX_PROP_NAME_LENGTH);
- strncat(supported, PARAM_SEP, 1);
}
}
- remove_last_sep(supported);
+
params->set(CameraProperties::SUPPORTED_ANTIBANDING, supported);
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
@@ -991,31 +1472,490 @@ status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX
params->set(CameraProperties::MAX_NUM_METERING_AREAS, supported);
CAMHAL_LOGDB("Maximum supported exposure areas %s", supported);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVNFSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+
LOG_FUNCTION_NAME;
+#ifndef OMAP_TUNA
+ if ( OMX_TRUE == caps.bVideoNoiseFilterSupported ) {
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
-status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertVSTABSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
status_t ret = NO_ERROR;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if ( OMX_TRUE == caps.bVideoStabilizationSupported ) {
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
- params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
- params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
+status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
LOG_FUNCTION_NAME
+#ifndef OMAP_TUNA
+ if ( caps.bAELockSupported ) {
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+#ifndef OMAP_TUNA
+ if ( caps.bAWBLockSupported ) {
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT
+
return ret;
}
-status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
char supported[MAX_PROP_VALUE_LENGTH];
const char *p;
+ unsigned int i = 0;
LOG_FUNCTION_NAME;
+ memset(supported, '\0', sizeof(supported));
+
+ // 1) Look up and assign sensor name
+ for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
+ if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
+ // sensor found
+ break;
+ }
+ }
+ if ( i == ARRAY_SIZE(mSensorNames) ) {
+ p = "UNKNOWN_SENSOR";
+ } else {
+ p = mSensorNames[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::CAMERA_NAME, supported);
+ params->set(CameraProperties::CAMERA_SENSOR_ID, caps.tSenMounting.nSenId);
+
+ // 2) Assign mounting rotation
+ params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertRaw(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+#ifndef OMAP_TUNA
+ sprintf(supported,"%d",int(caps.uSenNativeResWidth));
+#endif
+ params->set(CameraProperties::RAW_WIDTH, supported);
+
+ memset(supported, '\0', sizeof(supported));
+#ifndef OMAP_TUNA
+ if (caps.bMechanicalMisalignmentSupported) {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight) * 2);
+ } else {
+ sprintf(supported,"%d",int(caps.uSenNativeResHeight));
+ }
+#endif
+ params->set(CameraProperties::RAW_HEIGHT, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFacing(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+#ifndef OMAP_TUNA
+ for (i = 0; i < ARRAY_SIZE(mFacing); i++) {
+ if((OMX_TI_SENFACING_TYPE)mFacing[i].num == caps.tSenMounting.eFacing) {
+ break;
+ }
+ }
+#endif
+ if ( i == ARRAY_SIZE(mFacing) ) {
+ p = "UNKNOWN_FACING";
+ } else {
+ p = mFacing[i].param;
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ params->set(CameraProperties::FACING_INDEX, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFocalLength(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+#ifndef OMAP_TUNA
+ sprintf(supported, "%d", caps.nFocalLength / 100);
+#endif
+ strncat(supported, ".", REMAINING_BYTES(supported));
+#ifndef OMAP_TUNA
+ sprintf(supported+(strlen(supported)*sizeof(char)), "%d", caps.nFocalLength % 100);
+#endif
+
+ params->set(CameraProperties::FOCAL_LENGTH, supported);
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertAutoConvergenceModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulAutoConvModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eAutoConvModes[i], mAutoConvergenceLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+#endif
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertManualConvergenceRange(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMin ));
+#endif
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN, supported);
+
+#ifndef OMAP_TUNA
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax ));
+#endif
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX, supported);
+
+#ifndef OMAP_TUNA
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.nManualConvMax != caps.nManualConvMin ));
+#endif
+ params->set(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::insertMechanicalMisalignmentCorrection(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED,
+ caps.bMechanicalMisalignmentSupported == OMX_TRUE ?
+ android::CameraParameters::TRUE : android::CameraParameters::FALSE);
+
+ return OK;
+}
+#endif
+
+status_t OMXCameraAdapter::insertCaptureModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', sizeof(supported));
+
+#ifndef OMAP_TUNA
+ // 3D mode detect: Misalignment is present only in 3d mode
+ if (caps.bMechanicalMisalignmentSupported)
+ {
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ }
+ else // 2D mode detect: Misalignment is present only in 3d mode
+ {
+#endif
+ strncat(supported, TICameraParameters::HIGH_QUALITY_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_PERFORMANCE_MODE, REMAINING_BYTES(supported));
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::HIGH_QUALITY_ZSL_MODE, REMAINING_BYTES(supported));
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::CP_CAM_MODE, REMAINING_BYTES(supported));
+#endif
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::ZOOM_BRACKETING, REMAINING_BYTES(supported));
+#ifndef OMAP_TUNA
+ }
+#endif
+
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulBracketingModesCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eBracketingModes[i], mBracketingModesLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ }
+ strncat(supported, p, REMAINING_BYTES(supported));
+ }
+ }
+#endif
+
+ params->set(CameraProperties::CAP_MODE_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertLayout(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ unsigned int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulPrvFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.ePrvFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+#endif
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES, supported);
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+#ifndef OMAP_TUNA
+ for ( unsigned int i = 0 ; i < caps.ulCapFrameLayoutCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eCapFrameLayout[i], mLayoutLUT);
+ if ( NULL != p ) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ }
+ }
+#endif
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES, supported);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVideoSnapshotSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if (caps.bStillCapDuringVideoSupported)
+ {
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::TRUE);
+ }
+ else
+ {
+#endif
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if (caps.bGbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::SUPPORTED_GBCE,
+ android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertGLBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+#ifndef OMAP_TUNA
+ if (caps.bGlbceSupported) {
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::TRUE);
+ } else {
+#endif
+ params->set(CameraProperties::SUPPORTED_GLBCE,
+ android::CameraParameters::FALSE);
+#ifndef OMAP_TUNA
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char *pos, *str, *def;
+ char temp[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PREVIEW_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PREVIEW_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, params->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ }
+
+ /* If default is supported - set it, else - set first supported */
+ if (strstr(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES), DEFAULT_S3D_PICTURE_LAYOUT)) {
+ strncpy(temp, DEFAULT_S3D_PICTURE_LAYOUT, MAX_PROP_VALUE_LENGTH - 1);
+ } else {
+ strncpy(temp, params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ if ((pos = strstr(temp, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ }
+ params->set(CameraProperties::S3D_CAP_FRAME_LAYOUT, temp);
+
+ if (!strcmp(TICameraParameters::S3D_TB_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ } else if (!strcmp(TICameraParameters::S3D_SS_FULL, temp)) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ } else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, temp))
+ || (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, temp))) {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, params->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ }
+
params->set(CameraProperties::ANTIBANDING, DEFAULT_ANTIBANDING);
params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS);
params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST);
@@ -1024,7 +1964,7 @@ status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::SUPPORTED_EV_STEP, DEFAULT_EV_STEP);
params->set(CameraProperties::EXPOSURE_MODE, DEFAULT_EXPOSURE_MODE);
params->set(CameraProperties::FLASH_MODE, DEFAULT_FLASH_MODE);
- char *pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
+ pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
if ( NULL != pos )
{
params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE_PREFERRED);
@@ -1034,81 +1974,99 @@ status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
}
params->set(CameraProperties::IPP, DEFAULT_IPP);
- params->set(CameraProperties::GBCE, DEFAULT_GBCE);
+ params->set(CameraProperties::GBCE, android::CameraParameters::FALSE);
+ params->set(CameraProperties::GLBCE, android::CameraParameters::FALSE);
params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY);
params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY);
params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, DEFAULT_THUMBNAIL_SIZE);
params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
- params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+
+ if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_CAP_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ }
+
+ if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_TB_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_TB_SIZE);
+ } else if (!strcmp(params->get(CameraProperties::S3D_PRV_FRAME_LAYOUT),
+ TICameraParameters::S3D_SS_FULL)) {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SS_SIZE);
+ } else {
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ }
+
params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
- params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
- params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+
+ /* Set default value if supported, otherwise set max supported value */
+ strncpy(temp, params->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES),
+ MAX_PROP_VALUE_LENGTH - 1);
+ def = str = temp;
+ while (1) {
+ if ((pos = strstr(str, PARAM_SEP))) {
+ *pos = '\0';
+ }
+ if (!strcmp(str, DEFAULT_FRAMERATE)) {
+ def = str;
+ break;
+ }
+ if (atoi(str) > atoi(def)) {
+ def = str;
+ }
+ if (pos == NULL) {
+ break;
+ }
+ str = pos + strlen(PARAM_SEP);
+ }
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, def);
+
params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
params->set(CameraProperties::SATURATION, DEFAULT_SATURATION);
params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE);
params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS);
params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
- params->set(CameraProperties::VSTAB_SUPPORTED, DEFAULT_VSTAB_SUPPORTED);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
params->set(CameraProperties::WHITEBALANCE, DEFAULT_WB);
params->set(CameraProperties::ZOOM, DEFAULT_ZOOM);
params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES);
params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES);
params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK);
params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK);
- if(caps.tSenMounting.nSenId == 305) {
- params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_PRIMARY);
- } else {
- params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_SECONDARY);
- }
params->set(CameraProperties::HOR_ANGLE, DEFAULT_HOR_ANGLE);
params->set(CameraProperties::VER_ANGLE, DEFAULT_VER_ANGLE);
- params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, DEFAULT_VIDEO_SNAPSHOT_SUPPORTED);
params->set(CameraProperties::VIDEO_SIZE, DEFAULT_VIDEO_SIZE);
- params->set(CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO, DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+ params->set(CameraProperties::SENSOR_ORIENTATION, DEFAULT_SENSOR_ORIENTATION);
+ params->set(CameraProperties::AUTOCONVERGENCE_MODE, DEFAULT_AUTOCONVERGENCE_MODE);
+ params->set(CameraProperties::MANUAL_CONVERGENCE, DEFAULT_MANUAL_CONVERGENCE);
+ params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION, DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE);
+
+ char property[PROPERTY_VALUE_MAX];
+ property_get("ro.product.manufacturer",
+ property,
+ DEFAULT_EXIF_MAKE);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MAKE, property);
+ property_get("ro.product.model",
+ property,
+ DEFAULT_EXIF_MODEL);
+ property[0] = toupper(property[0]);
+ params->set(CameraProperties::EXIF_MODEL, property);
- LOG_FUNCTION_NAME;
-
- return ret;
-}
-
-status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
- status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
- const char *p;
- unsigned int i = 0;
-
- LOG_FUNCTION_NAME;
-
- memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
-
- // 1) Look up and assign sensor name
- for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
- if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
- // sensor found
- break;
- }
- }
- if ( i == ARRAY_SIZE(mSensorNames) ) {
- p = "UNKNOWN_SENSOR";
- } else {
- p = mSensorNames[i].param;
- }
- strncat(supported, p, MAX_PROP_NAME_LENGTH);
- params->set(CameraProperties::CAMERA_NAME, supported);
-
- // 2) Assign mounting rotation
- params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
-
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
status_t ret = NO_ERROR;
- char supported[MAX_PROP_VALUE_LENGTH];
LOG_FUNCTION_NAME;
@@ -1141,10 +2099,6 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
}
if ( NO_ERROR == ret ) {
- ret = insertVFramerates(params, caps);
- }
-
- if ( NO_ERROR == ret ) {
ret = insertEVs(params, caps);
}
@@ -1169,6 +2123,10 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
}
if ( NO_ERROR == ret ) {
+ ret = insertManualExpRanges(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
ret = insertFlashModes(params, caps);
}
@@ -1191,46 +2149,368 @@ status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* para
if ( NO_ERROR == ret ) {
ret = insertLocks(params, caps);
}
+
if ( NO_ERROR == ret) {
ret = insertAreas(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertFacing(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertFocalLength(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertAutoConvergenceModes(params, caps);
+ }
+ if ( NO_ERROR == ret) {
+ ret = insertManualConvergenceRange(params, caps);
+ }
+
+#ifndef OMAP_TUNA
+ if ( NO_ERROR == ret) {
+ ret = insertMechanicalMisalignmentCorrection(params, caps);
+ }
+#endif
+
+ if ( NO_ERROR == ret) {
+ ret = insertRaw(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertCaptureModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertLayout(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVideoSnapshotSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertVSTABSupported(params, caps);
+ }
+
+ if ( NO_ERROR == ret) {
+ ret = insertVNFSupported(params, caps);
}
//NOTE: Ensure that we always call insertDefaults after inserting the supported capabilities
//as there are checks inside insertDefaults to make sure a certain default is supported
// or not
if ( NO_ERROR == ret ) {
- ret = insertVideoSizes(params, caps);
+ ret = insertVideoSizes(params, caps);
}
- if ( NO_ERROR == ret ) {
- ret = insertDefaults(params, caps);
+ if ( NO_ERROR == ret) {
+ ret = insertGBCESupported(params, caps);
}
+ if ( NO_ERROR == ret) {
+ ret = insertGLBCESupported(params, caps);
+ }
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+
+bool OMXCameraAdapter::_checkOmxTiCap(const OMX_TI_CAPTYPE & caps)
+{
+#define CAMHAL_CHECK_OMX_TI_CAP(countVar, arrayVar) \
+ do { \
+ const int count = static_cast<int>(caps.countVar); \
+ const int maxSize = CAMHAL_SIZE_OF_ARRAY(caps.arrayVar); \
+ if ( count < 0 || count > maxSize ) \
+ { \
+ CAMHAL_LOGE("OMX_TI_CAPTYPE verification failed"); \
+ CAMHAL_LOGE(" variable: OMX_TI_CAPTYPE::" #countVar \
+ ", value: %d, max allowed: %d", \
+ count, maxSize); \
+ return false; \
+ } \
+ } while (0)
+
+ CAMHAL_CHECK_OMX_TI_CAP(ulPreviewFormatCount, ePreviewFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageFormatCount, eImageFormats);
+ CAMHAL_CHECK_OMX_TI_CAP(ulWhiteBalanceCount, eWhiteBalanceModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulColorEffectCount, eColorEffects);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlickerCount, eFlicker);
+ CAMHAL_CHECK_OMX_TI_CAP(ulExposureModeCount, eExposureModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFocusModeCount, eFocusModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulSceneCount, eSceneModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulFlashCount, eFlashModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvVarFPSModesCount, tPrvVarFPSModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapVarFPSModesCount, tCapVarFPSModes);
+#ifndef OMAP_TUNA
+ CAMHAL_CHECK_OMX_TI_CAP(ulAutoConvModesCount, eAutoConvModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulBracketingModesCount, eBracketingModes);
+ CAMHAL_CHECK_OMX_TI_CAP(ulImageCodingFormatCount, eImageCodingFormat);
+ CAMHAL_CHECK_OMX_TI_CAP(ulPrvFrameLayoutCount, ePrvFrameLayout);
+ CAMHAL_CHECK_OMX_TI_CAP(ulCapFrameLayoutCount, eCapFrameLayout);
+#endif
+
+#undef CAMHAL_CHECK_OMX_TI_CAP
+
+ return true;
+}
+
+
+bool OMXCameraAdapter::_dumpOmxTiCap(const int sensorId, const OMX_TI_CAPTYPE & caps)
+{
+ if ( !_checkOmxTiCap(caps) )
+ {
+ CAMHAL_LOGE("OMX_TI_CAPTYPE structure is invalid");
+ return false;
+ }
+
+ CAMHAL_LOGD("===================================================");
+ CAMHAL_LOGD("---- Dumping OMX capabilities for sensor id: %d ----", sensorId);
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPreviewFormatCount = %d", int(caps.ulPreviewFormatCount));
+ for ( int i = 0; i < int(caps.ulPreviewFormatCount); ++i )
+ CAMHAL_LOGD(" ePreviewFormats[%2d] = %d", i, int(caps.ePreviewFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageFormatCount = %d", int(caps.ulImageFormatCount));
+ for ( int i = 0; i < int(caps.ulImageFormatCount); ++i )
+ CAMHAL_LOGD(" eImageFormats[%2d] = %d", i, int(caps.eImageFormats[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tPreviewResRange.nWidthMin = %d", int(caps.tPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMin = %d", int(caps.tPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tPreviewResRange.nWidthMax = %d", int(caps.tPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tPreviewResRange.nHeightMax = %d", int(caps.tPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tPreviewResRange.nMaxResInPixels = %d", int(caps.tPreviewResRange.nMaxResInPixels));
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMin = %d", int(caps.tRotatedPreviewResRange.nWidthMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMin = %d", int(caps.tRotatedPreviewResRange.nHeightMin));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nWidthMax = %d", int(caps.tRotatedPreviewResRange.nWidthMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nHeightMax = %d", int(caps.tRotatedPreviewResRange.nHeightMax));
+ CAMHAL_LOGD("tRotatedPreviewResRange.nMaxResInPixels = %d", int(caps.tRotatedPreviewResRange.nMaxResInPixels));
+#endif
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tImageResRange.nWidthMin = %d", int(caps.tImageResRange.nWidthMin));
+ CAMHAL_LOGD("tImageResRange.nHeightMin = %d", int(caps.tImageResRange.nHeightMin));
+ CAMHAL_LOGD("tImageResRange.nWidthMax = %d", int(caps.tImageResRange.nWidthMax));
+ CAMHAL_LOGD("tImageResRange.nHeightMax = %d", int(caps.tImageResRange.nHeightMax));
+ CAMHAL_LOGD("tImageResRange.nMaxResInPixels = %d", int(caps.tImageResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tThumbResRange.nWidthMin = %d", int(caps.tThumbResRange.nWidthMin));
+ CAMHAL_LOGD("tThumbResRange.nHeightMin = %d", int(caps.tThumbResRange.nHeightMin));
+ CAMHAL_LOGD("tThumbResRange.nWidthMax = %d", int(caps.tThumbResRange.nWidthMax));
+ CAMHAL_LOGD("tThumbResRange.nHeightMax = %d", int(caps.tThumbResRange.nHeightMax));
+ CAMHAL_LOGD("tThumbResRange.nMaxResInPixels = %d", int(caps.tThumbResRange.nMaxResInPixels));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulWhiteBalanceCount = %d", int(caps.ulWhiteBalanceCount));
+ for ( int i = 0; i < int(caps.ulWhiteBalanceCount); ++i )
+ CAMHAL_LOGD(" eWhiteBalanceModes[%2d] = 0x%08x", i, int(caps.eWhiteBalanceModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulColorEffectCount = %d", int(caps.ulColorEffectCount));
+ for ( int i = 0; i < int(caps.ulColorEffectCount); ++i )
+ CAMHAL_LOGD(" eColorEffects[%2d] = 0x%08x", i, int(caps.eColorEffects[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("xMaxWidthZoom = %d", int(caps.xMaxWidthZoom));
+ CAMHAL_LOGD("xMaxHeightZoom = %d", int(caps.xMaxHeightZoom));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlickerCount = %d", int(caps.ulFlickerCount));
+ for ( int i = 0; i < int(caps.ulFlickerCount); ++i )
+ CAMHAL_LOGD(" eFlicker[%2d] = %d", i, int(caps.eFlicker[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulExposureModeCount = %d", int(caps.ulExposureModeCount));
+ for ( int i = 0; i < int(caps.ulExposureModeCount); ++i )
+ CAMHAL_LOGD(" eExposureModes[%2d] = 0x%08x", i, int(caps.eExposureModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("bLensDistortionCorrectionSupported = %d", int(caps.bLensDistortionCorrectionSupported));
+ CAMHAL_LOGD("bISONoiseFilterSupported = %d", int(caps.bISONoiseFilterSupported));
+ CAMHAL_LOGD("xEVCompensationMin = %d", int(caps.xEVCompensationMin));
+ CAMHAL_LOGD("xEVCompensationMax = %d", int(caps.xEVCompensationMax));
+ CAMHAL_LOGD("nSensitivityMax = %d", int(caps.nSensitivityMax));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFocusModeCount = %d", int(caps.ulFocusModeCount));
+ for ( int i = 0; i < int(caps.ulFocusModeCount); ++i )
+ CAMHAL_LOGD(" eFocusModes[%2d] = 0x%08x", i, int(caps.eFocusModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulSceneCount = %d", int(caps.ulSceneCount));
+ for ( int i = 0; i < int(caps.ulSceneCount); ++i )
+ CAMHAL_LOGD(" eSceneModes[%2d] = %d", i, int(caps.eSceneModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulFlashCount = %d", int(caps.ulFlashCount));
+ for ( int i = 0; i < int(caps.ulFlashCount); ++i )
+ CAMHAL_LOGD(" eFlashModes[%2d] = %d", i, int(caps.eFlashModes[i]));
+
+ CAMHAL_LOGD("xFramerateMin = %d", int(caps.xFramerateMin));
+ CAMHAL_LOGD("xFramerateMax = %d", int(caps.xFramerateMax));
+ CAMHAL_LOGD("bContrastSupported = %d", int(caps.bContrastSupported));
+ CAMHAL_LOGD("bSaturationSupported = %d", int(caps.bSaturationSupported));
+ CAMHAL_LOGD("bBrightnessSupported = %d", int(caps.bBrightnessSupported));
+ CAMHAL_LOGD("bProcessingLevelSupported = %d", int(caps.bProcessingLevelSupported));
+ CAMHAL_LOGD("bQFactorSupported = %d", int(caps.bQFactorSupported));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvVarFPSModesCount = %d", int(caps.ulPrvVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulPrvVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tPrvVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tPrvVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapVarFPSModesCount = %d", int(caps.ulCapVarFPSModesCount));
+ for ( int i = 0; i < int(caps.ulCapVarFPSModesCount); ++i )
+ {
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMin = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMin));
+ CAMHAL_LOGD(" tCapVarFPSModes[%d].nVarFPSMax = %d", i, int(caps.tCapVarFPSModes[i].nVarFPSMax));
+ }
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("tSenMounting.nSenId = %d", int(caps.tSenMounting.nSenId));
+ CAMHAL_LOGD("tSenMounting.nRotation = %d", int(caps.tSenMounting.nRotation));
+ CAMHAL_LOGD("tSenMounting.bMirror = %d", int(caps.tSenMounting.bMirror));
+ CAMHAL_LOGD("tSenMounting.bFlip = %d", int(caps.tSenMounting.bFlip));
+ CAMHAL_LOGD("tSenMounting.eFacing = %d", int(caps.tSenMounting.eFacing));
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulAutoConvModesCount = %d", int(caps.ulAutoConvModesCount));
+ for ( int i = 0; i < int(caps.ulAutoConvModesCount); ++i )
+ CAMHAL_LOGD(" eAutoConvModes[%2d] = %d", i, int(caps.eAutoConvModes[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulBracketingModesCount = %d", int(caps.ulBracketingModesCount));
+ for ( int i = 0; i < int(caps.ulBracketingModesCount); ++i )
+ CAMHAL_LOGD(" eBracketingModes[%2d] = %d", i, int(caps.eBracketingModes[i]));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bGbceSupported = %d", int(caps.bGbceSupported));
+#endif
+ CAMHAL_LOGD("bRawJpegSupported = %d", int(caps.bRawJpegSupported));
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulImageCodingFormatCount = %d", int(caps.ulImageCodingFormatCount));
+ for ( int i = 0; i < int(caps.ulImageCodingFormatCount); ++i )
+ CAMHAL_LOGD(" eImageCodingFormat[%2d] = %d", i, int(caps.eImageCodingFormat[i]));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("uSenNativeResWidth = %d", int(caps.uSenNativeResWidth));
+ CAMHAL_LOGD("uSenNativeResHeight = %d", int(caps.uSenNativeResHeight));
+#endif
+ CAMHAL_LOGD("ulAlgoAreasFocusCount = %d", int(caps.ulAlgoAreasFocusCount));
+ CAMHAL_LOGD("ulAlgoAreasExposureCount = %d", int(caps.ulAlgoAreasExposureCount));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bAELockSupported = %d", int(caps.bAELockSupported));
+ CAMHAL_LOGD("bAWBLockSupported = %d", int(caps.bAWBLockSupported));
+#endif
+ CAMHAL_LOGD("bAFLockSupported = %d", int(caps.bAFLockSupported));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("nFocalLength = %d", int(caps.nFocalLength));
+#endif
+
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulPrvFrameLayoutCount = %d", int(caps.ulPrvFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulPrvFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" ePrvFrameLayout[%2d] = %d", i, int(caps.ePrvFrameLayout[i]));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("ulCapFrameLayoutCount = %d", int(caps.ulCapFrameLayoutCount));
+ for ( int i = 0; i < int(caps.ulCapFrameLayoutCount); ++i )
+ CAMHAL_LOGD(" eCapFrameLayout[%2d] = %d", i, int(caps.eCapFrameLayout[i]));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bVideoNoiseFilterSupported = %d", int(caps.bVideoNoiseFilterSupported ));
+ CAMHAL_LOGD("bVideoStabilizationSupported = %d", int(caps.bVideoStabilizationSupported ));
+#endif
+ CAMHAL_LOGD("bStillCapDuringVideoSupported = %d", int(caps.bStillCapDuringVideoSupported ));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bMechanicalMisalignmentSupported = %d", int(caps.bMechanicalMisalignmentSupported));
+#endif
+ CAMHAL_LOGD("bFacePrioritySupported = %d", int(caps.bFacePrioritySupported ));
+ CAMHAL_LOGD("bRegionPrioritySupported = %d", int(caps.bRegionPrioritySupported ));
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("bGlbceSupported = %d", int(caps.bGlbceSupported));
+#endif
+
+ CAMHAL_LOGD("");
+#ifndef OMAP_TUNA
+ CAMHAL_LOGD("nManualConvMin = %d", int(caps.nManualConvMin ));
+ CAMHAL_LOGD("nManualConvMax = %d", int(caps.nManualConvMax ));
+ CAMHAL_LOGD("nManualExpMin = %d", int(caps.nManualExpMin ));
+ CAMHAL_LOGD("nManualExpMax = %d", int(caps.nManualExpMax ));
+#endif
+ CAMHAL_LOGD("nBrightnessMin = %d", int(caps.nBrightnessMin ));
+ CAMHAL_LOGD("nBrightnessMax = %d", int(caps.nBrightnessMax ));
+ CAMHAL_LOGD("nContrastMin = %d", int(caps.nContrastMin ));
+ CAMHAL_LOGD("nContrastMax = %d", int(caps.nContrastMax ));
+ CAMHAL_LOGD("nSharpnessMin = %d", int(caps.nSharpnessMin ));
+ CAMHAL_LOGD("nSharpnessMax = %d", int(caps.nSharpnessMax ));
+ CAMHAL_LOGD("nSaturationMin = %d", int(caps.nSaturationMin ));
+ CAMHAL_LOGD("nSaturationMax = %d", int(caps.nSaturationMax ));
+
+ CAMHAL_LOGD("");
+ CAMHAL_LOGD("------------------- end of dump -------------------");
+ CAMHAL_LOGD("===================================================");
+
+ return true;
+}
+
/*****************************************
* public exposed function declarations
*****************************************/
-status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HANDLETYPE handle) {
+status_t OMXCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params, OMX_HANDLETYPE handle)
+{
status_t ret = NO_ERROR;
int caps_size = 0;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_CAPTYPE** caps = NULL;;
+ CameraBuffer *bufferlist;
+ OMX_TI_CAPTYPE* caps;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
MemoryManager memMgr;
LOG_FUNCTION_NAME;
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
// allocate tiler (or ion) buffer for caps (size is always a multiple of 4K)
caps_size = ((sizeof(OMX_TI_CAPTYPE)+4095)/4096)*4096;
- caps = (OMX_TI_CAPTYPE**) memMgr.allocateBuffer(0, 0, NULL, caps_size, 1);
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, caps_size, 1);
+ caps = (OMX_TI_CAPTYPE*) bufferlist[0].opaque;
if (!caps) {
CAMHAL_LOGEB("Error allocating buffer for caps %d", eError);
@@ -1239,13 +2519,13 @@ status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HAN
}
// initialize structures to be passed to OMX Camera
- OMX_INIT_STRUCT_PTR (caps[0], OMX_TI_CAPTYPE);
- caps[0]->nPortIndex = OMX_ALL;
+ OMX_INIT_STRUCT_PTR (caps, OMX_TI_CAPTYPE);
+ caps->nPortIndex = OMX_ALL;
OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = caps_size;
- sharedBuffer.pSharedBuff = (OMX_U8 *) caps[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
// Get capabilities from OMX Camera
eError = OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer);
@@ -1257,23 +2537,26 @@ status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HAN
CAMHAL_LOGDA("OMX capability query success");
}
+#ifdef CAMERAHAL_DEBUG
+ _dumpOmxTiCap(sensorId, *caps);
+#endif
+
// Translate and insert Ducati capabilities to CameraProperties
if ( NO_ERROR == ret ) {
- ret = insertCapabilities(params, *caps[0]);
+ ret = insertCapabilities(params, *caps);
}
- CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps[0]->tSenMounting.nSenId);
-
+ CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps->tSenMounting.nSenId);
+ CAMHAL_LOGDB("facing id=%u", (unsigned int)caps->tSenMounting.eFacing);
EXIT:
- if (caps) {
- memMgr.freeBuffer((void*) caps);
- caps = NULL;
+ if (bufferlist) {
+ memMgr.freeBufferList(bufferlist);
}
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp
index 34a0357..28a0ab1 100644
--- a/camera/OMXCameraAdapter/OMXCapture.cpp
+++ b/camera/OMXCameraAdapter/OMXCapture.cpp
@@ -21,32 +21,41 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *str = NULL;
int w, h;
OMX_COLOR_FORMATTYPE pixFormat;
+ CodingMode codingMode = mCodingMode;
const char *valstr = NULL;
int varint = 0;
+ OMX_TI_STEREOFRAMELAYOUTTYPE capFrmLayout;
+ bool inCaptureState = false;
LOG_FUNCTION_NAME;
OMXCameraPortParameters *cap;
cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ capFrmLayout = cap->mFrameLayoutType;
+#ifndef OMAP_TUNA
+ setParamS3D(mCameraAdapterParameters.mImagePortIndex,
+ params.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT));
+#endif
+ if (capFrmLayout != cap->mFrameLayoutType) {
+ mPendingCaptureSettings |= SetFormat;
+ }
+
params.getPictureSize(&w, &h);
if ( ( w != ( int ) cap->mWidth ) ||
@@ -64,84 +73,170 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Image: cap.mHeight = %d", (int)cap->mHeight);
if ((valstr = params.getPictureFormat()) != NULL) {
- if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("CbYCrY format selected");
pixFormat = OMX_COLOR_FormatCbYCrY;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV422I;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV420SP;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_RGB565;
- } else if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
CAMHAL_LOGDA("JPEG format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingNone;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_JPEG;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
+ codingMode = CodingJPEG;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
CAMHAL_LOGDA("JPS format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingJPS;
+ codingMode = CodingJPS;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_JPS;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
CAMHAL_LOGDA("MPO format selected");
pixFormat = OMX_COLOR_FormatUnused;
- mCodingMode = CodingMPO;
+ codingMode = CodingMPO;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_MPO;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_RAW) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
CAMHAL_LOGDA("RAW Picture format selected");
pixFormat = OMX_COLOR_FormatRawBayer10bit;
- mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_RAW;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
} else {
CAMHAL_LOGEA("Invalid format, JPEG format selected as default");
pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
mPictureFormatFromClient = NULL;
}
} else {
CAMHAL_LOGEA("Picture format is NULL, defaulting to JPEG");
pixFormat = OMX_COLOR_FormatUnused;
+ codingMode = CodingJPEG;
mPictureFormatFromClient = NULL;
}
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ mRawCapture = false;
+ mYuvCapture = false;
+
+ valstr = params.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (!valstr || strcmp(valstr, TICameraParameters::HIGH_QUALITY_MODE) == 0) &&
+ access(kRawImagesOutputDirPath, F_OK) != -1 ) {
+ mRawCapture = true;
+ }
+
+ if (mRawCapture && (access(kYuvImagesOutputDirPath, F_OK) != -1)) {
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ mYuvCapture = true;
+ }
+#endif
// JPEG capture is not supported in video mode by OMX Camera
// Set capture format to yuv422i...jpeg encode will
// be done on A9
valstr = params.get(TICameraParameters::KEY_CAP_MODE);
if ( (valstr && !strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE)) &&
- (pixFormat == OMX_COLOR_FormatUnused) ) {
+ (pixFormat == OMX_COLOR_FormatUnused) ) {
CAMHAL_LOGDA("Capturing in video mode...selecting yuv422i");
pixFormat = OMX_COLOR_FormatCbYCrY;
}
- if ( pixFormat != cap->mColorFormat )
- {
+ if (pixFormat != cap->mColorFormat || codingMode != mCodingMode) {
mPendingCaptureSettings |= SetFormat;
cap->mColorFormat = pixFormat;
- }
+ mCodingMode = codingMode;
+ }
#ifdef OMAP_ENHANCEMENT
+ str = params.get(TICameraParameters::KEY_TEMP_BRACKETING);
+ if ( ( str != NULL ) &&
+ ( strcmp(str, android::CameraParameters::TRUE) == 0 ) ) {
- str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
- if ( NULL != str ) {
- parseExpRange(str, mExposureBracketingValues, EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ if ( !mBracketingSet ) {
+ mPendingCaptureSettings |= SetExpBracket;
+ }
+
+ mBracketingSet = true;
+ } else {
+
+ if ( mBracketingSet ) {
+ mPendingCaptureSettings |= SetExpBracket;
+ }
+
+ mBracketingSet = false;
+ }
+
+ if ( (str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL ) {
+ parseExpRange(str, mExposureBracketingValues, NULL,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+#ifndef OMAP_TUNA
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+#endif
+ mExposureBracketMode = OMX_BracketExposureRelativeInEV;
+#ifndef OMAP_TUNA
+ }
+#endif
+ mPendingCaptureSettings |= SetExpBracket;
+ } else if ( (str = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
+ parseExpRange(str, mExposureBracketingValues, mExposureGainBracketingValues,
+ mExposureGainBracketingModes,
+ EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+#ifndef OMAP_TUNA
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ } else {
+#endif
+ mExposureBracketMode = OMX_BracketExposureGainAbsolute;
+#ifndef OMAP_TUNA
+ }
+#endif
+ mPendingCaptureSettings |= SetExpBracket;
} else {
// if bracketing was previously set...we set again before capturing to clear
- if (mExposureBracketingValidEntries) mPendingCaptureSettings |= SetExpBracket;
- mExposureBracketingValidEntries = 0;
+ if (mExposureBracketingValidEntries) {
+ mPendingCaptureSettings |= SetExpBracket;
+ mExposureBracketingValidEntries = 0;
+ }
}
+ str = params.get(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
+ if ( NULL != str ) {
+ parseExpRange(str, mZoomBracketingValues, NULL, NULL,
+ ZOOM_BRACKET_RANGE, mZoomBracketingValidEntries);
+ mCurrentZoomBracketing = 0;
+ mZoomBracketingEnabled = true;
+ } else {
+ if (mZoomBracketingValidEntries) {
+ mZoomBracketingValidEntries = 0;
+ }
+ mZoomBracketingEnabled = false;
+ }
#endif
- varint = params.getInt(CameraParameters::KEY_ROTATION);
- if ( varint != -1 )
+ // Flush config queue
+ // If TRUE: Flush queue and abort processing before enqueing
+ valstr = params.get(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE);
+ if ( NULL != valstr ) {
+ if ( 0 == strcmp(valstr, android::CameraParameters::TRUE) ) {
+ mFlushShotConfigQueue = true;
+ } else if ( 0 == strcmp(valstr, android::CameraParameters::FALSE) ) {
+ mFlushShotConfigQueue = false;
+ } else {
+ CAMHAL_LOGE("Missing flush shot config parameter. Will use current (%s)",
+ mFlushShotConfigQueue ? "true" : "false");
+ }
+ }
+
+ if ( params.getInt(android::CameraParameters::KEY_ROTATION) != -1 )
{
- if ( ( unsigned int ) varint != mPictureRotation) {
+ if (params.getInt(android::CameraParameters::KEY_ROTATION) != (int) mPictureRotation) {
mPendingCaptureSettings |= SetRotation;
}
- mPictureRotation = varint;
+ mPictureRotation = params.getInt(android::CameraParameters::KEY_ROTATION);
}
else
{
@@ -152,110 +247,101 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Picture Rotation set %d", mPictureRotation);
#ifdef OMAP_ENHANCEMENT
-
// Read Sensor Orientation and set it based on perating mode
-
- varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
- if (( varint != -1 ) && (mCapMode == OMXCameraAdapter::VIDEO_MODE))
+ varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if ( varint != -1 )
{
- mSensorOrientation = varint;
- if (mSensorOrientation == 270 ||mSensorOrientation==90)
- {
- CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
- mSensorOrientation +=180;
- mSensorOrientation%=360;
- }
- }
- else
+ mSensorOrientation = varint;
+ if (mSensorOrientation == 270 ||mSensorOrientation==90)
+ {
+ CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
+ mSensorOrientation +=180;
+ mSensorOrientation%=360;
+ }
+ }
+ else
{
- mSensorOrientation = 0;
+ mSensorOrientation = 0;
}
- CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+ CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+#endif
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
varint = params.getInt(TICameraParameters::KEY_BURST);
if ( varint >= 1 )
{
- if (varint != mBurstFrames) {
- mPendingCaptureSettings |= SetExpBracket;
+ if (varint != (int) mBurstFrames) {
+ mPendingCaptureSettings |= SetBurst;
}
mBurstFrames = varint;
}
else
{
- if (mBurstFrames != 1) mPendingCaptureSettings |= SetExpBracket;
+ if (mBurstFrames != 1) mPendingCaptureSettings |= SetBurst;
mBurstFrames = 1;
}
CAMHAL_LOGVB("Burst Frames set %d", mBurstFrames);
-
#endif
- varint = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
- if ( ( varint >= MIN_JPEG_QUALITY ) &&
- ( varint <= MAX_JPEG_QUALITY ) )
- {
- if ( ( unsigned int ) varint != mPictureQuality) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mPictureQuality) {
mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = varint;
}
- mPictureQuality = varint;
- }
- else
- {
- if (mPictureQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetQuality;
- mPictureQuality = MAX_JPEG_QUALITY;
+ } else {
+ if (mPictureQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetQuality;
+ mPictureQuality = MAX_JPEG_QUALITY;
}
+ }
CAMHAL_LOGVB("Picture Quality set %d", mPictureQuality);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- if ( varint >= 0 )
- {
- if ( ( unsigned int ) varint != mThumbWidth) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ if ( varint >= 0 ) {
+ if (varint != mThumbWidth) {
mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = varint;
}
- mThumbWidth = varint;
- }
- else
- {
- if (mThumbWidth != DEFAULT_THUMB_WIDTH) mPendingCaptureSettings |= SetThumb;
- mThumbWidth = DEFAULT_THUMB_WIDTH;
+ } else {
+ if (mThumbWidth != DEFAULT_THUMB_WIDTH) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbWidth = DEFAULT_THUMB_WIDTH;
}
-
+ }
CAMHAL_LOGVB("Picture Thumb width set %d", mThumbWidth);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
- if ( varint >= 0 )
- {
- if ( ( unsigned int ) varint != mThumbHeight) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if ( varint >= 0 ) {
+ if (varint != mThumbHeight) {
mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = varint;
}
- mThumbHeight = varint;
- }
- else
- {
- if (mThumbHeight != DEFAULT_THUMB_HEIGHT) mPendingCaptureSettings |= SetThumb;
- mThumbHeight = DEFAULT_THUMB_HEIGHT;
+ } else {
+ if (mThumbHeight != DEFAULT_THUMB_HEIGHT) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbHeight = DEFAULT_THUMB_HEIGHT;
}
-
+ }
CAMHAL_LOGVB("Picture Thumb height set %d", mThumbHeight);
- varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
- if ( ( varint >= MIN_JPEG_QUALITY ) &&
- ( varint <= MAX_JPEG_QUALITY ) )
- {
- if ( ( unsigned int ) varint != mThumbQuality) {
+ varint = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if ( varint >= MIN_JPEG_QUALITY && varint <= MAX_JPEG_QUALITY ) {
+ if (varint != mThumbQuality) {
mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = varint;
}
- mThumbQuality = varint;
- }
- else
- {
- if (mThumbQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetThumb;
- mThumbQuality = MAX_JPEG_QUALITY;
+ } else {
+ if (mThumbQuality != MAX_JPEG_QUALITY) {
+ mPendingCaptureSettings |= SetThumb;
+ mThumbQuality = MAX_JPEG_QUALITY;
}
+ }
CAMHAL_LOGDB("Thumbnail Quality set %d", mThumbQuality);
@@ -263,19 +349,36 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
mPendingCaptureSettings = ECapturesettingsAll;
}
- if (mPendingCaptureSettings) {
+ // we are already capturing and in cpcam mode...just need to enqueue
+ // shots
+ inCaptureState = (CAPTURE_ACTIVE & mAdapterState) && (CAPTURE_ACTIVE & mNextState);
+ if ((mPendingCaptureSettings & ~SetExpBracket) && !inCaptureState) {
+ disableReprocess();
disableImagePort();
if ( NULL != mReleaseImageBuffersCallback ) {
mReleaseImageBuffersCallback(mReleaseData);
}
+ if (mPendingCaptureSettings & SetFormat) {
+ mPendingCaptureSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *cap);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
}
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+ cap->mWidth = params.getInt(TICameraParameters::RAW_WIDTH);
+ cap->mHeight = params.getInt(TICameraParameters::RAW_HEIGHT);
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t OMXCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
{
status_t ret = NO_ERROR;
OMXCameraPortParameters *imgCaptureData = NULL;
@@ -297,85 +400,196 @@ status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCou
if ( ret == NO_ERROR )
{
- length = imgCaptureData->mBufSize;
+ frame.mLength = imgCaptureData->mBufSize;
+ frame.mWidth = imgCaptureData->mWidth;
+ frame.mHeight = imgCaptureData->mHeight;
+ frame.mAlignment = imgCaptureData->mStride;
+ CAMHAL_LOGDB("getPictureBufferSize: width:%u height:%u alignment:%u length:%u",
+ frame.mWidth, frame.mHeight, frame.mAlignment, frame.mLength);
}
else
{
CAMHAL_LOGEB("setFormat() failed 0x%x", ret);
- length = 0;
}
}
- CAMHAL_LOGDB("getPictureBufferSize %d", length);
-
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+int OMXCameraAdapter::getBracketingValueMode(const char *a, const char *b) const
+{
+ BracketingValueMode bvm = BracketingValueAbsolute;
+
+ if ( (NULL != b) &&
+ (NULL != a) &&
+ (a < b) &&
+ ( (NULL != memchr(a, '+', b - a)) ||
+ (NULL != memchr(a, '-', b - a)) ) ) {
+ bvm = BracketingValueRelative;
+ }
+ return bvm;
+}
+
status_t OMXCameraAdapter::parseExpRange(const char *rangeStr,
- int * expRange,
+ int *expRange,
+ int *gainRange,
+ int *expGainModes,
size_t count,
size_t &validEntries)
{
status_t ret = NO_ERROR;
- char *ctx, *expVal;
- char *tmp = NULL;
+ char *end = NULL;
+ const char *startPtr = NULL;
size_t i = 0;
LOG_FUNCTION_NAME;
- if ( NULL == rangeStr )
- {
+ if ( NULL == rangeStr ){
return -EINVAL;
- }
+ }
- if ( NULL == expRange )
- {
+ if ( NULL == expRange ){
return -EINVAL;
- }
-
- if ( NO_ERROR == ret )
- {
- tmp = ( char * ) malloc( strlen(rangeStr) + 1 );
+ }
- if ( NULL == tmp )
- {
- CAMHAL_LOGEA("No resources for temporary buffer");
- return -1;
+ if ( NO_ERROR == ret ) {
+ startPtr = rangeStr;
+ do {
+ // Relative Exposure example: "-30,-10, 0, 10, 30"
+ // Absolute Gain ex. (exposure,gain) pairs: "(100,300),(200,300),(400,300),(800,300),(1600,300)"
+ // Relative Gain ex. (exposure,gain) pairs: "(-30,+0),(-10, +0),(+0,+0),(+10,+0),(+30,+0)"
+ // Forced relative Exposure example: "-30F,-10F, 0F, 10F, 30F"
+ // Forced absolute Gain ex. (exposure,gain) pairs: "(100,300)F,(200,300)F,(400,300)F,(800,300)F,(1600,300)F"
+ // Forced relative Gain ex. (exposure,gain) pairs: "(-30,+0)F,(-10, +0)F,(+0,+0)F,(+10,+0)F,(+30,+0)F"
+
+ // skip '(' and ','
+ while ((*startPtr == '(') || (*startPtr == ',')) startPtr++;
+
+ expRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (expGainModes) {
+ // if gainRange is given rangeStr should be (exposure, gain) pair
+ if (gainRange) {
+ int bvm_exp = getBracketingValueMode(startPtr, end);
+ startPtr = end + 1; // for the ','
+ gainRange[i] = (int)strtol(startPtr, &end, 10);
+
+ if (BracketingValueAbsolute == bvm_exp) {
+ expGainModes[i] = getBracketingValueMode(startPtr, end);
+ } else {
+ expGainModes[i] = bvm_exp;
+ }
+ } else {
+ expGainModes[i] = BracketingValueCompensation;
+ }
+ }
+ startPtr = end;
+
+ // skip ')'
+ while (*startPtr == ')') startPtr++;
+
+ // Check for "forced" key
+ if (expGainModes) {
+ while ((*startPtr == 'F') || (*startPtr == 'f')) {
+ if ( BracketingValueAbsolute == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueAbsoluteForced;
+ } else if ( BracketingValueRelative == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueRelativeForced;
+ } else if ( BracketingValueCompensation == expGainModes[i] ) {
+ expGainModes[i] = BracketingValueCompensationForced;
+ } else {
+ CAMHAL_LOGE("Unexpected old mode 0x%x", expGainModes[i]);
+ }
+ startPtr++;
+ }
}
- memset(tmp, '\0', strlen(rangeStr) + 1);
-
- }
-
- if ( NO_ERROR == ret )
- {
- strncpy(tmp, rangeStr, strlen(rangeStr) );
- expVal = strtok_r( (char *) tmp, CameraHal::PARAMS_DELIMITER, &ctx);
- i = 0;
- while ( ( NULL != expVal ) && ( i < count ) )
- {
- expRange[i] = atoi(expVal);
- expVal = strtok_r(NULL, CameraHal::PARAMS_DELIMITER, &ctx);
i++;
- }
+
+ } while ((startPtr[0] != '\0') && (i < count));
validEntries = i;
- }
+ }
- if ( NULL != tmp )
- {
- free(tmp);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doExposureBracketing(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NULL == evValues ) {
+ CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret ) {
+#ifndef OMAP_TUNA
+ if (bracketMode == OMX_BracketVectorShot) {
+ ret = setVectorShot(evValues, evValues2, evModes2, evCount, frameCount, flush, bracketMode);
+ } else {
+#endif
+ ret = setExposureBracketing(evValues, evValues2, evCount, frameCount, bracketMode);
+#ifndef OMAP_TUNA
}
+#endif
+ }
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
- size_t evCount,
- size_t frameCount)
+#ifndef OMAP_TUNA
+status_t OMXCameraAdapter::setVectorStop(bool toPreview)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE vecShotStop;
+
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&vecShotStop, OMX_TI_CONFIG_VECTSHOTSTOPMETHODTYPE);
+
+ vecShotStop.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (toPreview) {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_GOTO_PREVIEW;
+ } else {
+ vecShotStop.eStopMethod = OMX_TI_VECTSHOTSTOPMETHOD_WAIT_IN_CAPTURE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigVectShotStopMethod,
+ &vecShotStop);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring bracket shot 0x%x", eError);
+ } else {
+ CAMHAL_LOGDA("Bracket shot configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::initVectorShot()
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
@@ -384,17 +598,171 @@ status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
- CAMHAL_LOGEA("OMX component is in invalid state");
- ret = -EINVAL;
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&expCapMode, OMX_CONFIG_CAPTUREMODETYPE);
+ expCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ expCapMode.bFrameLimited = OMX_FALSE;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCaptureMode,
+ &expCapMode);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGEB("Error while configuring capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Camera capture mode configured successfully");
}
+ }
- if ( NULL == evValues )
- {
- CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
- ret = -EINVAL;
+ if (NO_ERROR == ret) {
+ OMX_INIT_STRUCT_PTR (&extExpCapMode, OMX_CONFIG_EXTCAPTUREMODETYPE);
+ extExpCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ extExpCapMode.bEnableBracketing = OMX_TRUE;
+ extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketVectorShot;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigExtCaptureMode,
+ &extExpCapMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring extended capture mode 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Extended camera capture mode configured successfully");
+ }
+ }
+
+#ifndef OMAP_TUNA
+ if (NO_ERROR == ret) {
+ // set vector stop method to stop in capture
+ ret = setVectorStop(false);
+ }
+#endif
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::setVectorShot(int *evValues,
+ int *evValues2,
+ int *evModes2,
+ size_t evCount,
+ size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS enqueueShotConfigs;
+ OMX_TI_CONFIG_QUERYAVAILABLESHOTS queryAvailableShots;
+ bool doFlush = flush;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR(&enqueueShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
+ OMX_INIT_STRUCT_PTR(&queryAvailableShots, OMX_TI_CONFIG_QUERYAVAILABLESHOTS);
+
+ queryAvailableShots.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigQueryAvailableShots,
+ &queryAvailableShots);
+ if (OMX_ErrorNone != eError) {
+ CAMHAL_LOGE("Error getting available shots 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGD("AVAILABLE SHOTS: %d", queryAvailableShots.nAvailableShots);
+ if (queryAvailableShots.nAvailableShots < evCount) {
+ // TODO(XXX): Need to implement some logic to handle this error
+ CAMHAL_LOGE("Not enough available shots to fulfill this queue request");
+ ret = -ENOSPC;
+ goto exit;
+ }
+ }
+
+ for ( unsigned int confID = 0; confID < evCount; ) {
+ unsigned int i;
+ for ( i = 0 ; (i < ARRAY_SIZE(enqueueShotConfigs.nShotConfig)) && (confID < evCount); i++, confID++ ) {
+ CAMHAL_LOGD("%2u: (%7d,%4d) mode: %d", confID, evValues[confID], evValues2[confID], evModes2[confID]);
+ enqueueShotConfigs.nShotConfig[i].nConfigId = confID;
+ enqueueShotConfigs.nShotConfig[i].nFrames = 1;
+ if ( (BracketingValueCompensation == evModes2[confID]) ||
+ (BracketingValueCompensationForced == evModes2[confID]) ) {
+ // EV compensation
+ enqueueShotConfigs.nShotConfig[i].nEC = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nExp = 0;
+ enqueueShotConfigs.nShotConfig[i].nGain = 0;
+ } else {
+ // exposure,gain pair
+ enqueueShotConfigs.nShotConfig[i].nEC = 0;
+ enqueueShotConfigs.nShotConfig[i].nExp = evValues[confID];
+ enqueueShotConfigs.nShotConfig[i].nGain = evValues2[confID];
+ }
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ switch (evModes2[confID]) {
+ case BracketingValueAbsolute: // (exp,gain) pairs directly program sensor values
+ default :
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_ABSOLUTE;
+ break;
+ case BracketingValueRelative: // (exp,gain) pairs relative to AE settings and constraints
+ case BracketingValueCompensation: // EV compensation relative to AE settings and constraints
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE;
+ break;
+ case BracketingValueAbsoluteForced: // (exp,gain) pairs directly program sensor values
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_ABSOLUTE;
+ break;
+ case BracketingValueRelativeForced: // (exp, gain) pairs relative to AE settings AND settings
+ case BracketingValueCompensationForced: // EV compensation relative to AE settings and constraints
+ // are forced over constraints due to flicker, etc.
+ enqueueShotConfigs.nShotConfig[i].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_FORCE_RELATIVE;
+ break;
+ }
+ enqueueShotConfigs.nShotConfig[i].bNoSnapshot = OMX_FALSE; // TODO: Make this configurable
+ }
+
+ // Repeat last exposure and again
+ if ((confID == evCount) && (evCount > 0) && (frameCount > evCount) && (0 != i)) {
+ enqueueShotConfigs.nShotConfig[i-1].nFrames = frameCount - evCount;
+ }
+
+ enqueueShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ enqueueShotConfigs.bFlushQueue = doFlush ? OMX_TRUE : OMX_FALSE;
+ enqueueShotConfigs.nNumConfigs = i;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &enqueueShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring bracket shot 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Bracket shot configured successfully");
}
+ // Flush only first time
+ doFlush = false;
+ }
+
+ exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+#endif
+
+status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
+ int *evValues2,
+ size_t evCount,
+ size_t frameCount,
+ OMX_BRACKETMODETYPE bracketMode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAPTUREMODETYPE expCapMode;
+ OMX_CONFIG_EXTCAPTUREMODETYPE extExpCapMode;
+
+ LOG_FUNCTION_NAME;
if ( NO_ERROR == ret )
{
@@ -439,13 +807,23 @@ status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
else
{
extExpCapMode.bEnableBracketing = OMX_TRUE;
- extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketExposureRelativeInEV;
+ extExpCapMode.tBracketConfigType.eBracketMode = bracketMode;
extExpCapMode.tBracketConfigType.nNbrBracketingValues = evCount - 1;
}
for ( unsigned int i = 0 ; i < evCount ; i++ )
{
- extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+#ifndef OMAP_TUNA
+ if (bracketMode == OMX_BracketExposureGainAbsolute) {
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = evValues[i];
+ extExpCapMode.tBracketConfigType.nBracketValues2[i] = evValues2[i];
+ } else {
+#endif
+ // assuming OMX_BracketExposureRelativeInEV
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+#ifndef OMAP_TUNA
+ }
+#endif
}
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
@@ -538,7 +916,8 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
if ( NO_ERROR == ret )
{
- currentBufferIdx = ( unsigned int ) pBuffHeader->pAppPrivate;
+ CameraBuffer *buffer = (CameraBuffer *)pBuffHeader->pAppPrivate;
+ currentBufferIdx = buffer->index;
if ( currentBufferIdx >= imgCaptureData->mNumBufs)
{
@@ -558,8 +937,8 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
mBracketingBuffersQueued[nextBufferIdx] = true;
mBracketingBuffersQueuedCount++;
mLastBracetingBufferIdx = nextBufferIdx;
- setFrameRefCount(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame, 1);
- returnFrame(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame);
+ setFrameRefCount((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame, 1);
+ returnFrame((CameraBuffer *)imgCaptureData->mBufferHeader[nextBufferIdx]->pAppPrivate, typeOfFrame);
}
}
@@ -568,7 +947,7 @@ status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
return ret;
}
-status_t OMXCameraAdapter::sendBracketFrames()
+status_t OMXCameraAdapter::sendBracketFrames(size_t &framesSent)
{
status_t ret = NO_ERROR;
int currentBufferIdx;
@@ -577,6 +956,7 @@ status_t OMXCameraAdapter::sendBracketFrames()
LOG_FUNCTION_NAME;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ framesSent = 0;
if ( OMX_StateExecuting != mComponentState )
{
@@ -599,6 +979,7 @@ status_t OMXCameraAdapter::sendBracketFrames()
imgCaptureData->mBufferHeader[currentBufferIdx],
imgCaptureData->mImageType,
imgCaptureData);
+ framesSent++;
}
} while ( currentBufferIdx != mLastBracetingBufferIdx );
@@ -625,7 +1006,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
@@ -644,7 +1025,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
mBracketingRange = range;
mBracketingBuffersQueued = new bool[imgCaptureData->mNumBufs];
@@ -657,6 +1038,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
mBracketingBuffersQueuedCount = imgCaptureData->mNumBufs;
+ mBurstFramesAccum = imgCaptureData->mNumBufs;
mLastBracetingBufferIdx = mBracketingBuffersQueuedCount - 1;
for ( int i = 0 ; i < imgCaptureData->mNumBufs ; i++ )
@@ -669,10 +1051,11 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
-
- ret = startImageCapture();
+ CachedCaptureParameters* cap_params = cacheCaptureParameters();
+ ret = startImageCapture(true, cap_params);
+ delete cap_params;
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( NO_ERROR == ret )
{
@@ -696,15 +1079,15 @@ status_t OMXCameraAdapter::stopBracketing()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mBracketingLock);
+ ret = stopImageCapture();
+
+ android::AutoMutex lock(mBracketingLock);
if ( NULL != mBracketingBuffersQueued )
{
delete [] mBracketingBuffersQueued;
}
- ret = stopImageCapture();
-
mBracketingBuffersQueued = NULL;
mBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
@@ -715,16 +1098,17 @@ status_t OMXCameraAdapter::stopBracketing()
return ret;
}
-status_t OMXCameraAdapter::startImageCapture()
+status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParameters* capParams)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * capData = NULL;
OMX_CONFIG_BOOLEANTYPE bOMX;
+ size_t bracketingSent = 0;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mImageCaptureLock);
+ android::AutoMutex lock(mImageCaptureLock);
if(!mCaptureConfigured)
{
@@ -738,25 +1122,42 @@ status_t OMXCameraAdapter::startImageCapture()
return NO_INIT;
}
- if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) {
- CAMHAL_LOGDA("trying starting capture when already canceled");
- return NO_ERROR;
+ if ( !bracketing ) {
+ if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) {
+ CAMHAL_LOGDA("trying starting capture when already canceled");
+ return NO_ERROR;
+ }
+ }
+
+ if (!capParams) {
+ CAMHAL_LOGE("Invalid cached parameters sent!");
+ return BAD_VALUE;
}
-#ifndef OMAP_TUNA
// Camera framework doesn't expect face callbacks once capture is triggered
pauseFaceDetection(true);
-#endif
//During bracketing image capture is already active
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
//Stop bracketing, activate normal burst for the remaining images
mBracketingEnabled = false;
- mCapturedFrames = mBracketingRange;
- ret = sendBracketFrames();
+ ret = sendBracketFrames(bracketingSent);
+
+ // Check if we accumulated enough buffers
+ if ( bracketingSent < ( mBracketingRange - 1 ) )
+ {
+ mCapturedFrames = mBracketingRange + ( ( mBracketingRange - 1 ) - bracketingSent );
+ }
+ else
+ {
+ mCapturedFrames = mBracketingRange;
+ }
+ mBurstFramesQueued = 0;
+ mBurstFramesAccum = mCapturedFrames;
+
if(ret != NO_ERROR)
goto EXIT;
else
@@ -765,25 +1166,45 @@ status_t OMXCameraAdapter::startImageCapture()
}
if ( NO_ERROR == ret ) {
- if (mPendingCaptureSettings & SetRotation) {
+ if (capParams->mPendingCaptureSettings & SetRotation) {
mPendingCaptureSettings &= ~SetRotation;
ret = setPictureRotation(mPictureRotation);
if ( NO_ERROR != ret ) {
CAMHAL_LOGEB("Error configuring image rotation %x", ret);
}
}
- }
- // need to enable wb data for video snapshot to fill in exif data
- if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) {
- // video snapshot uses wb data from snapshot frame
- ret = setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
+ if (capParams->mPendingCaptureSettings & (SetBurst|SetExpBracket)) {
+ mPendingCaptureSettings &= ~(SetExpBracket|SetBurst);
+ if ( mBracketingSet ) {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ 0,
+ 0,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ } else {
+ ret = doExposureBracketing(capParams->mExposureBracketingValues,
+ capParams->mExposureGainBracketingValues,
+ capParams->mExposureGainBracketingModes,
+ capParams->mExposureBracketingValidEntries,
+ capParams->mBurstFrames,
+ capParams->mFlushShotConfigQueue,
+ capParams->mExposureBracketMode);
+ }
+
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
+ goto EXIT;
+ }
+ }
}
- //OMX shutter callback events are only available in hq mode
- if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
- {
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ //OMX shutter callback events are only available in hq mode
+ if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
if ( NO_ERROR == ret )
{
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
@@ -798,23 +1219,86 @@ status_t OMXCameraAdapter::startImageCapture()
ret = setShutterCallback(true);
}
- }
+ }
- if ( NO_ERROR == ret ) {
- capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ if (mPending3Asettings) {
+ apply3Asettings(mParameters3A);
+ }
+ // check is we are already in capture state...which means we are
+ // accumulating shots
+ if ((ret == NO_ERROR) && (mBurstFramesQueued > 0)) {
+ int index = 0;
+ int queued = 0;
+ android::AutoMutex lock(mBurstLock);
+
+ if (capParams->mFlushShotConfigQueue) {
+ // reset shot queue
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesAccum = mBurstFrames;
+ mBurstFramesQueued = 0;
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ mBurstFramesQueued++;
+ }
+ }
+ } else {
+ mCapturedFrames += mBurstFrames;
+ mBurstFramesAccum += mBurstFrames;
+ }
+
+ while ((mBurstFramesQueued < mBurstFramesAccum) &&
+ (index < capData->mNumBufs) &&
+ (queued < capData->mMaxQueueable)) {
+ if (capData->mStatus[index] == OMXCameraPortParameters::IDLE) {
+ CAMHAL_LOGDB("Queuing buffer on Capture port - %p",
+ capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ mBurstFramesQueued++;
+ queued++;
+ } else if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ queued++;
+ }
+ index++;
+ }
+ } else if ( NO_ERROR == ret ) {
///Queue all the buffers on capture port
- for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
- CAMHAL_LOGDB("Queuing buffer on Capture port - 0x%x",
- ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
- eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ for ( int index = 0 ; index < capData->mMaxQueueable ; index++ ) {
+ if (mBurstFramesQueued < mBurstFramesAccum) {
+ CAMHAL_LOGDB("Queuing buffer on Capture port - %p",
+ capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
-
+ mBurstFramesQueued++;
+ } else {
+ capData->mStatus[index] = OMXCameraPortParameters::IDLE;
+ }
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+
+ ///Queue all the buffers on capture port
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on Video port (for RAW capture) - 0x%x", ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
+ capData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+#endif
+
mWaitingForSnapshot = true;
mCaptureSignalled = false;
+ mPendingCaptureSettings &= ~SetBurst;
// Capturing command is not needed when capturing in video mode
// Only need to queue buffers on image ports
@@ -834,9 +1318,9 @@ status_t OMXCameraAdapter::startImageCapture()
}
//OMX shutter callback events are only available in hq mode
+
if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
{
-
if ( NO_ERROR == ret )
{
ret = mStartCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
@@ -867,16 +1351,15 @@ status_t OMXCameraAdapter::startImageCapture()
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
- setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
mWaitingForSnapshot = false;
mCaptureSignalled = false;
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::stopImageCapture()
@@ -888,7 +1371,7 @@ status_t OMXCameraAdapter::stopImageCapture()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mImageCaptureLock);
+ android::AutoMutex lock(mImageCaptureLock);
if (!mCaptureConfigured) {
//Capture is not ongoing, return from here
@@ -900,9 +1383,13 @@ status_t OMXCameraAdapter::stopImageCapture()
goto EXIT;
}
+ // TODO(XXX): Reprocessing is currently piggy-backing capture commands
+ if (mAdapterState == REPROCESS_STATE) {
+ ret = stopReprocess();
+ }
+
//Disable the callback first
mWaitingForSnapshot = false;
- mSnapshotCount = 0;
// OMX shutter callback events are only available in hq mode
if ((HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) {
@@ -923,11 +1410,25 @@ status_t OMXCameraAdapter::stopImageCapture()
mStartCaptureSem.Create(0);
}
}
-
#ifndef OMAP_TUNA
- // After capture, face detection should be disabled
- // and application needs to restart face detection
- stopFaceDetection();
+ else if (CP_CAM == mCapMode) {
+ // Reset shot config queue
+ OMX_TI_CONFIG_ENQUEUESHOTCONFIGS resetShotConfigs;
+ OMX_INIT_STRUCT_PTR(&resetShotConfigs, OMX_TI_CONFIG_ENQUEUESHOTCONFIGS);
+
+ resetShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ resetShotConfigs.bFlushQueue = OMX_TRUE;
+ resetShotConfigs.nNumConfigs = 0;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &resetShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while reset shot config 0x%x", eError);
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("Shot config reset successfully");
+ }
+ }
#endif
//Wait here for the capture to be done, in worst case timeout and proceed with cleanup
@@ -956,23 +1457,42 @@ status_t OMXCameraAdapter::stopImageCapture()
}
}
- // had to enable wb data for video snapshot to fill in exif data
- // now that we are done...disable
- if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) {
- ret = setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance);
- }
+ // Disable WB and vector shot extra data for metadata
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+#ifndef OMAP_TUNA
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+#endif
CAMHAL_LOGDB("Capture set - 0x%x", eError);
mCaptureSignalled = true; //set this to true if we exited because of timeout
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ // Stop is always signalled externally in CPCAM mode
+ // We need to make sure we really stop
+ if ((mCapMode == CP_CAM)) {
+ disableReprocess();
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ // Moving code for below commit here as an optimization for continuous capture,
+ // so focus settings don't have to reapplied after each capture
+ // c78fa2a CameraHAL: Always reset focus mode after capture
+ // Workaround when doing many consecutive shots, CAF wasn't getting restarted.
+ mPending3Asettings |= SetFocus;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
@@ -982,20 +1502,21 @@ EXIT:
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::disableImagePort(){
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters *imgCaptureData = NULL;
+ OMXCameraPortParameters *imgRawCaptureData = NULL;
if (!mCaptureConfigured) {
return NO_ERROR;
@@ -1003,6 +1524,7 @@ status_t OMXCameraAdapter::disableImagePort(){
mCaptureConfigured = false;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex]; // for RAW capture
///Register for Image port Disable event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
@@ -1052,19 +1574,165 @@ status_t OMXCameraAdapter::disableImagePort(){
goto EXIT;
}
- EXIT:
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ deinitInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ mStopCaptureSem);
+ ///Disable RawCapture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Free all the buffers on RawCapture port
+ if (imgRawCaptureData) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - %d", imgRawCaptureData->mNumBufs);
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - 0x%x", ( unsigned int ) imgRawCaptureData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoPortIndex,
+ (OMX_BUFFERHEADERTYPE*)imgRawCaptureData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for Video port disable");
+ //Wait for the image port enable event
+ mStopCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ CAMHAL_LOGDA("Video Port disabled");
+ }
+#endif
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
+status_t OMXCameraAdapter::initInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int index = 0;
+#ifndef OMAP_TUNA
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ /* Indicate to Ducati that we're planning to use dynamically-mapped buffers */
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypePhysicalPageList;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ CAMHAL_LOGDA("Initializing internal buffers");
+ do {
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferallocset;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.nIndex = index;
-status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+ if (eError == OMX_ErrorNoMore) {
+ return NO_ERROR;
+ }
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("GetParameter failed error = 0x%x", eError);
+ break;
+ }
+
+ CAMHAL_LOGDB("Requesting buftype %d of size %dx%d",
+ (int)bufferalloc.eBufType, (int)bufferalloc.nAllocWidth,
+ (int)bufferalloc.nAllocLines);
+
+ bufferalloc.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+
+ OMX_INIT_STRUCT_PTR (&bufferallocset, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferallocset.nPortIndex = portIndex;
+ bufferallocset.nIndex = index;
+ bufferallocset.eBufType = OMX_TI_BufferTypeHardwareReserved1D;
+ bufferallocset.nAllocWidth = bufferalloc.nAllocWidth;
+ bufferallocset.nAllocLines = bufferalloc.nAllocLines;
+
+ eError = OMX_SetParameter (mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferallocset);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("SetParameter failed, error=%08x", eError);
+ if (eError == OMX_ErrorNoMore) return NO_ERROR;
+ break;
+ }
+
+ index++;
+
+ /* 1 is an arbitrary limit */
+ } while (index < 1);
+
+ CAMHAL_LOGV("Ducati requested too many (>1) internal buffers");
+
+ return -EINVAL;
+#endif
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::deinitInternalBuffers(OMX_U32 portIndex)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+#ifndef OMAP_TUNA
+ OMX_TI_PARAM_USEBUFFERDESCRIPTOR bufferdesc;
+
+ OMX_INIT_STRUCT_PTR (&bufferdesc, OMX_TI_PARAM_USEBUFFERDESCRIPTOR);
+ bufferdesc.nPortIndex = portIndex;
+ bufferdesc.bEnabled = OMX_FALSE;
+ bufferdesc.eBufferType = OMX_TI_BufferTypeDefault;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexUseBufferDescriptor,
+ &bufferdesc);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ OMX_TI_PARAM_COMPONENTBUFALLOCTYPE bufferalloc;
+ OMX_INIT_STRUCT_PTR (&bufferalloc, OMX_TI_PARAM_COMPONENTBUFALLOCTYPE);
+ bufferalloc.nPortIndex = portIndex;
+ bufferalloc.eBufType = OMX_TI_BufferTypeDefault;
+ bufferalloc.nAllocWidth = 1;
+ bufferalloc.nAllocLines = 1;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexParamComponentBufferAllocation,
+ &bufferalloc);
+#endif
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ return -EINVAL;
+ }
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
{
LOG_FUNCTION_NAME;
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * imgCaptureData = NULL;
- uint32_t *buffers = (uint32_t*)bufArr;
OMXCameraPortParameters cap;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -1077,7 +1745,6 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
// capture is already configured...we can skip this step
if (mCaptureConfigured) {
-
if ( NO_ERROR == ret )
{
ret = setupEXIF();
@@ -1088,6 +1755,7 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
mCapturedFrames = mBurstFrames;
+ mBurstFramesQueued = 0;
return NO_ERROR;
}
@@ -1117,16 +1785,6 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
}
- if (mPendingCaptureSettings & SetExpBracket) {
- mPendingCaptureSettings &= ~SetExpBracket;
- ret = setExposureBracketing( mExposureBracketingValues,
- mExposureBracketingValidEntries, mBurstFrames);
- if ( ret != NO_ERROR ) {
- CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
- goto EXIT;
- }
- }
-
if (mPendingCaptureSettings & SetQuality) {
mPendingCaptureSettings &= ~SetQuality;
ret = setImageQuality(mPictureQuality);
@@ -1136,6 +1794,12 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
}
+ // assuming if buffers are from ANW that they will be pagelist buffers
+ // and need a tiler reservation
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ initInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+ }
+
///Register for Image port ENABLE event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
@@ -1152,24 +1816,47 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+ // Configure DOMX to use either gralloc handles or vptrs
+ if ((imgCaptureData->mNumBufs > 0)) {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD ("Using ANW Buffers");
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+ } else {
+ CAMHAL_LOGD ("Using ION Buffers");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++ )
{
OMX_BUFFERHEADERTYPE *pBufferHdr;
CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
- (unsigned int)buffers[index],
+ (unsigned int)bufArr[index].opaque,
(int)imgCaptureData->mBufSize);
eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
&pBufferHdr,
mCameraAdapterParameters.mImagePortIndex,
0,
- mCaptureBuffersLength,
- (OMX_U8*)buffers[index]);
+ imgCaptureData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
- pBufferHdr->pAppPrivate = (OMX_PTR) index;
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
pBufferHdr->nVersion.s.nVersionMajor = 1 ;
pBufferHdr->nVersion.s.nVersionMinor = 1 ;
@@ -1213,21 +1900,197 @@ status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
}
+ // Enable WB and vector shot extra data for metadata
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+#ifndef OMAP_TUNA
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+
+ // CPCam mode only supports vector shot
+ // Regular capture is not supported
+ if (mCapMode == CP_CAM) initVectorShot();
+
+ // Choose proper single preview mode for cpcapture capture (reproc or hs)
+ if (( NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mCapMode)) {
+ OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
+ OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
+ if (mNextState == LOADED_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ } else if (mNextState == LOADED_REPROCESS_CAPTURE_STATE) {
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_Reprocess;
+ } else {
+ CAMHAL_LOGE("Wrong state trying to start a capture in CPCAM mode?");
+ singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
+ }
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigSinglePreviewMode,
+ &singlePrevMode);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
+ } else {
+ CAMHAL_LOGDA("single preview mode configured successfully");
+ }
+ }
+#endif
+
mCapturedFrames = mBurstFrames;
+ mBurstFramesAccum = mBurstFrames;
+ mBurstFramesQueued = 0;
+
mCaptureConfigured = true;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ if (mRawCapture) {
+ mCaptureConfigured = false;
+ }
+#endif
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
+ // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
+ // setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
+#ifndef OMAP_TUNA
+ setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
+#endif
//Release image buffers
if ( NULL != mReleaseImageBuffersCallback ) {
mReleaseImageBuffersCallback(mReleaseData);
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+status_t OMXCameraAdapter::UseBuffersRawCapture(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME
+ status_t ret;
+ OMX_ERRORTYPE eError;
+ OMXCameraPortParameters * imgRawCaptureData = NULL;
+ Utils::Semaphore camSem;
+ OMXCameraPortParameters cap;
+
+ imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
+
+ if (mCaptureConfigured) {
+ return NO_ERROR;
+ }
+
+ camSem.Create();
+
+ // mWaitingForSnapshot is true only when we're in the process of capturing
+ if (mWaitingForSnapshot) {
+ ///Register for Video port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Disable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ CAMHAL_LOGDA("Waiting for port disable");
+ //Wait for the image port enable event
+ camSem.Wait();
+ CAMHAL_LOGDA("Port disabled");
+ }
+
+ imgRawCaptureData->mNumBufs = num;
+ CAMHAL_LOGDB("RAW Max sensor width = %d", (int)imgRawCaptureData->mWidth);
+ CAMHAL_LOGDB("RAW Max sensor height = %d", (int)imgRawCaptureData->mHeight);
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_VIDEO, *imgRawCaptureData);
+
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+ }
+
+ ///Register for Video port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ camSem);
+
+ ///Enable Video Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoPortIndex,
+ NULL);
+
+ mCaptureBuffersLength = (int)imgRawCaptureData->mBufSize;
+ for ( int index = 0 ; index < imgRawCaptureData->mNumBufs ; index++ ) {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer rawCapture address: 0x%x, size = %d ",
+ (unsigned int)bufArr[index].opaque,
+ (int)imgRawCaptureData->mBufSize );
+
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoPortIndex,
+ 0,
+ mCaptureBuffersLength,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_UseBuffer = 0x%x", eError);
+ }
+
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgRawCaptureData->mBufferHeader[index] = pBufferHdr;
+
+ }
+
+ //Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ camSem.Wait();
+ CAMHAL_LOGDA("Port enabled");
+
+ if (NO_ERROR == ret) {
+ ret = setupEXIF();
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
+ }
+ }
+
+ mCapturedFrames = mBurstFrames;
+ mBurstFramesQueued = 0;
+ mCaptureConfigured = true;
+
+ EXIT:
+
+ if (eError != OMX_ErrorNone) {
+ if ( NULL != mErrorNotifier )
+ {
+ mErrorNotifier->errorNotify(eError);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDccDataSave.cpp b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
new file mode 100644
index 0000000..7547743
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDccDataSave.cpp
+*
+* This file contains functionality for handling DCC data save
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::initDccFileDataSave(OMX_HANDLETYPE* omxHandle, int portIndex)
+{
+ OMX_CONFIG_EXTRADATATYPE extraDataControl;
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
+ extraDataControl.nPortIndex = portIndex;
+ extraDataControl.eExtraDataType = OMX_TI_DccData;
+ extraDataControl.bEnable = OMX_TRUE;
+
+ eError = OMX_SetConfig(*omxHandle,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
+ &extraDataControl);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring dcc data overwrite extra data 0x%x",
+ eError);
+
+ ret = NO_INIT;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::sniffDccFileDataSave(OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_TI_DCCDATATYPE* dccData;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if ( NULL == pBuffHeader ) {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_TI_DccData);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
+ } else {
+ CAMHAL_LOGVA("Invalid OMX_TI_DCCDATATYPE");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ dccData = ( OMX_TI_DCCDATATYPE * ) extraData->data;
+
+ if (NULL == dccData) {
+ CAMHAL_LOGVA("OMX_TI_DCCDATATYPE is not found in extra data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ }
+
+ memcpy(&mDccData, dccData, sizeof(mDccData));
+
+ int dccDataSize = (int)dccData->nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ mDccData.pData = (OMX_PTR)malloc(dccDataSize);
+
+ if (NULL == mDccData.pData) {
+ CAMHAL_LOGVA("not enough memory for DCC data");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ memcpy(mDccData.pData, &(dccData->pData), dccDataSize);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// Recursively searches given directory contents for the correct DCC file.
+// The directory must be opened and its stream pointer + path passed
+// as arguments. As this function is called recursively, to avoid excessive
+// stack usage the path param is reused -> this MUST be char array with
+// enough length!!! (260 should suffice). Path must end with "/".
+// The directory must also be closed in the caller function.
+// If the correct camera DCC file is found (based on the OMX measurement data)
+// its file stream pointer is returned. NULL is returned otherwise
+FILE * OMXCameraAdapter::parseDCCsubDir(DIR *pDir, char *path)
+{
+ FILE *pFile;
+ DIR *pSubDir;
+ struct dirent *dirEntry;
+ int initialPathLength = strlen(path);
+
+ LOG_FUNCTION_NAME;
+
+ /* check each directory entry */
+ while ((dirEntry = readdir(pDir)) != NULL)
+ {
+ if (dirEntry->d_name[0] == '.')
+ continue;
+
+ strcat(path, dirEntry->d_name);
+ // dirEntry might be sub directory -> check it
+ pSubDir = opendir(path);
+ if (pSubDir) {
+ // dirEntry is sub directory -> parse it
+ strcat(path, "/");
+ pFile = parseDCCsubDir(pSubDir, path);
+ closedir(pSubDir);
+ if (pFile) {
+ // the correct DCC file found!
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ // dirEntry is file -> open it
+ pFile = fopen(path, "rb");
+ if (pFile) {
+ // now check if this is the correct DCC file for that camera
+ OMX_U32 dccFileIDword;
+ OMX_U32 *dccFileDesc = (OMX_U32 *) &mDccData.nCameraModuleId;
+ int i;
+
+ // DCC file ID is 3 4-byte words
+ for (i = 0; i < 3; i++) {
+ if (fread(&dccFileIDword, sizeof(OMX_U32), 1, pFile) != 1) {
+ // file too short
+ break;
+ }
+ if (dccFileIDword != dccFileDesc[i]) {
+ // DCC file ID word i does not match
+ break;
+ }
+ }
+
+ fclose(pFile);
+ if (i == 3) {
+ // the correct DCC file found!
+ CAMHAL_LOGDB("DCC file to be updated: %s", path);
+ // reopen it for modification
+ pFile = fopen(path, "rb+");
+ if (!pFile)
+ CAMHAL_LOGEB("ERROR: DCC file %s failed to open for modification", path);
+ LOG_FUNCTION_NAME_EXIT;
+ return pFile;
+ }
+ } else {
+ CAMHAL_LOGEB("ERROR: Failed to open file %s for reading", path);
+ }
+ }
+ // restore original path
+ path[initialPathLength] = '\0';
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ // DCC file not found in this directory tree
+ return NULL;
+}
+
+// Finds the DCC file corresponding to the current camera based on the
+// OMX measurement data, opens it and returns the file stream pointer
+// (NULL on error or if file not found).
+// The folder string dccFolderPath must end with "/"
+FILE * OMXCameraAdapter::fopenCameraDCC(const char *dccFolderPath)
+{
+ FILE *pFile;
+ DIR *pDir;
+ char dccPath[260];
+
+ LOG_FUNCTION_NAME;
+
+ strcpy(dccPath, dccFolderPath);
+
+ pDir = opendir(dccPath);
+ if (!pDir) {
+ CAMHAL_LOGEB("ERROR: Opening DCC directory %s failed", dccPath);
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ pFile = parseDCCsubDir(pDir, dccPath);
+ closedir(pDir);
+ if (pFile) {
+ CAMHAL_LOGDB("DCC file %s opened for modification", dccPath);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return pFile;
+}
+
+// Positions the DCC file stream pointer to the correct offset within the
+// correct usecase based on the OMX mesurement data. Returns 0 on success
+status_t OMXCameraAdapter::fseekDCCuseCasePos(FILE *pFile)
+{
+ OMX_U32 dccNumUseCases = 0;
+ OMX_U32 dccUseCaseData[3];
+ OMX_U32 i;
+
+ LOG_FUNCTION_NAME;
+
+ // position the file pointer to the DCC use cases section
+ if (fseek(pFile, 80, SEEK_SET)) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (fread(&dccNumUseCases, sizeof(OMX_U32), 1, pFile) != 1 ||
+ dccNumUseCases == 0) {
+ CAMHAL_LOGEA("ERROR: DCC file contains 0 use cases");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ for (i = 0; i < dccNumUseCases; i++) {
+ if (fread(dccUseCaseData, sizeof(OMX_U32), 3, pFile) != 3) {
+ CAMHAL_LOGEA("ERROR: Unexpected end of DCC file");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ if (dccUseCaseData[0] == mDccData.nUseCaseId) {
+ // DCC use case match!
+ break;
+ }
+ }
+
+ if (i == dccNumUseCases) {
+ CAMHAL_LOGEB("ERROR: Use case ID %lu not found in DCC file", mDccData.nUseCaseId);
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ // dccUseCaseData[1] is the offset to the beginning of the actual use case
+ // from the beginning of the file
+ // mDccData.nOffset is the offset within the actual use case (from the
+ // beginning of the use case to the data to be modified)
+
+ if (fseek(pFile,dccUseCaseData[1] + mDccData.nOffset, SEEK_SET ))
+ {
+ CAMHAL_LOGEA("ERROR: Error setting the correct offset");
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::saveDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData)
+ {
+ FILE *fd = fopenCameraDCC(DCC_PATH);
+
+ if (fd)
+ {
+ if (!fseekDCCuseCasePos(fd))
+ {
+ int dccDataSize = (int)mDccData.nSize - (int)(&(((OMX_TI_DCCDATATYPE*)0)->pData));
+
+ if (fwrite(mDccData.pData, dccDataSize, 1, fd) != 1)
+ {
+ CAMHAL_LOGEA("ERROR: Writing to DCC file failed");
+ }
+ else
+ {
+ CAMHAL_LOGDA("DCC file successfully updated");
+ }
+ }
+ fclose(fd);
+ }
+ else
+ {
+ CAMHAL_LOGEA("ERROR: Correct DCC file not found or failed to open for modification");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::closeDccFileDataSave()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mDccDataLock);
+
+ if (mDccData.pData) {
+ free(mDccData.pData);
+ mDccData.pData = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDefaults.cpp b/camera/OMXCameraAdapter/OMXDefaults.cpp
index aff38d1..2928573 100644
--- a/camera/OMXCameraAdapter/OMXDefaults.cpp
+++ b/camera/OMXCameraAdapter/OMXDefaults.cpp
@@ -24,10 +24,8 @@
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
-
-#undef LOG_TAG
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
#define __STRINGIFY(s) __STRING(s)
@@ -42,42 +40,44 @@ const char OMXCameraAdapter::DEFAULT_EXPOSURE_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_FLASH_MODE[] = "off";
const char OMXCameraAdapter::DEFAULT_FOCUS_MODE_PREFERRED[] = "auto";
const char OMXCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
-const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_IMAGE[] = "15000,30000";
-const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_VIDEO[]="24000,30000";
const char OMXCameraAdapter::DEFAULT_IPP[] = "ldc-nsf";
-const char OMXCameraAdapter::DEFAULT_GBCE[] = "disable";
const char OMXCameraAdapter::DEFAULT_ISO_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_JPEG_QUALITY[] = "95";
const char OMXCameraAdapter::DEFAULT_THUMBNAIL_QUALITY[] = "60";
const char OMXCameraAdapter::DEFAULT_THUMBNAIL_SIZE[] = "160x120";
const char OMXCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char OMXCameraAdapter::DEFAULT_S3D_PICTURE_LAYOUT[] = "tb-full";
const char OMXCameraAdapter::DEFAULT_PICTURE_SIZE[] = "320x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_SS_SIZE[] = "640x240";
+const char OMXCameraAdapter::DEFAULT_PICTURE_TB_SIZE[] = "320x480";
const char OMXCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
const char OMXCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char OMXCameraAdapter::DEFAULT_S3D_PREVIEW_LAYOUT[] = "tb-subsampled";
const char OMXCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_SS_SIZE[] = "1280x480";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_TB_SIZE[] = "640x960";
const char OMXCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
const char OMXCameraAdapter::DEFAULT_NUM_PIC_BUFS[] = "1";
-const char OMXCameraAdapter::DEFAULT_MAX_FOCUS_AREAS[] = "1";
const char OMXCameraAdapter::DEFAULT_SATURATION[] = "100";
const char OMXCameraAdapter::DEFAULT_SCENE_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_SHARPNESS[] = "100";
-const char OMXCameraAdapter::DEFAULT_VSTAB[] = "false";
-const char OMXCameraAdapter::DEFAULT_VSTAB_SUPPORTED[] = "true";
+const char * OMXCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_WB[] = "auto";
const char OMXCameraAdapter::DEFAULT_ZOOM[] = "0";
const char OMXCameraAdapter::DEFAULT_MAX_FD_HW_FACES[] = __STRINGIFY(MAX_NUM_FACES_SUPPORTED);
const char OMXCameraAdapter::DEFAULT_MAX_FD_SW_FACES[] = "0";
-const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_PRIMARY[] = "3.43";
-const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_SECONDARY[] = "1.95";
const char OMXCameraAdapter::DEFAULT_HOR_ANGLE[] = "54.8";
const char OMXCameraAdapter::DEFAULT_VER_ANGLE[] = "42.5";
-const char OMXCameraAdapter::DEFAULT_AE_LOCK[] = "false";
-const char OMXCameraAdapter::DEFAULT_AWB_LOCK[] = "false";
-const char OMXCameraAdapter::DEFAULT_MAX_NUM_METERING_AREAS[] = "0";
-const char OMXCameraAdapter::DEFAULT_LOCK_SUPPORTED[] = "true";
-const char OMXCameraAdapter::DEFAULT_LOCK_UNSUPPORTED[] = "false";
-const char OMXCameraAdapter::DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[] = "true";
+const char * OMXCameraAdapter::DEFAULT_AE_LOCK = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_AWB_LOCK = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_VIDEO_SIZE[] = "1920x1080";
-const char OMXCameraAdapter::DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "1920x1080";
-};
+const char OMXCameraAdapter::DEFAULT_SENSOR_ORIENTATION[] = "0";
+const char OMXCameraAdapter::DEFAULT_AUTOCONVERGENCE_MODE[] = "frame";
+const char OMXCameraAdapter::DEFAULT_MANUAL_CONVERGENCE[] = "0";
+const char * OMXCameraAdapter::DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE = android::CameraParameters::TRUE;
+const char OMXCameraAdapter::DEFAULT_EXIF_MAKE[] = "default_make";
+const char OMXCameraAdapter::DEFAULT_EXIF_MODEL[] = "default_model";
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp
index 76d94bd..cbf7c1c 100644
--- a/camera/OMXCameraAdapter/OMXExif.cpp
+++ b/camera/OMXCameraAdapter/OMXExif.cpp
@@ -21,17 +21,14 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include <math.h>
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersEXIF(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -40,7 +37,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
LOG_FUNCTION_NAME;
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -72,7 +69,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLatValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -104,7 +101,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLongValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
mEXIFData.mGPSData.mAltitude = floor(fabs(gpsPos));
@@ -120,7 +117,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mAltitudeValid= false;
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
{
long gpsTimestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsTimestamp) );
@@ -141,7 +138,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mTimeStampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
{
long gpsDatestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsDatestamp) );
@@ -160,7 +157,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mDatestampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
{
strncpy(mEXIFData.mGPSData.mProcMethod, valstr, GPS_PROCESSING_SIZE-1);
mEXIFData.mGPSData.mProcMethodValid = true;
@@ -213,7 +210,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
}
- if( ( valstr = params.get(CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
+ if( ( valstr = params.get(android::CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
CAMHAL_LOGVB("EXIF Focal length: %s", valstr);
ExifElementsTable::stringToRational(valstr,
&mEXIFData.mFocalNum,
@@ -235,16 +232,23 @@ status_t OMXCameraAdapter::setupEXIF()
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
OMX_TI_CONFIG_EXIF_TAGS *exifTags;
+ unsigned char *startPtr = NULL;
unsigned char *sharedPtr = NULL;
struct timeval sTv;
struct tm *pTime;
OMXCameraPortParameters * capData = NULL;
MemoryManager memMgr;
- OMX_U8** memmgr_buf_array = NULL;
+ CameraBuffer *memmgr_buf_array;
int buf_size = 0;
LOG_FUNCTION_NAME;
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
sharedBuffer.pSharedBuff = NULL;
capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -272,22 +276,23 @@ status_t OMXCameraAdapter::setupEXIF()
buf_size = ((buf_size+4095)/4096)*4096;
sharedBuffer.nSharedBuffSize = buf_size;
- memmgr_buf_array = (OMX_U8 **)memMgr.allocateBuffer(0, 0, NULL, buf_size, 1);
- sharedBuffer.pSharedBuff = ( OMX_U8 * ) memmgr_buf_array[0];
+ memmgr_buf_array = memMgr.allocateBufferList(0, 0, NULL, buf_size, 1);
+ sharedBuffer.pSharedBuff = (OMX_U8*)camera_buffer_get_omx_ptr(&memmgr_buf_array[0]);
+ startPtr = ( OMX_U8 * ) memmgr_buf_array[0].opaque;
- if ( NULL == sharedBuffer.pSharedBuff )
+ if ( NULL == startPtr)
{
CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
ret = -1;
}
//Extra data begins right after the EXIF configuration structure.
- sharedPtr = sharedBuffer.pSharedBuff + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
+ sharedPtr = startPtr + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
}
if ( NO_ERROR == ret )
{
- exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) sharedBuffer.pSharedBuff;
+ exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) startPtr;
OMX_INIT_STRUCT_PTR (exifTags, OMX_TI_CONFIG_EXIF_TAGS);
exifTags->nPortIndex = mCameraAdapterParameters.mImagePortIndex;
@@ -310,7 +315,7 @@ status_t OMXCameraAdapter::setupEXIF()
mEXIFData.mModel,
EXIF_MODEL_SIZE - 1);
- exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulModelBuffSizeBytes = strlen((char*)sharedPtr) + 1;
sharedPtr += EXIF_MODEL_SIZE;
exifTags->eStatusModel = OMX_TI_TagUpdated;
@@ -323,7 +328,7 @@ status_t OMXCameraAdapter::setupEXIF()
mEXIFData.mMake,
EXIF_MAKE_SIZE - 1);
- exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulMakeBuffSizeBytes = strlen((char*)sharedPtr) + 1;
sharedPtr += EXIF_MAKE_SIZE;
exifTags->eStatusMake = OMX_TI_TagUpdated;
@@ -357,7 +362,7 @@ status_t OMXCameraAdapter::setupEXIF()
pTime->tm_sec );
}
- exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
sharedPtr += EXIF_DATE_TIME_SIZE;
exifTags->ulDateTimeBuffSizeBytes = EXIF_DATE_TIME_SIZE;
exifTags->eStatusDateTime = OMX_TI_TagUpdated;
@@ -435,7 +440,7 @@ status_t OMXCameraAdapter::setupEXIF()
{
memcpy(sharedPtr, mEXIFData.mGPSData.mMapDatum, GPS_MAPDATUM_SIZE);
- exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
exifTags->ulGpsMapDatumBuffSizeBytes = GPS_MAPDATUM_SIZE;
exifTags->eStatusGpsMapDatum = OMX_TI_TagUpdated;
sharedPtr += GPS_MAPDATUM_SIZE;
@@ -444,7 +449,7 @@ status_t OMXCameraAdapter::setupEXIF()
if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsProcessingMethod ) &&
( mEXIFData.mGPSData.mProcMethodValid ) )
{
- exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - startPtr );
memcpy(sharedPtr, ExifAsciiPrefix, sizeof(ExifAsciiPrefix));
sharedPtr += sizeof(ExifAsciiPrefix);
@@ -500,7 +505,7 @@ status_t OMXCameraAdapter::setupEXIF()
if ( NULL != memmgr_buf_array )
{
- memMgr.freeBuffer(memmgr_buf_array);
+ memMgr.freeBufferList(memmgr_buf_array);
}
LOG_FUNCTION_NAME_EXIT;
@@ -809,7 +814,7 @@ status_t OMXCameraAdapter::convertGPSCoord(double coord,
if ( coord == 0 ) {
- ALOGE("Invalid GPS coordinate");
+ CAMHAL_LOGE("Invalid GPS coordinate");
return -EINVAL;
}
@@ -836,4 +841,5 @@ status_t OMXCameraAdapter::convertGPSCoord(double coord,
return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFD.cpp b/camera/OMXCameraAdapter/OMXFD.cpp
index 15f8d05..1a482b2 100644
--- a/camera/OMXCameraAdapter/OMXFD.cpp
+++ b/camera/OMXCameraAdapter/OMXFD.cpp
@@ -21,25 +21,15 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-#define FACE_DETECTION_THRESHOLD 80
-
-// constants used for face smooth filtering
-static const int HorizontalFilterThreshold = 40;
-static const int VerticalFilterThreshold = 40;
-static const int HorizontalFaceSizeThreshold = 30;
-static const int VerticalFaceSizeThreshold = 30;
+namespace Ti {
+namespace Camera {
+const uint32_t OMXCameraAdapter::FACE_DETECTION_THRESHOLD = 80;
-namespace android {
-
-status_t OMXCameraAdapter::setParametersFD(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersFD(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -55,9 +45,9 @@ status_t OMXCameraAdapter::startFaceDetection()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
- ret = setFaceDetection(true, mDeviceOrientation);
+ ret = setFaceDetection(true, mFaceOrientation);
if (ret != NO_ERROR) {
goto out;
}
@@ -82,17 +72,21 @@ status_t OMXCameraAdapter::stopFaceDetection()
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
- ret = setFaceDetection(false, mDeviceOrientation);
+ ret = setFaceDetection(false, mFaceOrientation);
if (ret != NO_ERROR) {
goto out;
}
- // Reset 3A settings
- ret = setParameters3A(mParams, state);
- if (ret != NO_ERROR) {
- goto out;
+ if ( mFaceDetectionRunning ) {
+ //Enable region priority and disable face priority for AF
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , false);
+
+ //Enable Region priority and disable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, true);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, false);
}
if (mPending3Asettings) {
@@ -106,7 +100,7 @@ status_t OMXCameraAdapter::stopFaceDetection()
void OMXCameraAdapter::pauseFaceDetection(bool pause)
{
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
// pausing will only take affect if fd is already running
if (mFaceDetectionRunning) {
mFaceDetectionPaused = pause;
@@ -114,6 +108,22 @@ void OMXCameraAdapter::pauseFaceDetection(bool pause)
}
}
+status_t OMXCameraAdapter::setFaceDetectionOrientation(OMX_U32 orientation)
+{
+ status_t ret = NO_ERROR;
+
+ android::AutoMutex lock(mFaceDetectionLock);
+
+ mFaceOrientation = orientation;
+
+ if (mFaceDetectionRunning) {
+ // restart face detection with new rotation
+ setFaceDetection(true, orientation);
+ }
+
+ return ret;
+}
+
status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
{
status_t ret = NO_ERROR;
@@ -162,7 +172,9 @@ status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
if ( NO_ERROR == ret )
{
- ret = setExtraData(enable, mCameraAdapterParameters.mPrevPortIndex, OMX_FaceDetection);
+ // TODO(XXX): Should enable/disable FD extra data separately
+ // on each port.
+ ret = setExtraData(enable, OMX_ALL, OMX_FaceDetection);
if ( NO_ERROR != ret )
{
@@ -185,18 +197,15 @@ status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
return ret;
}
-status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
- sp<CameraFDResult> &result,
- size_t previewWidth,
- size_t previewHeight)
+status_t OMXCameraAdapter::createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ android::sp<CameraMetadataResult> &result,
+ size_t previewWidth,
+ size_t previewHeight)
{
status_t ret = NO_ERROR;
- OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_TI_FACERESULT *faceResult;
- OMX_OTHER_EXTRADATATYPE *extraData;
- OMX_FACEDETECTIONTYPE *faceData;
- OMX_TI_PLATFORMPRIVATE *platformPrivate;
- camera_frame_metadata_t *faces;
+ status_t faceRet = NO_ERROR;
+ status_t metaRet = NO_ERROR;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
LOG_FUNCTION_NAME;
@@ -210,78 +219,78 @@ status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
return-EINVAL;
}
- platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate);
- if ( NULL != platformPrivate ) {
- if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
- CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
- platformPrivate->nSize,
- sizeof(OMX_TI_PLATFORMPRIVATE),
- platformPrivate->pAuxBuf1,
- platformPrivate->pAuxBufSize1,
- platformPrivate->pMetaDataBuffer,
- platformPrivate->nMetaDataSize);
+ if ( mFaceDetectionRunning && !mFaceDetectionPaused ) {
+ OMX_OTHER_EXTRADATATYPE *extraData;
+
+ extraData = getExtradata(pBuffHeader->pPlatformPrivate,
+ (OMX_EXTRADATATYPE)OMX_FaceDetection);
+
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
} else {
- CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
- ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
- ( unsigned int ) platformPrivate->nSize);
+ CAMHAL_LOGD("FD extra data not found!");
return -EINVAL;
}
- } else {
- CAMHAL_LOGDA("Invalid OMX_TI_PLATFORMPRIVATE");
- return-EINVAL;
- }
-
- if ( 0 >= platformPrivate->nMetaDataSize ) {
- CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
- ( unsigned int ) platformPrivate->nMetaDataSize);
- return -EINVAL;
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ if ( NULL != faceData ) {
+ if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
+ CAMHAL_LOGVB("Faces detected %d",
+ faceData->ulFaceCount,
+ faceData->nSize,
+ sizeof(OMX_FACEDETECTIONTYPE),
+ faceData->eCameraView,
+ faceData->nPortIndex,
+ faceData->nVersion);
+ } else {
+ CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
+ ( unsigned int ) faceData->nSize);
+ return -EINVAL;
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
+ return -EINVAL;
+ }
}
- extraData = getExtradata((OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer),
- platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE)OMX_FaceDetection);
-
- if ( NULL != extraData ) {
- CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
- extraData->nSize,
- sizeof(OMX_OTHER_EXTRADATATYPE),
- extraData->eType,
- extraData->nDataSize,
- extraData->nPortIndex,
- extraData->nVersion);
- } else {
- CAMHAL_LOGDA("Invalid OMX_OTHER_EXTRADATATYPE");
- return -EINVAL;
+ result = new (std::nothrow) CameraMetadataResult;
+ if(NULL == result.get()) {
+ ret = NO_MEMORY;
+ return ret;
}
- faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
- if ( NULL != faceData ) {
- if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
- CAMHAL_LOGVB("Faces detected %d",
- faceData->ulFaceCount,
- faceData->nSize,
- sizeof(OMX_FACEDETECTIONTYPE),
- faceData->eCameraView,
- faceData->nPortIndex,
- faceData->nVersion);
- } else {
- CAMHAL_LOGDB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
- ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
- ( unsigned int ) faceData->nSize);
- return -EINVAL;
+ //Encode face coordinates
+ faceRet = encodeFaceCoordinates(faceData, result->getMetadataResult()
+ , previewWidth, previewHeight);
+ if ((NO_ERROR == faceRet) || (NOT_ENOUGH_DATA == faceRet)) {
+ // Ignore harmless errors (no error and no update) and go ahead and encode
+ // the preview meta data
+ metaRet = encodePreviewMetadata(result->getMetadataResult()
+ , pBuffHeader->pPlatformPrivate);
+ if ( (NO_ERROR != metaRet) && (NOT_ENOUGH_DATA != metaRet) ) {
+ // Some 'real' error occurred during preview meta data encod, clear metadata
+ // result and return correct error code
+ result.clear();
+ ret = metaRet;
}
} else {
- CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
- return -EINVAL;
+ //Some real error occurred during face encoding, clear metadata result
+ // and return correct error code
+ result.clear();
+ ret = faceRet;
}
- ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight);
-
- if ( NO_ERROR == ret ) {
- result = new CameraFDResult(faces);
- } else {
+ if((NOT_ENOUGH_DATA == faceRet) && (NOT_ENOUGH_DATA == metaRet)) {
+ //No point sending the callback if nothing is changed
result.clear();
- result = NULL;
+ ret = faceRet;
}
LOG_FUNCTION_NAME_EXIT;
@@ -290,40 +299,38 @@ status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
}
status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
- camera_frame_metadata_t **pFaces,
+ camera_frame_metadata_t *metadataResult,
size_t previewWidth,
size_t previewHeight)
{
status_t ret = NO_ERROR;
camera_face_t *faces;
- camera_frame_metadata_t *faceResult;
size_t hRange, vRange;
double tmp;
+ bool faceArrayChanged = false;
LOG_FUNCTION_NAME;
- if ( NULL == faceData ) {
- CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter");
- return EINVAL;
- }
-
- LOG_FUNCTION_NAME
+ hRange = CameraMetadataResult::RIGHT - CameraMetadataResult::LEFT;
+ vRange = CameraMetadataResult::BOTTOM - CameraMetadataResult::TOP;
- hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT;
- vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP;
+ android::AutoMutex lock(mFaceDetectionLock);
- faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t));
- if ( NULL == faceResult ) {
- return -ENOMEM;
+ // Avoid memory leak if called twice on same CameraMetadataResult
+ if ( (0 < metadataResult->number_of_faces) && (NULL != metadataResult->faces) ) {
+ free(metadataResult->faces);
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
}
- if ( 0 < faceData->ulFaceCount ) {
+ if ( (NULL != faceData) && (0 < faceData->ulFaceCount) ) {
int orient_mult;
int trans_left, trans_top, trans_right, trans_bot;
faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
if ( NULL == faces ) {
- return -ENOMEM;
+ ret = NO_MEMORY;
+ goto out;
}
/**
@@ -359,7 +366,7 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
/ * (r, b)
*/
- if (mDeviceOrientation == 180) {
+ if (mFaceOrientation == 180) {
orient_mult = -1;
trans_left = 2; // right is now left
trans_top = 3; // bottom is now top
@@ -386,7 +393,7 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD)
continue;
- if (mDeviceOrientation == 180) {
+ if (mFaceOrientation == 180) {
// from sensor pov, the left pos is the right corner of the face in pov of frame
nLeft = faceData->tFacePosition[j].nLeft + faceData->tFacePosition[j].nWidth;
nTop = faceData->tFacePosition[j].nTop + faceData->tFacePosition[j].nHeight;
@@ -417,20 +424,21 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
faces[i].score = faceData->tFacePosition[j].nScore;
faces[i].id = 0;
- faces[i].left_eye[0] = CameraFDResult::INVALID_DATA;
- faces[i].left_eye[1] = CameraFDResult::INVALID_DATA;
- faces[i].right_eye[0] = CameraFDResult::INVALID_DATA;
- faces[i].right_eye[1] = CameraFDResult::INVALID_DATA;
- faces[i].mouth[0] = CameraFDResult::INVALID_DATA;
- faces[i].mouth[1] = CameraFDResult::INVALID_DATA;
+ faces[i].left_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].left_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].right_eye[1] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[0] = CameraMetadataResult::INVALID_DATA;
+ faces[i].mouth[1] = CameraMetadataResult::INVALID_DATA;
i++;
}
- faceResult->number_of_faces = i;
- faceResult->faces = faces;
+ metadataResult->number_of_faces = i;
+ metadataResult->faces = faces;
- for (int i = 0; i < faceResult->number_of_faces; i++)
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
{
+ bool faceChanged = true;
int centerX = (faces[i].rect[trans_left] + faces[i].rect[trans_right] ) / 2;
int centerY = (faces[i].rect[trans_top] + faces[i].rect[trans_bot] ) / 2;
@@ -448,43 +456,49 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
int tempSizeY = (faceDetectionLastOutput[j].rect[trans_bot] -
faceDetectionLastOutput[j].rect[trans_top] ) ;
- if ( (abs(tempCenterX - centerX) < HorizontalFilterThreshold) &&
- (abs(tempCenterY - centerY) < VerticalFilterThreshold) )
- {
- // Found Face. It did not move too far.
- // Now check size of rectangle compare to last output
- if ( (abs (tempSizeX -sizeX) < HorizontalFaceSizeThreshold) &&
- (abs (tempSizeY -sizeY) < VerticalFaceSizeThreshold) )
- {
- // Rectangle is almost same as last time
- // Output exactly what was done for this face last time.
- faces[i] = faceDetectionLastOutput[j];
- }
- else
- {
- // TODO(XXX): Rectangle size changed but position is same.
- // Possibly we can apply just positional correctness.
+ if ( ( tempCenterX == centerX) &&
+ ( tempCenterY == centerY) ) {
+ // Found Face.
+ // Now check size of rectangle
+ // compare to last output.
+ if ( ( tempSizeX == sizeX ) &&
+ ( tempSizeY == sizeY ) ) {
+ faceChanged = false;
}
}
}
+ // Send face detection data after some face coordinate changes
+ if (faceChanged) {
+ faceArrayChanged = true;
+ }
}
// Save this output for next iteration
- for (int i = 0; i < faceResult->number_of_faces; i++)
+ for (int i = 0; i < metadataResult->number_of_faces; i++)
{
faceDetectionLastOutput[i] = faces[i];
}
- faceDetectionNumFacesLastOutput = faceResult->number_of_faces;
} else {
- faceResult->number_of_faces = 0;
- faceResult->faces = NULL;
+ metadataResult->number_of_faces = 0;
+ metadataResult->faces = NULL;
}
- *pFaces = faceResult;
+ // Send face detection data after face count changes
+ if (faceDetectionNumFacesLastOutput != metadataResult->number_of_faces) {
+ faceArrayChanged = true;
+ }
+ faceDetectionNumFacesLastOutput = metadataResult->number_of_faces;
+
+ if ( !faceArrayChanged ) {
+ ret = NOT_ENOUGH_DATA;
+ }
LOG_FUNCTION_NAME_EXIT;
+out:
+
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp
index f584184..65577a5 100644
--- a/camera/OMXCameraAdapter/OMXFocus.cpp
+++ b/camera/OMXCameraAdapter/OMXFocus.cpp
@@ -22,10 +22,6 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
#include "ErrorUtils.h"
@@ -34,23 +30,26 @@
#define AF_IMAGE_CALLBACK_TIMEOUT 5000000 //5 seconds timeout
#define AF_VIDEO_CALLBACK_TIMEOUT 2800000 //2.8 seconds timeout
-namespace android {
+namespace Ti {
+namespace Camera {
+
+const nsecs_t OMXCameraAdapter::CANCEL_AF_TIMEOUT = seconds_to_nanoseconds(1);
-status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersFocus(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *str = NULL;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
size_t MAX_FOCUS_AREAS;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mFocusAreasLock);
+ android::AutoMutex lock(mFocusAreasLock);
- str = params.get(CameraParameters::KEY_FOCUS_AREAS);
+ str = params.get(android::CameraParameters::KEY_FOCUS_AREAS);
- MAX_FOCUS_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+ MAX_FOCUS_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
if ( NULL != str ) {
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -72,7 +71,7 @@ status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
}
}
- LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -84,6 +83,7 @@ status_t OMXCameraAdapter::doAutoFocus()
OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
OMX_PARAM_FOCUSSTATUSTYPE focusStatus;
OMX_CONFIG_BOOLEANTYPE bOMX;
+ CameraAdapter::AdapterState state;
nsecs_t timeout = 0;
LOG_FUNCTION_NAME;
@@ -102,18 +102,23 @@ status_t OMXCameraAdapter::doAutoFocus()
return NO_ERROR;
}
-
if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) {
CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called");
return NO_ERROR;
}
+ // AF when fixed focus modes are set should be a no-op.
+ if ( ( mParameters3A.Focus == OMX_IMAGE_FocusControlOff ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlHyperfocal ) ) {
+ returnFocusStatus(true);
+ return NO_ERROR;
+ }
+
OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
-#ifndef OMAP_TUNA
// If the app calls autoFocus, the camera will stop sending face callbacks.
pauseFaceDetection(true);
-#endif
// This is needed for applying FOCUS_REGION correctly
if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea()))
@@ -154,8 +159,7 @@ status_t OMXCameraAdapter::doAutoFocus()
( focusStatus.eFocusStatus == OMX_FocusStatusRequest ||
focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) ||
- (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) )
- {
+ (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) ) {
OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
bOMX.bEnabled = OMX_TRUE;
@@ -163,6 +167,12 @@ status_t OMXCameraAdapter::doAutoFocus()
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
&bOMX);
+ if ( OMX_ErrorNone != eError ) {
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ {
+ android::AutoMutex lock(mDoAFMutex);
// force AF, Ducati will take care of whether CAF
// or AF will be performed, depending on light conditions
@@ -172,29 +182,31 @@ status_t OMXCameraAdapter::doAutoFocus()
focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock;
}
- if ( focusControl.eFocusControl != OMX_IMAGE_FocusControlAuto )
- {
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigFocusControl,
&focusControl);
+
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
+ return INVALID_OPERATION;
+ } else {
+ CAMHAL_LOGDA("Autofocus started successfully");
}
- if ( OMX_ErrorNone != eError ) {
- CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
- return INVALID_OPERATION;
- } else {
- CAMHAL_LOGDA("Autofocus started successfully");
- }
+ // No need to wait if preview is about to stop
+ getNextState(state);
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE ) {
+ return NO_ERROR;
+ }
+
+ // configure focus timeout based on capture mode
+ timeout = (mCapMode == VIDEO_MODE) ?
+ ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
+ ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
- // configure focus timeout based on capture mode
- timeout = (mCapMode == VIDEO_MODE) ?
- ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
- ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
- {
- Mutex::Autolock lock(mDoAFMutex);
ret = mDoAFCond.waitRelative(mDoAFMutex, timeout);
- }
+ }
//If somethiing bad happened while we wait
if (mComponentState == OMX_StateInvalid) {
@@ -206,6 +218,7 @@ status_t OMXCameraAdapter::doAutoFocus()
CAMHAL_LOGEA("Autofocus callback timeout expired");
ret = returnFocusStatus(true);
} else {
+ CAMHAL_LOGDA("Autofocus callback received");
ret = returnFocusStatus(false);
}
} else { // Focus mode in continuous
@@ -227,18 +240,16 @@ status_t OMXCameraAdapter::stopAutoFocus()
LOG_FUNCTION_NAME;
- if ( OMX_StateInvalid == mComponentState )
- {
+ if ( OMX_StateInvalid == mComponentState ) {
CAMHAL_LOGEA("OMX component in Invalid state");
returnFocusStatus(false);
return -EINVAL;
- }
+ }
- if ( OMX_StateExecuting != mComponentState )
- {
+ if ( OMX_StateExecuting != mComponentState ) {
CAMHAL_LOGEA("OMX component not in executing state");
return NO_ERROR;
- }
+ }
if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) {
// No need to stop focus if we are in infinity mode. Nothing to stop.
@@ -251,19 +262,20 @@ status_t OMXCameraAdapter::stopAutoFocus()
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigFocusControl,
&focusControl);
- if ( OMX_ErrorNone != eError )
- {
+ if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
- } else {
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+#ifdef CAMERAHAL_TUNA
+ else {
// This is a WA. Usually the OMX Camera component should
// generate AF status change OMX event fairly quickly
// ( after one preview frame ) and this notification should
// actually come from 'handleFocusCallback()'.
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
-
+#endif
LOG_FUNCTION_NAME_EXIT;
@@ -294,7 +306,7 @@ status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focus
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::cancelAutoFocus()
@@ -310,21 +322,32 @@ status_t OMXCameraAdapter::cancelAutoFocus()
return ret;
}
- //Stop the AF only for modes other than CAF or Inifinity
+ //Stop the AF only for modes other than CAF, Inifinity or Off
if ( ( focusMode.eFocusControl != OMX_IMAGE_FocusControlAuto ) &&
( focusMode.eFocusControl != ( OMX_IMAGE_FOCUSCONTROLTYPE )
- OMX_IMAGE_FocusControlAutoInfinity ) ) {
+ OMX_IMAGE_FocusControlAutoInfinity ) &&
+ ( focusMode.eFocusControl != OMX_IMAGE_FocusControlOff ) ) {
+ android::AutoMutex lock(mCancelAFMutex);
stopAutoFocus();
+ ret = mCancelAFCond.waitRelative(mCancelAFMutex, CANCEL_AF_TIMEOUT);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGE("Cancel AF timeout!");
+ }
} else if (focusMode.eFocusControl == OMX_IMAGE_FocusControlAuto) {
// This re-enabling of CAF doesn't seem to
// be needed any more.
// re-apply CAF after unlocking and canceling
// mPending3Asettings |= SetFocus;
}
-#ifndef OMAP_TUNA
+
+ {
+ // Signal to 'doAutoFocus()'
+ android::AutoMutex lock(mDoAFMutex);
+ mDoAFCond.broadcast();
+ }
+
// If the apps call #cancelAutoFocus()}, the face callbacks will also resume.
pauseFaceDetection(false);
-#endif
LOG_FUNCTION_NAME_EXIT;
@@ -349,7 +372,7 @@ status_t OMXCameraAdapter::setFocusCallback(bool enabled)
if ( OMX_StateExecuting != mComponentState )
{
CAMHAL_LOGEA("OMX component not in executing state");
- ret = NO_ERROR;
+ return NO_ERROR;
}
if ( NO_ERROR == ret )
@@ -454,9 +477,9 @@ status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
} else {
CAMHAL_LOGDA("Focus locked. Applied focus locks successfully");
}
+
stopAutoFocus();
}
-
//Query current focus distance after AF is complete
updateFocusDistances(mParameters);
}
@@ -476,10 +499,8 @@ status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
notifyFocusSubscribers(focusStatus);
}
-#ifndef OMAP_TUNA
// After focus, face detection will resume sending face callbacks
pauseFaceDetection(false);
-#endif
LOG_FUNCTION_NAME_EXIT;
@@ -514,6 +535,7 @@ status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
if ( NO_ERROR == ret )
{
OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
OMX_IndexConfigCommonFocusStatus,
eFocusStatus);
@@ -534,7 +556,7 @@ status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
return ret;
}
-status_t OMXCameraAdapter::updateFocusDistances(CameraParameters &params)
+status_t OMXCameraAdapter::updateFocusDistances(android::CameraParameters &params)
{
OMX_U32 focusNear, focusOptimal, focusFar;
status_t ret = NO_ERROR;
@@ -620,7 +642,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
{
if ( 0 == dist )
{
- strncpy(buffer, CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
+ strncpy(buffer, android::CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
}
else
{
@@ -638,7 +660,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
OMX_U32 &optimal,
OMX_U32 &far,
- CameraParameters& params)
+ android::CameraParameters& params)
{
status_t ret = NO_ERROR;
@@ -677,7 +699,7 @@ status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
mFocusDistOptimal,
mFocusDistFar);
- params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
}
LOG_FUNCTION_NAME_EXIT;
@@ -690,13 +712,20 @@ status_t OMXCameraAdapter::setTouchFocus()
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- OMX_ALGOAREASTYPE **focusAreas;
+ OMX_ALGOAREASTYPE *focusAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
MemoryManager memMgr;
+ CameraBuffer *bufferlist;
int areasSize = 0;
LOG_FUNCTION_NAME;
+ ret = memMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
+
if ( OMX_StateInvalid == mComponentState )
{
CAMHAL_LOGEA("OMX component is in invalid state");
@@ -707,7 +736,8 @@ status_t OMXCameraAdapter::setTouchFocus()
{
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- focusAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);
+ bufferlist = memMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ focusAreas = (OMX_ALGOAREASTYPE*) bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
@@ -718,51 +748,60 @@ status_t OMXCameraAdapter::setTouchFocus()
return -ENOMEM;
}
- OMX_INIT_STRUCT_PTR (focusAreas[0], OMX_ALGOAREASTYPE);
+ OMX_INIT_STRUCT_PTR (focusAreas, OMX_ALGOAREASTYPE);
- focusAreas[0]->nPortIndex = OMX_ALL;
- focusAreas[0]->nNumAreas = mFocusAreas.size();
- focusAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaFocus;
+ focusAreas->nPortIndex = OMX_ALL;
+ focusAreas->nNumAreas = mFocusAreas.size();
+ focusAreas->nAlgoAreaPurpose = OMX_AlgoAreaFocus;
// If the area is the special case of (0, 0, 0, 0, 0), then
// the algorithm needs nNumAreas to be set to 0,
// in order to automatically choose the best fitting areas.
if ( mFocusAreas.itemAt(0)->isZeroArea() )
{
- focusAreas[0]->nNumAreas = 0;
+ focusAreas->nNumAreas = 0;
+ }
+
+ for ( unsigned int n = 0; n < mFocusAreas.size(); n++) {
+ int widthDivisor = 1;
+ int heightDivisor = 1;
+
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutTopBottom) {
+ heightDivisor = 2;
+ }
+ if (mPreviewData->mFrameLayoutType == OMX_TI_StereoFrameLayoutLeftRight) {
+ widthDivisor = 2;
}
- for ( unsigned int n = 0; n < mFocusAreas.size(); n++)
- {
// transform the coordinates to 3A-type coordinates
- mFocusAreas.itemAt(n)->transfrom(mPreviewData->mWidth,
- mPreviewData->mHeight,
- focusAreas[0]->tAlgoAreas[n].nTop,
- focusAreas[0]->tAlgoAreas[n].nLeft,
- focusAreas[0]->tAlgoAreas[n].nWidth,
- focusAreas[0]->tAlgoAreas[n].nHeight);
-
- focusAreas[0]->tAlgoAreas[n].nLeft =
- ( focusAreas[0]->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
- focusAreas[0]->tAlgoAreas[n].nTop =
- ( focusAreas[0]->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
- focusAreas[0]->tAlgoAreas[n].nWidth =
- ( focusAreas[0]->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
- focusAreas[0]->tAlgoAreas[n].nHeight =
- ( focusAreas[0]->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
- focusAreas[0]->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();
+ mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth/widthDivisor,
+ (size_t)mPreviewData->mHeight/heightDivisor,
+ (size_t&)focusAreas->tAlgoAreas[n].nTop,
+ (size_t&)focusAreas->tAlgoAreas[n].nLeft,
+ (size_t&)focusAreas->tAlgoAreas[n].nWidth,
+ (size_t&)focusAreas->tAlgoAreas[n].nHeight);
+
+ focusAreas->tAlgoAreas[n].nLeft =
+ ( focusAreas->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nTop =
+ ( focusAreas->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nWidth =
+ ( focusAreas->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
+ focusAreas->tAlgoAreas[n].nHeight =
+ ( focusAreas->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
+ focusAreas->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();
CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d",
- n, (int)focusAreas[0]->tAlgoAreas[n].nTop, (int)focusAreas[0]->tAlgoAreas[n].nLeft,
- (int)focusAreas[0]->tAlgoAreas[n].nWidth, (int)focusAreas[0]->tAlgoAreas[n].nHeight,
- (int)focusAreas[0]->tAlgoAreas[n].nPriority);
- }
+ n, (int)focusAreas->tAlgoAreas[n].nTop, (int)focusAreas->tAlgoAreas[n].nLeft,
+ (int)focusAreas->tAlgoAreas[n].nWidth, (int)focusAreas->tAlgoAreas[n].nHeight,
+ (int)focusAreas->tAlgoAreas[n].nPriority);
+ }
OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
sharedBuffer.nPortIndex = OMX_ALL;
sharedBuffer.nSharedBuffSize = areasSize;
- sharedBuffer.pSharedBuff = (OMX_U8 *) focusAreas[0];
+ sharedBuffer.pSharedBuff = (OMX_U8 *) camera_buffer_get_omx_ptr (&bufferlist[0]);
if ( NULL == sharedBuffer.pSharedBuff )
{
@@ -781,10 +820,9 @@ status_t OMXCameraAdapter::setTouchFocus()
}
EXIT:
- if (NULL != focusAreas)
+ if (NULL != bufferlist)
{
- memMgr.freeBuffer((void*) focusAreas);
- focusAreas = NULL;
+ memMgr.freeBufferList (bufferlist);
}
}
@@ -808,17 +846,22 @@ void OMXCameraAdapter::handleFocusCallback() {
CAMHAL_LOGEA("Focus status check failed!");
// signal and unblock doAutoFocus
if (AF_ACTIVE & nextState) {
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
return;
}
- if ( ( eFocusStatus.eFocusStatus != OMX_FocusStatusRequest ) &&
- ( eFocusStatus.eFocusStatus != OMX_FocusStatusOff ) ) {
+ if ( eFocusStatus.eFocusStatus == OMX_FocusStatusOff ) {
+ android::AutoMutex lock(mCancelAFMutex);
+ mCancelAFCond.signal();
+ return;
+ }
+
+ if (eFocusStatus.eFocusStatus != OMX_FocusStatusRequest) {
// signal doAutoFocus when a end of scan message comes
// ignore start of scan
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
@@ -843,4 +886,5 @@ void OMXCameraAdapter::handleFocusCallback() {
notifyFocusSubscribers(focusStatus);
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXMetadata.cpp b/camera/OMXCameraAdapter/OMXMetadata.cpp
new file mode 100644
index 0000000..af8c49c
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXMetadata.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMX3A.cpp
+*
+* This file contains functionality for handling 3A configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "OMXMetaData"
+
+#include "OMXCameraAdapter.h"
+#include <camera/CameraMetadata.h>
+
+namespace Ti {
+namespace Camera {
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+camera_memory_t * OMXCameraAdapter::getMetaData(const OMX_PTR plat_pvt,
+ camera_request_memory allocator) const
+{
+ camera_memory_t * ret = NULL;
+
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_FACEDETECTIONTYPE *faceData = NULL;
+ OMX_TI_WHITEBALANCERESULTTYPE * WBdata = NULL;
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo = NULL;
+ OMX_TI_LSCTABLETYPE *lscTbl = NULL;
+ camera_metadata_t *metaData;
+ size_t offset = 0;
+
+ size_t metaDataSize = sizeof(camera_metadata_t);
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_FaceDetection);
+ if ( NULL != extraData ) {
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ metaDataSize += faceData->ulFaceCount * sizeof(camera_metadata_face_t);
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_WhiteBalance);
+ if ( NULL != extraData ) {
+ WBdata = ( OMX_TI_WHITEBALANCERESULTTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+ if ( NULL != extraData ) {
+ shotInfo = ( OMX_TI_VECTSHOTINFOTYPE * ) extraData->data;
+ }
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_LSCTable);
+ if ( NULL != extraData ) {
+ lscTbl = ( OMX_TI_LSCTABLETYPE * ) extraData->data;
+ metaDataSize += OMX_TI_LSC_GAIN_TABLE_SIZE;
+ }
+
+ ret = allocator(-1, metaDataSize, 1, NULL);
+ if ( NULL == ret ) {
+ return NULL;
+ } else {
+ metaData = static_cast<camera_metadata_t *> (ret->data);
+ offset += sizeof(camera_metadata_t);
+ }
+
+ if ( NULL != faceData ) {
+ metaData->number_of_faces = 0;
+ int idx = 0;
+ metaData->faces_offset = offset;
+ struct camera_metadata_face *faces = reinterpret_cast<struct camera_metadata_face *> (static_cast<char*>(ret->data) + offset);
+ for ( int j = 0; j < faceData->ulFaceCount ; j++ ) {
+ if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD) {
+ continue;
+ }
+ idx = metaData->number_of_faces;
+ metaData->number_of_faces++;
+ // TODO: Rework and re-use encodeFaceCoordinates()
+ faces[idx].left = faceData->tFacePosition[j].nLeft;
+ faces[idx].top = faceData->tFacePosition[j].nTop;
+ faces[idx].bottom = faceData->tFacePosition[j].nWidth;
+ faces[idx].right = faceData->tFacePosition[j].nHeight;
+ }
+ offset += sizeof(camera_metadata_face_t) * metaData->number_of_faces;
+ }
+
+ if ( NULL != WBdata ) {
+ metaData->awb_temp = WBdata->nColorTemperature;
+ metaData->gain_b = WBdata->nGainB;
+ metaData->gain_gb = WBdata->nGainGB;
+ metaData->gain_gr = WBdata->nGainGR;
+ metaData->gain_r = WBdata->nGainR;
+ metaData->offset_b = WBdata->nOffsetB;
+ metaData->offset_gb = WBdata->nOffsetGB;
+ metaData->offset_gr = WBdata->nOffsetGR;
+ metaData->offset_r = WBdata->nOffsetR;
+ }
+
+ if ( NULL != lscTbl ) {
+ metaData->lsc_table_applied = lscTbl->bApplied;
+ metaData->lsc_table_size = OMX_TI_LSC_GAIN_TABLE_SIZE;
+ metaData->lsc_table_offset = offset;
+ uint8_t *lsc_table = reinterpret_cast<uint8_t *> (static_cast<char*>(ret->data) + offset);
+ memcpy(lsc_table, lscTbl->pGainTable, OMX_TI_LSC_GAIN_TABLE_SIZE);
+ offset += metaData->lsc_table_size;
+ }
+
+ if ( NULL != shotInfo ) {
+ metaData->frame_number = shotInfo->nFrameNum;
+ metaData->shot_number = shotInfo->nConfigId;
+ metaData->analog_gain = shotInfo->nAGain;
+ metaData->analog_gain_req = shotInfo->nReqGain;
+ metaData->analog_gain_min = shotInfo->nGainMin;
+ metaData->analog_gain_max = shotInfo->nGainMax;
+ metaData->analog_gain_error = shotInfo->nSenAGainErr;
+ metaData->analog_gain_dev = shotInfo->nDevAGain;
+ metaData->exposure_time = shotInfo->nExpTime;
+ metaData->exposure_time_req = shotInfo->nReqExpTime;
+ metaData->exposure_time_min = shotInfo->nExpMin;
+ metaData->exposure_time_max = shotInfo->nExpMax;
+ metaData->exposure_time_dev = shotInfo->nDevExpTime;
+ metaData->exposure_time_error = shotInfo->nSenExpTimeErr;
+ metaData->exposure_compensation_req = shotInfo->nReqEC;
+ metaData->exposure_dev = shotInfo->nDevEV;
+ }
+
+ return ret;
+}
+#endif
+
+status_t OMXCameraAdapter::encodePreviewMetadata(camera_frame_metadata_t *meta, const OMX_PTR plat_pvt)
+{
+ status_t ret = NO_ERROR;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ OMX_OTHER_EXTRADATATYPE *extraData = NULL;
+
+ extraData = getExtradata(plat_pvt, (OMX_EXTRADATATYPE) OMX_TI_VectShotInfo);
+
+ if ( (NULL != extraData) && (NULL != extraData->data) ) {
+ OMX_TI_VECTSHOTINFOTYPE *shotInfo;
+ shotInfo = (OMX_TI_VECTSHOTINFOTYPE*) extraData->data;
+
+ meta->analog_gain = shotInfo->nAGain;
+ meta->exposure_time = shotInfo->nExpTime;
+ } else {
+ meta->analog_gain = -1;
+ meta->exposure_time = -1;
+ }
+
+ // Send metadata event only after any value has been changed
+ if ((metadataLastAnalogGain == meta->analog_gain) &&
+ (metadataLastExposureTime == meta->exposure_time)) {
+ ret = NOT_ENOUGH_DATA;
+ } else {
+ metadataLastAnalogGain = meta->analog_gain;
+ metadataLastExposureTime = meta->exposure_time;
+ }
+#else
+ // no-op in non enhancement mode
+ CAMHAL_UNUSED(meta);
+ CAMHAL_UNUSED(plat_pvt);
+#endif
+
+ return ret;
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXReprocess.cpp b/camera/OMXCameraAdapter/OMXReprocess.cpp
new file mode 100644
index 0000000..28f1744
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXReprocess.cpp
@@ -0,0 +1,340 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXReprocess.cpp
+*
+* This file contains functionality for handling reprocessing operations.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+
+namespace Ti {
+namespace Camera {
+
+status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameters &params,
+ CameraBuffer* buffers,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ int w, h, s;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ OMXCameraPortParameters *portData;
+ const char* valstr;
+
+ LOG_FUNCTION_NAME;
+
+ if (!buffers) {
+ CAMHAL_LOGE("invalid buffer array");
+ return BAD_VALUE;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ w = buffers[0].width;
+ h = buffers[0].height;
+ s = buffers[0].stride;
+
+ valstr = buffers[0].format;
+ if (valstr != NULL) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ CAMHAL_LOGDA("RAW Picture format selected");
+ pixFormat = OMX_COLOR_FormatRawBayer10bit;
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ } else {
+ CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+
+ if ( (w != (int)portData->mWidth) || (h != (int)portData->mHeight) ||
+ (s != (int) portData->mStride) || (pixFormat != portData->mColorFormat)) {
+ portData->mWidth = w;
+ portData->mHeight = h;
+
+ if ( OMX_COLOR_FormatRawBayer10bit == pixFormat ) {
+ portData->mStride = w * 2;
+ } else {
+ portData->mStride = s;
+ }
+
+ portData->mColorFormat = pixFormat;
+
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_IN_VIDEO, *portData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startReprocess()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * portData = NULL;
+
+ LOG_FUNCTION_NAME;
+ CAMHAL_LOGD ("mReprocConfigured = %d", mReprocConfigured);
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ CAMHAL_LOGD ("mReprocConfigured = %d", mBurstFramesQueued);
+ if (NO_ERROR == ret) {
+ android::AutoMutex lock(mBurstLock);
+
+ for ( int index = 0 ; index < portData->mMaxQueueable ; index++ ) {
+ CAMHAL_LOGDB("Queuing buffer on video input port - %p",
+ portData->mBufferHeader[index]->pBuffer);
+ portData->mStatus[index] = OMXCameraPortParameters::FILL;
+ eError = OMX_EmptyThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::stopReprocess()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ if (!mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ // Disable port - send command and then free all buffers
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mStopReprocSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ if (portData) {
+ CAMHAL_LOGDB("Freeing buffers on reproc port - num: %d", portData->mNumBufs);
+ for (int index = 0 ; index < portData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on reproc port - 0x%x",
+ ( unsigned int ) portData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ (OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for port disable");
+ ret = mStopReprocSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Disable Image Port Exitting!!!");
+ goto EXIT;
+ }
+ if (NO_ERROR == ret) {
+ CAMHAL_LOGDA("Port disabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port disable");
+ goto EXIT;
+ }
+
+ deinitInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+
+ mReprocConfigured = false;
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::disableReprocess(){
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ // no-op..for now
+
+EXIT:
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *portData = NULL;
+
+ portData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex];
+
+ if ( 0 != mUseReprocessSem.Count() ) {
+ CAMHAL_LOGEB("Error mUseReprocessSem semaphore count %d", mUseReprocessSem.Count());
+ return BAD_VALUE;
+ }
+
+ if (mAdapterState == REPROCESS_STATE) {
+ stopReprocess();
+ } else if (mAdapterState == CAPTURE_STATE) {
+ stopImageCapture();
+ disableImagePort();
+ }
+
+ if (mReprocConfigured) {
+ return NO_ERROR;
+ }
+
+ portData->mNumBufs = num;
+
+ // Configure
+ ret = setParametersReprocess(mParams, bufArr, mAdapterState);
+
+ // Configure DOMX to use either gralloc handles or vptrs
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mVideoInPortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD("Using ANW");
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ // Need to allocate tiler reservation and state we are going to be using
+ // pagelist buffers. Assuming this happens when buffers if from anw
+ initInternalBuffers(mCameraAdapterParameters.mVideoInPortIndex);
+ } else {
+ CAMHAL_LOGD("Using ION");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ // Enable Port
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ mUseReprocessSem);
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ for (int index = 0 ; index < portData->mNumBufs ; index++)
+ {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)bufArr[index].opaque,
+ (int)portData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ 0,
+ portData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ portData->mBufferHeader[index] = pBufferHdr;
+ }
+
+ // Wait for port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseReprocessSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ // Error out if somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State while trying to enable port for reprocessing");
+ goto EXIT;
+ }
+
+ if (ret == NO_ERROR) {
+ CAMHAL_LOGDA("Port enabled");
+ } else {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mVideoInPortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+
+ mReprocConfigured = true;
+
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ // Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ performCleanupAfterError();
+ LOG_FUNCTION_NAME_EXIT;
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
+
+}
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXZoom.cpp b/camera/OMXCameraAdapter/OMXZoom.cpp
index eec7691..e39a3b0 100644
--- a/camera/OMXCameraAdapter/OMXZoom.cpp
+++ b/camera/OMXCameraAdapter/OMXZoom.cpp
@@ -21,14 +21,11 @@
*
*/
-#undef LOG_TAG
-
-#define LOG_TAG "CameraHAL"
-
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
65536, 68157, 70124, 72745,
@@ -49,20 +46,19 @@ const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
524288 };
-status_t OMXCameraAdapter::setParametersZoom(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersZoom(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ( ZOOM_ACTIVE & state ) != ZOOM_ACTIVE )
{
- int zoom = params.getInt(CameraParameters::KEY_ZOOM);
- if( ( zoom >= 0 ) && ( zoom < ZOOM_STAGES ) )
- {
+ int zoom = params.getInt(android::CameraParameters::KEY_ZOOM);
+ if (( zoom >= 0 ) && ( zoom < mMaxZoomSupported )) {
mTargetZoomIdx = zoom;
//Immediate zoom should be applied instantly ( CTS requirement )
@@ -97,8 +93,7 @@ status_t OMXCameraAdapter::doZoom(int index)
ret = -1;
}
- if ( ( 0 > index) || ( ( ZOOM_STAGES - 1 ) < index ) )
- {
+ if (( 0 > index) || ((mMaxZoomSupported - 1 ) < index )) {
CAMHAL_LOGEB("Zoom index %d out of range", index);
ret = -EINVAL;
}
@@ -139,7 +134,7 @@ status_t OMXCameraAdapter::advanceZoom()
{
status_t ret = NO_ERROR;
AdapterState state;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
BaseCameraAdapter::getState(state);
@@ -241,23 +236,20 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
CAMHAL_LOGDB("Start smooth zoom target = %d, mCurrentIdx = %d",
targetIdx,
mCurrentZoomIdx);
- if ( ( targetIdx >= 0 ) && ( targetIdx < ZOOM_STAGES ) )
- {
+ if (( targetIdx >= 0 ) && ( targetIdx < mMaxZoomSupported )) {
mTargetZoomIdx = targetIdx;
mZoomParameterIdx = mCurrentZoomIdx;
mReturnZoomStatus = false;
- }
- else
- {
+ } else {
CAMHAL_LOGEB("Smooth value out of range %d!", targetIdx);
ret = -EINVAL;
- }
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -267,7 +259,7 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
status_t OMXCameraAdapter::stopSmoothZoom()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
@@ -293,4 +285,5 @@ status_t OMXCameraAdapter::stopSmoothZoom()
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/SensorListener.cpp b/camera/SensorListener.cpp
index bb6f577..45a278b 100644
--- a/camera/SensorListener.cpp
+++ b/camera/SensorListener.cpp
@@ -21,16 +21,14 @@
*
*/
-#define LOG_TAG "CameraHAL"
-
#include "SensorListener.h"
-#include "CameraHal.h"
#include <stdint.h>
#include <math.h>
#include <sys/types.h>
-namespace android {
+namespace Ti {
+namespace Camera {
/*** static declarations ***/
static const float RADIANS_2_DEG = (float) (180 / M_PI);
@@ -46,7 +44,7 @@ static int sensor_events_listener(int fd, int events, void* data)
ASensorEvent sen_events[8];
while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) {
for (int i = 0; i < num_sensors; i++) {
- if (sen_events[i].type == Sensor::TYPE_ACCELEROMETER) {
+ if (sen_events[i].type == android::Sensor::TYPE_ACCELEROMETER) {
float x = sen_events[i].vector.azimuth;
float y = sen_events[i].vector.pitch;
float z = sen_events[i].vector.roll;
@@ -79,7 +77,7 @@ static int sensor_events_listener(int fd, int events, void* data)
}
listener->handleOrientation(orient, tilt);
CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient);
- } else if (sen_events[i].type == Sensor::TYPE_GYROSCOPE) {
+ } else if (sen_events[i].type == android::Sensor::TYPE_GYROSCOPE) {
CAMHAL_LOGVA("GYROSCOPE EVENT");
}
}
@@ -132,11 +130,11 @@ SensorListener::~SensorListener() {
status_t SensorListener::initialize() {
status_t ret = NO_ERROR;
- SensorManager& mgr(SensorManager::getInstance());
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- sp<Looper> mLooper;
+ android::sp<android::Looper> mLooper;
mSensorEventQueue = mgr.createEventQueue();
if (mSensorEventQueue == NULL) {
@@ -145,7 +143,7 @@ status_t SensorListener::initialize() {
goto out;
}
- mLooper = new Looper(false);
+ mLooper = new android::Looper(false);
mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
if (mSensorLooperThread.get() == NULL)
@@ -157,7 +155,7 @@ status_t SensorListener::initialize() {
goto out;
}
- ret = mSensorLooperThread->run("sensor looper thread", PRIORITY_URGENT_DISPLAY);
+ ret = mSensorLooperThread->run("sensor looper thread", android::PRIORITY_URGENT_DISPLAY);
if (ret == INVALID_OPERATION){
CAMHAL_LOGDA("thread already running ?!?");
} else if (ret != NO_ERROR) {
@@ -184,7 +182,7 @@ void SensorListener::setCallbacks(orientation_callback_t orientation_cb, void *c
void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
mOrientationCb(orientation, tilt, mCbCookie);
@@ -194,15 +192,15 @@ void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
}
void SensorListener::enableSensor(sensor_type_t type) {
- Sensor const* sensor;
- SensorManager& mgr(SensorManager::getInstance());
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
- sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
mSensorEventQueue->enableSensor(sensor);
mSensorEventQueue->setEventRate(sensor, ms2ns(100));
@@ -213,15 +211,15 @@ void SensorListener::enableSensor(sensor_type_t type) {
}
void SensorListener::disableSensor(sensor_type_t type) {
- Sensor const* sensor;
- SensorManager& mgr(SensorManager::getInstance());
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) {
- sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
mSensorEventQueue->disableSensor(sensor);
sensorsEnabled &= ~SENSOR_ORIENTATION;
@@ -230,4 +228,5 @@ void SensorListener::disableSensor(sensor_type_t type) {
LOG_FUNCTION_NAME_EXIT;
}
-} // namespace android
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/TICameraParameters.cpp b/camera/TICameraParameters.cpp
index 221cff4..ed8105e 100644
--- a/camera/TICameraParameters.cpp
+++ b/camera/TICameraParameters.cpp
@@ -14,24 +14,26 @@
* limitations under the License.
*/
-
-
-
-#define LOG_TAG "CameraHAL"
#include <utils/Log.h>
#include <string.h>
#include <stdlib.h>
#include <TICameraParameters.h>
-#include "CameraHal.h"
-namespace android {
+#define TI_KEY_ALGO_PREFIX "ti-algo-"
+
+namespace Ti {
+namespace Camera {
//TI extensions to camera mode
const char TICameraParameters::HIGH_PERFORMANCE_MODE[] = "high-performance";
const char TICameraParameters::HIGH_QUALITY_MODE[] = "high-quality";
const char TICameraParameters::HIGH_QUALITY_ZSL_MODE[] = "high-quality-zsl";
+const char TICameraParameters::CP_CAM_MODE[] = "cp-cam";
const char TICameraParameters::VIDEO_MODE[] = "video-mode";
+const char TICameraParameters::EXPOSURE_BRACKETING[] = "exposure-bracketing";
+const char TICameraParameters::ZOOM_BRACKETING[] = "zoom-bracketing";
+const char TICameraParameters::TEMP_BRACKETING[] = "temporal-bracketing";
// TI extensions to standard android Parameters
const char TICameraParameters::KEY_SUPPORTED_CAMERAS[] = "camera-indexes";
@@ -40,61 +42,73 @@ const char TICameraParameters::KEY_SHUTTER_ENABLE[] = "shutter-enable";
const char TICameraParameters::KEY_CAMERA_NAME[] = "camera-name";
const char TICameraParameters::KEY_BURST[] = "burst-capture";
const char TICameraParameters::KEY_CAP_MODE[] = "mode";
+const char TICameraParameters::KEY_CAP_MODE_VALUES[] = "mode-values";
const char TICameraParameters::KEY_VNF[] = "vnf";
+const char TICameraParameters::KEY_VNF_SUPPORTED[] = "vnf-supported";
const char TICameraParameters::KEY_SATURATION[] = "saturation";
const char TICameraParameters::KEY_BRIGHTNESS[] = "brightness";
-const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure";
const char TICameraParameters::KEY_SUPPORTED_EXPOSURE[] = "exposure-mode-values";
+const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN[] = "supported-manual-exposure-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX[] = "supported-manual-exposure-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_STEP[] = "supported-manual-exposure-step";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN[] = "supported-manual-gain-iso-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX[] = "supported-manual-gain-iso-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP[] = "supported-manual-gain-iso-step";
+const char TICameraParameters::KEY_MANUAL_EXPOSURE[] = "manual-exposure";
+const char TICameraParameters::KEY_MANUAL_EXPOSURE_RIGHT[] = "manual-exposure-right";
+const char TICameraParameters::KEY_MANUAL_GAIN_ISO[] = "manual-gain-iso";
+const char TICameraParameters::KEY_MANUAL_GAIN_ISO_RIGHT[] = "manual-gain-iso-right";
const char TICameraParameters::KEY_CONTRAST[] = "contrast";
const char TICameraParameters::KEY_SHARPNESS[] = "sharpness";
const char TICameraParameters::KEY_ISO[] = "iso";
const char TICameraParameters::KEY_SUPPORTED_ISO_VALUES[] = "iso-mode-values";
const char TICameraParameters::KEY_SUPPORTED_IPP[] = "ipp-values";
const char TICameraParameters::KEY_IPP[] = "ipp";
-const char TICameraParameters::KEY_MAN_EXPOSURE[] = "manual-exposure";
const char TICameraParameters::KEY_METERING_MODE[] = "meter-mode";
-const char TICameraParameters::KEY_PADDED_WIDTH[] = "padded-width";
-const char TICameraParameters::KEY_PADDED_HEIGHT[] = "padded-height";
const char TICameraParameters::KEY_EXP_BRACKETING_RANGE[] = "exp-bracketing-range";
+const char TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE[] = "exp-gain-bracketing-range";
+const char TICameraParameters::KEY_ZOOM_BRACKETING_RANGE[] = "zoom-bracketing-range";
const char TICameraParameters::KEY_TEMP_BRACKETING[] = "temporal-bracketing";
const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS[] = "temporal-bracketing-range-positive";
const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG[] = "temporal-bracketing-range-negative";
-const char TICameraParameters::KEY_S3D_SUPPORTED[] = "s3d-supported";
+const char TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE[] = "flush-shot-config-queue";
const char TICameraParameters::KEY_MEASUREMENT_ENABLE[] = "measurement";
const char TICameraParameters::KEY_GBCE[] = "gbce";
+const char TICameraParameters::KEY_GBCE_SUPPORTED[] = "gbce-supported";
const char TICameraParameters::KEY_GLBCE[] = "glbce";
+const char TICameraParameters::KEY_GLBCE_SUPPORTED[] = "glbce-supported";
const char TICameraParameters::KEY_CURRENT_ISO[] = "current-iso";
const char TICameraParameters::KEY_SENSOR_ORIENTATION[] = "sensor-orientation";
-const char TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES[] = "sensor-orientation-values";
-const char TICameraParameters::KEY_MINFRAMERATE[] = "min-framerate";
-const char TICameraParameters::KEY_MAXFRAMERATE[] = "max-framerate";
const char TICameraParameters::KEY_RECORDING_HINT[] = "internal-recording-hint";
const char TICameraParameters::KEY_AUTO_FOCUS_LOCK[] = "auto-focus-lock";
-
-//TI extensions for enabling/disabling GLBCE
-const char TICameraParameters::GLBCE_ENABLE[] = "enable";
-const char TICameraParameters::GLBCE_DISABLE[] = "disable";
-
-//TI extensions for enabling/disabling GBCE
-const char TICameraParameters::GBCE_ENABLE[] = "enable";
-const char TICameraParameters::GBCE_DISABLE[] = "disable";
-
-//TI extensions for enabling/disabling measurement
-const char TICameraParameters::MEASUREMENT_ENABLE[] = "enable";
-const char TICameraParameters::MEASUREMENT_DISABLE[] = "disable";
-
-//TI extensions for zoom
-const char TICameraParameters::ZOOM_SUPPORTED[] = "true";
-const char TICameraParameters::ZOOM_UNSUPPORTED[] = "false";
-
-// TI extensions for 2D Preview in Stereo Mode
-const char TICameraParameters::KEY_S3D2D_PREVIEW[] = "s3d2d-preview";
-const char TICameraParameters::KEY_S3D2D_PREVIEW_MODE[] = "s3d2d-preview-values";
+const char TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED[] = "preview-fps-range-ext-values";
+const char TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED[] = "preview-fps-ext-values";
+
+const char TICameraParameters::RAW_WIDTH[] = "raw-width";
+const char TICameraParameters::RAW_HEIGHT[] = "raw-height";
+
+// TI extensions for Stereo Mode
+const char TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT[] = "s3d-prv-frame-layout";
+const char TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT_VALUES[] = "s3d-prv-frame-layout-values";
+const char TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT[] = "s3d-cap-frame-layout";
+const char TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT_VALUES[] = "s3d-cap-frame-layout-values";
+
+//TI extentions fo 3D resolutions
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES[] = "supported-picture-subsampled-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES[] = "supported-picture-topbottom-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[] = "supported-picture-sidebyside-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[] = "supported-preview-subsampled-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[] = "supported-preview-topbottom-size-values";
+const char TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[] = "supported-preview-sidebyside-size-values";
//TI extensions for SAC/SMC
-const char TICameraParameters::KEY_AUTOCONVERGENCE[] = "auto-convergence";
const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE[] = "auto-convergence-mode";
-const char TICameraParameters::KEY_MANUALCONVERGENCE_VALUES[] = "manual-convergence-values";
+const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE_VALUES[] = "auto-convergence-mode-values";
+const char TICameraParameters::KEY_MANUAL_CONVERGENCE[] = "manual-convergence";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN[] = "supported-manual-convergence-min";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX[] = "supported-manual-convergence-max";
+const char TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP[] = "supported-manual-convergence-step";
//TI extensions for setting EXIF tags
const char TICameraParameters::KEY_EXIF_MODEL[] = "exif-model";
@@ -105,13 +119,10 @@ const char TICameraParameters::KEY_GPS_MAPDATUM[] = "gps-mapdatum";
const char TICameraParameters::KEY_GPS_VERSION[] = "gps-version";
const char TICameraParameters::KEY_GPS_DATESTAMP[] = "gps-datestamp";
-//TI extensions for enabling/disabling shutter sound
-const char TICameraParameters::SHUTTER_ENABLE[] = "true";
-const char TICameraParameters::SHUTTER_DISABLE[] = "false";
-
-//TI extensions for Temporal Bracketing
-const char TICameraParameters::BRACKET_ENABLE[] = "enable";
-const char TICameraParameters::BRACKET_DISABLE[] = "disable";
+// TI extensions for slice mode implementation for VTC
+const char TICameraParameters::KEY_VTC_HINT[] = "internal-vtc-hint";
+const char TICameraParameters::KEY_VIDEO_ENCODER_HANDLE[] = "encoder_handle";
+const char TICameraParameters::KEY_VIDEO_ENCODER_SLICE_HEIGHT[] = "encoder_slice_height";
//TI extensions to Image post-processing
const char TICameraParameters::IPP_LDCNSF[] = "ldc-nsf";
@@ -120,14 +131,12 @@ const char TICameraParameters::IPP_NSF[] = "nsf";
const char TICameraParameters::IPP_NONE[] = "off";
// TI extensions to standard android pixel formats
-const char TICameraParameters::PIXEL_FORMAT_RAW[] = "raw";
+const char TICameraParameters::PIXEL_FORMAT_UNUSED[] = "unused";
const char TICameraParameters::PIXEL_FORMAT_JPS[] = "jps";
const char TICameraParameters::PIXEL_FORMAT_MPO[] = "mpo";
-const char TICameraParameters::PIXEL_FORMAT_RAW_JPEG[] = "raw+jpeg";
-const char TICameraParameters::PIXEL_FORMAT_RAW_MPO[] = "raw+mpo";
+const char TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY[] = "yuv422i-uyvy";
// TI extensions to standard android scene mode settings
-const char TICameraParameters::SCENE_MODE_SPORT[] = "sport";
const char TICameraParameters::SCENE_MODE_CLOSEUP[] = "closeup";
const char TICameraParameters::SCENE_MODE_AQUA[] = "aqua";
const char TICameraParameters::SCENE_MODE_SNOWBEACH[] = "snow-beach";
@@ -149,6 +158,7 @@ const char TICameraParameters::WHITE_BALANCE_FACE[] = "face-priority";
const char TICameraParameters::FOCUS_MODE_PORTRAIT[] = "portrait";
const char TICameraParameters::FOCUS_MODE_EXTENDED[] = "extended";
const char TICameraParameters::FOCUS_MODE_FACE[] = "face-priority";
+const char TICameraParameters::FOCUS_MODE_OFF[] = "off";
// TI extensions to add values for effect settings.
const char TICameraParameters::EFFECT_NATURAL[] = "natural";
@@ -157,7 +167,7 @@ const char TICameraParameters::EFFECT_COLOR_SWAP[] = "color-swap";
const char TICameraParameters::EFFECT_BLACKWHITE[] = "blackwhite";
// TI extensions to add exposure preset modes
-const char TICameraParameters::EXPOSURE_MODE_OFF[] = "off";
+const char TICameraParameters::EXPOSURE_MODE_MANUAL[] = "manual";
const char TICameraParameters::EXPOSURE_MODE_AUTO[] = "auto";
const char TICameraParameters::EXPOSURE_MODE_NIGHT[] = "night";
const char TICameraParameters::EXPOSURE_MODE_BACKLIGHT[] = "backlighting";
@@ -179,12 +189,19 @@ const char TICameraParameters::ISO_MODE_1000[] = "1000";
const char TICameraParameters::ISO_MODE_1200[] = "1200";
const char TICameraParameters::ISO_MODE_1600[] = "1600";
+//TI extensions for stereo frame layouts
+const char TICameraParameters::S3D_NONE[] = "none";
+const char TICameraParameters::S3D_TB_FULL[] = "tb-full";
+const char TICameraParameters::S3D_SS_FULL[] = "ss-full";
+const char TICameraParameters::S3D_TB_SUBSAMPLED[] = "tb-subsampled";
+const char TICameraParameters::S3D_SS_SUBSAMPLED[] = "ss-subsampled";
+
// TI extensions to add auto convergence values
-const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "mode-disable";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "mode-frame";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "mode-center";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_FFT[] = "mode-fft";
-const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "mode-manual";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "disable";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "frame";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "center";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_TOUCH[] = "touch";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "manual";
//TI values for camera direction
const char TICameraParameters::FACING_FRONT[]="front";
@@ -198,5 +215,17 @@ const char TICameraParameters::ORIENTATION_SENSOR_NONE[] = "0";
const char TICameraParameters::ORIENTATION_SENSOR_90[] = "90";
const char TICameraParameters::ORIENTATION_SENSOR_180[] = "180";
const char TICameraParameters::ORIENTATION_SENSOR_270[] = "270";
-};
+const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[] = "mechanical-misalignment-correction-supported";
+const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION[] = "mechanical-misalignment-correction";
+
+//TI extensions for enable/disable algos
+const char TICameraParameters::KEY_ALGO_FIXED_GAMMA[] = TI_KEY_ALGO_PREFIX "fixed-gamma";
+const char TICameraParameters::KEY_ALGO_NSF1[] = TI_KEY_ALGO_PREFIX "nsf1";
+const char TICameraParameters::KEY_ALGO_NSF2[] = TI_KEY_ALGO_PREFIX "nsf2";
+const char TICameraParameters::KEY_ALGO_SHARPENING[] = TI_KEY_ALGO_PREFIX "sharpening";
+const char TICameraParameters::KEY_ALGO_THREELINCOLORMAP[] = TI_KEY_ALGO_PREFIX "threelinecolormap";
+const char TICameraParameters::KEY_ALGO_GIC[] = TI_KEY_ALGO_PREFIX "gic";
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index c365023..29c71c7 100644
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -25,6 +25,7 @@
#include "V4LCameraAdapter.h"
#include "CameraHal.h"
#include "TICameraParameters.h"
+#include "DebugUtils.h"
#include <signal.h>
#include <stdio.h>
#include <stdlib.h>
@@ -37,6 +38,8 @@
#include <sys/select.h>
#include <linux/videodev.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
#include <cutils/properties.h>
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
@@ -44,28 +47,250 @@ static int mDebugFps = 0;
#define Q16_OFFSET 16
-#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+#define HERE(Msg) {CAMHAL_LOGEB("--=== %s===--\n", Msg);}
-namespace android {
-
-#undef LOG_TAG
-///Maintain a separate tag for V4LCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-Mutex gAdapterLock;
-const char *device = DEVICE;
+//define this macro to save first few raw frames when starting the preview.
+//#define SAVE_RAW_FRAMES 1
+//#define DUMP_CAPTURE_FRAME 1
+//#define PPM_PER_FRAME_CONVERSION 1
+
+//Proto Types
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
+static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height );
+
+android::Mutex gV4LAdapterLock;
+char device[15];
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
-status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+/*--------------------V4L wrapper functions -------------------------------*/
+status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
+ status_t ret = NO_ERROR;
+ errno = 0;
+
+ do {
+ ret = ioctl (fd, req, argp);
+ }while (-1 == ret && EINTR == errno);
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitMmap(int& count) {
+ status_t ret = NO_ERROR;
+
+ //First allocate adapter internal buffers at V4L level for USB Cam
+ //These are the buffers from which we will copy the data into overlay buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+ for (int i = 0; i < count; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (NULL,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ CAMHAL_LOGVB(" mVideoInfo->mem[%d]=%p ; mVideoInfo->buf.length = %d", i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer [%d]. (%s)", i, strerror(errno));
+ return -1;
+ }
+ }
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitUsrPtr(int& count) {
+ status_t ret = NO_ERROR;
+
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_USERPTR;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed for USERPTR: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStartStreaming () {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ if (!mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
+ return ret;
+ }
+ mVideoInfo->isStreaming = true;
+ }
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ if (mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ goto EXIT;
+ }
+ mVideoInfo->isStreaming = false;
+
+ /* Unmap buffers */
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ for (int i = 0; i < nBufferCount; i++) {
+ if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0) {
+ CAMHAL_LOGEA("munmap() failed");
+ }
+ }
+
+ //free the memory allocated during REQBUFS, by setting the count=0
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = 0;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ }
+EXIT:
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
+ status_t ret = NO_ERROR;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_G_FMT Failed: %s", strerror(errno));
+ }
+
+ mVideoInfo->width = width;
+ mVideoInfo->height = height;
+ mVideoInfo->framesizeIn = (width * height << 1);
+ mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->format.fmt.pix.width = width;
+ mVideoInfo->format.fmt.pix.height = height;
+ mVideoInfo->format.fmt.pix.pixelformat = pix_format;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_FMT Failed: %s", strerror(errno));
+ return ret;
+ }
+ v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
+ return ret;
+}
+
+status_t V4LCameraAdapter::restartPreview ()
{
- LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ struct v4l2_streamparm streamParams;
+
+ //configure for preview size and pixel format.
+ mParams.getPreviewSize(&width, &height);
+
+ ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = v4lInitMmap(mPreviewBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ //set frame rate
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_PARM Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+ nQueued++;
+ }
+ ret = v4lStartStreaming();
+ CAMHAL_LOGDA("Ready for preview....");
+EXIT:
+ return ret;
+}
+
+/*--------------------Camera Adapter Functions-----------------------------*/
+status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+{
char value[PROPERTY_VALUE_MAX];
+
+ LOG_FUNCTION_NAME;
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
@@ -73,115 +298,129 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
// Allocate memory for video info structure
mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
- if(!mVideoInfo)
- {
- return NO_MEMORY;
- }
+ if(!mVideoInfo) {
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
- if ((mCameraHandle = open(device, O_RDWR)) == -1)
- {
+ if ((mCameraHandle = open(device, O_RDWR) ) == -1) {
CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
- if (ret < 0)
- {
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
+ if (ret < 0) {
CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
- {
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING))
- {
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)) {
CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
// Initialize flags
mPreviewing = false;
mVideoInfo->isStreaming = false;
mRecording = false;
-
+ mCapturing = false;
+EXIT:
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
-status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
{
-
status_t ret = NO_ERROR;
+ int idx = 0;
+ LOG_FUNCTION_NAME;
- if ( !mVideoInfo->isStreaming )
- {
- return NO_ERROR;
+ if ( frameType == CameraFrame::IMAGE_FRAME) { //(1 > mCapturedFrames)
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ CAMHAL_LOGDB("===========Signal End Image Capture==========");
+ mEndImageCaptureCallback(mEndCaptureData);
}
+ goto EXIT;
+ }
+ if ( !mVideoInfo->isStreaming ) {
+ goto EXIT;
+ }
- int i = mPreviewBufs.valueFor(( unsigned int )frameBuf);
- if(i<0)
- {
- return BAD_VALUE;
- }
+ idx = mPreviewBufs.valueFor(frameBuf);
+ if(idx < 0) {
+ CAMHAL_LOGEB("Wrong index = %d",idx);
+ goto EXIT;
+ }
- mVideoInfo->buf.index = i;
+ mVideoInfo->buf.index = idx;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
- CAMHAL_LOGEA("Init: VIDIOC_QBUF Failed");
- return -1;
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
}
-
nQueued++;
-
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
+status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params)
{
- LOG_FUNCTION_NAME;
-
status_t ret = NO_ERROR;
-
int width, height;
+ struct v4l2_streamparm streamParams;
- params.getPreviewSize(&width, &height);
-
- CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
-
- mVideoInfo->width = width;
- mVideoInfo->height = height;
- mVideoInfo->framesizeIn = (width * height << 1);
- mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+ LOG_FUNCTION_NAME;
- mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->format.fmt.pix.width = width;
- mVideoInfo->format.fmt.pix.height = height;
- mVideoInfo->format.fmt.pix.pixelformat = DEFAULT_PIXEL_FORMAT;
+ if(!mPreviewing && !mCapturing) {
+ params.getPreviewSize(&width, &height);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
- ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
- if (ret < 0) {
- CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
- return ret;
+ ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ //set frame rate
+ // Now its fixed to 30 FPS
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator;
+ CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps);
}
// Udpate the current parameter set
mParams = params;
+EXIT:
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-void V4LCameraAdapter::getParameters(CameraParameters& params)
+void V4LCameraAdapter::getParameters(android::CameraParameters& params)
{
LOG_FUNCTION_NAME;
@@ -193,27 +432,37 @@ void V4LCameraAdapter::getParameters(CameraParameters& params)
///API to give the buffers to Adapter
-status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t V4LCameraAdapter::useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
switch(mode)
{
case CAMERA_PREVIEW:
+ mPreviewBufferCountQueueable = queueable;
ret = UseBuffersPreview(bufArr, num);
break;
- //@todo Insert Image capture case here
+ case CAMERA_IMAGE_CAPTURE:
+ mCaptureBufferCountQueueable = queueable;
+ ret = UseBuffersCapture(bufArr, num);
+ break;
case CAMERA_VIDEO:
//@warn Video capture is not fully supported yet
+ mPreviewBufferCountQueueable = queueable;
ret = UseBuffersPreview(bufArr, num);
break;
+ case CAMERA_MEASUREMENT:
+ break;
+
+ default:
+ break;
}
LOG_FUNCTION_NAME_EXIT;
@@ -221,172 +470,301 @@ status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
return ret;
}
-status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
-{
+status_t V4LCameraAdapter::UseBuffersCapture(CameraBuffer *bufArr, int num) {
int ret = NO_ERROR;
- if(NULL == bufArr)
- {
- return BAD_VALUE;
- }
-
- //First allocate adapter internal buffers at V4L level for USB Cam
- //These are the buffers from which we will copy the data into overlay buffers
- /* Check if camera can handle NB_BUFFER buffers */
- mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
- mVideoInfo->rb.count = num;
-
- ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
- if (ret < 0) {
- CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
- return ret;
+ LOG_FUNCTION_NAME;
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
}
for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mCaptureBufs.add(&bufArr[i], i);
+ CAMHAL_LOGDB("capture- buff [%d] = 0x%x ",i, mCaptureBufs.keyAt(i));
+ }
- memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+ // Update the preview buffer count
+ mCaptureBufferCount = num;
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
- mVideoInfo->buf.index = i;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+}
- ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
- if (ret < 0) {
- CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
- return ret;
- }
+status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
+{
+ int ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
- mVideoInfo->mem[i] = mmap (0,
- mVideoInfo->buf.length,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- mCameraHandle,
- mVideoInfo->buf.m.offset);
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (mVideoInfo->mem[i] == MAP_FAILED) {
- CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
- return -1;
+ ret = v4lInitMmap(num);
+ if (ret == NO_ERROR) {
+ for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mPreviewBufs.add(&bufArr[i], i);
+ CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs.keyAt(i));
}
- uint32_t *ptr = (uint32_t*) bufArr;
+ // Update the preview buffer count
+ mPreviewBufferCount = num;
+ }
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::takePicture() {
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ size_t yuv422i_buff_size = 0;
+ int index = 0;
+ char *fp = NULL;
+ CameraBuffer *buffer = NULL;
+ CameraFrame frame;
- //Associate each Camera internal buffer with the one from Overlay
- mPreviewBufs.add((int)ptr[i], i);
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mCaptureBufsLock);
+ if(mCapturing) {
+ CAMHAL_LOGEA("Already Capture in Progress...");
+ ret = BAD_VALUE;
+ goto EXIT;
}
- // Update the preview buffer count
- mPreviewBufferCount = num;
+ mCapturing = true;
+ mPreviewing = false;
- return ret;
-}
+ // Stop preview streaming
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
-status_t V4LCameraAdapter::startPreview()
-{
- status_t ret = NO_ERROR;
+ //configure for capture image size and pixel format.
+ mParams.getPictureSize(&width, &height);
+ CAMHAL_LOGDB("Image Capture Size WxH = %dx%d",width,height);
+ yuv422i_buff_size = width * height * 2;
- Mutex::Autolock lock(mPreviewBufsLock);
+ ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
- if(mPreviewing)
- {
- return BAD_VALUE;
+ ret = v4lInitMmap(mCaptureBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
}
- for (int i = 0; i < mPreviewBufferCount; i++) {
+ for (int i = 0; i < mCaptureBufferCountQueueable; i++) {
mVideoInfo->buf.index = i;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
CAMHAL_LOGEA("VIDIOC_QBUF Failed");
- return -EINVAL;
+ ret = BAD_VALUE;
+ goto EXIT;
}
-
nQueued++;
- }
+ }
- enum v4l2_buf_type bufType;
- if (!mVideoInfo->isStreaming) {
- bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = v4lStartStreaming();
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lStartStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
- ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
- if (ret < 0) {
- CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
- return ret;
- }
+ CAMHAL_LOGDA("Streaming started for Image Capture");
- mVideoInfo->isStreaming = true;
- }
+ //get the frame and send to encode as JPG
+ fp = this->GetFrame(index);
+ if(!fp) {
+ CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ CAMHAL_LOGDA("::Capture Frame received from V4L::");
+ buffer = mCaptureBufs.keyAt(index);
+ CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size);
+
+ //copy the yuv422i data to the image buffer.
+ memcpy(buffer->opaque, fp, yuv422i_buff_size);
+
+#ifdef DUMP_CAPTURE_FRAME
+ //dump the YUV422 buffer in to a file
+ //a folder should have been created at /data/misc/camera/raw/
+ {
+ int fd =-1;
+ fd = open("/data/misc/camera/raw/captured_yuv422i_dump.yuv", O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGEB("Unable to open file: %s", strerror(fd));
+ }
+ else {
+ write(fd, fp, yuv422i_buff_size );
+ close(fd);
+ CAMHAL_LOGDB("::Captured Frame dumped at /data/misc/camera/raw/captured_yuv422i_dump.yuv::");
+ }
+ }
+#endif
+
+ CAMHAL_LOGDA("::sending capture frame to encoder::");
+ frame.mFrameType = CameraFrame::IMAGE_FRAME;
+ frame.mBuffer = buffer;
+ frame.mLength = yuv422i_buff_size;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mAlignment = width*2;
+ frame.mOffset = 0;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::IMAGE_FRAME;
+ frame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ frame.mQuirks |= CameraFrame::FORMAT_YUV422I_YUYV;
+
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
- // Create and start preview thread for receiving buffers from V4L Camera
- mPreviewThread = new PreviewThread(this);
+ // Stop streaming after image capture
+ ret = v4lStopStreaming(mCaptureBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = restartPreview();
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
- CAMHAL_LOGDA("Created preview thread");
+status_t V4LCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ //Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ mCaptureBufs.clear();
- //Update the flag to indicate we are previewing
- mPreviewing = true;
+ mCapturing = false;
+ mPreviewing = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
- return ret;
+status_t V4LCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ //autoFocus is not implemented. Just return.
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
}
-status_t V4LCameraAdapter::stopPreview()
+status_t V4LCameraAdapter::startPreview()
{
- enum v4l2_buf_type bufType;
- int ret = NO_ERROR;
+ status_t ret = NO_ERROR;
- Mutex::Autolock lock(mPreviewBufsLock);
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mPreviewBufsLock);
- if(!mPreviewing)
- {
- return NO_INIT;
- }
+ if(mPreviewing) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (mVideoInfo->isStreaming) {
- bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
- CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
- return ret;
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
}
+ nQueued++;
+ }
- mVideoInfo->isStreaming = false;
+ ret = v4lStartStreaming();
+
+ // Create and start preview thread for receiving buffers from V4L Camera
+ if(!mCapturing) {
+ mPreviewThread = new PreviewThread(this);
+ CAMHAL_LOGDA("Created preview thread");
}
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ //Update the flag to indicate we are previewing
+ mPreviewing = true;
+ mCapturing = false;
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::stopPreview()
+{
+ enum v4l2_buf_type bufType;
+ int ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mStopPreviewLock);
+
+ if(!mPreviewing) {
+ return NO_INIT;
+ }
+ mPreviewing = false;
+
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
+ }
nQueued = 0;
nDequeued = 0;
-
- /* Unmap buffers */
- for (int i = 0; i < mPreviewBufferCount; i++)
- if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0)
- CAMHAL_LOGEA("Unmap failed");
+ mFramesWithEncoder = 0;
mPreviewBufs.clear();
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
+ LOG_FUNCTION_NAME_EXIT;
return ret;
-
}
char * V4LCameraAdapter::GetFrame(int &index)
{
- int ret;
+ int ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
/* DQ */
- ret = ioctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
if (ret < 0) {
CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
return NULL;
@@ -395,6 +773,7 @@ char * V4LCameraAdapter::GetFrame(int &index)
index = mVideoInfo->buf.index;
+ LOG_FUNCTION_NAME_EXIT;
return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
}
@@ -403,6 +782,7 @@ char * V4LCameraAdapter::GetFrame(int &index)
status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
{
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
// Just return the current preview size, nothing more to do here.
mParams.getPreviewSize(( int * ) &width,
@@ -419,9 +799,27 @@ status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t buffer
return NO_ERROR;
}
-status_t V4LCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame *frame, size_t bufferCount)
{
- // We don't support image capture yet, safely return from here without messing up
+ int width = 0;
+ int height = 0;
+ int bytesPerPixel = 2; // for YUV422i; default pixel format
+
+ LOG_FUNCTION_NAME;
+
+ if (frame == NULL) {
+ return BAD_VALUE;
+ }
+
+ mParams.getPictureSize( &width, &height );
+ frame->mLength = width * height * bytesPerPixel;
+ frame->mWidth = width;
+ frame->mHeight = height;
+ frame->mAlignment = width * bytesPerPixel;
+
+ CAMHAL_LOGDB("Picture size: W x H = %u x %u (size=%u bytes, alignment=%u bytes)",
+ frame->mWidth, frame->mHeight, frame->mLength, frame->mAlignment);
+ LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
@@ -431,16 +829,17 @@ static void debugShowFPS()
static int mLastFrameCount = 0;
static nsecs_t mLastFpsTime = 0;
static float mFps = 0;
- mFrameCount++;
- if (!(mFrameCount & 0x1F)) {
- nsecs_t now = systemTime();
- nsecs_t diff = now - mLastFpsTime;
- mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
- mLastFpsTime = now;
- mLastFrameCount = mFrameCount;
- ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ if(mDebugFps) {
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
}
- // XXX: mFPS has the value we want
}
status_t V4LCameraAdapter::recalculateFPS()
@@ -487,6 +886,7 @@ V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
LOG_FUNCTION_NAME;
// Nothing useful to do in the constructor
+ mFramesWithEncoder = 0;
LOG_FUNCTION_NAME_EXIT;
}
@@ -507,6 +907,202 @@ V4LCameraAdapter::~V4LCameraAdapter()
LOG_FUNCTION_NAME_EXIT;
}
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ) {
+ //convert YUV422I yuyv to uyvy format.
+ uint32_t *bf = (uint32_t*)src;
+ uint32_t *dst = (uint32_t*)dest;
+
+ LOG_FUNCTION_NAME;
+
+ if (!src || !dest) {
+ return;
+ }
+
+ for(size_t i = 0; i < size; i = i+4)
+ {
+ dst[0] = ((bf[0] & 0x00FF00FF) << 8) | ((bf[0] & 0xFF00FF00) >> 8);
+ bf++;
+ dst++;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format and copies directly to preview buffers (Tiler memory).
+ int stride = 4096;
+ unsigned char *bf = src;
+ unsigned char *dst_y = dest;
+ unsigned char *dst_uv = dest + ( height * stride);
+#ifdef PPM_PER_FRAME_CONVERSION
+ static int frameCount = 0;
+ static nsecs_t ppm_diff = 0;
+ nsecs_t ppm_start = systemTime();
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ dst_y += (stride - width);
+ }
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ dst_uv = dst_uv + (stride - width);
+ }
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ dst_y = dst_y + (stride - width);
+ if (skip == 0) {
+ dst_uv = dst_uv + (stride - width);
+ }
+ } //end of for()
+ }
+
+#ifdef PPM_PER_FRAME_CONVERSION
+ ppm_diff += (systemTime() - ppm_start);
+ frameCount++;
+
+ if (frameCount >= 30) {
+ ppm_diff = ppm_diff / frameCount;
+ LOGD("PPM: YUV422i to NV12 Conversion(%d x %d): %llu us ( %llu ms )", width, height,
+ ns2us(ppm_diff), ns2ms(ppm_diff) );
+ ppm_diff = 0;
+ frameCount = 0;
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format.
+ unsigned char *bf = src;
+ unsigned char *dst_y = dest;
+ unsigned char *dst_uv = dest + (width * height);
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ }
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ }
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+#ifdef SAVE_RAW_FRAMES
+void saveFile(unsigned char* buff, int buff_size) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+ if (counter > 3) {
+ return;
+ }
+ //dump nv12 buffer
+ counter++;
+ sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter);
+ CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn);
+
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ write(fd, buff, buff_size );
+ close(fd);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+#endif
+
/* Preview Thread */
// ---------------------------------------------------------------------------
@@ -515,63 +1111,115 @@ int V4LCameraAdapter::previewThread()
status_t ret = NO_ERROR;
int width, height;
CameraFrame frame;
+ void *y_uv[2];
+ int index = 0;
+ int stride = 4096;
+ char *fp = NULL;
- if (mPreviewing)
- {
- int index = 0;
- char *fp = this->GetFrame(index);
- if(!fp)
- {
- return BAD_VALUE;
- }
+ mParams.getPreviewSize(&width, &height);
- uint8_t* ptr = (uint8_t*) mPreviewBufs.keyAt(index);
+ if (mPreviewing) {
- int width, height;
- uint16_t* dest = (uint16_t*)ptr;
- uint16_t* src = (uint16_t*) fp;
- mParams.getPreviewSize(&width, &height);
- for(int i=0;i<height;i++)
- {
- for(int j=0;j<width;j++)
- {
- //*dest = *src;
- //convert from YUYV to UYVY supported in Camera service
- *dest = (((*src & 0xFF000000)>>24)<<16)|(((*src & 0x00FF0000)>>16)<<24) |
- (((*src & 0xFF00)>>8)<<0)|(((*src & 0x00FF)>>0)<<8);
- src++;
- dest++;
- }
- dest += 4096/2-width;
- }
+ fp = this->GetFrame(index);
+ if(!fp) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ CameraBuffer *buffer = mPreviewBufs.keyAt(index);
+ CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer);
+ if (!lframe) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ debugShowFPS();
+
+ if ( mFrameSubscribers.size() == 0 ) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ y_uv[0] = (void*) lframe->mYuv[0];
+ //y_uv[1] = (void*) lframe->mYuv[1];
+ //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride);
+ convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height);
+ CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] );
+
+#ifdef SAVE_RAW_FRAMES
+ unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
+ //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
+ convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
+ saveFile( nv12_buff, ((width*height)*3/2) );
+ free (nv12_buff);
+#endif
- mParams.getPreviewSize(&width, &height);
frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
- frame.mBuffer = ptr;
- frame.mLength = width*height*2;
- frame.mAlignment = width*2;
+ frame.mBuffer = buffer;
+ frame.mLength = width*height*3/2;
+ frame.mAlignment = stride;
frame.mOffset = 0;
- frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
- ret = sendFrameToSubscribers(&frame);
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
}
+ }
+EXIT:
return ret;
}
-extern "C" CameraAdapter* CameraAdapter_Factory()
+//scan for video devices
+void detectVideoDevice(char** video_device_list, int& num_device) {
+ char dir_path[20];
+ char* filename;
+ char** dev_list = video_device_list;
+ DIR *d;
+ struct dirent *dir;
+ int index = 0;
+
+ strcpy(dir_path, DEVICE_PATH);
+ d = opendir(dir_path);
+ if(d) {
+ //read each entry in the /dev/ and find if there is videox entry.
+ while ((dir = readdir(d)) != NULL) {
+ filename = dir->d_name;
+ if (strncmp(filename, DEVICE_NAME, 5) == 0) {
+ strcpy(dev_list[index],DEVICE_PATH);
+ strncat(dev_list[index],filename,sizeof(DEVICE_NAME));
+ index++;
+ }
+ } //end of while()
+ closedir(d);
+ num_device = index;
+
+ for(int i=0; i<index; i++){
+ CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]);
+ }
+ }
+}
+
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gV4LAdapterLock);
LOG_FUNCTION_NAME;
adapter = new V4LCameraAdapter(sensor_index);
if ( adapter ) {
- CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index);
+ CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
} else {
- CAMHAL_LOGEA("Camera adapter create failed!");
+ CAMHAL_LOGEA("V4L Camera adapter create failed for sensor index = %d!",sensor_index);
}
LOG_FUNCTION_NAME_EXIT;
@@ -579,32 +1227,91 @@ extern "C" CameraAdapter* CameraAdapter_Factory()
return adapter;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera) {
+extern "C" status_t V4LCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
+ status_t ret = NO_ERROR;
+ struct v4l2_capability cap;
+ int tempHandle = NULL;
int num_cameras_supported = 0;
+ char device_list[5][15];
+ char* video_device_list[5];
+ int num_v4l_devices = 0;
+ int sensorId = 0;
CameraProperties::Properties* properties = NULL;
LOG_FUNCTION_NAME;
- if(!properties_array)
- {
- return -EINVAL;
+ supportedCameras = 0;
+ memset((void*)&cap, 0, sizeof(v4l2_capability));
+
+ if (!properties_array) {
+ CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
}
- // TODO: Need to tell camera properties what other cameras we can support
- if (starting_camera + num_cameras_supported < max_camera) {
- num_cameras_supported++;
- properties = properties_array + starting_camera;
- properties->set(CameraProperties::CAMERA_NAME, "USBCamera");
+ for (int i = 0; i < 5; i++) {
+ video_device_list[i] = device_list[i];
}
+ //look for the connected video devices
+ detectVideoDevice(video_device_list, num_v4l_devices);
- LOG_FUNCTION_NAME_EXIT;
+ for (int i = 0; i < num_v4l_devices; i++) {
+ if ( (starting_camera + num_cameras_supported) < max_camera) {
+ sensorId = starting_camera + num_cameras_supported;
+
+ CAMHAL_LOGDB("Opening device[%d] = %s..",i, video_device_list[i]);
+ if ((tempHandle = open(video_device_list[i], O_RDWR)) == -1) {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera(%s): %s",video_device_list[i], strerror(errno));
+ continue;
+ }
- return num_cameras_supported;
+ ret = ioctl (tempHandle, VIDIOC_QUERYCAP, &cap);
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ close(tempHandle);
+ continue;
+ }
+
+ //check for video capture devices
+ if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ close(tempHandle);
+ continue;
+ }
+
+ strcpy(device, video_device_list[i]);
+ properties = properties_array + starting_camera + num_cameras_supported;
+
+ //fetch capabilities for this camera
+ ret = V4LCameraAdapter::getCaps( sensorId, properties, tempHandle );
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error while getting capabilities.");
+ close(tempHandle);
+ continue;
+ }
+
+ num_cameras_supported++;
+
+ }
+ //For now exit this loop once a valid video capture device is found.
+ //TODO: find all V4L capture devices and it capabilities
+ break;
+ }//end of for() loop
+
+ supportedCameras = num_cameras_supported;
+ CAMHAL_LOGDB("Number of V4L cameras detected =%d", num_cameras_supported);
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ close(tempHandle);
+ return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/V4LCameraAdapter/V4LCapabilities.cpp b/camera/V4LCameraAdapter/V4LCapabilities.cpp
new file mode 100644
index 0000000..3a84268
--- /dev/null
+++ b/camera/V4LCameraAdapter/V4LCapabilities.cpp
@@ -0,0 +1,346 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCapabilities.cpp
+*
+* This file implements the V4L Capabilities feature.
+*
+*/
+
+#include "CameraHal.h"
+#include "V4LCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+
+namespace Ti {
+namespace Camera {
+
+/************************************
+ * global constants and variables
+ *************************************/
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+#define MAX_RES_STRING_LENGTH 10
+#define DEFAULT_WIDTH 640
+#define DEFAULT_HEIGHT 480
+
+static const char PARAM_SEP[] = ",";
+
+//Camera defaults
+const char V4LCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char V4LCameraAdapter::DEFAULT_PICTURE_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv422i-yuyv";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
+const char V4LCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char V4LCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
+const char * V4LCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * V4LCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
+
+
+const CapPixelformat V4LCameraAdapter::mPixelformats [] = {
+ { V4L2_PIX_FMT_YUYV, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { V4L2_PIX_FMT_JPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+};
+
+/*****************************************
+ * internal static function declarations
+ *****************************************/
+
+/**** Utility functions to help translate V4L Caps to Parameter ****/
+
+status_t V4LCameraAdapter::insertDefaults(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
+
+ params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
+
+ params->set(CameraProperties::CAMERA_NAME, "USBCAMERA");
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, "320x240");
+ params->set(CameraProperties::JPEG_QUALITY, "90");
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, "50");
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(30000,30000)");
+ params->set(CameraProperties::FRAMERATE_RANGE, "30000,30000");
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, "none");
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, "auto");
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, "auto");
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, "auto");
+ params->set(CameraProperties::SUPPORTED_EFFECTS, "none");
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, "ldc-nsf");
+ params->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
+ params->set(CameraProperties::ORIENTATION_INDEX, 0);
+ params->set(CameraProperties::SENSOR_ORIENTATION, "0");
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewFormatCount; i++) {
+ for (unsigned int j = 0; j < ARRAY_SIZE(mPixelformats); j++) {
+ if(caps.ePreviewFormats[i] == mPixelformats[j].pixelformat ) {
+ strncat (supported, mPixelformats[j].param, MAX_PROP_VALUE_LENGTH-1 );
+ strncat (supported, PARAM_SEP, 1 );
+ }
+ }
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tPreviewRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertImageSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulCaptureResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tCaptureRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertFrameRates(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char temp[10];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulFrameRateCount; i++) {
+ snprintf (temp, 10, "%d", caps.ulFrameRates[i] );
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, temp, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertCapabilities(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFrameRates(params, caps);
+ }
+
+ //Insert Supported Focus modes.
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, "infinity");
+
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, "jpeg");
+
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::sortAscend(V4L_TI_CAPTYPE &caps, uint16_t count) {
+ size_t tempRes;
+ size_t w, h, tmpW,tmpH;
+ for (int i=0; i<count; i++) {
+ w = caps.tPreviewRes[i].width;
+ h = caps.tPreviewRes[i].height;
+ tempRes = w*h;
+ for (int j=i+1; j<count; j++) {
+ tmpW = caps.tPreviewRes[j].width;
+ tmpH = caps.tPreviewRes[j].height;
+
+ if (tempRes > (tmpW * tmpH) ) {
+ caps.tPreviewRes[j].width = w;
+ caps.tPreviewRes[j].height = h;
+ w = tmpW;
+ h = tmpH;
+ }
+ }
+ caps.tPreviewRes[i].width = w;
+ caps.tPreviewRes[i].height = h;
+
+ }
+ return NO_ERROR;
+}
+
+/*****************************************
+ * public exposed function declarations
+ *****************************************/
+
+status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params,
+ V4L_HANDLETYPE handle) {
+ status_t status = NO_ERROR;
+ V4L_TI_CAPTYPE caps;
+ int i = 0;
+ int j = 0;
+ struct v4l2_fmtdesc fmtDesc;
+ struct v4l2_frmsizeenum frmSizeEnum;
+ struct v4l2_frmivalenum frmIvalEnum;
+
+ //get supported pixel formats
+ for ( i = 0; status == NO_ERROR; i++) {
+ fmtDesc.index = i;
+ fmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ status = ioctl (handle, VIDIOC_ENUM_FMT, &fmtDesc);
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("fmtDesc[%d].description::pixelformat::flags== (%s::%d::%d)",i, fmtDesc.description,fmtDesc.pixelformat,fmtDesc.flags);
+ caps.ePreviewFormats[i] = fmtDesc.pixelformat;
+ }
+ }
+ caps.ulPreviewFormatCount = i;
+
+ //get preview sizes & capture image sizes
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmSizeEnum.index = i;
+ //Check for frame sizes for default pixel format
+ //TODO: Check for frame sizes for all supported pixel formats
+ frmSizeEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ status = ioctl (handle, VIDIOC_ENUM_FRAMESIZES, &frmSizeEnum);
+ if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ break;
+ }
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("frmSizeEnum.index[%d].width x height == (%d x %d)", i, frmSizeEnum.discrete.width, frmSizeEnum.discrete.height);
+ caps.tPreviewRes[i].width = frmSizeEnum.discrete.width;
+ caps.tPreviewRes[i].height = frmSizeEnum.discrete.height;
+ snprintf(caps.tPreviewRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",frmSizeEnum.discrete.width,frmSizeEnum.discrete.height);
+
+ caps.tCaptureRes[i].width = frmSizeEnum.discrete.width;
+ caps.tCaptureRes[i].height = frmSizeEnum.discrete.height;
+ snprintf(caps.tCaptureRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",frmSizeEnum.discrete.width,frmSizeEnum.discrete.height);
+ }
+ else {
+ caps.ulCaptureResCount = i;
+ caps.ulPreviewResCount = i;
+ }
+ }
+ if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ CAMHAL_LOGDB("\nmin_width x height = %d x %d ",frmSizeEnum.stepwise.min_width, frmSizeEnum.stepwise.min_height);
+ CAMHAL_LOGDB("\nmax_width x height = %d x %d ",frmSizeEnum.stepwise.max_width, frmSizeEnum.stepwise.max_height);
+ CAMHAL_LOGDB("\nstep width x height = %d x %d ",frmSizeEnum.stepwise.step_width,frmSizeEnum.stepwise.step_height);
+ //TODO: populate the sizes when type = V4L2_FRMSIZE_TYPE_STEPWISE
+ }
+
+ //sort the preview sizes in ascending order
+ sortAscend(caps, caps.ulPreviewResCount);
+
+ //get supported frame rates
+ bool fps30 = false;
+ for ( j=caps.ulPreviewResCount-1; j >= 0; j--) {
+ CAMHAL_LOGDB(" W x H = %d x %d", caps.tPreviewRes[j].width, caps.tPreviewRes[j].height);
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmIvalEnum.index = i;
+ //Check for supported frame rates for the default pixel format.
+ frmIvalEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ frmIvalEnum.width = caps.tPreviewRes[j].width;
+ frmIvalEnum.height = caps.tPreviewRes[j].height;
+
+ status = ioctl (handle, VIDIOC_ENUM_FRAMEINTERVALS, &frmIvalEnum);
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ break;
+ }
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("frmIvalEnum[%d].frame rate= %d)",i, (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator));
+ caps.ulFrameRates[i] = (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator);
+ if (caps.ulFrameRates[i] == 30) {
+ fps30 = true;
+ }
+ }
+ else {
+ caps.ulFrameRateCount = i;
+ }
+ }
+ if(fps30) {
+ break;
+ }
+ }
+
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ //TODO: populate the frame rates when type = V4L2_FRMIVAL_TYPE_STEPWISE;
+ }
+
+ //update the preview resolution with the highest resolution which supports 30fps.
+/* // for video preview the application choose the resolution from the mediaprofiles.xml.
+ // so populating all supported preview resolution is required for video mode.
+ caps.tPreviewRes[0].width = caps.tPreviewRes[j].width;
+ caps.tPreviewRes[0].height = caps.tPreviewRes[j].height;
+ snprintf(caps.tPreviewRes[0].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tPreviewRes[j].width,caps.tPreviewRes[j].height);
+ caps.ulPreviewResCount = 1;
+*/
+ insertCapabilities (params, caps);
+ return NO_ERROR;
+}
+
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h
index 9cdf45a..560e98d 100644
--- a/camera/inc/ANativeWindowDisplayAdapter.h
+++ b/camera/inc/ANativeWindowDisplayAdapter.h
@@ -20,10 +20,8 @@
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-//temporarily define format here
-#define HAL_PIXEL_FORMAT_TI_NV12 0x100
-
-namespace android {
+namespace Ti {
+namespace Camera {
/**
* Display handler class - This class basically handles the buffer posting to display
@@ -35,7 +33,7 @@ public:
typedef struct
{
- void *mBuffer;
+ CameraBuffer *mBuffer;
void *mUser;
int mOffset;
int mWidth;
@@ -65,7 +63,7 @@ public:
virtual int setPreviewWindow(struct preview_stream_ops *window);
virtual int setFrameProvider(FrameNotifier *frameProvider);
virtual int setErrorHandler(ErrorNotifier *errorNotifier);
- virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL);
virtual int disableDisplay(bool cancel_buffer = true);
virtual status_t pauseDisplay(bool pause);
@@ -76,16 +74,17 @@ public:
#endif
- virtual int useBuffers(void* bufArr, int num);
virtual bool supportsExternalBuffering();
//Implementation of inherited interfaces
- virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
virtual uint32_t * getOffsets() ;
virtual int getFd() ;
- virtual int freeBuffer(void* buf);
+ virtual int freeBufferList(CameraBuffer * buflist);
- virtual int maxQueueableBuffers(unsigned int& queueable);
+ virtual status_t maxQueueableBuffers(unsigned int& queueable);
+ virtual status_t minUndequeueableBuffers(int& unqueueable);
///Class specific functions
static void frameCallbackRelay(CameraFrame* caFrame);
@@ -105,17 +104,17 @@ public:
static const int DISPLAY_TIMEOUT;
static const int FAILED_DQS_TO_SUSPEND;
- class DisplayThread : public Thread
+ class DisplayThread : public android::Thread
{
ANativeWindowDisplayAdapter* mDisplayAdapter;
- TIUTILS::MessageQueue mDisplayThreadQ;
+ Utils::MessageQueue mDisplayThreadQ;
public:
DisplayThread(ANativeWindowDisplayAdapter* da)
: Thread(false), mDisplayAdapter(da) { }
///Returns a reference to the display message Q for display adapter to post messages
- TIUTILS::MessageQueue& msgQ()
+ Utils::MessageQueue& msgQ()
{
return mDisplayThreadQ;
}
@@ -147,20 +146,22 @@ private:
int mFailedDQs;
bool mPaused; //Pause state
preview_stream_ops_t* mANativeWindow;
- sp<DisplayThread> mDisplayThread;
+ android::sp<DisplayThread> mDisplayThread;
FrameProvider *mFrameProvider; ///Pointer to the frame provider interface
- TIUTILS::MessageQueue mDisplayQ;
+ Utils::MessageQueue mDisplayQ;
unsigned int mDisplayState;
///@todo Have a common class for these members
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
bool mDisplayEnabled;
int mBufferCount;
- buffer_handle_t** mBufferHandleMap;
- IMG_native_handle_t** mGrallocHandleMap;
- uint32_t* mOffsetsMap;
+ CameraBuffer *mBuffers;
+ //buffer_handle_t** mBufferHandleMap; // -> frames[i].BufferHandle
+ //IMG_native_handle_t** mGrallocHandleMap; // -> frames[i].GrallocHandle
+ uint32_t* mOffsetsMap; // -> frames[i].Offset
int mFD;
- KeyedVector<int, int> mFramesWithCameraAdapterMap;
- sp<ErrorNotifier> mErrorNotifier;
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::KeyedVector<int, int> mFramesType;
+ android::sp<ErrorNotifier> mErrorNotifier;
uint32_t mFrameWidth;
uint32_t mFrameHeight;
@@ -184,5 +185,5 @@ private:
};
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/inc/BaseCameraAdapter.h b/camera/inc/BaseCameraAdapter.h
index bc38e00..b7966b0 100644
--- a/camera/inc/BaseCameraAdapter.h
+++ b/camera/inc/BaseCameraAdapter.h
@@ -21,7 +21,18 @@
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
+
+struct LUT {
+ const char * userDefinition;
+ int halDefinition;
+};
+
+struct LUTtypeHAL{
+ int size;
+ const LUT *Table;
+};
class BaseCameraAdapter : public CameraAdapter
{
@@ -39,16 +50,16 @@ public:
//Message/Frame notification APIs
virtual void enableMsgType(int32_t msgs, frame_callback callback=NULL, event_callback eventCb=NULL, void* cookie=NULL);
virtual void disableMsgType(int32_t msgs, void* cookie);
- virtual void returnFrame(void * frameBuf, CameraFrame::FrameType frameType);
- virtual void addFramePointers(void *frameBuf, void *y_uv);
+ virtual void returnFrame(CameraBuffer * frameBuf, CameraFrame::FrameType frameType);
+ virtual void addFramePointers(CameraBuffer *frameBuf, void *y_uv);
virtual void removeFramePointers();
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params) = 0;
- virtual void getParameters(CameraParameters& params) = 0;
+ virtual status_t setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
//API to send a command to the camera
- virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0 );
+ virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0, int value4 = 0 );
virtual status_t registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data);
@@ -59,6 +70,8 @@ public:
//Retrieves the next Adapter state
virtual AdapterState getNextState();
+ virtual status_t setSharedAllocator(camera_request_memory shmem_alloc) { mSharedAllocator = shmem_alloc; return NO_ERROR; };
+
// Rolls the state machine back to INTIALIZED_STATE from the current state
virtual status_t rollbackToInitializedState();
@@ -115,10 +128,10 @@ protected:
virtual status_t stopSmoothZoom();
//Should be implemented by deriving classes in order to stop smooth zoom
- virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer* bufArr, int num, size_t length, unsigned int queueable);
//Should be implemented by deriving classes in order queue a released buffer in CameraAdapter
- virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t fillThisBuffer(CameraBuffer* frameBuf, CameraFrame::FrameType frameType);
//API to get the frame size required to be allocated. This size is used to override the size passed
//by camera service when VSTAB/VNF is turned ON for example
@@ -128,7 +141,7 @@ protected:
virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
//API to get required picture buffers size with the current configuration in CameraParameters
- virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
// Should be implemented by deriving classes in order to start face detection
// ( if supported )
@@ -140,6 +153,12 @@ protected:
virtual status_t switchToExecuting();
+ virtual status_t setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height);
+
+ virtual status_t destroyTunnel();
+
+ virtual status_t cameraPreviewInitialization();
+
// Receive orientation events from CameraHal
virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
@@ -148,7 +167,7 @@ protected:
status_t notifyFocusSubscribers(CameraHalEvent::FocusStatus status);
status_t notifyShutterSubscribers();
status_t notifyZoomSubscribers(int zoomIdx, bool targetReached);
- status_t notifyFaceSubscribers(sp<CameraFDResult> &faces);
+ status_t notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta);
//Send the frame to subscribers
status_t sendFrameToSubscribers(CameraFrame *frame);
@@ -157,14 +176,15 @@ protected:
status_t resetFrameRefCount(CameraFrame &frame);
//A couple of helper functions
- void setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount);
- int getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType);
- int setInitFrameRefCount(void* buf, unsigned int mask);
+ void setFrameRefCount(CameraBuffer* frameBuf, CameraFrame::FrameType frameType, int refCount);
+ int getFrameRefCount(CameraBuffer* frameBuf, CameraFrame::FrameType frameType);
+ int setInitFrameRefCount(CameraBuffer* buf, unsigned int mask);
+ static const char* getLUTvalue_translateHAL(int Value, LUTtypeHAL LUT);
// private member functions
private:
status_t __sendFrameToSubscribers(CameraFrame* frame,
- KeyedVector<int, frame_callback> *subscribers,
+ android::KeyedVector<int, frame_callback> *subscribers,
CameraFrame::FrameType frameType);
status_t rollbackToPreviousState();
@@ -198,55 +218,66 @@ protected:
#endif
- mutable Mutex mReturnFrameLock;
+ mutable android::Mutex mReturnFrameLock;
//Lock protecting the Adapter state
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
AdapterState mAdapterState;
AdapterState mNextState;
//Different frame subscribers get stored using these
- KeyedVector<int, frame_callback> mFrameSubscribers;
- KeyedVector<int, frame_callback> mFrameDataSubscribers;
- KeyedVector<int, frame_callback> mVideoSubscribers;
- KeyedVector<int, frame_callback> mImageSubscribers;
- KeyedVector<int, frame_callback> mRawSubscribers;
- KeyedVector<int, event_callback> mFocusSubscribers;
- KeyedVector<int, event_callback> mZoomSubscribers;
- KeyedVector<int, event_callback> mShutterSubscribers;
- KeyedVector<int, event_callback> mFaceSubscribers;
+ android::KeyedVector<int, frame_callback> mFrameSubscribers;
+ android::KeyedVector<int, frame_callback> mSnapshotSubscribers;
+ android::KeyedVector<int, frame_callback> mFrameDataSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoInSubscribers;
+ android::KeyedVector<int, frame_callback> mImageSubscribers;
+ android::KeyedVector<int, frame_callback> mRawSubscribers;
+ android::KeyedVector<int, event_callback> mFocusSubscribers;
+ android::KeyedVector<int, event_callback> mZoomSubscribers;
+ android::KeyedVector<int, event_callback> mShutterSubscribers;
+ android::KeyedVector<int, event_callback> mMetadataSubscribers;
//Preview buffer management data
- int *mPreviewBuffers;
+ CameraBuffer *mPreviewBuffers;
int mPreviewBufferCount;
size_t mPreviewBuffersLength;
- KeyedVector<int, int> mPreviewBuffersAvailable;
- mutable Mutex mPreviewBufferLock;
+ android::KeyedVector<CameraBuffer *, int> mPreviewBuffersAvailable;
+ mutable android::Mutex mPreviewBufferLock;
+
+ //Snapshot buffer management data
+ android::KeyedVector<int, int> mSnapshotBuffersAvailable;
+ mutable android::Mutex mSnapshotBufferLock;
//Video buffer management data
- int *mVideoBuffers;
- KeyedVector<int, int> mVideoBuffersAvailable;
+ CameraBuffer *mVideoBuffers;
+ android::KeyedVector<CameraBuffer *, int> mVideoBuffersAvailable;
int mVideoBuffersCount;
size_t mVideoBuffersLength;
- mutable Mutex mVideoBufferLock;
+ mutable android::Mutex mVideoBufferLock;
//Image buffer management data
- int *mCaptureBuffers;
- KeyedVector<int, bool> mCaptureBuffersAvailable;
+ CameraBuffer *mCaptureBuffers;
+ android::KeyedVector<CameraBuffer *, int> mCaptureBuffersAvailable;
int mCaptureBuffersCount;
size_t mCaptureBuffersLength;
- mutable Mutex mCaptureBufferLock;
+ mutable android::Mutex mCaptureBufferLock;
//Metadata buffermanagement
- int *mPreviewDataBuffers;
- KeyedVector<int, bool> mPreviewDataBuffersAvailable;
+ CameraBuffer *mPreviewDataBuffers;
+ android::KeyedVector<CameraBuffer *, int> mPreviewDataBuffersAvailable;
int mPreviewDataBuffersCount;
size_t mPreviewDataBuffersLength;
- mutable Mutex mPreviewDataBufferLock;
+ mutable android::Mutex mPreviewDataBufferLock;
- TIUTILS::MessageQueue mFrameQ;
- TIUTILS::MessageQueue mAdapterQ;
- mutable Mutex mSubscriberLock;
+ //Video input buffer management data (used for reproc pipe)
+ CameraBuffer *mVideoInBuffers;
+ android::KeyedVector<CameraBuffer *, int> mVideoInBuffersAvailable;
+ mutable android::Mutex mVideoInBufferLock;
+
+ Utils::MessageQueue mFrameQ;
+ Utils::MessageQueue mAdapterQ;
+ mutable android::Mutex mSubscriberLock;
ErrorNotifier *mErrorNotifier;
release_image_buffers_callback mReleaseImageBuffersCallback;
end_image_capture_callback mEndImageCaptureCallback;
@@ -254,18 +285,21 @@ protected:
void *mEndCaptureData;
bool mRecording;
+ camera_request_memory mSharedAllocator;
+
uint32_t mFramesWithDucati;
uint32_t mFramesWithDisplay;
uint32_t mFramesWithEncoder;
-#ifdef DEBUG_LOG
- KeyedVector<int, bool> mBuffersWithDucati;
+#ifdef CAMERAHAL_DEBUG
+ android::KeyedVector<int, bool> mBuffersWithDucati;
#endif
- KeyedVector<void *, CameraFrame *> mFrameQueue;
+ android::KeyedVector<void *, CameraFrame *> mFrameQueue;
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //BASE_CAMERA_ADAPTER_H
diff --git a/camera/inc/BufferSourceAdapter.h b/camera/inc/BufferSourceAdapter.h
new file mode 100644
index 0000000..436d2e5
--- /dev/null
+++ b/camera/inc/BufferSourceAdapter.h
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BUFFER_SOURCE_ADAPTER_H
+#define BUFFER_SOURCE_ADAPTER_H
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+
+#include "CameraHal.h"
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace Ti {
+namespace Camera {
+
+/**
+ * Handles enqueueing/dequeing buffers to tap-in/tap-out points
+ * TODO(XXX): this class implements DisplayAdapter for now
+ * but this will most likely change once tap-in/tap-out points
+ * are better defined
+ */
+
+class BufferSourceAdapter : public DisplayAdapter
+{
+// private types
+private:
+ // helper class to return frame in different thread context
+ class ReturnFrame : public android::Thread {
+ public:
+ ReturnFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
+ mWaitForSignal.Create(0);
+ mDestroying = false;
+ }
+
+ ~ReturnFrame() {
+ mDestroying = true;
+ mWaitForSignal.Release();
+ }
+
+ void signal() {
+ mWaitForSignal.Signal();
+ }
+
+ virtual bool threadLoop() {
+ mWaitForSignal.Wait();
+ if (!mDestroying) mBufferSourceAdapter->handleFrameReturn();
+ return true;
+ }
+
+ private:
+ BufferSourceAdapter* mBufferSourceAdapter;
+ Utils::Semaphore mWaitForSignal;
+ bool mDestroying;
+ };
+
+ // helper class to queue frame in different thread context
+ class QueueFrame : public android::Thread {
+ public:
+ QueueFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
+ mDestroying = false;
+ }
+
+ ~QueueFrame() {
+ mDestroying = true;
+
+ android::AutoMutex lock(mFramesMutex);
+ while (!mFrames.empty()) {
+ CameraFrame *frame = mFrames.itemAt(0);
+ mFrames.removeAt(0);
+ frame->mMetaData.clear();
+ delete frame;
+ }
+ mFramesCondition.signal();
+ }
+
+ void addFrame(CameraFrame *frame) {
+ android::AutoMutex lock(mFramesMutex);
+ mFrames.add(new CameraFrame(*frame));
+ mFramesCondition.signal();
+ }
+
+ virtual bool threadLoop() {
+ CameraFrame *frame = NULL;
+ {
+ android::AutoMutex lock(mFramesMutex);
+ while (mFrames.empty() && !mDestroying) mFramesCondition.wait(mFramesMutex);
+ if (!mDestroying) {
+ frame = mFrames.itemAt(0);
+ mFrames.removeAt(0);
+ }
+ }
+
+ if (frame) {
+ mBufferSourceAdapter->handleFrameCallback(frame);
+ frame->mMetaData.clear();
+ delete frame;
+ }
+
+ return true;
+ }
+
+ private:
+ BufferSourceAdapter* mBufferSourceAdapter;
+ android::Vector<CameraFrame *> mFrames;
+ android::Condition mFramesCondition;
+ android::Mutex mFramesMutex;
+ bool mDestroying;
+ };
+
+ enum {
+ BUFFER_SOURCE_TAP_IN,
+ BUFFER_SOURCE_TAP_OUT
+ };
+
+// public member functions
+public:
+ BufferSourceAdapter();
+ virtual ~BufferSourceAdapter();
+
+ virtual status_t initialize();
+ virtual int setPreviewWindow(struct preview_stream_ops *source);
+ virtual int setFrameProvider(FrameNotifier *frameProvider);
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL);
+ virtual int disableDisplay(bool cancel_buffer = true);
+ virtual status_t pauseDisplay(bool pause);
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ // Not implemented in this class
+ virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL) { return NO_ERROR; }
+#endif
+ virtual bool supportsExternalBuffering();
+ virtual CameraBuffer * allocateBufferList(int width, int dummyHeight, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
+ virtual uint32_t * getOffsets() ;
+ virtual int getFd() ;
+ virtual int freeBufferList(CameraBuffer * buflist);
+ virtual int maxQueueableBuffers(unsigned int& queueable);
+ virtual int minUndequeueableBuffers(int& unqueueable);
+
+ static void frameCallback(CameraFrame* caFrame);
+ void addFrame(CameraFrame* caFrame);
+ void handleFrameCallback(CameraFrame* caFrame);
+ bool handleFrameReturn();
+
+private:
+ void destroy();
+ status_t returnBuffersToWindow();
+
+private:
+ preview_stream_ops_t* mBufferSource;
+ FrameProvider *mFrameProvider; // Pointer to the frame provider interface
+
+ mutable android::Mutex mLock;
+ int mBufferCount;
+ CameraBuffer *mBuffers;
+
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::sp<ErrorNotifier> mErrorNotifier;
+ android::sp<ReturnFrame> mReturnFrame;
+ android::sp<QueueFrame> mQueueFrame;
+
+ uint32_t mFrameWidth;
+ uint32_t mFrameHeight;
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+
+ int mBufferSourceDirection;
+
+ const char *mPixelFormat;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
+
+#endif
diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h
index 2506b7b..d20db1e 100644
--- a/camera/inc/CameraHal.h
+++ b/camera/inc/CameraHal.h
@@ -29,22 +29,37 @@
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
+
+#include <hardware/camera.h>
#include <utils/Log.h>
#include <utils/threads.h>
-#include <linux/videodev2.h>
-#include "binder/MemoryBase.h"
-#include "binder/MemoryHeapBase.h"
#include <utils/threads.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
#include <camera/CameraParameters.h>
-#include <hardware/camera.h>
+#ifdef OMAP_ENHANCEMENT_CPCAM
+#include <camera/CameraMetadata.h>
+#include <camera/ShotParameters.h>
+#endif
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/GraphicBuffer.h>
+
+/* For IMG_native_handle_t */
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+#include <ion_ti/ion.h>
+
+#include "Common.h"
#include "MessageQueue.h"
#include "Semaphore.h"
#include "CameraProperties.h"
-#include "DebugUtils.h"
#include "SensorListener.h"
-#include <ui/GraphicBufferAllocator.h>
-#include <ui/GraphicBuffer.h>
+//temporarily define format here
+#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+#define HAL_PIXEL_FORMAT_TI_Y8 0x103
+#define HAL_PIXEL_FORMAT_TI_Y16 0x104
#define MIN_WIDTH 640
#define MIN_HEIGHT 480
@@ -66,6 +81,8 @@
#define SHARPNESS_OFFSET 100
#define CONTRAST_OFFSET 100
+#define FRAME_RATE_HIGH_HD 60
+
#define CAMHAL_GRALLOC_USAGE GRALLOC_USAGE_HW_TEXTURE | \
GRALLOC_USAGE_HW_RENDER | \
GRALLOC_USAGE_SW_READ_RARELY | \
@@ -77,75 +94,111 @@
#define LOCK_BUFFER_TRIES 5
#define HAL_PIXEL_FORMAT_NV12 0x100
-#define CAMHAL_LOGI ALOGI
+#define NONNEG_ASSIGN(x,y) \
+ if(x > -1) \
+ y = x
-//Uncomment to enable more verbose/debug logs
-#define DEBUG_LOG
+#define CAMHAL_SIZE_OF_ARRAY(x) static_cast<int>(sizeof(x)/sizeof(x[0]))
-///Camera HAL Logging Functions
-#ifndef DEBUG_LOG
+namespace Ti {
+namespace Camera {
-#define CAMHAL_LOGDA(str)
-#define CAMHAL_LOGDB(str, ...)
-#define CAMHAL_LOGVA(str)
-#define CAMHAL_LOGVB(str, ...)
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+extern const char * const kRawImagesOutputDirPath;
+extern const char * const kYuvImagesOutputDirPath;
+#endif
+#define V4L_CAMERA_NAME_USB "USBCAMERA"
+#define OMX_CAMERA_NAME_OV "OV5640"
+#define OMX_CAMERA_NAME_SONY "IMX060"
-#define CAMHAL_LOGEA ALOGE
-#define CAMHAL_LOGEB ALOGE
-#undef LOG_FUNCTION_NAME
-#undef LOG_FUNCTION_NAME_EXIT
-#define LOG_FUNCTION_NAME
-#define LOG_FUNCTION_NAME_EXIT
+///Forward declarations
+class CameraHal;
+class CameraFrame;
+class CameraHalEvent;
+class DisplayFrame;
-#else
+class FpsRange {
+public:
+ static int compare(const FpsRange * left, const FpsRange * right);
-#define CAMHAL_LOGDA DBGUTILS_LOGDA
-#define CAMHAL_LOGDB DBGUTILS_LOGDB
-#define CAMHAL_LOGVA DBGUTILS_LOGVA
-#define CAMHAL_LOGVB DBGUTILS_LOGVB
+ FpsRange(int min, int max);
+ FpsRange();
-#define CAMHAL_LOGEA DBGUTILS_LOGEA
-#define CAMHAL_LOGEB DBGUTILS_LOGEB
+ bool operator==(const FpsRange & fpsRange) const;
-#endif
+ bool isNull() const;
+ bool isFixed() const;
+ int min() const;
+ int max() const;
+private:
+ int mMin;
+ int mMax;
+};
-#define NONNEG_ASSIGN(x,y) \
- if(x > -1) \
- y = x
-namespace android {
+inline int FpsRange::compare(const FpsRange * const left, const FpsRange * const right) {
+ if ( left->max() < right->max() ) {
+ return -1;
+ }
-#define PARAM_BUFFER 6000
+ if ( left->max() > right->max() ) {
+ return 1;
+ }
-///Forward declarations
-class CameraHal;
-class CameraFrame;
-class CameraHalEvent;
-class DisplayFrame;
+ if ( left->min() < right->min() ) {
+ return -1;
+ }
+
+ if ( left->min() > right->min() ) {
+ return 1;
+ }
+
+ return 0;
+}
+
+inline FpsRange::FpsRange(const int min, const int max) : mMin(min), mMax(max) {}
+
+inline FpsRange::FpsRange() : mMin(-1), mMax(-1) {}
+
+inline bool FpsRange::operator==(const FpsRange & fpsRange) const {
+ return mMin == fpsRange.mMin && mMax == fpsRange.mMax;
+}
+
+inline bool FpsRange::isNull() const {
+ return mMin == -1 || mMax == -1;
+}
+
+inline bool FpsRange::isFixed() const {
+ return mMin == mMax;
+}
+
+inline int FpsRange::min() const { return mMin; }
+
+inline int FpsRange::max() const { return mMax; }
-class CameraArea : public RefBase
+class CameraArea : public android::RefBase
{
public:
- CameraArea(int32_t top,
- int32_t left,
- int32_t bottom,
- int32_t right,
- uint32_t weight) : mTop(top),
+ CameraArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ size_t weight) : mTop(top),
mLeft(left),
mBottom(bottom),
mRight(right),
mWeight(weight) {}
- status_t transfrom(uint32_t width,
- uint32_t height,
- int32_t &top,
- int32_t &left,
- uint32_t &areaWidth,
- uint32_t &areaHeight);
+ status_t transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight);
bool isValid()
{
@@ -158,62 +211,84 @@ public:
&& ( 0 == mRight ) && ( 0 == mWeight ));
}
- uint32_t getWeight()
+ size_t getWeight()
{
return mWeight;
}
- bool compare(const sp<CameraArea> &area);
+ bool compare(const android::sp<CameraArea> &area);
static status_t parseAreas(const char *area,
- uint32_t areaLength,
- Vector< sp<CameraArea> > &areas);
+ size_t areaLength,
+ android::Vector< android::sp<CameraArea> > &areas);
- static status_t checkArea(int32_t top,
- int32_t left,
- int32_t bottom,
- int32_t right,
- int32_t weight);
+ static status_t checkArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ ssize_t weight);
- static bool areAreasDifferent(Vector< sp<CameraArea> > &, Vector< sp<CameraArea> > &);
+ static bool areAreasDifferent(android::Vector< android::sp<CameraArea> > &, android::Vector< android::sp<CameraArea> > &);
protected:
- static const int32_t TOP = -1000;
- static const int32_t LEFT = -1000;
- static const int32_t BOTTOM = 1000;
- static const int32_t RIGHT = 1000;
- static const int32_t WEIGHT_MIN = 1;
- static const int32_t WEIGHT_MAX = 1000;
-
- int32_t mTop;
- int32_t mLeft;
- int32_t mBottom;
- int32_t mRight;
- uint32_t mWeight;
+ static const ssize_t TOP = -1000;
+ static const ssize_t LEFT = -1000;
+ static const ssize_t BOTTOM = 1000;
+ static const ssize_t RIGHT = 1000;
+ static const ssize_t WEIGHT_MIN = 1;
+ static const ssize_t WEIGHT_MAX = 1000;
+
+ ssize_t mTop;
+ ssize_t mLeft;
+ ssize_t mBottom;
+ ssize_t mRight;
+ size_t mWeight;
};
-class CameraFDResult : public RefBase
+class CameraMetadataResult : public android::RefBase
{
public:
- CameraFDResult() : mFaceData(NULL) {};
- CameraFDResult(camera_frame_metadata_t *faces) : mFaceData(faces) {};
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ CameraMetadataResult(camera_memory_t * extMeta) : mExtendedMetadata(extMeta) {
+ mMetadata.faces = NULL;
+ mMetadata.number_of_faces = 0;
+#ifdef OMAP_ENHANCEMENT
+ mMetadata.analog_gain = 0;
+ mMetadata.exposure_time = 0;
+#endif
+ };
+#endif
- virtual ~CameraFDResult() {
- if ( ( NULL != mFaceData ) && ( NULL != mFaceData->faces ) ) {
- free(mFaceData->faces);
- free(mFaceData);
- mFaceData=NULL;
- }
+ CameraMetadataResult() {
+ mMetadata.faces = NULL;
+ mMetadata.number_of_faces = 0;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mMetadata.analog_gain = 0;
+ mMetadata.exposure_time = 0;
+#endif
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mExtendedMetadata = NULL;
+#endif
+ }
- if(( NULL != mFaceData ))
- {
- free(mFaceData);
- mFaceData = NULL;
- }
+ virtual ~CameraMetadataResult() {
+ if ( NULL != mMetadata.faces ) {
+ free(mMetadata.faces);
+ }
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ if ( NULL != mExtendedMetadata ) {
+ mExtendedMetadata->release(mExtendedMetadata);
+ }
+#endif
}
- camera_frame_metadata_t *getFaceResult() { return mFaceData; };
+ camera_frame_metadata_t *getMetadataResult() { return &mMetadata; };
+
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t *getExtendedMetadata() { return mExtendedMetadata; };
+#endif
static const ssize_t TOP = -1000;
static const ssize_t LEFT = -1000;
@@ -223,9 +298,58 @@ public:
private:
- camera_frame_metadata_t *mFaceData;
+ camera_frame_metadata_t mMetadata;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t *mExtendedMetadata;
+#endif
};
+typedef enum {
+ CAMERA_BUFFER_NONE = 0,
+ CAMERA_BUFFER_GRALLOC,
+ CAMERA_BUFFER_ANW,
+ CAMERA_BUFFER_MEMORY,
+ CAMERA_BUFFER_ION
+} CameraBufferType;
+
+typedef struct _CameraBuffer {
+ CameraBufferType type;
+ /* opaque is the generic drop-in replacement for the pointers
+ * that were used previously */
+ void *opaque;
+
+ /* opaque has different meanings depending on the buffer type:
+ * GRALLOC - gralloc_handle_t
+ * ANW - a pointer to the buffer_handle_t (which corresponds to
+ * the ANativeWindowBuffer *)
+ * MEMORY - address of allocated memory
+ * ION - address of mapped ion allocation
+ *
+ * FIXME opaque should be split into several fields:
+ * - handle/pointer we got from the allocator
+ * - handle/value we pass to OMX
+ * - pointer to mapped memory (if the buffer is mapped)
+ */
+
+ /* mapped holds ptr to mapped memory in userspace */
+ void *mapped;
+
+ /* These are specific to ION buffers */
+ struct ion_handle * ion_handle;
+ int ion_fd;
+ int fd;
+ size_t size;
+ int index;
+
+ /* These describe the camera buffer */
+ int width;
+ int stride;
+ int height;
+ const char *format;
+} CameraBuffer;
+
+void * camera_buffer_get_omx_ptr (CameraBuffer *buffer);
+
class CameraFrame
{
public:
@@ -242,6 +366,7 @@ class CameraFrame
FRAME_DATA= 0x80,
RAW_FRAME = 0x100,
SNAPSHOT_FRAME = 0x200,
+ REPROCESS_INPUT_FRAME = 0x400,
ALL_FRAMES = 0xFFFF ///Maximum of 16 frame types supported
};
@@ -249,6 +374,8 @@ class CameraFrame
{
ENCODE_RAW_YUV422I_TO_JPEG = 0x1 << 0,
HAS_EXIF_DATA = 0x1 << 1,
+ FORMAT_YUV422I_YUYV = 0x1 << 2,
+ FORMAT_YUV422I_UYVY = 0x1 << 3,
};
//default contrustor
@@ -265,45 +392,32 @@ class CameraFrame
mFd(0),
mLength(0),
mFrameMask(0),
- mQuirks(0) {
-
+ mQuirks(0)
+ {
mYuv[0] = 0; //NULL;
mYuv[1] = 0; //NULL;
- }
- //copy constructor
- CameraFrame(const CameraFrame &frame) :
- mCookie(frame.mCookie),
- mCookie2(frame.mCookie2),
- mBuffer(frame.mBuffer),
- mFrameType(frame.mFrameType),
- mTimestamp(frame.mTimestamp),
- mWidth(frame.mWidth),
- mHeight(frame.mHeight),
- mOffset(frame.mOffset),
- mAlignment(frame.mAlignment),
- mFd(frame.mFd),
- mLength(frame.mLength),
- mFrameMask(frame.mFrameMask),
- mQuirks(frame.mQuirks) {
-
- mYuv[0] = frame.mYuv[0];
- mYuv[1] = frame.mYuv[1];
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ mMetaData = 0;
+#endif
}
void *mCookie;
void *mCookie2;
- void *mBuffer;
+ CameraBuffer *mBuffer;
int mFrameType;
nsecs_t mTimestamp;
unsigned int mWidth, mHeight;
uint32_t mOffset;
unsigned int mAlignment;
int mFd;
- uint32_t mLength;
+ size_t mLength;
unsigned mFrameMask;
unsigned int mQuirks;
unsigned int mYuv[2];
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ android::sp<CameraMetadataResult> mMetaData;
+#endif
///@todo add other member vars like stride etc
};
@@ -326,7 +440,7 @@ public:
EVENT_FOCUS_ERROR = 0x2,
EVENT_ZOOM_INDEX_REACHED = 0x4,
EVENT_SHUTTER = 0x8,
- EVENT_FACE = 0x10,
+ EVENT_METADATA = 0x10,
///@remarks Future enum related to display, like frame displayed event, could be added here
ALL_EVENTS = 0xFFFF ///Maximum of 16 event types supported
};
@@ -359,23 +473,23 @@ public:
} ZoomEventData;
typedef struct FaceData_t {
- int32_t top;
- int32_t left;
- int32_t bottom;
- int32_t right;
- uint32_t score;
+ ssize_t top;
+ ssize_t left;
+ ssize_t bottom;
+ ssize_t right;
+ size_t score;
} FaceData;
- typedef sp<CameraFDResult> FaceEventData;
+ typedef android::sp<CameraMetadataResult> MetaEventData;
- class CameraHalEventData : public RefBase{
+ class CameraHalEventData : public android::RefBase{
public:
CameraHalEvent::FocusEventData focusEvent;
CameraHalEvent::ZoomEventData zoomEvent;
CameraHalEvent::ShutterEventData shutterEvent;
- CameraHalEvent::FaceEventData faceEvent;
+ CameraHalEvent::MetaEventData metadataEvent;
};
//default contrustor
@@ -391,7 +505,7 @@ public:
void* mCookie;
CameraHalEventType mEventType;
- sp<CameraHalEventData> mEventData;
+ android::sp<CameraHalEventData> mEventData;
};
@@ -423,7 +537,7 @@ public:
virtual ~MessageNotifier() {};
};
-class ErrorNotifier : public virtual RefBase
+class ErrorNotifier : public virtual android::RefBase
{
public:
virtual void errorNotify(int error) = 0;
@@ -439,8 +553,8 @@ public:
class FrameNotifier : public MessageNotifier
{
public:
- virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
- virtual void addFramePointers(void *frameBuf, void *buf) = 0;
+ virtual void returnFrame(CameraBuffer* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(CameraBuffer *frameBuf, void *buf) = 0;
virtual void removeFramePointers() = 0;
virtual ~FrameNotifier() {};
@@ -460,8 +574,8 @@ public:
int enableFrameNotification(int32_t frameTypes);
int disableFrameNotification(int32_t frameTypes);
- int returnFrame(void *frameBuf, CameraFrame::FrameType frameType);
- void addFramePointers(void *frameBuf, void *buf);
+ int returnFrame(CameraBuffer *frameBuf, CameraFrame::FrameType frameType);
+ void addFramePointers(CameraBuffer *frameBuf, void *buf);
void removeFramePointers();
};
@@ -489,13 +603,18 @@ public:
class BufferProvider
{
public:
- virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) = 0;
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs) = 0;
+
+ // gets a buffer list from BufferProvider when buffers are sent from external source and already pre-allocated
+ // only call this function for an input source into CameraHal. If buffers are not from a pre-allocated source
+ // this function will return NULL and numBufs of -1
+ virtual CameraBuffer *getBufferList(int *numBufs) = 0;
//additional methods used for memory mapping
virtual uint32_t * getOffsets() = 0;
virtual int getFd() = 0;
- virtual int freeBuffer(void* buf) = 0;
+ virtual int freeBufferList(CameraBuffer * buf) = 0;
virtual ~BufferProvider() {}
};
@@ -503,7 +622,7 @@ public:
/**
* Class for handling data and notify callbacks to application
*/
-class AppCallbackNotifier: public ErrorNotifier , public virtual RefBase
+class AppCallbackNotifier: public ErrorNotifier , public virtual android::RefBase
{
public:
@@ -545,7 +664,7 @@ public:
//All sub-components of Camera HAL call this whenever any error happens
virtual void errorNotify(int error);
- status_t startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, uint32_t length, uint32_t count);
+ status_t startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
status_t stopPreviewCallbacks();
status_t enableMsgType(int32_t msgType);
@@ -577,12 +696,12 @@ public:
//Notifications from CameraHal for video recording case
status_t startRecording();
status_t stopRecording();
- status_t initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, uint32_t length, uint32_t count, void *vidBufs);
+ status_t initSharedVideoBuffers(CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count, CameraBuffer *vidBufs);
status_t releaseRecordingFrame(const void *opaque);
- status_t useMetaDataBufferMode(bool enable);
+ status_t useMetaDataBufferMode(bool enable);
- void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2);
+ void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2, void *cookie3);
void useVideoBuffers(bool useVideoBuffers);
@@ -592,9 +711,9 @@ public:
void flushEventQueue();
//Internal class definitions
- class NotificationThread : public Thread {
+ class NotificationThread : public android::Thread {
AppCallbackNotifier* mAppCallbackNotifier;
- TIUTILS::MessageQueue mNotificationThreadQ;
+ Utils::MessageQueue mNotificationThreadQ;
public:
enum NotificationThreadCommands
{
@@ -609,7 +728,7 @@ public:
return mAppCallbackNotifier->notificationThread();
}
- TIUTILS::MessageQueue &msgQ() { return mNotificationThreadQ;}
+ Utils::MessageQueue &msgQ() { return mNotificationThreadQ;}
};
//Friend declarations
@@ -623,10 +742,12 @@ private:
status_t dummyRaw();
void copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType);
void copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType);
+ size_t calculateBufferSize(size_t width, size_t height, const char *pixelFormat);
+ const char* getContstantForPixelFormat(const char *pixelFormat);
private:
- mutable Mutex mLock;
- mutable Mutex mBurstLock;
+ mutable android::Mutex mLock;
+ mutable android::Mutex mBurstLock;
CameraHal* mCameraHal;
camera_notify_callback mNotifyCb;
camera_data_callback mDataCb;
@@ -636,34 +757,37 @@ private:
//Keeps Video MemoryHeaps and Buffers within
//these objects
- KeyedVector<unsigned int, unsigned int> mVideoHeaps;
- KeyedVector<unsigned int, unsigned int> mVideoBuffers;
- KeyedVector<unsigned int, unsigned int> mVideoMap;
+ android::KeyedVector<unsigned int, unsigned int> mVideoHeaps;
+ android::KeyedVector<unsigned int, unsigned int> mVideoBuffers;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMap;
//Keeps list of Gralloc handles and associated Video Metadata Buffers
- KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferMemoryMap;
- KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferReverseMap;
+ android::KeyedVector<void *, camera_memory_t *> mVideoMetadataBufferMemoryMap;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMetadataBufferReverseMap;
bool mBufferReleased;
- sp< NotificationThread> mNotificationThread;
+ android::sp< NotificationThread> mNotificationThread;
EventProvider *mEventProvider;
FrameProvider *mFrameProvider;
- TIUTILS::MessageQueue mEventQ;
- TIUTILS::MessageQueue mFrameQ;
+ Utils::MessageQueue mEventQ;
+ Utils::MessageQueue mFrameQ;
NotifierState mNotifierState;
bool mPreviewing;
camera_memory_t* mPreviewMemory;
- unsigned char* mPreviewBufs[MAX_BUFFERS];
+ CameraBuffer mPreviewBuffers[MAX_BUFFERS];
int mPreviewBufCount;
+ int mPreviewWidth;
+ int mPreviewHeight;
+ int mPreviewStride;
const char *mPreviewPixelFormat;
- KeyedVector<unsigned int, sp<MemoryHeapBase> > mSharedPreviewHeaps;
- KeyedVector<unsigned int, sp<MemoryBase> > mSharedPreviewBuffers;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryHeapBase> > mSharedPreviewHeaps;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryBase> > mSharedPreviewBuffers;
//Burst mode active
bool mBurst;
- mutable Mutex mRecordingLock;
+ mutable android::Mutex mRecordingLock;
bool mRecording;
bool mMeasurementEnabled;
@@ -681,27 +805,24 @@ private:
/**
* Class used for allocating memory for JPEG bit stream buffers, output buffers of camera in no overlay case
*/
-class MemoryManager : public BufferProvider, public virtual RefBase
+class MemoryManager : public BufferProvider, public virtual android::RefBase
{
public:
- MemoryManager():mIonFd(-1){ }
+ MemoryManager();
+ ~MemoryManager();
- ///Initializes the memory manager creates any resources required
- status_t initialize() { return NO_ERROR; }
+ status_t initialize();
int setErrorHandler(ErrorNotifier *errorNotifier);
- virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual CameraBuffer *getBufferList(int *numBufs);
virtual uint32_t * getOffsets();
virtual int getFd() ;
- virtual int freeBuffer(void* buf);
+ virtual int freeBufferList(CameraBuffer * buflist);
private:
-
- sp<ErrorNotifier> mErrorNotifier;
+ android::sp<ErrorNotifier> mErrorNotifier;
int mIonFd;
- KeyedVector<unsigned int, unsigned int> mIonHandleMap;
- KeyedVector<unsigned int, unsigned int> mIonFdMap;
- KeyedVector<unsigned int, unsigned int> mIonBufLength;
};
@@ -712,29 +833,31 @@ private:
* Concrete classes derive from this class and provide implementations based on the specific camera h/w interface
*/
-class CameraAdapter: public FrameNotifier, public virtual RefBase
+class CameraAdapter: public FrameNotifier, public virtual android::RefBase
{
protected:
enum AdapterActiveStates {
- INTIALIZED_ACTIVE = 1 << 0,
- LOADED_PREVIEW_ACTIVE = 1 << 1,
- PREVIEW_ACTIVE = 1 << 2,
- LOADED_CAPTURE_ACTIVE = 1 << 3,
- CAPTURE_ACTIVE = 1 << 4,
- BRACKETING_ACTIVE = 1 << 5,
- AF_ACTIVE = 1 << 6,
- ZOOM_ACTIVE = 1 << 7,
- VIDEO_ACTIVE = 1 << 8,
+ INTIALIZED_ACTIVE = 1 << 0,
+ LOADED_PREVIEW_ACTIVE = 1 << 1,
+ PREVIEW_ACTIVE = 1 << 2,
+ LOADED_CAPTURE_ACTIVE = 1 << 3,
+ CAPTURE_ACTIVE = 1 << 4,
+ BRACKETING_ACTIVE = 1 << 5,
+ AF_ACTIVE = 1 << 6,
+ ZOOM_ACTIVE = 1 << 7,
+ VIDEO_ACTIVE = 1 << 8,
+ LOADED_REPROCESS_ACTIVE = 1 << 9,
+ REPROCESS_ACTIVE = 1 << 10,
};
public:
typedef struct
{
- void *mBuffers;
+ CameraBuffer *mBuffers;
uint32_t *mOffsets;
int mFd;
- uint32_t mLength;
- uint32_t mCount;
- uint32_t mMaxQueueable;
+ size_t mLength;
+ size_t mCount;
+ size_t mMaxQueueable;
} BuffersDescriptor;
enum CameraCommands
@@ -764,6 +887,16 @@ public:
CAMERA_START_FD = 22,
CAMERA_STOP_FD = 23,
CAMERA_SWITCH_TO_EXECUTING = 24,
+ CAMERA_USE_BUFFERS_VIDEO_CAPTURE = 25,
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ CAMERA_USE_BUFFERS_REPROCESS = 26,
+ CAMERA_START_REPROCESS = 27,
+#endif
+#ifdef OMAP_ENHANCEMENT_VTC
+ CAMERA_SETUP_TUNNEL = 28,
+ CAMERA_DESTROY_TUNNEL = 29,
+#endif
+ CAMERA_PREVIEW_INITIALIZATION = 30,
};
enum CameraMode
@@ -771,27 +904,32 @@ public:
CAMERA_PREVIEW,
CAMERA_IMAGE_CAPTURE,
CAMERA_VIDEO,
- CAMERA_MEASUREMENT
+ CAMERA_MEASUREMENT,
+ CAMERA_REPROCESS,
};
enum AdapterState {
- INTIALIZED_STATE = INTIALIZED_ACTIVE,
- LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE ,
- AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_AF_STATE = VIDEO_ACTIVE | AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_LOADED_CAPTURE_STATE = VIDEO_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- VIDEO_CAPTURE_STATE = VIDEO_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
- BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ INTIALIZED_STATE = INTIALIZED_ACTIVE,
+ LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE ,
+ AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_AF_STATE = VIDEO_ACTIVE | AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_LOADED_CAPTURE_STATE = VIDEO_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_CAPTURE_STATE = VIDEO_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_REPROCESS_STATE = LOADED_REPROCESS_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_REPROCESS_CAPTURE_STATE = LOADED_REPROCESS_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ REPROCESS_STATE = REPROCESS_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
};
+
public:
///Initialzes the camera adapter creates any resources required
@@ -805,19 +943,13 @@ public:
event_callback eventCb = NULL,
void *cookie = NULL) = 0;
virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
- virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
- virtual void addFramePointers(void *frameBuf, void *buf) = 0;
+ virtual void returnFrame(CameraBuffer* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(CameraBuffer *frameBuf, void *buf) = 0;
virtual void removeFramePointers() = 0;
//APIs to configure Camera adapter and get the current parameter set
- virtual int setParameters(const CameraParameters& params) = 0;
- virtual void getParameters(CameraParameters& params) = 0;
-
- //API to flush the buffers from Camera
- status_t flushBuffers()
- {
- return sendCommand(CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS);
- }
+ virtual int setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
//Registers callback for returning image buffers back to CameraHAL
virtual int registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) = 0;
@@ -826,7 +958,7 @@ public:
virtual int registerEndCaptureCallback(end_image_capture_callback callback, void *user_data) = 0;
//API to send a command to the camera
- virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0) = 0;
+ virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0, int value4=0) = 0;
virtual ~CameraAdapter() {};
@@ -847,6 +979,8 @@ public:
// Retrieves the next Adapter state - for internal use (not locked)
virtual status_t getNextState(AdapterState &state) = 0;
+ virtual status_t setSharedAllocator(camera_request_memory shmem_alloc) = 0;
+
protected:
//The first two methods will try to switch the adapter state.
//Every call to setState() should be followed by a corresponding
@@ -857,16 +991,18 @@ protected:
virtual status_t rollbackState() = 0;
};
-class DisplayAdapter : public BufferProvider, public virtual RefBase
+class DisplayAdapter : public BufferProvider, public virtual android::RefBase
{
public:
- typedef struct S3DParameters_t
- {
- int mode;
- int framePacking;
- int order;
- int subSampling;
- } S3DParameters;
+ DisplayAdapter();
+
+#ifdef OMAP_ENHANCEMENT
+ preview_stream_extended_ops_t * extendedOps() const {
+ return mExtendedOps;
+ }
+
+ void setExtendedOps(preview_stream_extended_ops_t * extendedOps);
+#endif
///Initializes the display adapter creates any resources required
virtual int initialize() = 0;
@@ -874,7 +1010,7 @@ public:
virtual int setPreviewWindow(struct preview_stream_ops *window) = 0;
virtual int setFrameProvider(FrameNotifier *frameProvider) = 0;
virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
- virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL) = 0;
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL) = 0;
virtual int disableDisplay(bool cancel_buffer = true) = 0;
//Used for Snapshot review temp. pause
virtual int pauseDisplay(bool pause) = 0;
@@ -884,13 +1020,23 @@ public:
virtual int setSnapshotTimeRef(struct timeval *refTime = NULL) = 0;
#endif
- virtual int useBuffers(void *bufArr, int num) = 0;
virtual bool supportsExternalBuffering() = 0;
// Get max queueable buffers display supports
// This function should only be called after
- // allocateBuffer
- virtual int maxQueueableBuffers(unsigned int& queueable) = 0;
+ // allocateBufferList
+ virtual status_t maxQueueableBuffers(unsigned int& queueable) = 0;
+
+ // Get min buffers display needs at any given time
+ virtual status_t minUndequeueableBuffers(int& unqueueable) = 0;
+protected:
+ virtual const char* getPixFormatConstant(const char* parameters_format) const;
+ virtual size_t getBufSize(const char* parameters_format, int width, int height) const;
+
+private:
+#ifdef OMAP_ENHANCEMENT
+ preview_stream_extended_ops_t * mExtendedOps;
+#endif
};
static void releaseImageBuffers(void *userData);
@@ -912,6 +1058,7 @@ public:
///Constants
static const int NO_BUFFERS_PREVIEW;
static const int NO_BUFFERS_IMAGE_CAPTURE;
+ static const int NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
static const uint32_t VFR_SCALE = 1000;
@@ -959,10 +1106,25 @@ public:
int startPreview();
/**
+ * Set preview mode related initialization.
+ * Only used when slice based processing is enabled.
+ */
+ int cameraPreviewInitialization();
+
+ /**
* Only used if overlays are used for camera preview.
*/
int setPreviewWindow(struct preview_stream_ops *window);
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ void setExtendedPreviewStreamOps(preview_stream_extended_ops_t *ops);
+
+ /**
+ * Set a tap-in or tap-out point.
+ */
+ int setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout);
+#endif
+
/**
* Stop a previously started preview.
*/
@@ -1013,7 +1175,7 @@ public:
/**
* Take a picture.
*/
- int takePicture();
+ int takePicture(const char* params);
/**
* Cancel a picture that was started with takePicture. Calling this
@@ -1023,7 +1185,7 @@ public:
/** Set the camera parameters. */
int setParameters(const char* params);
- int setParameters(const CameraParameters& params);
+ int setParameters(const android::CameraParameters& params);
/** Return the camera parameters. */
char* getParameters();
@@ -1045,8 +1207,19 @@ public:
*/
int dump(int fd) const;
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ /**
+ * start a reprocessing operation.
+ */
+ int reprocess(const char* params);
+
+ /**
+ * cancels current reprocessing operation
+ */
+ int cancel_reprocess();
+#endif
- status_t storeMetaDataInBuffers(bool enable);
+ status_t storeMetaDataInBuffers(bool enable);
//@}
@@ -1097,7 +1270,7 @@ private:
//@{
/** Set the camera parameters specific to Video Recording. */
- bool setVideoModeParameters(const CameraParameters&);
+ bool setVideoModeParameters(const android::CameraParameters&);
/** Reset the camera parameters specific to Video Recording. */
bool resetVideoModeParameters();
@@ -1110,7 +1283,7 @@ private:
void insertSupportedParams();
/** Allocate preview data buffers */
- status_t allocPreviewDataBufs(uint32_t size, uint32_t bufferCount);
+ status_t allocPreviewDataBufs(size_t size, size_t bufferCount);
/** Free preview data buffers */
status_t freePreviewDataBufs();
@@ -1122,18 +1295,30 @@ private:
status_t allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount);
/** Allocate image capture buffers */
- status_t allocImageBufs(unsigned int width, unsigned int height, uint32_t length, const char* previewFormat, unsigned int bufferCount);
+ status_t allocImageBufs(unsigned int width, unsigned int height, size_t length,
+ const char* previewFormat, unsigned int bufferCount,
+ unsigned int *max_queueable);
+
+ /** Allocate Raw buffers */
+ status_t allocRawBufs(int width, int height, const char* previewFormat, int bufferCount);
/** Free preview buffers */
status_t freePreviewBufs();
/** Free video bufs */
- status_t freeVideoBufs(void *bufs);
+ status_t freeVideoBufs(CameraBuffer *bufs);
+
+ /** Free RAW bufs */
+ status_t freeRawBufs();
//Check if a given resolution is supported by the current camera
//instance
bool isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions);
+ //Check if a given variable frame rate range is supported by the current camera
+ //instance
+ bool isFpsRangeValid(int fpsMin, int fpsMax, const char *supportedFpsRanges);
+
//Check if a given parameter is supported by the current camera
// instance
bool isParameterValid(const char *param, const char *supportedParams);
@@ -1153,11 +1338,11 @@ private:
void forceStopPreview();
- void selectFPSRange(int framerate, int *min_fps, int *max_fps);
-
- void setPreferredPreviewRes(int width, int height);
- void resetPreviewRes(CameraParameters *mParams, int width, int height);
+ void getPreferredPreviewRes(int *width, int *height);
+ void resetPreviewRes(android::CameraParameters *params);
+ // Internal __takePicture function - used in public takePicture() and reprocess()
+ int __takePicture(const char* params);
//@}
@@ -1178,21 +1363,33 @@ public:
static const char PARAMS_DELIMITER[];
CameraAdapter *mCameraAdapter;
- sp<AppCallbackNotifier> mAppCallbackNotifier;
- sp<DisplayAdapter> mDisplayAdapter;
- sp<MemoryManager> mMemoryManager;
+ android::sp<AppCallbackNotifier> mAppCallbackNotifier;
+ android::sp<DisplayAdapter> mDisplayAdapter;
+ android::sp<MemoryManager> mMemoryManager;
+ // TODO(XXX): May need to keep this as a vector in the future
+ // when we can have multiple tap-in/tap-out points
+ android::sp<DisplayAdapter> mBufferSourceAdapter_In;
+ android::sp<DisplayAdapter> mBufferSourceAdapter_Out;
+
+#ifdef OMAP_ENHANCEMENT
+ preview_stream_extended_ops_t * mExtendedPreviewStreamOps;
+#endif
- sp<IMemoryHeap> mPictureHeap;
+ android::sp<android::IMemoryHeap> mPictureHeap;
int* mGrallocHandles;
bool mFpsRangeChangedByApp;
-
+ int mRawWidth;
+ int mRawHeight;
+ bool mRawCapture;
///static member vars
+ static const int SW_SCALING_FPS_LIMIT;
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
//Timestamp from the CameraHal constructor
@@ -1211,34 +1408,40 @@ private:
bool mDynamicPreviewSwitch;
//keeps paused state of display
bool mDisplayPaused;
+
+#ifdef OMAP_ENHANCEMENT_VTC
+ bool mTunnelSetup;
+ bool mVTCUseCase;
+#endif
+
//Index of current camera adapter
int mCameraIndex;
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
- sp<SensorListener> mSensorListener;
+ android::sp<SensorListener> mSensorListener;
void* mCameraAdapterHandle;
- CameraParameters mParameters;
+ android::CameraParameters mParameters;
bool mPreviewRunning;
bool mPreviewStateOld;
bool mRecordingEnabled;
EventProvider *mEventProvider;
- int32_t *mPreviewDataBufs;
+ CameraBuffer *mPreviewDataBuffers;
uint32_t *mPreviewDataOffsets;
int mPreviewDataFd;
int mPreviewDataLength;
- int32_t *mImageBufs;
+ CameraBuffer *mImageBuffers;
uint32_t *mImageOffsets;
int mImageFd;
int mImageLength;
- int32_t *mPreviewBufs;
+ CameraBuffer *mPreviewBuffers;
uint32_t *mPreviewOffsets;
int mPreviewLength;
int mPreviewFd;
- int32_t *mVideoBufs;
+ CameraBuffer *mVideoBuffers;
uint32_t *mVideoOffsets;
int mVideoFd;
int mVideoLength;
@@ -1254,6 +1457,7 @@ private:
CameraProperties::Properties* mCameraProperties;
bool mPreviewStartInProgress;
+ bool mPreviewInitializationDone;
bool mSetPreviewWindowCalled;
@@ -1264,9 +1468,10 @@ private:
int mVideoWidth;
int mVideoHeight;
+ android::String8 mCapModeBackup;
};
-
-}; // namespace android
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/CameraProperties.h b/camera/inc/CameraProperties.h
index 6f05877..bfc6012 100644
--- a/camera/inc/CameraProperties.h
+++ b/camera/inc/CameraProperties.h
@@ -30,15 +30,27 @@
#include <ctype.h>
#include "cutils/properties.h"
-namespace android {
+#include "Common.h"
-#define MAX_CAMERAS_SUPPORTED 2
+namespace Ti {
+namespace Camera {
+
+#define MAX_CAMERAS_SUPPORTED 3
#define MAX_SIMUL_CAMERAS_SUPPORTED 1
#define MAX_PROP_NAME_LENGTH 50
#define MAX_PROP_VALUE_LENGTH 2048
-#define EXIF_MAKE_DEFAULT "default_make"
-#define EXIF_MODEL_DEFAULT "default_model"
+#define REMAINING_BYTES(buff) ((((int)sizeof(buff) - 1 - (int)strlen(buff)) < 0) ? 0 : (sizeof(buff) - 1 - strlen(buff)))
+
+enum OperatingMode {
+ MODE_HIGH_SPEED = 0,
+ MODE_HIGH_QUALITY,
+ MODE_ZEROSHUTTERLAG,
+ MODE_VIDEO,
+ MODE_STEREO,
+ MODE_CPCAM,
+ MODE_MAX
+};
// Class that handles the Camera Properties
class CameraProperties
@@ -47,19 +59,32 @@ public:
static const char INVALID[];
static const char CAMERA_NAME[];
static const char CAMERA_SENSOR_INDEX[];
+ static const char CAMERA_SENSOR_ID[];
static const char ORIENTATION_INDEX[];
static const char FACING_INDEX[];
- static const char S3D_SUPPORTED[];
static const char SUPPORTED_PREVIEW_SIZES[];
+ static const char SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[];
+ static const char SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[];
+ static const char SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[];
static const char SUPPORTED_PREVIEW_FORMATS[];
static const char SUPPORTED_PREVIEW_FRAME_RATES[];
+ static const char SUPPORTED_PREVIEW_FRAME_RATES_EXT[];
static const char SUPPORTED_PICTURE_SIZES[];
+ static const char SUPPORTED_PICTURE_SUBSAMPLED_SIZES[];
+ static const char SUPPORTED_PICTURE_TOPBOTTOM_SIZES[];
+ static const char SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[];
static const char SUPPORTED_PICTURE_FORMATS[];
static const char SUPPORTED_THUMBNAIL_SIZES[];
static const char SUPPORTED_WHITE_BALANCE[];
static const char SUPPORTED_EFFECTS[];
static const char SUPPORTED_ANTIBANDING[];
static const char SUPPORTED_EXPOSURE_MODES[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_MIN[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_MAX[];
+ static const char SUPPORTED_MANUAL_EXPOSURE_STEP[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_MIN[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_MAX[];
+ static const char SUPPORTED_MANUAL_GAIN_ISO_STEP[];
static const char SUPPORTED_EV_MIN[];
static const char SUPPORTED_EV_MAX[];
static const char SUPPORTED_EV_STEP[];
@@ -97,9 +122,15 @@ public:
static const char CONTRAST[];
static const char IPP[];
static const char GBCE[];
- static const char AUTOCONVERGENCE[];
+ static const char SUPPORTED_GBCE[];
+ static const char GLBCE[];
+ static const char SUPPORTED_GLBCE[];
static const char AUTOCONVERGENCE_MODE[];
- static const char MANUALCONVERGENCE_VALUES[];
+ static const char AUTOCONVERGENCE_MODE_VALUES[];
+ static const char MANUAL_CONVERGENCE[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_MIN[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_MAX[];
+ static const char SUPPORTED_MANUAL_CONVERGENCE_STEP[];
static const char SENSOR_ORIENTATION[];
static const char SENSOR_ORIENTATION_VALUES[];
static const char REVISION[];
@@ -113,16 +144,22 @@ public:
static const char MAX_FD_HW_FACES[];
static const char MAX_FD_SW_FACES[];
+ static const char MAX_PICTURE_WIDTH[];
+ static const char MAX_PICTURE_HEIGHT[];
+
static const char PARAMS_DELIMITER [];
- static const char S3D2D_PREVIEW[];
- static const char S3D2D_PREVIEW_MODES[];
+ static const char S3D_PRV_FRAME_LAYOUT[];
+ static const char S3D_PRV_FRAME_LAYOUT_VALUES[];
+ static const char S3D_CAP_FRAME_LAYOUT[];
+ static const char S3D_CAP_FRAME_LAYOUT_VALUES[];
static const char VSTAB[];
static const char VSTAB_SUPPORTED[];
+ static const char VNF[];
+ static const char VNF_SUPPORTED[];
static const char FRAMERATE_RANGE[];
- static const char FRAMERATE_RANGE_IMAGE[];
- static const char FRAMERATE_RANGE_VIDEO[];
static const char FRAMERATE_RANGE_SUPPORTED[];
+ static const char FRAMERATE_RANGE_EXT_SUPPORTED[];
static const char DEFAULT_VALUE[];
@@ -138,7 +175,14 @@ public:
static const char VIDEO_SIZE[];
static const char SUPPORTED_VIDEO_SIZES[];
- static const char PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+
+ static const char MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[];
+ static const char MECHANICAL_MISALIGNMENT_CORRECTION[];
+
+ static const char RAW_WIDTH[];
+ static const char RAW_HEIGHT[];
+
+ static const char CAP_MODE_VALUES[];
CameraProperties();
~CameraProperties();
@@ -147,32 +191,31 @@ public:
class Properties
{
public:
+
Properties()
{
- mProperties = new DefaultKeyedVector<String8, String8>(String8(DEFAULT_VALUE));
- char property[PROPERTY_VALUE_MAX];
- property_get("ro.product.manufacturer", property, EXIF_MAKE_DEFAULT);
- property[0] = toupper(property[0]);
- set(EXIF_MAKE, property);
- property_get("ro.product.model", property, EXIF_MODEL_DEFAULT);
- property[0] = toupper(property[0]);
- set(EXIF_MODEL, property);
}
+
~Properties()
{
- delete mProperties;
}
- ssize_t set(const char *prop, const char *value);
- ssize_t set(const char *prop, int value);
- const char* get(const char * prop);
+
+ void set(const char *prop, const char *value);
+ void set(const char *prop, int value);
+ const char* get(const char * prop) const;
+ int getInt(const char * prop) const;
+ void setSensorIndex(int idx);
+ void setMode(OperatingMode mode);
+ OperatingMode getMode() const;
void dump();
protected:
- const char* keyAt(unsigned int);
- const char* valueAt(unsigned int);
+ const char* keyAt(const unsigned int) const;
+ const char* valueAt(const unsigned int) const;
private:
- DefaultKeyedVector<String8, String8>* mProperties;
+ OperatingMode mCurrentMode;
+ android::DefaultKeyedVector<android::String8, android::String8> mProperties[MODE_MAX];
};
@@ -184,15 +227,15 @@ public:
private:
- uint32_t mCamerasSupported;
+ int mCamerasSupported;
int mInitialized;
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
Properties mCameraProps[MAX_CAMERAS_SUPPORTED];
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //CAMERA_PROPERTIES_H
-
diff --git a/camera/inc/Common.h b/camera/inc/Common.h
new file mode 100644
index 0000000..b369e65
--- /dev/null
+++ b/camera/inc/Common.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERAHAL_COMMON_H
+#define CAMERAHAL_COMMON_H
+
+#include "UtilsCommon.h"
+#include "DebugUtils.h"
+#include "Status.h"
+
+
+
+
+// logging functions
+#ifdef CAMERAHAL_DEBUG
+# define CAMHAL_LOGD DBGUTILS_LOGD
+# define CAMHAL_LOGDA DBGUTILS_LOGDA
+# define CAMHAL_LOGDB DBGUTILS_LOGDB
+# ifdef CAMERAHAL_DEBUG_VERBOSE
+# define CAMHAL_LOGV DBGUTILS_LOGV
+# define CAMHAL_LOGVA DBGUTILS_LOGVA
+# define CAMHAL_LOGVB DBGUTILS_LOGVB
+# else
+# define CAMHAL_LOGV(...)
+# define CAMHAL_LOGVA(str)
+# define CAMHAL_LOGVB(str, ...)
+# endif
+#else
+# define CAMHAL_LOGD(...)
+# define CAMHAL_LOGDA(str)
+# define CAMHAL_LOGDB(str, ...)
+# define CAMHAL_LOGV(...)
+# define CAMHAL_LOGVA(str)
+# define CAMHAL_LOGVB(str, ...)
+#endif
+
+#define CAMHAL_LOGI DBGUTILS_LOGI
+#define CAMHAL_LOGW DBGUTILS_LOGW
+#define CAMHAL_LOGE DBGUTILS_LOGE
+#define CAMHAL_LOGEA DBGUTILS_LOGEA
+#define CAMHAL_LOGEB DBGUTILS_LOGEB
+#define CAMHAL_LOGF DBGUTILS_LOGF
+
+#define CAMHAL_ASSERT DBGUTILS_ASSERT
+#define CAMHAL_ASSERT_X DBGUTILS_ASSERT_X
+
+#define CAMHAL_UNUSED(x) (void)x
+
+
+
+
+#endif // CAMERAHAL_COMMON_H
diff --git a/camera/inc/Encoder_libjpeg.h b/camera/inc/Encoder_libjpeg.h
index fb9a894..72feb08 100644
--- a/camera/inc/Encoder_libjpeg.h
+++ b/camera/inc/Encoder_libjpeg.h
@@ -29,11 +29,19 @@
extern "C" {
#include "jhead.h"
+
+#undef TRUE
+#undef FALSE
+
}
-#define CANCEL_TIMEOUT 3000000 // 3 seconds
+#include "CameraHal.h"
+
+#define CANCEL_TIMEOUT 5000000 // 5 seconds
+
+namespace Ti {
+namespace Camera {
-namespace android {
/**
* libjpeg encoder class - uses libjpeg to encode yuv
*/
@@ -45,6 +53,7 @@ typedef void (*encoder_libjpeg_callback_t) (void* main_jpeg,
void* cookie1,
void* cookie2,
void* cookie3,
+ void* cookie4,
bool canceled);
// these have to match strings defined in external/jhead/exif.c
@@ -86,7 +95,12 @@ class ExifElementsTable {
public:
ExifElementsTable() :
gps_tag_count(0), exif_tag_count(0), position(0),
- jpeg_opened(false), has_datetime_tag(false) { }
+ jpeg_opened(false)
+ {
+#ifdef ANDROID_API_JB_OR_LATER
+ has_datetime_tag = false;
+#endif
+ }
~ExifElementsTable();
status_t insertElement(const char* tag, const char* value);
@@ -102,10 +116,12 @@ class ExifElementsTable {
unsigned int exif_tag_count;
unsigned int position;
bool jpeg_opened;
+#ifdef ANDROID_API_JB_OR_LATER
bool has_datetime_tag;
+#endif
};
-class Encoder_libjpeg : public Thread {
+class Encoder_libjpeg : public android::Thread {
/* public member types and variables */
public:
struct params {
@@ -131,9 +147,9 @@ class Encoder_libjpeg : public Thread {
CameraFrame::FrameType type,
void* cookie1,
void* cookie2,
- void* cookie3)
- : Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
- mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3),
+ void* cookie3, void *cookie4)
+ : android::Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
+ mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3), mCookie4(cookie4),
mType(type), mThumb(NULL) {
this->incStrong(this);
mCancelSem.Create(0);
@@ -145,10 +161,9 @@ class Encoder_libjpeg : public Thread {
virtual bool threadLoop() {
size_t size = 0;
- sp<Encoder_libjpeg> tn = NULL;
if (mThumbnailInput) {
// start thread to encode thumbnail
- mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL);
+ mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL, NULL);
mThumb->run();
}
@@ -167,7 +182,7 @@ class Encoder_libjpeg : public Thread {
}
if(mCb) {
- mCb(mMainInput, mThumbnailInput, mType, mCookie1, mCookie2, mCookie3, mCancelEncoding);
+ mCb(mMainInput, mThumbnailInput, mType, mCookie1, mCookie2, mCookie3, mCookie4, mCancelEncoding);
}
// encoder thread runs, self-destructs, and then exits
@@ -197,13 +212,15 @@ class Encoder_libjpeg : public Thread {
void* mCookie1;
void* mCookie2;
void* mCookie3;
+ void* mCookie4;
CameraFrame::FrameType mType;
- sp<Encoder_libjpeg> mThumb;
- Semaphore mCancelSem;
+ android::sp<Encoder_libjpeg> mThumb;
+ Utils::Semaphore mCancelSem;
size_t encode(params*);
};
-}
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/General3A_Settings.h b/camera/inc/General3A_Settings.h
index dab1f8e..777b255 100644
--- a/camera/inc/General3A_Settings.h
+++ b/camera/inc/General3A_Settings.h
@@ -29,12 +29,8 @@
#ifndef GENERAL_3A_SETTINGS_H
#define GENERAL_3A_SETTINGS_H
-#define FOCUS_FACE_PRIORITY OMX_IMAGE_FocusControlMax -1
-#define FOCUS_REGION_PRIORITY OMX_IMAGE_FocusControlMax -2
-#define WB_FACE_PRIORITY OMX_WhiteBalControlMax -1
-#define EXPOSURE_FACE_PRIORITY OMX_ExposureControlMax - 1
-
-namespace android {
+namespace Ti {
+namespace Camera {
struct userToOMX_LUT{
const char * userDefinition;
@@ -58,15 +54,15 @@ const userToOMX_LUT isoUserToOMX[] = {
};
const userToOMX_LUT effects_UserToOMX [] = {
- { CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
- { CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
- { CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
- { CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
- { CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
- { CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
- { CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
- { CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
- { CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
+ { android::CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
+ { android::CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
+ { android::CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
+ { android::CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
+ { android::CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
+ { android::CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
+ { android::CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
+ { android::CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
+ { android::CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::EFFECT_NATURAL, OMX_ImageFilterNatural },
{ TICameraParameters::EFFECT_VIVID, OMX_ImageFilterVivid },
@@ -76,27 +72,24 @@ const userToOMX_LUT effects_UserToOMX [] = {
};
const userToOMX_LUT scene_UserToOMX [] = {
- { CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
- { CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
- { CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
- { CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
- { CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
-/*********** TODO: These scene modes are not verified. ************
- ***************** Have to verify and reeable later. **************
- { CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
- { CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
- { CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
- { CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
- { CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
- { CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
- { CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
- { CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
- { CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
-*********************************************************************/
+ { android::CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
+ { android::CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
+ { android::CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
+ { android::CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
+ { android::CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
+ { android::CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
+ { android::CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
+ { android::CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
+ { android::CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
+ { android::CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
+ { android::CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
+ { android::CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
+ { android::CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
+ { android::CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
+ { android::CameraParameters::SCENE_MODE_SPORTS, OMX_Sport },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::SCENE_MODE_CLOSEUP, OMX_Closeup },
{ TICameraParameters::SCENE_MODE_AQUA, OMX_Underwater },
- { TICameraParameters::SCENE_MODE_SPORT, OMX_Sport },
{ TICameraParameters::SCENE_MODE_MOOD, OMX_Mood },
{ TICameraParameters::SCENE_MODE_NIGHT_INDOOR, OMX_NightIndoor },
{ TICameraParameters::SCENE_MODE_DOCUMENT, OMX_Document },
@@ -108,47 +101,45 @@ const userToOMX_LUT scene_UserToOMX [] = {
};
const userToOMX_LUT whiteBal_UserToOMX [] = {
- { CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
- { CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
- { CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
- { CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
- { CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
-/********************** THESE ARE CURRENT NOT TUNED PROPERLY *************************
- { CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
- { CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
- { CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
-**************************************************************************************/
+ { android::CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
+ { android::CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
+ { android::CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
+ { android::CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
+ { android::CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
+ { android::CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
+ { android::CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
+ { android::CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::WHITE_BALANCE_TUNGSTEN, OMX_WhiteBalControlTungsten },
{ TICameraParameters::WHITE_BALANCE_HORIZON, OMX_WhiteBalControlHorizon },
- { TICameraParameters::WHITE_BALANCE_FACE, WB_FACE_PRIORITY },
{ TICameraParameters::WHITE_BALANCE_SUNSET, OMX_TI_WhiteBalControlSunset }
#endif
};
const userToOMX_LUT antibanding_UserToOMX [] = {
- { CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
- { CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
- { CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
- { CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
+ { android::CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
+ { android::CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
+ { android::CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
+ { android::CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
};
const userToOMX_LUT focus_UserToOMX [] = {
- { CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
- { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
- { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
- { CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
- { CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
- { CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
+ { android::CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
+ { android::CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
#ifdef OMAP_ENHANCEMENT
- { TICameraParameters::FOCUS_MODE_FACE , FOCUS_FACE_PRIORITY },
+ { TICameraParameters::FOCUS_MODE_FACE , OMX_IMAGE_FocusControlContinousFacePriority },
{ TICameraParameters::FOCUS_MODE_PORTRAIT, OMX_IMAGE_FocusControlPortrait },
{ TICameraParameters::FOCUS_MODE_EXTENDED, OMX_IMAGE_FocusControlExtended },
#endif
+ { TICameraParameters::FOCUS_MODE_OFF , OMX_IMAGE_FocusControlOff }
};
const userToOMX_LUT exposure_UserToOMX [] = {
- { TICameraParameters::EXPOSURE_MODE_OFF, OMX_ExposureControlOff },
+ { TICameraParameters::EXPOSURE_MODE_MANUAL, OMX_ExposureControlOff },
{ TICameraParameters::EXPOSURE_MODE_AUTO, OMX_ExposureControlAuto },
{ TICameraParameters::EXPOSURE_MODE_NIGHT, OMX_ExposureControlNight },
{ TICameraParameters::EXPOSURE_MODE_BACKLIGHT, OMX_ExposureControlBackLight },
@@ -158,15 +149,14 @@ const userToOMX_LUT exposure_UserToOMX [] = {
{ TICameraParameters::EXPOSURE_MODE_BEACH, OMX_ExposureControlBeach },
{ TICameraParameters::EXPOSURE_MODE_APERTURE, OMX_ExposureControlLargeAperture },
{ TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE, OMX_ExposureControlSmallApperture },
- { TICameraParameters::EXPOSURE_MODE_FACE, EXPOSURE_FACE_PRIORITY },
};
const userToOMX_LUT flash_UserToOMX [] = {
- { CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
- { CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
- { CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
- { CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
- { CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
+ { android::CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
+ { android::CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
+ { android::CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
+ { android::CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
+ { android::CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::FLASH_MODE_FILL_IN ,OMX_IMAGE_FlashControlFillin }
#endif
@@ -241,11 +231,23 @@ class Gen3A_settings{
int Sharpness;
int ISO;
int FlashMode;
+ int ManualExposure;
+ int ManualExposureRight;
+ int ManualGain;
+ int ManualGainRight;
unsigned int Brightness;
OMX_BOOL ExposureLock;
OMX_BOOL FocusLock;
OMX_BOOL WhiteBalanceLock;
+
+ OMX_BOOL AlgoFixedGamma;
+ OMX_BOOL AlgoNSF1;
+ OMX_BOOL AlgoNSF2;
+ OMX_BOOL AlgoSharpening;
+ OMX_BOOL AlgoThreeLinColorMap;
+ OMX_BOOL AlgoGIC;
+
};
/*
@@ -270,11 +272,21 @@ enum E3ASettingsFlags
SetExpLock = 1 << 16,
SetWBLock = 1 << 17,
SetMeteringAreas = 1 << 18,
+ SetManualExposure = 1 << 19,
+
+ SetAlgoFixedGamma = 1 << 20,
+ SetAlgoNSF1 = 1 << 21,
+ SetAlgoNSF2 = 1 << 22,
+ SetAlgoSharpening = 1 << 23,
+ SetAlgoThreeLinColorMap = 1 << 24,
+ SetAlgoGIC = 1 << 25,
+
E3aSettingMax,
E3AsettingsAll = ( ((E3aSettingMax -1 ) << 1) -1 ) /// all possible flags raised
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //GENERAL_3A_SETTINGS_H
diff --git a/camera/inc/NV12_resize.h b/camera/inc/NV12_resize.h
index 927faf8..4b05a4f 100644
--- a/camera/inc/NV12_resize.h
+++ b/camera/inc/NV12_resize.h
@@ -1,66 +1,61 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
#ifndef NV12_RESIZE_H_
#define NV12_RESIZE_H_
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-typedef unsigned char mmBool;
-typedef unsigned char mmUchar;
-typedef unsigned char mmUint8;
-typedef unsigned char mmByte;
-typedef unsigned short mmUint16;
-typedef unsigned int mmUint32;
-typedef unsigned long mmUint64;
-typedef signed char mmInt8;
-typedef char mmChar;
-typedef signed short mmInt16;
-typedef signed int mmInt32;
-typedef signed long mmLong;
-typedef signed int mmHandle;
-typedef float mmFloat;
-typedef double mmDouble;
-typedef int HObj;
-typedef HObj HFile;
-typedef int HDir;
-typedef void* mmMutexHandle;
-typedef struct _fstat
-{
- mmInt32 fileSize;
-}VE_FileAttribute;
-
-typedef struct
-{
- mmInt32 second;
- mmInt32 millisecond;
-}tsVE_Time;
-
-typedef struct
-{
- mmInt32 year;
- mmInt32 month;
- mmInt32 day;
- mmInt32 hour;
- mmInt32 minute;
- mmInt32 second;
+#include "Common.h"
+
+typedef unsigned char mmBool;
+typedef unsigned char mmUchar;
+typedef unsigned char mmUint8;
+typedef unsigned char mmByte;
+typedef unsigned short mmUint16;
+typedef unsigned int mmUint32;
+typedef unsigned long mmUint64;
+typedef signed char mmInt8;
+typedef char mmChar;
+typedef signed short mmInt16;
+typedef signed int mmInt32;
+typedef signed long mmLong;
+typedef signed int mmHandle;
+typedef float mmFloat;
+typedef double mmDouble;
+typedef int HObj;
+typedef HObj HFile;
+typedef int HDir;
+typedef void* mmMutexHandle;
+typedef struct _fstat {
+ mmInt32 fileSize;
+} VE_FileAttribute;
+
+typedef struct {
+ mmInt32 second;
+ mmInt32 millisecond;
+} tsVE_Time;
+
+typedef struct {
+ mmInt32 year;
+ mmInt32 month;
+ mmInt32 day;
+ mmInt32 hour;
+ mmInt32 minute;
+ mmInt32 second;
} TmDateTime;
-/*----------------------------------------------------------------------------
- Define : TRUE/FALSE for boolean operations
-----------------------------------------------------------------------------*/
-
-#ifndef TRUE
- #define TRUE 1
-#endif
-
-#ifndef FALSE
- #define FALSE 0
-#endif
-
-#ifndef NULL
- #define NULL 0
-#endif
-
const mmUint8 bWeights[8][8][4] = {
{{64, 0, 0, 0}, {56, 0, 0, 8}, {48, 0, 0,16}, {40, 0, 0,24},
{32, 0, 0,32}, {24, 0, 0,40}, {16, 0, 0,48}, { 8, 0, 0,56}},
@@ -87,8 +82,7 @@ const mmUint8 bWeights[8][8][4] = {
{ 4,28,28,4 }, { 3,21,35, 5}, { 2,14,42, 6}, { 1,7 ,49, 7}}
};
-typedef enum
-{
+typedef enum {
IC_FORMAT_NONE,
IC_FORMAT_RGB565,
IC_FORMAT_RGB888,
@@ -96,26 +90,24 @@ typedef enum
IC_FORMAT_YCbCr,
IC_FORMAT_YCbCr420_FRAME_PK,
IC_FORMAT_MAX
-}enumImageFormat;
+} enumImageFormat;
/* This structure defines the format of an image */
-typedef struct
-{
- mmInt32 uWidth;
- mmInt32 uHeight;
- mmInt32 uStride;
- enumImageFormat eFormat;
- mmByte *imgPtr;
- mmByte *clrPtr;
- mmInt32 uOffset;
+typedef struct {
+ mmInt32 uWidth;
+ mmInt32 uHeight;
+ mmInt32 uStride;
+ enumImageFormat eFormat;
+ mmByte *imgPtr;
+ mmByte *clrPtr;
+ mmInt32 uOffset;
} structConvImage;
-typedef struct IC_crop_struct
-{
- mmUint32 x; /* x pos of rectangle */
- mmUint32 y; /* y pos of rectangle */
- mmUint32 uWidth; /* dx of rectangle */
- mmUint32 uHeight; /* dy of rectangle */
+typedef struct IC_crop_struct {
+ mmUint32 x; /* x pos of rectangle */
+ mmUint32 y; /* y pos of rectangle */
+ mmUint32 uWidth; /* dx of rectangle */
+ mmUint32 uHeight; /* dy of rectangle */
} IC_rect_type;
/*==========================================================================
@@ -133,16 +125,11 @@ typedef struct IC_crop_struct
* faster version.
============================================================================*/
mmBool
-VT_resizeFrame_Video_opt2_lp
-(
- structConvImage* i_img_ptr, /* Points to the input image */
- structConvImage* o_img_ptr, /* Points to the output image */
- IC_rect_type* cropout, /* how much to resize to in final image */
- mmUint16 dummy /* Transparent pixel value */
- );
-
-#ifdef __cplusplus
-}
-#endif
+VT_resizeFrame_Video_opt2_lp(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ );
#endif //#define NV12_RESIZE_H_
diff --git a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
index 5c88cf7..3d9d788 100644
--- a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
+++ b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
@@ -48,7 +48,8 @@ extern "C"
}
-namespace android {
+namespace Ti {
+namespace Camera {
#define Q16_OFFSET 16
@@ -60,6 +61,7 @@ namespace android {
#define MIN_JPEG_QUALITY 1
#define MAX_JPEG_QUALITY 100
#define EXP_BRACKET_RANGE 10
+#define ZOOM_BRACKET_RANGE 10
#define FOCUS_DIST_SIZE 100
#define FOCUS_DIST_BUFFER_SIZE 500
@@ -68,6 +70,8 @@ namespace android {
#define DEFAULT_THUMB_WIDTH 160
#define DEFAULT_THUMB_HEIGHT 120
#define FRAME_RATE_FULL_HD 27
+#define FRAME_RATE_HIGH_HD 60
+
#define ZOOM_STAGES 61
#define FACE_DETECTION_BUFFER_SIZE 0x1000
@@ -116,27 +120,27 @@ namespace android {
#define OMX_CAMERA_PORT_IMAGE_OUT_IMAGE (OMX_CAMERA_PORT_IMAGE_START + 0)
-#define OMX_INIT_STRUCT(_s_, _name_) \
- memset(&(_s_), 0x0, sizeof(_name_)); \
- (_s_).nSize = sizeof(_name_); \
- (_s_).nVersion.s.nVersionMajor = 0x1; \
- (_s_).nVersion.s.nVersionMinor = 0x1; \
- (_s_).nVersion.s.nRevision = 0x0; \
+#define OMX_INIT_STRUCT(_s_, _name_) \
+ memset(&(_s_), 0x0, sizeof(_name_)); \
+ (_s_).nSize = sizeof(_name_); \
+ (_s_).nVersion.s.nVersionMajor = 0x1; \
+ (_s_).nVersion.s.nVersionMinor = 0x1; \
+ (_s_).nVersion.s.nRevision = 0x0; \
(_s_).nVersion.s.nStep = 0x0
#define OMX_INIT_STRUCT_PTR(_s_, _name_) \
- memset((_s_), 0x0, sizeof(_name_)); \
- (_s_)->nSize = sizeof(_name_); \
- (_s_)->nVersion.s.nVersionMajor = 0x1; \
- (_s_)->nVersion.s.nVersionMinor = 0x1; \
- (_s_)->nVersion.s.nRevision = 0x0; \
+ memset((_s_), 0x0, sizeof(_name_)); \
+ (_s_)->nSize = sizeof(_name_); \
+ (_s_)->nVersion.s.nVersionMajor = 0x1; \
+ (_s_)->nVersion.s.nVersionMinor = 0x1; \
+ (_s_)->nVersion.s.nRevision = 0x0; \
(_s_)->nVersion.s.nStep = 0x0
-#define GOTO_EXIT_IF(_CONDITION,_ERROR) { \
- if ((_CONDITION)) { \
- eError = (_ERROR); \
- goto EXIT; \
- } \
+#define GOTO_EXIT_IF(_CONDITION,_ERROR) { \
+ if ((_CONDITION)) { \
+ eError = (_ERROR); \
+ goto EXIT; \
+ } \
}
const int64_t kCameraBufferLatencyNs = 250000000LL; // 250 ms
@@ -167,15 +171,16 @@ struct CapPixelformat {
const char *param;
};
-struct CapU32 {
- OMX_U32 num;
+struct CapCodingFormat {
+ OMX_IMAGE_CODINGTYPE imageCodingFormat;
const char *param;
};
-struct CapU32Pair {
- OMX_U32 num1, num2;
+struct CapU32 {
+ OMX_U32 num;
const char *param;
};
+
struct CapS32 {
OMX_S32 num;
const char *param;
@@ -185,7 +190,6 @@ typedef CapU32 CapFramerate;
typedef CapU32 CapISO;
typedef CapU32 CapSensorName;
typedef CapS32 CapZoom;
-typedef CapS32 CapEVComp;
/**
* Class which completely abstracts the camera hardware interaction from camera hal
@@ -205,19 +209,14 @@ public:
///Five second timeout
static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000;
- enum OMXCameraEvents
- {
- CAMERA_PORT_ENABLE = 0x1,
- CAMERA_PORT_FLUSH = 0x2,
- CAMERA_PORT_DISABLE = 0x4,
- };
-
enum CaptureMode
{
+ INITIAL_MODE = -1,
HIGH_SPEED = 1,
HIGH_QUALITY = 2,
VIDEO_MODE = 3,
HIGH_QUALITY_ZSL = 4,
+ CP_CAM = 5,
};
enum IPPMode
@@ -231,11 +230,9 @@ public:
enum CodingMode
{
- CodingNone = 0,
+ CodingJPEG = 0,
CodingJPS,
CodingMPO,
- CodingRAWJPEG,
- CodingRAWMPO,
};
enum Algorithm3A
@@ -264,10 +261,30 @@ public:
SetExpBracket = 1 << 2,
SetQuality = 1 << 3,
SetRotation = 1 << 4,
+ SetBurst = 1 << 5,
ECaptureSettingMax,
ECapturesettingsAll = ( ((ECaptureSettingMax -1 ) << 1) -1 ) /// all possible flags raised
};
+ enum PreviewSettingsFlags {
+ SetLDC = 1 << 0,
+ SetNSF = 1 << 1,
+ SetCapMode = 1 << 2,
+ SetVNF = 1 << 3,
+ SetVSTAB = 1 << 4,
+ EPreviewSettingMax,
+ EPreviewSettingsAll = ( ((EPreviewSettingMax -1 ) << 1) -1 ) /// all possible flags raised
+ };
+
+ enum BracketingValueMode {
+ BracketingValueAbsolute,
+ BracketingValueRelative,
+ BracketingValueAbsoluteForced,
+ BracketingValueRelativeForced,
+ BracketingValueCompensation,
+ BracketingValueCompensationForced
+ };
+
class GPSData
{
public:
@@ -309,8 +326,9 @@ public:
class OMXCameraPortParameters
{
public:
- OMX_U32 mHostBufaddr[MAX_NO_BUFFERS];
+ //CameraBuffer * mHostBufaddr[MAX_NO_BUFFERS];
OMX_BUFFERHEADERTYPE *mBufferHeader[MAX_NO_BUFFERS];
+ OMX_U8 mStatus[MAX_NO_BUFFERS];
OMX_U32 mWidth;
OMX_U32 mHeight;
OMX_U32 mStride;
@@ -328,9 +346,17 @@ public:
OMX_CONFIG_FRAMESTABTYPE mVidStabConfig;
OMX_U32 mCapFrame;
OMX_U32 mFrameRate;
- OMX_S32 mMinFrameRate;
- OMX_S32 mMaxFrameRate;
- CameraFrame::FrameType mImageType;
+ OMX_U32 mMinFrameRate;
+ OMX_U32 mMaxFrameRate;
+ CameraFrame::FrameType mImageType;
+ OMX_TI_STEREOFRAMELAYOUTTYPE mFrameLayoutType;
+
+ CameraBuffer * lookup_omx_buffer (OMX_BUFFERHEADERTYPE *pBufHeader);
+ enum {
+ IDLE = 0, // buffer is neither with HAL or Ducati
+ FILL, // buffer is with Ducati
+ DONE, // buffer is filled and sent to HAL
+ };
};
///Context of the OMX Camera component
@@ -344,9 +370,24 @@ public:
OMX_U32 mPrevPortIndex;
OMX_U32 mImagePortIndex;
OMX_U32 mMeasurementPortIndex;
+ OMX_U32 mVideoInPortIndex;
OMXCameraPortParameters mCameraPortParams[MAX_NO_PORTS];
};
+ class CachedCaptureParameters
+ {
+ public:
+ unsigned int mPendingCaptureSettings;
+ unsigned int mPictureRotation;
+ int mExposureBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingModes[EXP_BRACKET_RANGE];
+ size_t mExposureBracketingValidEntries;
+ OMX_BRACKETMODETYPE mExposureBracketMode;
+ unsigned int mBurstFrames;
+ bool mFlushShotConfigQueue;
+ };
+
public:
OMXCameraAdapter(size_t sensor_index);
@@ -356,21 +397,22 @@ public:
virtual status_t initialize(CameraProperties::Properties*);
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params);
- virtual void getParameters(CameraParameters& params);
+ virtual status_t setParameters(const android::CameraParameters& params);
+ virtual void getParameters(android::CameraParameters& params);
// API
- virtual status_t UseBuffersPreview(void* bufArr, int num);
+ status_t UseBuffersPreview(CameraBuffer *bufArr, int num);
- //API to flush the buffers for preview
- status_t flushBuffers();
+ //API to flush the buffers
+ status_t flushBuffers(OMX_U32 port = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW);
// API
virtual status_t setFormat(OMX_U32 port, OMXCameraPortParameters &cap);
// Function to get and populate caps from handle
- static status_t getCaps(CameraProperties::Properties* props, OMX_HANDLETYPE handle);
+ static status_t getCaps(int sensorId, CameraProperties::Properties* props, OMX_HANDLETYPE handle);
static const char* getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT);
+ static int getMultipleLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT, char * supported);
static int getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT);
OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
@@ -385,7 +427,8 @@ public:
OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader);
- static OMX_ERRORTYPE OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData=NULL);
+ static OMX_ERRORTYPE OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ const OMX_CALLBACKTYPE & callbacks);
protected:
@@ -402,10 +445,10 @@ protected:
virtual status_t stopVideoCapture();
virtual status_t startPreview();
virtual status_t stopPreview();
- virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
- virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer * bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::FrameType frameType);
virtual status_t getFrameSize(size_t &width, size_t &height);
- virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
virtual status_t startFaceDetection();
virtual status_t stopFaceDetection();
@@ -414,11 +457,17 @@ protected:
private:
+ // Caches and returns current set of parameters
+ CachedCaptureParameters* cacheCaptureParameters();
+
status_t doSwitchToExecuting();
void performCleanupAfterError();
- status_t switchToLoaded();
+ status_t switchToIdle();
+
+ status_t switchToLoaded(bool bPortEnableRequired = false);
+ status_t prevPortEnable();
OMXCameraPortParameters *getPortParams(CameraFrame::FrameType frameType);
@@ -437,15 +486,21 @@ private:
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN Semaphore &semaphore);
+ OMX_IN Utils::Semaphore &semaphore);
status_t setPictureRotation(unsigned int degree);
status_t setSensorOrientation(unsigned int degree);
status_t setImageQuality(unsigned int quality);
status_t setThumbnailParams(unsigned int width, unsigned int height, unsigned int quality);
+ status_t setSensorQuirks(int orientation,
+ OMXCameraPortParameters &portParams,
+ bool &portConfigured);
+
+ status_t setupTunnel(uint32_t SliceHeight, uint32_t EncoderHandle, uint32_t width, uint32_t height);
+ status_t destroyTunnel();
//EXIF
- status_t setParametersEXIF(const CameraParameters &params,
+ status_t setParametersEXIF(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t convertGPSCoord(double coord, int &deg, int &min, int &sec, int &secDivisor);
status_t setupEXIF();
@@ -462,12 +517,12 @@ private:
//Focus distances
- status_t setParametersFocus(const CameraParameters &params,
+ status_t setParametersFocus(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t addFocusDistances(OMX_U32 &near,
OMX_U32 &optimal,
OMX_U32 &far,
- CameraParameters& params);
+ android::CameraParameters& params);
status_t encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length);
status_t getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far);
@@ -476,14 +531,17 @@ private:
status_t enableVideoStabilization(bool enable);
//Digital zoom
- status_t setParametersZoom(const CameraParameters &params,
+ status_t setParametersZoom(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t doZoom(int index);
status_t advanceZoom();
//3A related parameters
- status_t setParameters3A(const CameraParameters &params,
+ status_t setParameters3A(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
+ void declareParameter3ABool(const android::CameraParameters &params, const char *key,
+ OMX_BOOL &current_setting, E3ASettingsFlags pending,
+ const char *msg);
// scene modes
status_t setScene(Gen3A_settings& Gen3A);
@@ -501,6 +559,7 @@ private:
//Exposure Modes
status_t setExposureMode(Gen3A_settings& Gen3A);
+ status_t setManualExposureVal(Gen3A_settings& Gen3A);
status_t setEVCompensation(Gen3A_settings& Gen3A);
status_t setWBMode(Gen3A_settings& Gen3A);
status_t setFlicker(Gen3A_settings& Gen3A);
@@ -512,6 +571,20 @@ private:
status_t setEffect(Gen3A_settings& Gen3A);
status_t setMeteringAreas(Gen3A_settings& Gen3A);
+ //TI extensions for enable/disable algos
+ status_t setParameter3ABool(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg);
+ status_t setParameter3ABoolInvert(const OMX_INDEXTYPE omx_idx,
+ const OMX_BOOL data, const char *msg);
+#ifndef OMAP_TUNA
+ status_t setAlgoFixedGamma(Gen3A_settings& Gen3A);
+ status_t setAlgoNSF1(Gen3A_settings& Gen3A);
+ status_t setAlgoNSF2(Gen3A_settings& Gen3A);
+ status_t setAlgoSharpening(Gen3A_settings& Gen3A);
+ status_t setAlgoThreeLinColorMap(Gen3A_settings& Gen3A);
+ status_t setAlgoGIC(Gen3A_settings& Gen3A);
+#endif
+
status_t getEVCompensation(Gen3A_settings& Gen3A);
status_t getWBMode(Gen3A_settings& Gen3A);
status_t getSharpness(Gen3A_settings& Gen3A);
@@ -524,10 +597,16 @@ private:
status_t setWhiteBalanceLock(Gen3A_settings& Gen3A);
status_t set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus);
+ //Stereo 3D
+#ifndef OMAP_TUNA
+ void setParamS3D(OMX_U32 port, const char *valstr);
+ status_t setS3DFrameLayout(OMX_U32 port) const;
+#endif
+
//API to set FrameRate using VFR interface
status_t setVFramerate(OMX_U32 minFrameRate,OMX_U32 maxFrameRate);
- status_t setParametersAlgo(const CameraParameters &params,
+ status_t setParametersAlgo(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
//Noise filtering
@@ -548,18 +627,21 @@ private:
status_t setTouchFocus();
//Face detection
- status_t setParametersFD(const CameraParameters &params,
+ status_t setParametersFD(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
- status_t updateFocusDistances(CameraParameters &params);
+ status_t updateFocusDistances(android::CameraParameters &params);
+ status_t setFaceDetectionOrientation(OMX_U32 orientation);
status_t setFaceDetection(bool enable, OMX_U32 orientation);
- status_t detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
- sp<CameraFDResult> &result,
+ status_t createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ android::sp<CameraMetadataResult> &result,
size_t previewWidth,
size_t previewHeight);
status_t encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
- camera_frame_metadata_t **pFaces,
+ camera_frame_metadata_t *metadataResult,
size_t previewWidth,
size_t previewHeight);
+ status_t encodePreviewMetadata(camera_frame_metadata_t *meta, const OMX_PTR plat_pvt);
+
void pauseFaceDetection(bool pause);
//3A Algorithms priority configuration
@@ -569,17 +651,29 @@ private:
status_t setSensorOverclock(bool enable);
// Utility methods for OMX Capabilities
+ static bool _checkOmxTiCap(const OMX_TI_CAPTYPE & caps);
+ static bool _dumpOmxTiCap(int sensorId, const OMX_TI_CAPTYPE & caps);
+
static status_t insertCapabilities(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t encodeSizeCap(OMX_TI_CAPRESTYPE&, const CapResolution *, size_t, char *, size_t);
static status_t encodeISOCap(OMX_U32, const CapISO*, size_t, char*, size_t);
static size_t encodeZoomCap(OMX_S32, const CapZoom*, size_t, char*, size_t);
- static status_t encodeFramerateCap(OMX_U32, OMX_U32, const CapFramerate*, size_t, char*, size_t);
- static status_t encodeVFramerateCap(OMX_TI_CAPTYPE&, const CapU32Pair*, size_t, char*, char*, size_t);
+ static void encodeFrameRates(int minFrameRate, int maxFrameRate, const OMX_TI_CAPTYPE & caps,
+ const CapFramerate * fixedFrameRates, int frameRateCount, android::Vector<FpsRange> & fpsRanges);
+ static status_t encodeImageCodingFormatCap(OMX_IMAGE_CODINGTYPE,
+ const CapCodingFormat *,
+ size_t,
+ char *);
static status_t encodePixelformatCap(OMX_COLOR_FORMATTYPE,
const CapPixelformat*,
size_t,
char*,
size_t);
+ static status_t encodeSizeCap3D(OMX_TI_CAPRESTYPE&,
+ const CapResolution*,
+ size_t ,
+ char * ,
+ size_t);
static status_t insertImageSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertPreviewSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertThumbSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
@@ -587,13 +681,13 @@ private:
static status_t insertImageFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertPreviewFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
- static status_t insertVFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertEVs(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertISOModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertIPPModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &);
static status_t insertWBModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertEffects(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertExpModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertManualExpRanges(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertSceneModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &);
static status_t insertFocusModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertFlickerModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
@@ -602,21 +696,55 @@ private:
static status_t insertDefaults(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertLocks(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertAreas(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+#ifndef OMAP_TUNA
+ static status_t insertMechanicalMisalignmentCorrection(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+#endif
+ static status_t insertCaptureModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
static status_t insertVideoSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
-
- status_t setParametersCapture(const CameraParameters &params,
+ static status_t insertFacing(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertFocalLength(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertAutoConvergenceModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertManualConvergenceRange(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertLayout(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertVideoSnapshotSupported(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertVNFSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps);
+ static status_t insertVSTABSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps);
+ static status_t insertGBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps);
+ static status_t insertGLBCESupported(CameraProperties::Properties* params,
+ const OMX_TI_CAPTYPE &caps);
+ static status_t insertRaw(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+
+ status_t setParametersCapture(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
//Exposure Bracketing
- status_t setExposureBracketing(int *evValues, size_t evCount, size_t frameCount);
- status_t parseExpRange(const char *rangeStr, int * expRange, size_t count, size_t &validEntries);
+#ifndef OMAP_TUNA
+ status_t initVectorShot();
+ status_t setVectorShot(int *evValues, int *evValues2, int *evModes2,
+ size_t evCount, size_t frameCount,
+ bool flush, OMX_BRACKETMODETYPE bracketMode);
+#endif
+ status_t setVectorStop(bool toPreview = false);
+ status_t setExposureBracketing(int *evValues, int *evValues2,
+ size_t evCount, size_t frameCount,
+ OMX_BRACKETMODETYPE bracketMode);
+ status_t doExposureBracketing(int *evValues, int *evValues2,
+ int *evModes2,
+ size_t evCount, size_t frameCount,
+ bool flush,
+ OMX_BRACKETMODETYPE bracketMode);
+ int getBracketingValueMode(const char *a, const char *b) const;
+ status_t parseExpRange(const char *rangeStr, int *expRange, int *gainRange,
+ int *expGainModes,
+ size_t count, size_t &validEntries);
//Temporal Bracketing
status_t doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader, CameraFrame::FrameType typeOfFrame);
- status_t sendBracketFrames();
+ status_t sendBracketFrames(size_t &framesSent);
// Image Capture Service
- status_t startImageCapture();
+ status_t startImageCapture(bool bracketing, CachedCaptureParameters*);
status_t disableImagePort();
//Shutter callback notifications
@@ -624,34 +752,64 @@ private:
//Sets eithter HQ or HS mode and the frame count
status_t setCaptureMode(OMXCameraAdapter::CaptureMode mode);
- status_t UseBuffersCapture(void* bufArr, int num);
- status_t UseBuffersPreviewData(void* bufArr, int num);
+ status_t UseBuffersCapture(CameraBuffer *bufArr, int num);
+ status_t UseBuffersPreviewData(CameraBuffer *bufArr, int num);
+ status_t UseBuffersRawCapture(CameraBuffer *bufArr, int num);
//Used for calculation of the average frame rate during preview
status_t recalculateFPS();
- //Helper method for initializing a CameFrame object
- status_t initCameraFrame(CameraFrame &frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, int typeOfFrame, OMXCameraPortParameters *port);
-
//Sends the incoming OMX buffer header to subscribers
- status_t sendFrame(CameraFrame &frame);
-
status_t sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port);
status_t apply3Asettings( Gen3A_settings& Gen3A );
- status_t init3AParams(Gen3A_settings &Gen3A);
// AutoConvergence
- status_t setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode, OMX_S32 pManualConverence);
- status_t getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode, OMX_S32 *pManualConverence);
+ status_t setAutoConvergence(const char *valstr, const char *pValManualstr, const android::CameraParameters &params);
status_t setExtraData(bool enable, OMX_U32, OMX_EXT_EXTRADATATYPE);
- OMX_OTHER_EXTRADATATYPE *getExtradata(OMX_OTHER_EXTRADATATYPE *extraData, OMX_U32 extraDataSize, OMX_EXTRADATATYPE type);
+ OMX_OTHER_EXTRADATATYPE *getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const;
+
+ // Meta data
+#ifdef OMAP_ENHANCEMENT_CPCAM
+ camera_memory_t * getMetaData(const OMX_PTR plat_pvt,
+ camera_request_memory allocator) const;
+#endif
+
+#ifndef OMAP_TUNA
+ // Mechanical Misalignment Correction
+ status_t setMechanicalMisalignmentCorrection(bool enable);
+
+ // DCC file data save
+ status_t initDccFileDataSave(OMX_HANDLETYPE* omxHandle, int portIndex);
+ status_t sniffDccFileDataSave(OMX_BUFFERHEADERTYPE* pBuffHeader);
+ status_t saveDccFileDataSave();
+ status_t closeDccFileDataSave();
+ status_t fseekDCCuseCasePos(FILE *pFile);
+ FILE * fopenCameraDCC(const char *dccFolderPath);
+ FILE * parseDCCsubDir(DIR *pDir, char *path);
+#endif
+
+#ifdef CAMERAHAL_OMX_PROFILING
+ status_t storeProfilingData(OMX_BUFFERHEADERTYPE* pBuffHeader);
+#endif
+
+ // Internal buffers
+ status_t initInternalBuffers (OMX_U32);
+ status_t deinitInternalBuffers (OMX_U32);
+
+ // Reprocess Methods -- implementation in OMXReprocess.cpp
+ status_t setParametersReprocess(const android::CameraParameters &params, CameraBuffer* bufs,
+ BaseCameraAdapter::AdapterState state);
+ status_t startReprocess();
+ status_t disableReprocess();
+ status_t stopReprocess();
+ status_t UseBuffersReprocess(CameraBuffer *bufArr, int num);
- class CommandHandler : public Thread {
+ class CommandHandler : public android::Thread {
public:
CommandHandler(OMXCameraAdapter* ca)
- : Thread(false), mCameraAdapter(ca) { }
+ : android::Thread(false), mCameraAdapter(ca) { }
virtual bool threadLoop() {
bool ret;
@@ -659,38 +817,42 @@ private:
return ret;
}
- status_t put(TIUTILS::Message* msg){
- Mutex::Autolock lock(mLock);
+ status_t put(Utils::Message* msg){
+ android::AutoMutex lock(mLock);
return mCommandMsgQ.put(msg);
}
void clearCommandQ()
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.clear();
}
enum {
COMMAND_EXIT = -1,
CAMERA_START_IMAGE_CAPTURE = 0,
- CAMERA_PERFORM_AUTOFOCUS = 1,
+ CAMERA_PERFORM_AUTOFOCUS,
CAMERA_SWITCH_TO_EXECUTING,
+ CAMERA_START_REPROCESS
};
private:
bool Handler();
- TIUTILS::MessageQueue mCommandMsgQ;
+ Utils::MessageQueue mCommandMsgQ;
OMXCameraAdapter* mCameraAdapter;
- Mutex mLock;
+ android::Mutex mLock;
};
- sp<CommandHandler> mCommandHandler;
+ android::sp<CommandHandler> mCommandHandler;
public:
- class OMXCallbackHandler : public Thread {
+ class OMXCallbackHandler : public android::Thread {
public:
OMXCallbackHandler(OMXCameraAdapter* ca)
- : Thread(false), mCameraAdapter(ca) { }
+ : Thread(false), mCameraAdapter(ca)
+ {
+ mIsProcessed = true;
+ }
virtual bool threadLoop() {
bool ret;
@@ -698,31 +860,36 @@ public:
return ret;
}
- status_t put(TIUTILS::Message* msg){
- Mutex::Autolock lock(mLock);
+ status_t put(Utils::Message* msg){
+ android::AutoMutex lock(mLock);
+ mIsProcessed = false;
return mCommandMsgQ.put(msg);
}
void clearCommandQ()
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.clear();
}
+ void flush();
+
enum {
COMMAND_EXIT = -1,
CAMERA_FILL_BUFFER_DONE,
- CAMERA_FOCUS_STATUS,
+ CAMERA_FOCUS_STATUS
};
private:
bool Handler();
- TIUTILS::MessageQueue mCommandMsgQ;
+ Utils::MessageQueue mCommandMsgQ;
OMXCameraAdapter* mCameraAdapter;
- Mutex mLock;
+ android::Mutex mLock;
+ android::Condition mCondition;
+ bool mIsProcessed;
};
- sp<OMXCallbackHandler> mOMXCallbackHandler;
+ android::sp<OMXCallbackHandler> mOMXCallbackHandler;
private:
@@ -731,15 +898,36 @@ private:
//OMX Capabilities data
static const CapResolution mImageCapRes [];
+ static const CapResolution mImageCapResSS [];
+ static const CapResolution mImageCapResTB [];
static const CapResolution mPreviewRes [];
+ static const CapResolution mPreviewResSS [];
+ static const CapResolution mPreviewResTB [];
+ static const CapResolution mPreviewPortraitRes [];
static const CapResolution mThumbRes [];
static const CapPixelformat mPixelformats [];
+ static const userToOMX_LUT mFrameLayout [];
+ static const LUTtype mLayoutLUT;
+ static const CapCodingFormat mImageCodingFormat[];
static const CapFramerate mFramerates [];
static const CapU32 mSensorNames[] ;
static const CapZoom mZoomStages [];
- static const CapEVComp mEVCompRanges [];
static const CapISO mISOStages [];
- static const CapU32Pair mVarFramerates [];
+ static const int SENSORID_IMX060;
+ static const int SENSORID_OV5650;
+ static const int SENSORID_OV5640;
+ static const int SENSORID_OV14825;
+ static const int SENSORID_S5K4E1GA;
+ static const int SENSORID_S5K6A1GX03;
+ static const CapU32 mFacing [];
+ static const userToOMX_LUT mAutoConvergence [];
+ static const LUTtype mAutoConvergenceLUT;
+ static const userToOMX_LUT mBracketingModes[];
+ static const LUTtype mBracketingModesLUT;
+
+ static const int FPS_MIN;
+ static const int FPS_MAX;
+ static const int FPS_MAX_EXTENDED;
// OMX Camera defaults
static const char DEFAULT_ANTIBANDING[];
@@ -752,45 +940,54 @@ private:
static const char DEFAULT_FLASH_MODE[];
static const char DEFAULT_FOCUS_MODE_PREFERRED[];
static const char DEFAULT_FOCUS_MODE[];
- static const char DEFAULT_FRAMERATE_RANGE_IMAGE[];
- static const char DEFAULT_FRAMERATE_RANGE_VIDEO[];
static const char DEFAULT_IPP[];
- static const char DEFAULT_GBCE[];
static const char DEFAULT_ISO_MODE[];
static const char DEFAULT_JPEG_QUALITY[];
static const char DEFAULT_THUMBNAIL_QUALITY[];
static const char DEFAULT_THUMBNAIL_SIZE[];
static const char DEFAULT_PICTURE_FORMAT[];
+ static const char DEFAULT_S3D_PICTURE_LAYOUT[];
static const char DEFAULT_PICTURE_SIZE[];
+ static const char DEFAULT_PICTURE_SS_SIZE[];
+ static const char DEFAULT_PICTURE_TB_SIZE[];
static const char DEFAULT_PREVIEW_FORMAT[];
static const char DEFAULT_FRAMERATE[];
+ static const char DEFAULT_S3D_PREVIEW_LAYOUT[];
static const char DEFAULT_PREVIEW_SIZE[];
+ static const char DEFAULT_PREVIEW_SS_SIZE[];
+ static const char DEFAULT_PREVIEW_TB_SIZE[];
static const char DEFAULT_NUM_PREV_BUFS[];
static const char DEFAULT_NUM_PIC_BUFS[];
- static const char DEFAULT_MAX_FOCUS_AREAS[];
static const char DEFAULT_SATURATION[];
static const char DEFAULT_SCENE_MODE[];
static const char DEFAULT_SHARPNESS[];
- static const char DEFAULT_VSTAB[];
- static const char DEFAULT_VSTAB_SUPPORTED[];
+ static const char * DEFAULT_VSTAB;
+ static const char * DEFAULT_VNF;
static const char DEFAULT_WB[];
static const char DEFAULT_ZOOM[];
static const char DEFAULT_MAX_FD_HW_FACES[];
static const char DEFAULT_MAX_FD_SW_FACES[];
- static const char DEFAULT_AE_LOCK[];
- static const char DEFAULT_AWB_LOCK[];
- static const char DEFAULT_MAX_NUM_METERING_AREAS[];
- static const char DEFAULT_LOCK_SUPPORTED[];
- static const char DEFAULT_LOCK_UNSUPPORTED[];
- static const char DEFAULT_FOCAL_LENGTH_PRIMARY[];
- static const char DEFAULT_FOCAL_LENGTH_SECONDARY[];
+ static const char * DEFAULT_AE_LOCK;
+ static const char * DEFAULT_AWB_LOCK;
static const char DEFAULT_HOR_ANGLE[];
static const char DEFAULT_VER_ANGLE[];
- static const char DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[];
static const char DEFAULT_VIDEO_SIZE[];
- static const char DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+ static const char DEFAULT_SENSOR_ORIENTATION[];
+ static const char DEFAULT_AUTOCONVERGENCE_MODE[];
+ static const char DEFAULT_MANUAL_CONVERGENCE[];
+ static const char * DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE;
+ static const char DEFAULT_EXIF_MODEL[];
+ static const char DEFAULT_EXIF_MAKE[];
+
static const size_t MAX_FOCUS_AREAS;
+#ifdef CAMERAHAL_OMX_PROFILING
+
+ static const char DEFAULT_PROFILE_PATH[];
+ int mDebugProfile;
+
+#endif
+
OMX_VERSIONTYPE mCompRevision;
//OMX Component UUID
@@ -803,31 +1000,53 @@ private:
char mFocusDistBuffer[FOCUS_DIST_BUFFER_SIZE];
// Current Focus areas
- Vector< sp<CameraArea> > mFocusAreas;
- mutable Mutex mFocusAreasLock;
+ android::Vector<android::sp<CameraArea> > mFocusAreas;
+ mutable android::Mutex mFocusAreasLock;
+
+ // Current Touch convergence areas
+ android::Vector<android::sp<CameraArea> > mTouchAreas;
+ mutable android::Mutex mTouchAreasLock;
// Current Metering areas
- Vector< sp<CameraArea> > mMeteringAreas;
- mutable Mutex mMeteringAreasLock;
+ android::Vector<android::sp<CameraArea> > mMeteringAreas;
+ mutable android::Mutex mMeteringAreasLock;
+ OperatingMode mCapabilitiesOpMode;
CaptureMode mCapMode;
+ // TODO(XXX): Do we really need this lock? Let's
+ // try to merge temporal bracketing and burst
+ // capture later
+ mutable android::Mutex mBurstLock;
size_t mBurstFrames;
+ size_t mBurstFramesAccum;
+ size_t mBurstFramesQueued;
size_t mCapturedFrames;
+ bool mFlushShotConfigQueue;
bool mMeasurementEnabled;
//Exposure Bracketing
int mExposureBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingValues[EXP_BRACKET_RANGE];
+ int mExposureGainBracketingModes[EXP_BRACKET_RANGE];
size_t mExposureBracketingValidEntries;
+ OMX_BRACKETMODETYPE mExposureBracketMode;
+
+ //Zoom Bracketing
+ int mZoomBracketingValues[ZOOM_BRACKET_RANGE];
+ size_t mZoomBracketingValidEntries;
- mutable Mutex mFaceDetectionLock;
+ static const uint32_t FACE_DETECTION_THRESHOLD;
+ mutable android::Mutex mFaceDetectionLock;
//Face detection status
bool mFaceDetectionRunning;
bool mFaceDetectionPaused;
bool mFDSwitchAlgoPriority;
- camera_face_t faceDetectionLastOutput [MAX_NUM_FACES_SUPPORTED];
+ camera_face_t faceDetectionLastOutput[MAX_NUM_FACES_SUPPORTED];
int faceDetectionNumFacesLastOutput;
+ int metadataLastAnalogGain;
+ int metadataLastExposureTime;
//Geo-tagging
EXIFData mEXIFData;
@@ -857,7 +1076,7 @@ private:
unsigned int mZoomParameterIdx;
//current zoom
- Mutex mZoomLock;
+ android::Mutex mZoomLock;
unsigned int mCurrentZoomIdx, mTargetZoomIdx, mPreviousZoomIndx;
bool mZoomUpdating, mZoomUpdate;
int mZoomInc;
@@ -868,63 +1087,78 @@ private:
OMX_VERSIONTYPE mLocalVersionParam;
unsigned int mPending3Asettings;
- Mutex m3ASettingsUpdateLock;
+ android::Mutex m3ASettingsUpdateLock;
Gen3A_settings mParameters3A;
const char *mPictureFormatFromClient;
+ BrightnessMode mGBCE;
+ BrightnessMode mGLBCE;
+
OMX_TI_CONFIG_3A_FACE_PRIORITY mFacePriority;
OMX_TI_CONFIG_3A_REGION_PRIORITY mRegionPriority;
- CameraParameters mParams;
+ android::CameraParameters mParams;
CameraProperties::Properties* mCapabilities;
unsigned int mPictureRotation;
bool mWaitingForSnapshot;
- int mSnapshotCount;
bool mCaptureConfigured;
unsigned int mPendingCaptureSettings;
+ unsigned int mPendingPreviewSettings;
OMX_TI_ANCILLARYDATATYPE* mCaptureAncillaryData;
OMX_TI_WHITEBALANCERESULTTYPE* mWhiteBalanceData;
+ bool mReprocConfigured;
//Temporal bracketing management data
- mutable Mutex mBracketingLock;
+ bool mBracketingSet;
+ mutable android::Mutex mBracketingLock;
bool *mBracketingBuffersQueued;
int mBracketingBuffersQueuedCount;
int mLastBracetingBufferIdx;
bool mBracketingEnabled;
- int mBracketingRange;
+ bool mZoomBracketingEnabled;
+ size_t mBracketingRange;
+ int mCurrentZoomBracketing;
+ android::CameraParameters mParameters;
+#ifdef CAMERAHAL_TUNA
bool mIternalRecordingHint;
+#endif
- CameraParameters mParameters;
bool mOmxInitialized;
OMXCameraAdapterComponentContext mCameraAdapterParameters;
bool mFirstTimeInit;
///Semaphores used internally
- Semaphore mInitSem;
- Semaphore mFlushSem;
- Semaphore mUsePreviewDataSem;
- Semaphore mUsePreviewSem;
- Semaphore mUseCaptureSem;
- Semaphore mStartPreviewSem;
- Semaphore mStopPreviewSem;
- Semaphore mStartCaptureSem;
- Semaphore mStopCaptureSem;
- Semaphore mSwitchToLoadedSem;
- Semaphore mSwitchToExecSem;
-
- mutable Mutex mStateSwitchLock;
-
- Vector<struct TIUTILS::Message *> mEventSignalQ;
- Mutex mEventLock;
+ Utils::Semaphore mInitSem;
+ Utils::Semaphore mFlushSem;
+ Utils::Semaphore mUsePreviewDataSem;
+ Utils::Semaphore mUsePreviewSem;
+ Utils::Semaphore mUseCaptureSem;
+ Utils::Semaphore mStartPreviewSem;
+ Utils::Semaphore mStopPreviewSem;
+ Utils::Semaphore mStartCaptureSem;
+ Utils::Semaphore mStopCaptureSem;
+ Utils::Semaphore mSwitchToLoadedSem;
+ Utils::Semaphore mSwitchToExecSem;
+ Utils::Semaphore mStopReprocSem;
+ Utils::Semaphore mUseReprocessSem;
+
+ mutable android::Mutex mStateSwitchLock;
+ mutable android::Mutex mIdleStateSwitchLock;
+
+ android::Vector<Utils::Message *> mEventSignalQ;
+ android::Mutex mEventLock;
OMX_STATETYPE mComponentState;
+ OMX_TI_AUTOCONVERGENCEMODETYPE mAutoConv;
+ OMX_S32 mManualConv;
bool mVnfEnabled;
bool mVstabEnabled;
int mSensorOrientation;
int mDeviceOrientation;
+ int mFaceOrientation;
bool mSensorOverclock;
//Indicates if we should leave
@@ -936,11 +1170,15 @@ private:
int mLastFrameCount;
unsigned int mIter;
nsecs_t mLastFPSTime;
- Mutex mFrameCountMutex;
- Condition mFirstFrameCondition;
+ android::Mutex mFrameCountMutex;
+ android::Condition mFirstFrameCondition;
- Mutex mDoAFMutex;
- Condition mDoAFCond;
+ static const nsecs_t CANCEL_AF_TIMEOUT;
+ android::Mutex mCancelAFMutex;
+ android::Condition mCancelAFCond;
+
+ android::Mutex mDoAFMutex;
+ android::Condition mDoAFCond;
size_t mSensorIndex;
CodingMode mCodingMode;
@@ -949,14 +1187,33 @@ private:
OMX_TICKS mTimeSourceDelta;
bool onlyOnce;
- Semaphore mCaptureSem;
+ Utils::Semaphore mCaptureSem;
bool mCaptureSignalled;
OMX_BOOL mUserSetExpLock;
OMX_BOOL mUserSetWbLock;
- Mutex mImageCaptureLock;
+#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+ bool mRawCapture;
+ bool mYuvCapture;
+#endif
+
+ bool mSetFormatDone;
+
+#ifndef OMAP_TUNA
+ OMX_TI_DCCDATATYPE mDccData;
+ android::Mutex mDccDataLock;
+#endif
+
+ int mMaxZoomSupported;
+ android::Mutex mImageCaptureLock;
+
+ bool mTunnelDestroyed;
+ bool mPreviewPortInitialized;
+
};
-}; //// namespace
-#endif //OMX_CAMERA_ADAPTER_H
+} // namespace Camera
+} // namespace Ti
+
+#endif //OMX_CAMERA_ADAPTER_H
diff --git a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
index 205a87b..d57843e 100644
--- a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
+++ b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
@@ -32,6 +32,9 @@
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
#endif
+namespace Ti {
+namespace Camera {
+
struct SceneModesEntry {
OMX_SCENEMODETYPE scene;
OMX_IMAGE_FLASHCONTROLTYPE flash;
@@ -239,9 +242,312 @@ static const SceneModesEntry S5K6A1GX03_SceneModesLUT [] = {
OMX_WhiteBalControlAuto },
};
+static const SceneModesEntry IMX060_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoMacro,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlPortrait,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoMacro,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoMacro,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoLock,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
+static const SceneModesEntry OV5640_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
+static const SceneModesEntry OV5650_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlPortrait,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoLock,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
static const CameraToSensorModesLUTEntry CameraToSensorModesLUT [] = {
{ "S5K4E1GA", S5K4E1GA_SceneModesLUT, ARRAY_SIZE(S5K4E1GA_SceneModesLUT)},
{ "S5K6A1GX03", S5K6A1GX03_SceneModesLUT, ARRAY_SIZE(S5K6A1GX03_SceneModesLUT)},
+ { "IMX060", IMX060_SceneModesLUT, ARRAY_SIZE(IMX060_SceneModesLUT)},
+ { "OV5640", OV5640_SceneModesLUT, ARRAY_SIZE(OV5640_SceneModesLUT)},
+ { "OV5650", OV5650_SceneModesLUT, ARRAY_SIZE(OV5650_SceneModesLUT)},
};
+} // namespace Camera
+} // namespace Ti
+
#endif
diff --git a/camera/inc/SensorListener.h b/camera/inc/SensorListener.h
index 913eb95..44037b7 100644
--- a/camera/inc/SensorListener.h
+++ b/camera/inc/SensorListener.h
@@ -30,7 +30,10 @@
#include <gui/SensorEventQueue.h>
#include <utils/Looper.h>
-namespace android {
+#include "Common.h"
+
+namespace Ti {
+namespace Camera {
/**
* SensorListner class - Registers with sensor manager to get sensor events
@@ -38,11 +41,11 @@ namespace android {
typedef void (*orientation_callback_t) (uint32_t orientation, uint32_t tilt, void* cookie);
-class SensorLooperThread : public Thread {
+class SensorLooperThread : public android::Thread {
public:
- SensorLooperThread(Looper* looper)
+ SensorLooperThread(android::Looper* looper)
: Thread(false) {
- mLooper = sp<Looper>(looper);
+ mLooper = android::sp<android::Looper>(looper);
}
~SensorLooperThread() {
mLooper.clear();
@@ -58,11 +61,11 @@ class SensorLooperThread : public Thread {
mLooper->wake();
}
private:
- sp<Looper> mLooper;
+ android::sp<android::Looper> mLooper;
};
-class SensorListener : public RefBase
+class SensorListener : public android::RefBase
{
/* public - types */
public:
@@ -85,17 +88,18 @@ public:
void handleOrientation(uint32_t orientation, uint32_t tilt);
/* public - member variables */
public:
- sp<SensorEventQueue> mSensorEventQueue;
+ android::sp<android::SensorEventQueue> mSensorEventQueue;
/* private - member variables */
private:
int sensorsEnabled;
orientation_callback_t mOrientationCb;
void *mCbCookie;
- sp<Looper> mLooper;
- sp<SensorLooperThread> mSensorLooperThread;
- Mutex mLock;
+ android::sp<android::Looper> mLooper;
+ android::sp<SensorLooperThread> mSensorLooperThread;
+ android::Mutex mLock;
};
-}
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/TICameraParameters.h b/camera/inc/TICameraParameters.h
index 4701cae..84fa9fa 100644
--- a/camera/inc/TICameraParameters.h
+++ b/camera/inc/TICameraParameters.h
@@ -14,16 +14,14 @@
* limitations under the License.
*/
-
-
-
#ifndef TI_CAMERA_PARAMETERS_H
#define TI_CAMERA_PARAMETERS_H
#include <utils/KeyedVector.h>
#include <utils/String8.h>
-namespace android {
+namespace Ti {
+namespace Camera {
///TI Specific Camera Parameters
class TICameraParameters
@@ -36,35 +34,48 @@ static const char KEY_SUPPORTED_CAMERAS[];
// Select logical Camera index
static const char KEY_CAMERA[];
static const char KEY_CAMERA_NAME[];
-static const char KEY_S3D_SUPPORTED[];
static const char KEY_BURST[];
static const char KEY_CAP_MODE[];
+static const char KEY_CAP_MODE_VALUES[];
static const char KEY_VNF[];
+static const char KEY_VNF_SUPPORTED[];
static const char KEY_SATURATION[];
static const char KEY_BRIGHTNESS[];
-static const char KEY_EXPOSURE_MODE[];
static const char KEY_SUPPORTED_EXPOSURE[];
+static const char KEY_EXPOSURE_MODE[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_MIN[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_MAX[];
+static const char KEY_SUPPORTED_MANUAL_EXPOSURE_STEP[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MIN[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX[];
+static const char KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP[];
+static const char KEY_MANUAL_EXPOSURE[];
+static const char KEY_MANUAL_EXPOSURE_RIGHT[];
+static const char KEY_MANUAL_GAIN_ISO[];
+static const char KEY_MANUAL_GAIN_ISO_RIGHT[];
static const char KEY_CONTRAST[];
static const char KEY_SHARPNESS[];
static const char KEY_ISO[];
static const char KEY_SUPPORTED_ISO_VALUES[];
static const char KEY_SUPPORTED_IPP[];
static const char KEY_IPP[];
-static const char KEY_MAN_EXPOSURE[];
static const char KEY_METERING_MODE[];
-static const char KEY_PADDED_WIDTH[];
-static const char KEY_PADDED_HEIGHT[];
static const char KEY_EXP_BRACKETING_RANGE[];
+static const char KEY_EXP_GAIN_BRACKETING_RANGE[];
+static const char KEY_ZOOM_BRACKETING_RANGE[];
static const char KEY_TEMP_BRACKETING[];
static const char KEY_TEMP_BRACKETING_RANGE_POS[];
static const char KEY_TEMP_BRACKETING_RANGE_NEG[];
+static const char KEY_FLUSH_SHOT_CONFIG_QUEUE[];
static const char KEY_SHUTTER_ENABLE[];
static const char KEY_MEASUREMENT_ENABLE[];
static const char KEY_INITIAL_VALUES[];
static const char KEY_GBCE[];
+static const char KEY_GBCE_SUPPORTED[];
static const char KEY_GLBCE[];
-static const char KEY_MINFRAMERATE[];
-static const char KEY_MAXFRAMERATE[];
+static const char KEY_GLBCE_SUPPORTED[];
+static const char KEY_FRAMERATE_RANGES_EXT_SUPPORTED[];
+static const char KEY_FRAMERATES_EXT_SUPPORTED[];
// TI recording hint to notify camera adapters of possible recording
static const char KEY_RECORDING_HINT[];
@@ -72,32 +83,18 @@ static const char KEY_AUTO_FOCUS_LOCK[];
static const char KEY_CURRENT_ISO[];
static const char KEY_SENSOR_ORIENTATION[];
-static const char KEY_SENSOR_ORIENTATION_VALUES[];
-
-//TI extensions for zoom
-static const char ZOOM_SUPPORTED[];
-static const char ZOOM_UNSUPPORTED[];
//TI extensions for camera capabilies
static const char INITIAL_VALUES_TRUE[];
static const char INITIAL_VALUES_FALSE[];
-//TI extensions for enabling/disabling measurements
-static const char MEASUREMENT_ENABLE[];
-static const char MEASUREMENT_DISABLE[];
-
// TI extensions to add values for ManualConvergence and AutoConvergence mode
-static const char KEY_AUTOCONVERGENCE[];
static const char KEY_AUTOCONVERGENCE_MODE[];
-static const char KEY_MANUALCONVERGENCE_VALUES[];
-
-//TI extensions for enabling/disabling GLBCE
-static const char GLBCE_ENABLE[];
-static const char GLBCE_DISABLE[];
-
-//TI extensions for enabling/disabling GBCE
-static const char GBCE_ENABLE[];
-static const char GBCE_DISABLE[];
+static const char KEY_AUTOCONVERGENCE_MODE_VALUES[];
+static const char KEY_MANUAL_CONVERGENCE[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX[];
+static const char KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP[];
// TI extensions to add Min frame rate Values
static const char VIDEO_MINFRAMERATE_5[];
@@ -109,16 +106,6 @@ static const char VIDEO_MINFRAMERATE_25[];
static const char VIDEO_MINFRAMERATE_30[];
static const char VIDEO_MINFRAMERATE_33[];
-// TI extensions for Manual Gain and Manual Exposure
-static const char KEY_MANUAL_EXPOSURE_LEFT[];
-static const char KEY_MANUAL_EXPOSURE_RIGHT[];
-static const char KEY_MANUAL_EXPOSURE_MODES[];
-static const char KEY_MANUAL_GAIN_EV_RIGHT[];
-static const char KEY_MANUAL_GAIN_EV_LEFT[];
-static const char KEY_MANUAL_GAIN_ISO_RIGHT[];
-static const char KEY_MANUAL_GAIN_ISO_LEFT[];
-static const char KEY_MANUAL_GAIN_MODES[];
-
//TI extensions for setting EXIF tags
static const char KEY_EXIF_MODEL[];
static const char KEY_EXIF_MAKE[];
@@ -128,13 +115,13 @@ static const char KEY_GPS_MAPDATUM[];
static const char KEY_GPS_VERSION[];
static const char KEY_GPS_DATESTAMP[];
-//TI extensions for enabling/disabling shutter sound
-static const char SHUTTER_ENABLE[];
-static const char SHUTTER_DISABLE[];
+// TI extensions for VTC
+static const char KEY_VTC_HINT[];
+static const char KEY_VIDEO_ENCODER_HANDLE[];
+static const char KEY_VIDEO_ENCODER_SLICE_HEIGHT[];
-//TI extensions for Temporal bracketing
-static const char BRACKET_ENABLE[];
-static const char BRACKET_DISABLE[];
+static const char RAW_WIDTH[];
+static const char RAW_HEIGHT[];
//TI extensions to Image post-processing
static const char IPP_LDCNSF[];
@@ -146,18 +133,19 @@ static const char IPP_NONE[];
static const char HIGH_PERFORMANCE_MODE[];
static const char HIGH_QUALITY_MODE[];
static const char HIGH_QUALITY_ZSL_MODE[];
+static const char CP_CAM_MODE[];
static const char VIDEO_MODE[];
-
+static const char EXPOSURE_BRACKETING[];
+static const char ZOOM_BRACKETING[];
+static const char TEMP_BRACKETING[];
// TI extensions to standard android pixel formats
-static const char PIXEL_FORMAT_RAW[];
+static const char PIXEL_FORMAT_UNUSED[];
static const char PIXEL_FORMAT_JPS[];
static const char PIXEL_FORMAT_MPO[];
-static const char PIXEL_FORMAT_RAW_JPEG[];
-static const char PIXEL_FORMAT_RAW_MPO[];
+static const char PIXEL_FORMAT_YUV422I_UYVY[];
// TI extensions to standard android scene mode settings
-static const char SCENE_MODE_SPORT[];
static const char SCENE_MODE_CLOSEUP[];
static const char SCENE_MODE_AQUA[];
static const char SCENE_MODE_SNOWBEACH[];
@@ -176,7 +164,7 @@ static const char WHITE_BALANCE_SUNSET[];
static const char WHITE_BALANCE_FACE[];
// TI extensions to add exposure preset modes to android api
-static const char EXPOSURE_MODE_OFF[];
+static const char EXPOSURE_MODE_MANUAL[];
static const char EXPOSURE_MODE_AUTO[];
static const char EXPOSURE_MODE_NIGHT[];
static const char EXPOSURE_MODE_BACKLIGHT[];
@@ -192,6 +180,7 @@ static const char EXPOSURE_MODE_FACE[];
static const char FOCUS_MODE_PORTRAIT[];
static const char FOCUS_MODE_EXTENDED[];
static const char FOCUS_MODE_FACE[];
+static const char FOCUS_MODE_OFF[];
// TI extensions to add iso values
static const char ISO_MODE_AUTO[];
@@ -209,17 +198,34 @@ static const char EFFECT_VIVID[];
static const char EFFECT_COLOR_SWAP[];
static const char EFFECT_BLACKWHITE[];
-static const char KEY_S3D2D_PREVIEW[];
-static const char KEY_S3D2D_PREVIEW_MODE[];
+//TI extensions for stereo frame layouts
+static const char KEY_S3D_PRV_FRAME_LAYOUT[];
+static const char KEY_S3D_PRV_FRAME_LAYOUT_VALUES[];
+static const char KEY_S3D_CAP_FRAME_LAYOUT[];
+static const char KEY_S3D_CAP_FRAME_LAYOUT_VALUES[];
+
+//TI extensions for stereo frame layouts
+static const char S3D_NONE[];
+static const char S3D_TB_FULL[];
+static const char S3D_SS_FULL[];
+static const char S3D_TB_SUBSAMPLED[];
+static const char S3D_SS_SUBSAMPLED[];
+
+//TI extentions fo 3D resolutions
+static const char KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES[];
+static const char KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES[];
+static const char KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES[];
+static const char KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES[];
// TI extensions to add values for AutoConvergence settings.
static const char AUTOCONVERGENCE_MODE_DISABLE[];
static const char AUTOCONVERGENCE_MODE_FRAME[];
static const char AUTOCONVERGENCE_MODE_CENTER[];
-static const char AUTOCONVERGENCE_MODE_FFT[];
+static const char AUTOCONVERGENCE_MODE_TOUCH[];
static const char AUTOCONVERGENCE_MODE_MANUAL[];
-
//TI extensions for flash mode settings
static const char FLASH_MODE_FILL_IN[];
@@ -234,9 +240,20 @@ static const char ORIENTATION_SENSOR_270[];
static const char FACING_FRONT[];
static const char FACING_BACK[];
-};
+static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[];
+static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION[];
+
+//TI extensions for enable/disable algos
+static const char KEY_ALGO_FIXED_GAMMA[];
+static const char KEY_ALGO_NSF1[];
+static const char KEY_ALGO_NSF2[];
+static const char KEY_ALGO_SHARPENING[];
+static const char KEY_ALGO_THREELINCOLORMAP[];
+static const char KEY_ALGO_GIC[];
};
-#endif
+} // namespace Camera
+} // namespace Ti
+#endif
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
index b9d3952..cf277ac 100644
--- a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -19,16 +19,40 @@
#ifndef V4L_CAMERA_ADAPTER_H
#define V4L_CAMERA_ADAPTER_H
+#include <linux/videodev2.h>
+
#include "CameraHal.h"
#include "BaseCameraAdapter.h"
#include "DebugUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
+
#define NB_BUFFER 10
-#define DEVICE "/dev/video4"
+#define DEVICE "/dev/videoxx"
+#define DEVICE_PATH "/dev/"
+#define DEVICE_NAME "videoxx"
+
+typedef int V4L_HANDLETYPE;
+
+struct CapPixelformat {
+ uint32_t pixelformat;
+ const char *param;
+};
+
+struct CapResolution {
+ size_t width, height;
+ char param[10];
+};
+struct CapU32 {
+ uint32_t num;
+ const char *param;
+};
+
+typedef CapU32 CapFramerate;
struct VideoInfo {
struct v4l2_capability cap;
@@ -36,6 +60,7 @@ struct VideoInfo {
struct v4l2_buffer buf;
struct v4l2_requestbuffers rb;
void *mem[NB_BUFFER];
+ void *CaptureBuffers[NB_BUFFER];
bool isStreaming;
int width;
int height;
@@ -43,6 +68,16 @@ struct VideoInfo {
int framesizeIn;
};
+typedef struct V4L_TI_CAPTYPE {
+ uint16_t ulPreviewFormatCount; // supported preview pixelformat count
+ uint32_t ePreviewFormats[32];
+ uint16_t ulPreviewResCount; // supported preview resolution sizes
+ CapResolution tPreviewRes[32];
+ uint16_t ulCaptureResCount; // supported capture resolution sizes
+ CapResolution tCaptureRes[32];
+ uint16_t ulFrameRateCount; // supported frame rate
+ uint16_t ulFrameRates[32];
+}V4L_TI_CAPTYPE;
/**
* Class which completely abstracts the camera hardware interaction from camera hal
@@ -64,32 +99,35 @@ public:
public:
- V4LCameraAdapter();
+ V4LCameraAdapter(size_t sensor_index);
~V4LCameraAdapter();
///Initialzes the camera adapter creates any resources required
- virtual status_t initialize(CameraProperties::Properties*, int sensor_index=0);
+ virtual status_t initialize(CameraProperties::Properties*);
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params);
- virtual void getParameters(CameraParameters& params);
+ virtual status_t setParameters(const android::CameraParameters& params);
+ virtual void getParameters(android::CameraParameters& params);
// API
- virtual status_t UseBuffersPreview(void* bufArr, int num);
+ virtual status_t UseBuffersPreview(CameraBuffer *bufArr, int num);
+ virtual status_t UseBuffersCapture(CameraBuffer *bufArr, int num);
- //API to flush the buffers for preview
- status_t flushBuffers();
+ static status_t getCaps(const int sensorId, CameraProperties::Properties* params, V4L_HANDLETYPE handle);
protected:
//----------Parent class method implementation------------------------------------
virtual status_t startPreview();
virtual status_t stopPreview();
- virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
- virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t takePicture();
+ virtual status_t stopImageCapture();
+ virtual status_t autoFocus();
+ virtual status_t useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType);
virtual status_t getFrameSize(size_t &width, size_t &height);
- virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getPictureBufferSize(CameraFrame *frame, size_t bufferCount);
virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
//-----------------------------------------------------------------------------
@@ -97,13 +135,13 @@ protected:
private:
- class PreviewThread : public Thread {
+ class PreviewThread : public android::Thread {
V4LCameraAdapter* mAdapter;
public:
PreviewThread(V4LCameraAdapter* hw) :
Thread(false), mAdapter(hw) { }
virtual void onFirstRef() {
- run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY);
+ run("CameraPreviewThread", android::PRIORITY_URGENT_DISPLAY);
}
virtual bool threadLoop() {
mAdapter->previewThread();
@@ -122,15 +160,56 @@ private:
public:
private:
+ //capabilities data
+ static const CapPixelformat mPixelformats [];
+ static const CapResolution mPreviewRes [];
+ static const CapFramerate mFramerates [];
+ static const CapResolution mImageCapRes [];
+
+ //camera defaults
+ static const char DEFAULT_PREVIEW_FORMAT[];
+ static const char DEFAULT_PREVIEW_SIZE[];
+ static const char DEFAULT_FRAMERATE[];
+ static const char DEFAULT_NUM_PREV_BUFS[];
+
+ static const char DEFAULT_PICTURE_FORMAT[];
+ static const char DEFAULT_PICTURE_SIZE[];
+ static const char DEFAULT_FOCUS_MODE[];
+ static const char * DEFAULT_VSTAB;
+ static const char * DEFAULT_VNF;
+
+ static status_t insertDefaults(CameraProperties::Properties*, V4L_TI_CAPTYPE&);
+ static status_t insertCapabilities(CameraProperties::Properties*, V4L_TI_CAPTYPE&);
+ static status_t insertPreviewFormats(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertPreviewSizes(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertImageSizes(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t insertFrameRates(CameraProperties::Properties* , V4L_TI_CAPTYPE&);
+ static status_t sortAscend(V4L_TI_CAPTYPE&, uint16_t ) ;
+
+ status_t v4lIoctl(int, int, void*);
+ status_t v4lInitMmap(int&);
+ status_t v4lInitUsrPtr(int&);
+ status_t v4lStartStreaming();
+ status_t v4lStopStreaming(int nBufferCount);
+ status_t v4lSetFormat(int, int, uint32_t);
+ status_t restartPreview();
+
+
int mPreviewBufferCount;
- KeyedVector<int, int> mPreviewBufs;
- mutable Mutex mPreviewBufsLock;
+ int mPreviewBufferCountQueueable;
+ int mCaptureBufferCount;
+ int mCaptureBufferCountQueueable;
+ android::KeyedVector<CameraBuffer *, int> mPreviewBufs;
+ android::KeyedVector<CameraBuffer *, int> mCaptureBufs;
+ mutable android::Mutex mPreviewBufsLock;
+ mutable android::Mutex mCaptureBufsLock;
+ mutable android::Mutex mStopPreviewLock;
- CameraParameters mParams;
+ android::CameraParameters mParams;
bool mPreviewing;
bool mCapturing;
- Mutex mLock;
+ android::Mutex mLock;
int mFrameCount;
int mLastFrameCount;
@@ -142,17 +221,18 @@ private:
int mSensorIndex;
- // protected by mLock
- sp<PreviewThread> mPreviewThread;
-
- struct VideoInfo *mVideoInfo;
- int mCameraHandle;
+ // protected by mLock
+ android::sp<PreviewThread> mPreviewThread;
+ struct VideoInfo *mVideoInfo;
+ int mCameraHandle;
int nQueued;
int nDequeued;
};
-}; //// namespace
-#endif //V4L_CAMERA_ADAPTER_H
+} // namespace Camera
+} // namespace Ti
+
+#endif //V4L_CAMERA_ADAPTER_H