summaryrefslogtreecommitdiffstats
path: root/camera
diff options
context:
space:
mode:
Diffstat (limited to 'camera')
-rwxr-xr-xcamera/ANativeWindowDisplayAdapter.cpp1096
-rw-r--r--camera/Android.mk131
-rw-r--r--camera/AppCallbackNotifier.cpp1399
-rw-r--r--camera/BaseCameraAdapter.cpp1969
-rwxr-xr-xcamera/CameraHal.cpp3030
-rw-r--r--camera/CameraHalCommon.cpp121
-rw-r--r--camera/CameraHalUtilClasses.cpp273
-rw-r--r--camera/CameraHal_Module.cpp676
-rw-r--r--camera/CameraParameters.cpp184
-rw-r--r--camera/CameraProperties.cpp125
-rw-r--r--camera/MemoryManager.cpp284
-rw-r--r--camera/OMXCameraAdapter/OMX3A.cpp1083
-rw-r--r--camera/OMXCameraAdapter/OMXAlgo.cpp1181
-rw-r--r--camera/OMXCameraAdapter/OMXCameraAdapter.cpp3160
-rw-r--r--camera/OMXCameraAdapter/OMXCapabilities.cpp1181
-rw-r--r--camera/OMXCameraAdapter/OMXCapture.cpp1066
-rw-r--r--camera/OMXCameraAdapter/OMXDefaults.cpp71
-rw-r--r--camera/OMXCameraAdapter/OMXExif.cpp567
-rw-r--r--camera/OMXCameraAdapter/OMXFD.cpp327
-rw-r--r--camera/OMXCameraAdapter/OMXFocus.cpp673
-rw-r--r--camera/OMXCameraAdapter/OMXZoom.cpp270
-rw-r--r--camera/SensorListener.cpp232
-rw-r--r--camera/TICameraParameters.cpp202
-rw-r--r--camera/V4LCameraAdapter/V4LCameraAdapter.cpp615
-rw-r--r--camera/inc/ANativeWindowDisplayAdapter.h188
-rw-r--r--camera/inc/BaseCameraAdapter.h245
-rw-r--r--camera/inc/CameraHal.h1144
-rw-r--r--camera/inc/CameraProperties.h180
-rw-r--r--camera/inc/General3A_Settings.h262
-rw-r--r--camera/inc/OMXCameraAdapter/OMXCameraAdapter.h834
-rw-r--r--camera/inc/SensorListener.h101
-rw-r--r--camera/inc/TICameraParameters.h241
-rw-r--r--camera/inc/V4LCameraAdapter/V4LCameraAdapter.h158
-rw-r--r--camera/inc/VideoMetadata.h32
34 files changed, 23301 insertions, 0 deletions
diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp
new file mode 100755
index 0000000..38e641f
--- /dev/null
+++ b/camera/ANativeWindowDisplayAdapter.cpp
@@ -0,0 +1,1096 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#define LOG_TAG "CameraHAL"
+
+#include "ANativeWindowDisplayAdapter.h"
+#include <OMX_IVCommon.h>
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+namespace android {
+
+///Constant declarations
+///@todo Check the time units
+const int ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT = 1000; // seconds
+
+//Suspends buffers after given amount of failed dq's
+const int ANativeWindowDisplayAdapter::FAILED_DQS_TO_SUSPEND = 3;
+
+
+
+OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format)
+{
+ OMX_COLOR_FORMATTYPE pixFormat;
+
+ if ( parameters_format != NULL )
+ {
+ if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+ else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ CAMHAL_LOGDA("RGB565 format selected");
+ pixFormat = OMX_COLOR_Format16bitRGB565;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+ }
+ else {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+
+ return pixFormat;
+}
+
+const char* getPixFormatConstant(const char* parameters_format)
+{
+ const char* pixFormat;
+
+ if ( parameters_format != NULL )
+ {
+ if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ CAMHAL_LOGVA("CbYCrY format selected");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ {
+ // TODO(XXX): We are treating YV12 the same as YUV420SP
+ CAMHAL_LOGVA("YUV420SP format selected");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ CAMHAL_LOGVA("RGB565 format selected");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_RGB565;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Invalid format, CbYCrY format selected as default");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+
+ return pixFormat;
+}
+
+const size_t getBufSize(const char* parameters_format, int width, int height)
+{
+ int buf_size;
+
+ if ( parameters_format != NULL ) {
+ if (strcmp(parameters_format,
+ (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ buf_size = width * height * 2;
+ }
+ else if((strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
+ (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
+ buf_size = width * height * 3 / 2;
+ }
+ else if(strcmp(parameters_format,
+ (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ buf_size = width * height * 2;
+ } else {
+ CAMHAL_LOGEA("Invalid format");
+ buf_size = 0;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL");
+ buf_size = 0;
+ }
+
+ return buf_size;
+}
+/*--------------------ANativeWindowDisplayAdapter Class STARTS here-----------------------------*/
+
+
+/**
+ * Display Adapter class STARTS here..
+ */
+ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
+ mDisplayState(ANativeWindowDisplayAdapter::DISPLAY_INIT),
+ mDisplayEnabled(false),
+ mBufferCount(0)
+
+
+
+{
+ LOG_FUNCTION_NAME;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ mShotToShot = false;
+ mStartCapture.tv_sec = 0;
+ mStartCapture.tv_usec = 0;
+ mStandbyToShot.tv_sec = 0;
+ mStandbyToShot.tv_usec = 0;
+ mMeasureStandby = false;
+#endif
+
+ mPixelFormat = NULL;
+ mBufferHandleMap = NULL;
+ mGrallocHandleMap = NULL;
+ mOffsetsMap = NULL;
+ mFrameProvider = NULL;
+
+ mFrameWidth = 0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ mSuspend = false;
+ mFailedDQs = 0;
+
+ mPaused = false;
+ mXOff = 0;
+ mYOff = 0;
+ mFirstInit = false;
+
+ mFD = -1;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+ANativeWindowDisplayAdapter::~ANativeWindowDisplayAdapter()
+{
+ Semaphore sem;
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ ///If Frame provider exists
+ if(mFrameProvider)
+ {
+ // Unregister with the frame provider
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ }
+
+ ///The ANativeWindow object will get destroyed here
+ destroy();
+
+ ///If Display thread exists
+ if(mDisplayThread.get())
+ {
+ ///Kill the display thread
+ sem.Create();
+ msg.command = DisplayThread::DISPLAY_EXIT;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK - implies that the thread is now started and waiting for frames
+ sem.Wait();
+
+ // Exit and cleanup the thread
+ mDisplayThread->requestExitAndWait();
+
+ // Delete the display thread
+ mDisplayThread.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t ANativeWindowDisplayAdapter::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Create the display thread
+ mDisplayThread = new DisplayThread(this);
+ if ( !mDisplayThread.get() )
+ {
+ CAMHAL_LOGEA("Couldn't create display thread");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_MEMORY;
+ }
+
+ ///Start the display thread
+ status_t ret = mDisplayThread->run("DisplayThread", PRIORITY_URGENT_DISPLAY);
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Couldn't run display thread");
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int ANativeWindowDisplayAdapter::setPreviewWindow(preview_stream_ops_t* window)
+{
+ LOG_FUNCTION_NAME;
+ ///Note that Display Adapter cannot work without a valid window object
+ if ( !window)
+ {
+ CAMHAL_LOGEA("NULL window object passed to DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ ///Destroy the existing window object, if it exists
+ destroy();
+
+ ///Move to new window obj
+ mANativeWindow = window;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::setFrameProvider(FrameNotifier *frameProvider)
+{
+ LOG_FUNCTION_NAME;
+
+ // Check for NULL pointer
+ if ( !frameProvider )
+ {
+ CAMHAL_LOGEA("NULL passed for frame provider");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ /** Dont do anything here, Just save the pointer for use when display is
+ actually enabled or disabled
+ */
+ mFrameProvider = new FrameProvider(frameProvider, this, frameCallbackRelay);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != refTime )
+ {
+ Mutex::Autolock lock(mLock);
+ memcpy(&mStartCapture, refTime, sizeof(struct timeval));
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#endif
+
+
+int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime, S3DParameters *s3dParams)
+{
+ Semaphore sem;
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mDisplayEnabled )
+ {
+ CAMHAL_LOGDA("Display is already enabled");
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+ }
+
+#if 0 //TODO: s3d is not part of bringup...will reenable
+ if (s3dParams)
+ mOverlay->set_s3d_params(s3dParams->mode, s3dParams->framePacking,
+ s3dParams->order, s3dParams->subSampling);
+#endif
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( NULL != refTime )
+ {
+ Mutex::Autolock lock(mLock);
+ memcpy(&mStandbyToShot, refTime, sizeof(struct timeval));
+ mMeasureStandby = true;
+ }
+
+#endif
+
+ //Send START_DISPLAY COMMAND to display thread. Display thread will start and then wait for a message
+ sem.Create();
+ msg.command = DisplayThread::DISPLAY_START;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK - implies that the thread is now started and waiting for frames
+ sem.Wait();
+
+ // Register with the frame provider for frames
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+
+ mDisplayEnabled = true;
+ mPreviewWidth = width;
+ mPreviewHeight = height;
+
+ CAMHAL_LOGVB("mPreviewWidth = %d mPreviewHeight = %d", mPreviewWidth, mPreviewHeight);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::disableDisplay()
+{
+ LOG_FUNCTION_NAME;
+
+ if(!mDisplayEnabled)
+ {
+ CAMHAL_LOGDA("Display is already disabled");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ // Unregister with the frame provider here
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+
+ if ( NULL != mDisplayThread.get() )
+ {
+ //Send STOP_DISPLAY COMMAND to display thread. Display thread will stop and dequeue all messages
+ // and then wait for message
+ Semaphore sem;
+ sem.Create();
+ TIUTILS::Message msg;
+ msg.command = DisplayThread::DISPLAY_STOP;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK for display to be disabled
+
+ sem.Wait();
+
+ }
+
+ Mutex::Autolock lock(mLock);
+ {
+ ///Reset the display enabled flag
+ mDisplayEnabled = false;
+
+ ///Reset the offset values
+ mXOff = 0;
+ mYOff = 0;
+
+ ///Reset the frame width and height values
+ mFrameWidth =0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+
+ if (mANativeWindow)
+ for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(i);
+ mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[value]);
+ }
+ else
+ LOGE("mANativeWindow is NULL");
+
+ ///Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t ANativeWindowDisplayAdapter::pauseDisplay(bool pause)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ mPaused = pause;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+void ANativeWindowDisplayAdapter::destroy()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Check if the display is disabled, if not disable it
+ if ( mDisplayEnabled )
+ {
+ CAMHAL_LOGDA("WARNING: Calling destroy of Display adapter when display enabled. Disabling display..");
+ disableDisplay();
+ }
+
+ mBufferCount = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+// Implementation of inherited interfaces
+void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ status_t err;
+ int i = -1;
+ const int lnumBufs = numBufs;
+ int32_t *buffers = new int32_t[lnumBufs];
+ mBufferHandleMap = new buffer_handle_t*[lnumBufs];
+ mGrallocHandleMap = new IMG_native_handle_t*[lnumBufs];
+ int undequeued = 0;
+
+ // Set gralloc usage bits for window.
+ err = mANativeWindow->set_usage(mANativeWindow, GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+ | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+ if (err != 0) {
+ LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs);
+ //if(mBufferCount != numBufs)
+ // {
+ ///Set the number of buffers needed for camera preview
+ err = mANativeWindow->set_buffer_count(mANativeWindow, numBufs);
+ if (err != 0) {
+ LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
+ return NULL;
+ }
+ CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs);
+ mBufferCount = numBufs;
+ //}
+
+ // Set window geometry
+ err = mANativeWindow->set_buffers_geometry(
+ mANativeWindow,
+ width,
+ height,
+ /*toOMXPixFormat(format)*/HAL_PIXEL_FORMAT_NV12); // Gralloc only supports NV12 alloc!
+
+ if (err != 0) {
+ LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+ return NULL;
+ }
+
+ ///We just return the buffers from ANativeWindow, if the width and height are same, else (vstab, vnf case)
+ ///re-allocate buffers using ANativeWindow and then get them
+ ///@todo - Re-allocate buffers for vnf and vstab using the width, height, format, numBufs etc
+ if ( (buffers == NULL) || (mBufferHandleMap == NULL) )
+ {
+ CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
+ LOG_FUNCTION_NAME_EXIT;
+ delete [] buffers;
+ return NULL;
+ }
+
+ mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+
+ for ( i=0; i < mBufferCount; i++ )
+ {
+ IMG_native_handle_t** hndl2hndl;
+ IMG_native_handle_t* handle;
+ int stride; // dummy variable to get stride
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, (buffer_handle_t**) &hndl2hndl, &stride);
+
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ goto fail;
+ }
+
+ handle = *hndl2hndl;
+
+ mBufferHandleMap[i] = (buffer_handle_t*) hndl2hndl;
+ mGrallocHandleMap[i] = handle;
+ mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+
+ bytes = getBufSize(format, width, height);
+
+ CAMHAL_LOGDB("Adding buffer index=%d, address=0x%x", i, buffers[i]);
+ }
+
+
+ // lock the initial queueable buffers
+ for( i = 0; i < mBufferCount-undequeued; i++ )
+ {
+ mANativeWindow->lock_buffer(mANativeWindow, mBufferHandleMap[i]);
+ }
+
+ // return the rest of the buffers back to ANativeWindow
+ for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++)
+ {
+ mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
+ }
+
+ mFirstInit = true;
+ mPixelFormat = getPixFormatConstant(format);
+ mFrameWidth = width;
+ mFrameHeight = height;
+
+ return mGrallocHandleMap;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ for (int start = 0; start < i && i > 0; start++) {
+ int err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[start]);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err);
+ }
+ }
+ CAMHAL_LOGEA("Error occurred, performing cleanup");
+ if ( buffers )
+ {
+ delete [] buffers;
+ }
+
+ if ( NULL != mErrorNotifier.get() )
+ {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+
+}
+
+uint32_t * ANativeWindowDisplayAdapter::getOffsets()
+{
+ const int lnumBufs = mBufferCount;
+
+ LOG_FUNCTION_NAME;
+
+ // TODO(XXX): Need to remove getOffsets from the API. No longer needed
+
+ if ( NULL == mANativeWindow )
+ {
+ CAMHAL_LOGEA("mANativeWindow reference is missing");
+ goto fail;
+ }
+
+ if( mBufferHandleMap == NULL)
+ {
+ CAMHAL_LOGEA("Buffers not allocated yet!!");
+ goto fail;
+ }
+
+ if(mOffsetsMap == NULL)
+ {
+ mOffsetsMap = new uint32_t[lnumBufs];
+ for(int i = 0; i < mBufferCount; i++)
+ {
+ IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[i]);
+ mOffsetsMap[i] = 0;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mOffsetsMap;
+
+ fail:
+
+ if ( NULL != mOffsetsMap )
+ {
+ delete [] mOffsetsMap;
+ mOffsetsMap = NULL;
+ }
+
+ if ( NULL != mErrorNotifier.get() )
+ {
+ mErrorNotifier->errorNotify(-ENOSYS);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0)
+ {
+ ret = -ENOSYS;
+ goto end;
+ }
+
+ if(!mANativeWindow)
+ {
+ ret = -ENOSYS;
+ goto end;
+ }
+
+ mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+
+ queueable = mBufferCount - undequeued;
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+int ANativeWindowDisplayAdapter::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mFD == -1)
+ {
+ IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[0]);
+ // TODO: should we dup the fd? not really necessary and another thing for ANativeWindow
+ // to manage and close...
+ mFD = dup(handle->fd[0]);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mFD;
+
+}
+
+int ANativeWindowDisplayAdapter::freeBuffer(void* buf)
+{
+ LOG_FUNCTION_NAME;
+
+ int *buffers = (int *) buf;
+ if((int *)mGrallocHandleMap != buffers)
+ {
+ CAMHAL_LOGEA("CameraHal passed wrong set of buffers to free!!!");
+ if (mGrallocHandleMap != NULL)
+ delete []mGrallocHandleMap;
+ mGrallocHandleMap = NULL;
+ }
+
+ if ( NULL != buf )
+ {
+ delete [] buffers;
+ }
+
+ if( mBufferHandleMap != NULL)
+ {
+ delete [] mBufferHandleMap;
+ mBufferHandleMap = NULL;
+ }
+
+ if ( NULL != mOffsetsMap )
+ {
+ delete [] mOffsetsMap;
+ mOffsetsMap = NULL;
+ }
+
+ if( mFD != -1)
+ {
+ close(mFD); // close duped handle
+ mFD = -1;
+ }
+
+ return NO_ERROR;
+}
+
+
+bool ANativeWindowDisplayAdapter::supportsExternalBuffering()
+{
+ return false;
+}
+
+int ANativeWindowDisplayAdapter::useBuffers(void *bufArr, int num)
+{
+ return NO_ERROR;
+}
+
+void ANativeWindowDisplayAdapter::displayThread()
+{
+ bool shouldLive = true;
+ int timeout = 0;
+ status_t ret;
+
+ LOG_FUNCTION_NAME;
+
+ while(shouldLive)
+ {
+ ret = TIUTILS::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
+ , &mDisplayQ
+ , NULL
+ , ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT);
+
+ if ( !mDisplayThread->msgQ().isEmpty() )
+ {
+ ///Received a message from CameraHal, process it
+ shouldLive = processHalMsg();
+
+ }
+ else if( !mDisplayQ.isEmpty())
+ {
+ if ( mDisplayState== ANativeWindowDisplayAdapter::DISPLAY_INIT )
+ {
+
+ ///If display adapter is not started, continue
+ continue;
+
+ }
+ else
+ {
+ TIUTILS::Message msg;
+ ///Get the dummy msg from the displayQ
+ if(mDisplayQ.get(&msg)!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Error in getting message from display Q");
+ continue;
+ }
+
+ // There is a frame from ANativeWindow for us to dequeue
+ // We dequeue and return the frame back to Camera adapter
+ if(mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED)
+ {
+ handleFrameReturn();
+ }
+
+ if (mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_EXITED)
+ {
+ ///we exit the thread even though there are frames still to dequeue. They will be dequeued
+ ///in disableDisplay
+ shouldLive = false;
+ }
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+bool ANativeWindowDisplayAdapter::processHalMsg()
+{
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+
+ mDisplayThread->msgQ().get(&msg);
+ bool ret = true, invalidCommand = false;
+
+ switch ( msg.command )
+ {
+
+ case DisplayThread::DISPLAY_START:
+
+ CAMHAL_LOGDA("Display thread received DISPLAY_START command from Camera HAL");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STARTED;
+
+ break;
+
+ case DisplayThread::DISPLAY_STOP:
+
+ ///@bug There is no API to disable SF without destroying it
+ ///@bug Buffers might still be w/ display and will get displayed
+ ///@remarks Ideal seqyence should be something like this
+ ///mOverlay->setParameter("enabled", false);
+ CAMHAL_LOGDA("Display thread received DISPLAY_STOP command from Camera HAL");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STOPPED;
+
+ break;
+
+ case DisplayThread::DISPLAY_EXIT:
+
+ CAMHAL_LOGDA("Display thread received DISPLAY_EXIT command from Camera HAL.");
+ CAMHAL_LOGDA("Stopping display thread...");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_EXITED;
+ ///Note that the SF can have pending buffers when we disable the display
+ ///This is normal and the expectation is that they may not be displayed.
+ ///This is to ensure that the user experience is not impacted
+ ret = false;
+ break;
+
+ default:
+
+ CAMHAL_LOGEB("Invalid Display Thread Command 0x%x.", msg.command);
+ invalidCommand = true;
+
+ break;
+ }
+
+ ///Signal the semaphore if it is sent as part of the message
+ if ( ( msg.arg1 ) && ( !invalidCommand ) )
+ {
+
+ CAMHAL_LOGDA("+Signalling display semaphore");
+ Semaphore &sem = *((Semaphore*)msg.arg1);
+
+ sem.Signal();
+
+ CAMHAL_LOGDA("-Signalling display semaphore");
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame)
+{
+ status_t ret = NO_ERROR;
+ uint32_t actualFramesWithDisplay = 0;
+ android_native_buffer_t *buffer = NULL;
+ int i;
+
+ ///@todo Do cropping based on the stabilized frame coordinates
+ ///@todo Insert logic to drop frames here based on refresh rate of
+ ///display or rendering rate whichever is lower
+ ///Queue the buffer to overlay
+ for ( i = 0; i < mBufferCount; i++ )
+ {
+ if ( ((int) dispFrame.mBuffer ) == (int)mGrallocHandleMap[i] )
+ {
+ break;
+ }
+ }
+
+ if ( mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED &&
+ (!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) &&
+ !mSuspend)
+ {
+ Mutex::Autolock lock(mLock);
+ uint32_t xOff = (dispFrame.mOffset% PAGE_SIZE);
+ uint32_t yOff = (dispFrame.mOffset / PAGE_SIZE);
+
+ // Set crop only if current x and y offsets do not match with frame offsets
+ if((mXOff!=xOff) || (mYOff!=yOff))
+ {
+ CAMHAL_LOGDB("Offset %d xOff = %d, yOff = %d", dispFrame.mOffset, xOff, yOff);
+ uint8_t bytesPerPixel;
+ ///Calculate bytes per pixel based on the pixel format
+ if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ bytesPerPixel = 2;
+ }
+ else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ bytesPerPixel = 2;
+ }
+ else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ {
+ bytesPerPixel = 1;
+ }
+ else
+ {
+ bytesPerPixel = 1;
+ }
+
+ CAMHAL_LOGVB(" crop.left = %d crop.top = %d crop.right = %d crop.bottom = %d",
+ xOff/bytesPerPixel, yOff , (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
+ // We'll ignore any errors here, if the surface is
+ // already invalid, we'll know soon enough.
+ mANativeWindow->set_crop(mANativeWindow, xOff/bytesPerPixel, yOff,
+ (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
+
+ ///Update the current x and y offsets
+ mXOff = xOff;
+ mYOff = yOff;
+ }
+
+ ret = mANativeWindow->enqueue_buffer(mANativeWindow, mBufferHandleMap[i]);
+ if (ret != 0) {
+ LOGE("Surface::queueBuffer returned error %d", ret);
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+
+
+ // HWComposer has not minimum buffer requirement. We should be able to dequeue
+ // the buffer immediately
+ TIUTILS::Message msg;
+ mDisplayQ.put(&msg);
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( mMeasureStandby )
+ {
+ CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot);
+ mMeasureStandby = false;
+ }
+ else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType)
+ {
+ CameraHal::PPM("Shot to snapshot: ", &mStartCapture);
+ mShotToShot = true;
+ }
+ else if ( mShotToShot )
+ {
+ CameraHal::PPM("Shot to shot: ", &mStartCapture);
+ mShotToShot = false;
+ }
+#endif
+
+ }
+ else
+ {
+ Mutex::Autolock lock(mLock);
+ // cancel buffer and dequeue another one
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
+ if (ret != 0) {
+ LOGE("Surface::queueBuffer returned error %d", ret);
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+
+ TIUTILS::Message msg;
+ mDisplayQ.put(&msg);
+ ret = NO_ERROR;
+ }
+
+ return ret;
+}
+
+
+bool ANativeWindowDisplayAdapter::handleFrameReturn()
+{
+ status_t err;
+ buffer_handle_t* buf;
+ int i = 0;
+ int stride; // dummy variable to get stride
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, &buf, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ }
+
+ err = mANativeWindow->lock_buffer(mANativeWindow, buf);
+ if (err != 0) {
+ CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err);
+ }
+
+ for(i = 0; i < mBufferCount; i++)
+ {
+ if (mBufferHandleMap[i] == buf)
+ break;
+ }
+
+ mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+
+ CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount-1);
+ mFrameProvider->returnFrame( (void*)mGrallocHandleMap[i], CameraFrame::PREVIEW_FRAME_SYNC);
+ return true;
+}
+
+void ANativeWindowDisplayAdapter::frameCallbackRelay(CameraFrame* caFrame)
+{
+
+ if ( NULL != caFrame )
+ {
+ if ( NULL != caFrame->mCookie )
+ {
+ ANativeWindowDisplayAdapter *da = (ANativeWindowDisplayAdapter*) caFrame->mCookie;
+ da->frameCallback(caFrame);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p", caFrame, caFrame->mCookie);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid Camera Frame = %p", caFrame);
+ }
+
+}
+
+void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
+{
+ ///Call queueBuffer of overlay in the context of the callback thread
+ DisplayFrame df;
+ df.mBuffer = caFrame->mBuffer;
+ df.mType = (CameraFrame::FrameType) caFrame->mFrameType;
+ df.mOffset = caFrame->mOffset;
+ df.mWidthStride = caFrame->mAlignment;
+ df.mLength = caFrame->mLength;
+ df.mWidth = caFrame->mWidth;
+ df.mHeight = caFrame->mHeight;
+ PostFrame(df);
+}
+
+
+/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/
+
+};
+
diff --git a/camera/Android.mk b/camera/Android.mk
new file mode 100644
index 0000000..0e2bb8c
--- /dev/null
+++ b/camera/Android.mk
@@ -0,0 +1,131 @@
+ifeq ($(TARGET_BOARD_PLATFORM),omap4)
+
+LOCAL_PATH:= $(call my-dir)
+
+OMAP4_CAMERA_HAL_USES:= OMX
+# OMAP4_CAMERA_HAL_USES:= USB
+
+OMAP4_CAMERA_HAL_SRC := \
+ CameraHal_Module.cpp \
+ CameraHal.cpp \
+ CameraHalUtilClasses.cpp \
+ AppCallbackNotifier.cpp \
+ ANativeWindowDisplayAdapter.cpp \
+ CameraProperties.cpp \
+ MemoryManager.cpp \
+ SensorListener.cpp
+
+OMAP4_CAMERA_COMMON_SRC:= \
+ CameraParameters.cpp \
+ TICameraParameters.cpp \
+ CameraHalCommon.cpp
+
+OMAP4_CAMERA_OMX_SRC:= \
+ BaseCameraAdapter.cpp \
+ OMXCameraAdapter/OMX3A.cpp \
+ OMXCameraAdapter/OMXAlgo.cpp \
+ OMXCameraAdapter/OMXCameraAdapter.cpp \
+ OMXCameraAdapter/OMXCapabilities.cpp \
+ OMXCameraAdapter/OMXCapture.cpp \
+ OMXCameraAdapter/OMXDefaults.cpp \
+ OMXCameraAdapter/OMXExif.cpp \
+ OMXCameraAdapter/OMXFD.cpp \
+ OMXCameraAdapter/OMXFocus.cpp \
+ OMXCameraAdapter/OMXZoom.cpp \
+
+OMAP4_CAMERA_USB_SRC:= \
+ BaseCameraAdapter.cpp \
+ V4LCameraAdapter/V4LCameraAdapter.cpp
+
+#
+# OMX Camera HAL
+#
+
+ifeq ($(OMAP4_CAMERA_HAL_USES),OMX)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ $(OMAP4_CAMERA_HAL_SRC) \
+ $(OMAP4_CAMERA_OMX_SRC) \
+ $(OMAP4_CAMERA_COMMON_SRC)
+
+LOCAL_C_INCLUDES += \
+ $(LOCAL_PATH)/inc/ \
+ $(LOCAL_PATH)/../hwc \
+ $(LOCAL_PATH)/../include \
+ $(LOCAL_PATH)/inc/OMXCameraAdapter \
+ $(LOCAL_PATH)/../libtiutils \
+ hardware/ti/omap4xxx/tiler \
+ frameworks/base/include/ui \
+ frameworks/base/include/utils \
+ hardware/ti/omap4xxx/domx/omx_core/inc \
+ hardware/ti/omap4xxx/domx/mm_osal/inc \
+ frameworks/base/include/media/stagefright \
+ frameworks/base/include/media/stagefright/openmax
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libtiutils \
+ libmm_osal \
+ libOMX_Core \
+ libtimemmgr \
+ libcamera_client \
+ libgui \
+ libdomx
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
+
+else
+ifeq ($(OMAP4_CAMERA_HAL_USES),USB)
+
+#
+# USB Camera Adapter
+#
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ $(OMAP4_CAMERA_HAL_SRC) \
+ $(OMAP4_CAMERA_USB_SRC) \
+ $(OMAP4_CAMERA_COMMON_SRC)
+
+LOCAL_C_INCLUDES += \
+ $(LOCAL_PATH)/inc/ \
+ $(LOCAL_PATH)/../hwc \
+ $(LOCAL_PATH)/../include \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter \
+ $(LOCAL_PATH)/../libtiutils \
+ hardware/ti/omap4xxx/tiler \
+ frameworks/base/include/ui \
+ frameworks/base/include/utils \
+ frameworks/base/include/media/stagefright/openmax
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libtiutils \
+ libtimemmgr \
+ libcamera_client
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
+endif
+endif
+endif
diff --git a/camera/AppCallbackNotifier.cpp b/camera/AppCallbackNotifier.cpp
new file mode 100644
index 0000000..d266cc2
--- /dev/null
+++ b/camera/AppCallbackNotifier.cpp
@@ -0,0 +1,1399 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#define LOG_TAG "CameraHAL"
+
+
+#include "CameraHal.h"
+#include "VideoMetadata.h"
+#include <MetadataBufferType.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+
+namespace android {
+
+const int AppCallbackNotifier::NOTIFIER_TIMEOUT = -1;
+
+/*--------------------NotificationHandler Class STARTS here-----------------------------*/
+
+/**
+ * NotificationHandler class
+ */
+
+
+///Initialization function for AppCallbackNotifier
+status_t AppCallbackNotifier::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ mMeasurementEnabled = false;
+
+ ///Create the app notifier thread
+ mNotificationThread = new NotificationThread(this);
+ if(!mNotificationThread.get())
+ {
+ CAMHAL_LOGEA("Couldn't create Notification thread");
+ return NO_MEMORY;
+ }
+
+ ///Start the display thread
+ status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't run NotificationThread");
+ mNotificationThread.clear();
+ return ret;
+ }
+
+ mUseMetaDataBufferMode = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mCameraHal = cameraHal;
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mRequestMemory = get_memory;
+ mCallbackCookie = user;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setMeasurements(bool enable)
+{
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mMeasurementEnabled = enable;
+
+ if ( enable )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::FRAME_DATA_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+//All sub-components of Camera HAL call this whenever any error happens
+void AppCallbackNotifier::errorNotify(int error)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGEB("AppCallbackNotifier received error %d", error);
+
+ ///Notify errors to application in callback thread. Post error event to event queue
+ TIUTILS::Message msg;
+ msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_ERROR;
+ msg.arg1 = (void*)error;
+
+ mEventQ.put(&msg);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::notificationThread()
+{
+ bool shouldLive = true;
+ status_t ret;
+
+ LOG_FUNCTION_NAME;
+
+ while(shouldLive)
+ {
+ //CAMHAL_LOGDA("Notification Thread waiting for message");
+ ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
+ &mEventQ,
+ &mFrameQ,
+ AppCallbackNotifier::NOTIFIER_TIMEOUT);
+
+ //CAMHAL_LOGDA("Notification Thread received message");
+
+ if(mNotificationThread->msgQ().hasMsg())
+ {
+ ///Received a message from CameraHal, process it
+ CAMHAL_LOGDA("Notification Thread received message from Camera HAL");
+ shouldLive = processMessage();
+ if(!shouldLive)
+ {
+ CAMHAL_LOGDA("Notification Thread exiting.");
+ }
+ }
+ if(mEventQ.hasMsg())
+ {
+ ///Received an event from one of the event providers
+ CAMHAL_LOGDA("Notification Thread received an event from event provider (CameraAdapter)");
+ notifyEvent();
+ }
+ if(mFrameQ.hasMsg())
+ {
+ ///Received a frame from one of the frame providers
+ //CAMHAL_LOGDA("Notification Thread received a frame from frame provider (CameraAdapter)");
+ notifyFrame();
+ }
+ }
+
+ CAMHAL_LOGDA("Notification Thread exited.");
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+void AppCallbackNotifier::notifyEvent()
+{
+ ///Receive and send the event notifications to app
+ TIUTILS::Message msg;
+ LOG_FUNCTION_NAME;
+ mEventQ.get(&msg);
+ bool ret = true;
+ CameraHalEvent *evt = NULL;
+ CameraHalEvent::FocusEventData *focusEvtData;
+ CameraHalEvent::ZoomEventData *zoomEvtData;
+ CameraHalEvent::FaceEventData faceEvtData;
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ return;
+ }
+
+ switch(msg.command)
+ {
+ case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT:
+
+ evt = ( CameraHalEvent * ) msg.arg1;
+
+ if ( NULL == evt )
+ {
+ CAMHAL_LOGEA("Invalid CameraHalEvent");
+ return;
+ }
+
+ switch(evt->mEventType)
+ {
+ case CameraHalEvent::EVENT_SHUTTER:
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_SHUTTER) ) )
+ {
+ mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_FOCUS_LOCKED:
+ case CameraHalEvent::EVENT_FOCUS_ERROR:
+
+ focusEvtData = &evt->mEventData->focusEvent;
+ if ( ( focusEvtData->focusLocked ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
+ {
+ mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
+ }
+ else if ( focusEvtData->focusError &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
+ {
+ mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_ZOOM_INDEX_REACHED:
+
+ zoomEvtData = &evt->mEventData->zoomEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ZOOM) ) )
+ {
+ mNotifyCb(CAMERA_MSG_ZOOM, zoomEvtData->currentZoomIndex, zoomEvtData->targetZoomIndexReached, mCallbackCookie);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_FACE:
+
+ faceEvtData = evt->mEventData->faceEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) )
+ {
+ // WA for an issue inside CameraService
+ camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL);
+
+ mDataCb(CAMERA_MSG_PREVIEW_METADATA,
+ tmpBuffer,
+ 0,
+ faceEvtData->getFaceResult(),
+ mCallbackCookie);
+
+ faceEvtData.clear();
+
+ if ( NULL != tmpBuffer ) {
+ tmpBuffer->release(tmpBuffer);
+ }
+
+ }
+
+ break;
+
+ case CameraHalEvent::ALL_EVENTS:
+ break;
+ default:
+ break;
+ }
+
+ break;
+
+ case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_ERROR:
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ERROR) ) )
+ {
+ mNotifyCb(CAMERA_MSG_ERROR, CAMERA_ERROR_UNKNOWN, 0, mCallbackCookie);
+ }
+
+ break;
+
+ }
+
+ if ( NULL != evt )
+ {
+ delete evt;
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+static void copy2Dto1D(void *dst,
+ void *src,
+ int width,
+ int height,
+ size_t stride,
+ uint32_t offset,
+ unsigned int bytesPerPixel,
+ size_t length,
+ const char *pixelFormat)
+{
+ unsigned int alignedRow, row;
+ unsigned char *bufferDst, *bufferSrc;
+ unsigned char *bufferDstEnd, *bufferSrcEnd;
+ uint16_t *bufferSrc_UV;
+ void *y_uv[2]; //y_uv[0]=> y pointer; y_uv[1]=>uv pointer
+
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ Rect bounds;
+
+ bounds.left = offset % stride;
+ bounds.top = offset / stride;
+ bounds.right = width;
+ bounds.bottom = height;
+
+ // get the y & uv pointers from the gralloc handle;
+ mapper.lock((buffer_handle_t)src, GRALLOC_USAGE_SW_READ_OFTEN, bounds, y_uv);
+
+ CAMHAL_LOGDB("copy2Dto1D() y= %p ; uv=%p.",y_uv[0],y_uv[1]);
+ CAMHAL_LOGDB("pixelFormat,= %d; offset=%d",*pixelFormat,offset);
+
+ if (pixelFormat!=NULL) {
+ if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ bytesPerPixel = 1;
+ bufferDst = ( unsigned char * ) dst;
+ bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel;
+ bufferSrc = ( unsigned char * ) y_uv[0] + offset;
+ bufferSrcEnd = ( unsigned char * ) ( ( size_t ) y_uv[0] + length + offset);
+ row = width*bytesPerPixel;
+ alignedRow = stride-width;
+ int stride_bytes = stride / 8;
+ uint32_t xOff = offset % stride;
+ uint32_t yOff = offset / stride;
+
+ // going to convert from NV12 here and return
+ // Step 1: Y plane: iterate through each row and copy
+ for ( int i = 0 ; i < height ; i++) {
+ memcpy(bufferDst, bufferSrc, row);
+ bufferSrc += stride;
+ bufferDst += row;
+ if ( ( bufferSrc > bufferSrcEnd ) || ( bufferDst > bufferDstEnd ) ) {
+ break;
+ }
+ }
+
+ bufferSrc_UV = ( uint16_t * ) y_uv[1] + (stride/2)*yOff + xOff;
+
+ if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ uint16_t *bufferDst_UV;
+
+ // Step 2: UV plane: convert NV12 to NV21 by swapping U & V
+ bufferDst_UV = (uint16_t *) (((uint8_t*)dst)+row*height);
+
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " vswp q0, q1 \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst2.8 {q0,q1},[%[dst]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst2.8 {d0,d1},[%[dst]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst2.8 {d0[0],d1[0]},[%[dst]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (bufferDst_UV), [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+ }
+ } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ uint16_t *bufferDst_U;
+ uint16_t *bufferDst_V;
+
+ // Step 2: UV plane: convert NV12 to YV12 by de-interleaving U & V
+ // TODO(XXX): This version of CameraHal assumes NV12 format it set at
+ // camera adapter to support YV12. Need to address for
+ // USBCamera
+
+ bufferDst_V = (uint16_t *) (((uint8_t*)dst)+row*height);
+ bufferDst_U = (uint16_t *) (((uint8_t*)dst)+row*height+row*height/4);
+
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst1.8 {q1},[%[dst_v]]! \n\t"
+ " vst1.8 {q0},[%[dst_u]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst1.8 {d1},[%[dst_v]]! \n\t"
+ " vst1.8 {d0},[%[dst_u]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst1.8 {d1[0]},[%[dst_v]]! \n\t"
+ " vst1.8 {d0[0]},[%[dst_u]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_u] "+r" (bufferDst_U), [dst_v] "+r" (bufferDst_V),
+ [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+ }
+ }
+ mapper.unlock((buffer_handle_t)src);
+ return ;
+
+ } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ bytesPerPixel = 2;
+ }
+ }
+
+ bufferDst = ( unsigned char * ) dst;
+ bufferSrc = ( unsigned char * ) y_uv[0];
+ row = width*bytesPerPixel;
+ alignedRow = ( row + ( stride -1 ) ) & ( ~ ( stride -1 ) );
+
+ //iterate through each row
+ for ( int i = 0 ; i < height ; i++, bufferSrc += alignedRow, bufferDst += row) {
+ memcpy(bufferDst, bufferSrc, row);
+ }
+ mapper.unlock((buffer_handle_t)src);
+}
+
+void AppCallbackNotifier::notifyFrame()
+{
+ ///Receive and send the frame notifications to app
+ TIUTILS::Message msg;
+ CameraFrame *frame;
+ MemoryHeapBase *heap;
+ MemoryBase *buffer = NULL;
+ sp<MemoryBase> memBase;
+ void *buf = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if(!mFrameQ.isEmpty())
+ {
+ mFrameQ.get(&msg);
+ }
+ else
+ {
+ return;
+ }
+
+ bool ret = true;
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ return;
+ }
+
+ frame = NULL;
+ switch(msg.command)
+ {
+ case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME:
+
+ frame = (CameraFrame *) msg.arg1;
+ if(!frame)
+ {
+ break;
+ }
+
+ if ( (CameraFrame::RAW_FRAME == frame->mFrameType )&&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) ) )
+ {
+
+#ifdef COPY_IMAGE_BUFFER
+
+ camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if ( NULL != raw_picture )
+ {
+ buf = raw_picture->data;
+ if ( NULL != buf )
+ {
+ memcpy(buf,
+ ( void * ) ( (unsigned int) frame->mBuffer + frame->mOffset),
+ frame->mLength);
+ }
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ }
+
+ mDataCb(CAMERA_MSG_RAW_IMAGE, raw_picture, 0, NULL, mCallbackCookie);
+#else
+
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+
+#endif
+ if(raw_picture)
+ {
+ raw_picture->release(raw_picture);
+ }
+
+ }
+ else if ( ( CameraFrame::IMAGE_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) )
+ {
+ Mutex::Autolock lock(mLock);
+
+#ifdef COPY_IMAGE_BUFFER
+
+ camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if(raw_picture)
+ {
+ buf = raw_picture->data;
+ }
+
+ if ( NULL != buf)
+ {
+ memcpy(buf,
+ ( void * ) ( (unsigned int) frame->mBuffer + frame->mOffset),
+ frame->mLength);
+ }
+
+ {
+ Mutex::Autolock lock(mBurstLock);
+#if 0 //TODO: enable burst mode later
+ if ( mBurst )
+ {
+ `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ }
+ else
+#endif
+ {
+ mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, raw_picture,
+ 0, NULL,
+ mCallbackCookie);
+ }
+ }
+#else
+
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+
+#endif
+ if(raw_picture)
+ {
+ raw_picture->release(raw_picture);
+ }
+
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+
+ }
+ else if ( ( CameraFrame::VIDEO_FRAME_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) )
+ {
+ mRecordingLock.lock();
+ if(mRecording)
+ {
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t *videoMedatadaBufferMemory =
+ (camera_memory_t *) mVideoMetadataBufferMemoryMap.valueFor((uint32_t) frame->mBuffer);
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) videoMedatadaBufferMemory->data;
+
+ if( (NULL == videoMedatadaBufferMemory) || (NULL == videoMetadataBuffer) || (NULL == frame->mBuffer) )
+ {
+ CAMHAL_LOGEA("Error! One of the video buffers is NULL");
+ break;
+ }
+
+ videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = frame->mBuffer;
+ videoMetadataBuffer->offset = frame->mOffset;
+
+ CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
+ frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory);
+
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
+ videoMedatadaBufferMemory, 0, mCallbackCookie);
+ }
+ else
+ {
+ //TODO: Need to revisit this, should ideally be mapping the TILER buffer using mRequestMemory
+ camera_memory_t* fakebuf = mRequestMemory(-1, 4, 1, NULL);
+ if( (NULL == fakebuf) || ( NULL == fakebuf->data) || ( NULL == frame->mBuffer))
+ {
+ CAMHAL_LOGEA("Error! One of the video buffers is NULL");
+ break;
+ }
+
+ fakebuf->data = frame->mBuffer;
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, fakebuf, 0, mCallbackCookie);
+ fakebuf->release(fakebuf);
+ }
+ }
+ mRecordingLock.unlock();
+
+ }
+ else if(( CameraFrame::SNAPSHOT_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb)) {
+ Mutex::Autolock lock(mLock);
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ if (!mPreviewMemory || !frame->mBuffer) {
+ CAMHAL_LOGDA("Error! One of the buffer is NULL");
+ break;
+ }
+
+ buf = (void*) mPreviewBufs[mPreviewBufCount];
+
+ CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)",
+ __LINE__,
+ buf,
+ frame->mBuffer,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mAlignment,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+
+ if ( NULL != buf ) {
+ copy2Dto1D(buf,
+ frame->mBuffer,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mAlignment,
+ frame->mOffset,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+ }
+
+ if (mCameraHal->msgTypeEnabled(CAMERA_MSG_POSTVIEW_FRAME)) {
+ ///Give preview callback to app
+ mDataCb(CAMERA_MSG_POSTVIEW_FRAME, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+ }
+
+ // increment for next buffer
+ mPreviewBufCount = (mPreviewBufCount+1) % AppCallbackNotifier::MAX_BUFFERS;
+ }
+
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ } else if ( ( CameraFrame::PREVIEW_FRAME_SYNC== frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ Mutex::Autolock lock(mLock);
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ if (!mPreviewMemory || !frame->mBuffer) {
+ CAMHAL_LOGDA("Error! One of the buffer is NULL");
+ break;
+ }
+
+ buf = (void*) mPreviewBufs[mPreviewBufCount];
+
+ CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)",
+ __LINE__,
+ buf,
+ frame->mBuffer,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mAlignment,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+
+ if ( NULL != buf ) {
+ copy2Dto1D(buf,
+ frame->mBuffer,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mAlignment,
+ frame->mOffset,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+ }
+
+ // Give preview callback to app
+ mDataCb(CAMERA_MSG_PREVIEW_FRAME, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+
+ // increment for next buffer
+ mPreviewBufCount = (mPreviewBufCount+1) % AppCallbackNotifier::MAX_BUFFERS;
+ }
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ } else if ( ( CameraFrame::FRAME_DATA_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ if (!mPreviewMemory || !frame->mBuffer) {
+ CAMHAL_LOGDA("Error! One of the buffer is NULL");
+ break;
+ }
+
+ buf = (void*) mPreviewBufs[mPreviewBufCount];
+ if (buf) {
+ if ( (mPreviewMemory->size / MAX_BUFFERS) >= frame->mLength ) {
+ memcpy(buf, ( void * ) frame->mBuffer, frame->mLength);
+ } else {
+ memset(buf, 0, (mPreviewMemory->size / MAX_BUFFERS));
+ }
+ }
+
+ // Give preview callback to app
+ mDataCb(CAMERA_MSG_PREVIEW_FRAME, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+
+ //Increment the buffer count
+ mPreviewBufCount = (mPreviewBufCount+1) % AppCallbackNotifier::MAX_BUFFERS;
+
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ CAMHAL_LOGDB("Frame type 0x%x is still unsupported!", frame->mFrameType);
+ }
+
+ break;
+
+ default:
+
+ break;
+
+ };
+
+exit:
+
+ if ( NULL != frame )
+ {
+ delete frame;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::frameCallbackRelay(CameraFrame* caFrame)
+{
+ LOG_FUNCTION_NAME;
+ AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (caFrame->mCookie);
+ appcbn->frameCallback(caFrame);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
+{
+ ///Post the event to the event queue of AppCallbackNotifier
+ TIUTILS::Message msg;
+ CameraFrame *frame;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != caFrame )
+ {
+
+ frame = new CameraFrame(*caFrame);
+ if ( NULL != frame )
+ {
+ msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME;
+ msg.arg1 = frame;
+ mFrameQ.put(&msg);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraFrame");
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+void AppCallbackNotifier::eventCallbackRelay(CameraHalEvent* chEvt)
+{
+ LOG_FUNCTION_NAME;
+ AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (chEvt->mCookie);
+ appcbn->eventCallback(chEvt);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
+{
+
+ ///Post the event to the event queue of AppCallbackNotifier
+ TIUTILS::Message msg;
+ CameraHalEvent *event;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != chEvt )
+ {
+
+ event = new CameraHalEvent(*chEvt);
+ if ( NULL != event )
+ {
+ msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT;
+ msg.arg1 = event;
+ mEventQ.put(&msg);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraHalEvent");
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+bool AppCallbackNotifier::processMessage()
+{
+ ///Retrieve the command from the command queue and process it
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDA("+Msg get...");
+ mNotificationThread->msgQ().get(&msg);
+ CAMHAL_LOGDA("-Msg get...");
+ bool ret = true;
+
+ switch(msg.command)
+ {
+ case NotificationThread::NOTIFIER_EXIT:
+ {
+ CAMHAL_LOGEA("Received NOTIFIER_EXIT command from Camera HAL");
+ mNotifierState = AppCallbackNotifier::NOTIFIER_EXITED;
+ ret = false;
+ break;
+ }
+ default:
+ {
+ CAMHAL_LOGEA("Error: ProcessMsg() command from Camera HAL");
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+
+}
+
+AppCallbackNotifier::~AppCallbackNotifier()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Stop app callback notifier if not already stopped
+ stop();
+
+ ///Unregister with the frame provider
+ if ( NULL != mFrameProvider )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ }
+
+ //unregister with the event provider
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ }
+
+ TIUTILS::Message msg = {0,0,0,0,0,0};
+ msg.command = NotificationThread::NOTIFIER_EXIT;
+
+ ///Post the message to display thread
+ mNotificationThread->msgQ().put(&msg);
+
+ //Exit and cleanup the thread
+ mNotificationThread->requestExitAndWait();
+
+ //Delete the display thread
+ mNotificationThread.clear();
+
+
+ ///Free the event and frame providers
+ if ( NULL != mEventProvider )
+ {
+ ///Deleting the event provider
+ CAMHAL_LOGDA("Stopping Event Provider");
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ if ( NULL != mFrameProvider )
+ {
+ ///Deleting the frame provider
+ CAMHAL_LOGDA("Stopping Frame Provider");
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ releaseSharedVideoBuffers();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+//Free all video heaps and buffers
+void AppCallbackNotifier::releaseSharedVideoBuffers()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t* videoMedatadaBufferMemory;
+ for (unsigned int i = 0; i < mVideoMetadataBufferMemoryMap.size(); i++)
+ {
+ videoMedatadaBufferMemory = (camera_memory_t*) mVideoMetadataBufferMemoryMap.valueAt(i);
+ if(NULL != videoMedatadaBufferMemory)
+ {
+ videoMedatadaBufferMemory->release(videoMedatadaBufferMemory);
+ CAMHAL_LOGDB("Released videoMedatadaBufferMemory=0x%x", videoMedatadaBufferMemory);
+ }
+ }
+
+ mVideoMetadataBufferMemoryMap.clear();
+ mVideoMetadataBufferReverseMap.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get start command from CameraHal
+ ///@Remarks Currently only one event provider (CameraAdapter) is supported
+ ///@todo Have an array of event providers for each event bitmask
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
+{
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get the start command from CameraAdapter
+ mFrameProvider = new FrameProvider(frameNotifier, this, frameCallbackRelay);
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Error in creating FrameProvider");
+ }
+ else
+ {
+ //Register only for captured images and RAW for now
+ //TODO: Register for and handle all types of frames
+ mFrameProvider->enableFrameNotification(CameraFrame::IMAGE_FRAME);
+ mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+{
+ sp<MemoryHeapBase> heap;
+ sp<MemoryBase> buffer;
+ unsigned int *bufArr;
+ size_t size = 0;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( mPreviewing )
+ {
+ CAMHAL_LOGDA("+Already previewing");
+ return NO_INIT;
+ }
+
+ int w,h;
+ ///Get preview size
+ params.getPreviewSize(&w, &h);
+
+ //Get the preview pixel format
+ mPreviewPixelFormat = params.getPreviewFormat();
+
+ if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ size = w*h*2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ {
+ size = (w*h*3)/2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ size = w*h*2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_RGB565;
+ }
+
+ mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL);
+ if (!mPreviewMemory) {
+ return NO_MEMORY;
+ }
+
+ for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) {
+ mPreviewBufs[i] = (unsigned char*) mPreviewMemory->data + (i*size);
+ }
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ mPreviewBufCount = 0;
+
+ mPreviewing = true;
+
+ LOG_FUNCTION_NAME;
+
+ return NO_ERROR;
+}
+
+void AppCallbackNotifier::setBurst(bool burst)
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mBurstLock);
+
+ mBurst = burst;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCallbackNotifier::stopPreviewCallbacks()
+{
+ sp<MemoryHeapBase> heap;
+ sp<MemoryBase> buffer;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop preview callbacks without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( !mPreviewing )
+ {
+ return NO_INIT;
+ }
+
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+
+ mPreviewMemory->release(mPreviewMemory);
+
+ mPreviewing = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::useMetaDataBufferMode(bool enable)
+{
+ mUseMetaDataBufferMode = enable;
+
+ return NO_ERROR;
+}
+
+
+status_t AppCallbackNotifier::startRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ mRecording = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//Allocate metadata buffers for video recording
+status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ uint32_t *bufArr = NULL;
+ camera_memory_t* videoMedatadaBufferMemory = NULL;
+
+ if(NULL == buffers)
+ {
+ CAMHAL_LOGEA("Error! Video buffers are NULL");
+ return BAD_VALUE;
+ }
+ bufArr = (uint32_t *) buffers;
+
+ for (uint32_t i = 0; i < count; i++)
+ {
+ videoMedatadaBufferMemory = mRequestMemory(-1, sizeof(video_metadata_t), 1, NULL);
+ if((NULL == videoMedatadaBufferMemory) || (NULL == videoMedatadaBufferMemory->data))
+ {
+ CAMHAL_LOGEA("Error! Could not allocate memory for Video Metadata Buffers");
+ return NO_MEMORY;
+ }
+
+ mVideoMetadataBufferMemoryMap.add(bufArr[i], (uint32_t)(videoMedatadaBufferMemory));
+ mVideoMetadataBufferReverseMap.add((uint32_t)(videoMedatadaBufferMemory->data), bufArr[i]);
+ CAMHAL_LOGDB("bufArr[%d]=0x%x, videoMedatadaBufferMemory=0x%x, videoMedatadaBufferMemory->data=0x%x",
+ i, bufArr[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data);
+ }
+ }
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::stopRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(!mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ ///Release the shared video buffers
+ releaseSharedVideoBuffers();
+
+ mRecording = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
+{
+ status_t ret = NO_ERROR;
+ void *frame = NULL;
+
+ LOG_FUNCTION_NAME;
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if ( NULL == mem )
+ {
+ CAMHAL_LOGEA("Video Frame released is invalid");
+ ret = -1;
+ }
+
+ if( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ if(mUseMetaDataBufferMode)
+ {
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) mem ;
+ frame = (void*) mVideoMetadataBufferReverseMap.valueFor((uint32_t) videoMetadataBuffer);
+ CAMHAL_LOGVB("Releasing frame with videoMetadataBuffer=0x%x, videoMetadataBuffer->handle=0x%x & frame handle=0x%x\n",
+ videoMetadataBuffer, videoMetadataBuffer->handle, frame);
+ }
+ else
+ {
+ frame = (void*)(*((uint32_t *)mem));
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mFrameProvider->returnFrame(frame, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::enableMsgType(int32_t msgType)
+{
+ if(msgType & CAMERA_MSG_POSTVIEW_FRAME)
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ return NO_ERROR;
+}
+
+status_t AppCallbackNotifier::disableMsgType(int32_t msgType)
+{
+ if(msgType & CAMERA_MSG_POSTVIEW_FRAME)
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
+ }
+
+ if(msgType & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::start()
+{
+ LOG_FUNCTION_NAME;
+ if(mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCallbackNotifier already running");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ ///Check whether initial conditions are met for us to start
+ ///A frame provider should be available, if not return error
+ if(!mFrameProvider)
+ {
+ ///AppCallbackNotifier not properly initialized
+ CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Frame provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ ///At least one event notifier should be available, if not return error
+ ///@todo Modify here when there is an array of event providers
+ if(!mEventProvider)
+ {
+ CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Event provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ ///AppCallbackNotifier not properly initialized
+ return NO_INIT;
+ }
+
+ mNotifierState = AppCallbackNotifier::NOTIFIER_STARTED;
+ CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STARTED \n");
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::stop()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mNotifierState!=AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCallbackNotifier already in stopped state");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ mNotifierState = AppCallbackNotifier::NOTIFIER_STOPPED;
+ CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STOPPED \n");
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+/*--------------------NotificationHandler Class ENDS here-----------------------------*/
+
+
+
+};
diff --git a/camera/BaseCameraAdapter.cpp b/camera/BaseCameraAdapter.cpp
new file mode 100644
index 0000000..fb90a79
--- /dev/null
+++ b/camera/BaseCameraAdapter.cpp
@@ -0,0 +1,1969 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#define LOG_TAG "CameraHAL"
+
+#include "BaseCameraAdapter.h"
+
+namespace android {
+
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+BaseCameraAdapter::BaseCameraAdapter()
+{
+ mReleaseImageBuffersCallback = NULL;
+ mEndImageCaptureCallback = NULL;
+ mErrorNotifier = NULL;
+ mEndCaptureData = NULL;
+ mReleaseData = NULL;
+ mRecording = false;
+
+ mPreviewBuffers = NULL;
+ mPreviewBufferCount = 0;
+ mPreviewBuffersLength = 0;
+
+ mVideoBuffers = NULL;
+ mVideoBuffersCount = 0;
+ mVideoBuffersLength = 0;
+
+ mCaptureBuffers = NULL;
+ mCaptureBuffersCount = 0;
+ mCaptureBuffersLength = 0;
+
+ mPreviewDataBuffers = NULL;
+ mPreviewDataBuffersCount = 0;
+ mPreviewDataBuffersLength = 0;
+
+ mAdapterState = INTIALIZED_STATE;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ mStartFocus.tv_sec = 0;
+ mStartFocus.tv_usec = 0;
+ mStartCapture.tv_sec = 0;
+ mStartCapture.tv_usec = 0;
+#endif
+
+}
+
+BaseCameraAdapter::~BaseCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mSubscriberLock);
+
+ mFrameSubscribers.clear();
+ mImageSubscribers.clear();
+ mRawSubscribers.clear();
+ mVideoSubscribers.clear();
+ mFocusSubscribers.clear();
+ mShutterSubscribers.clear();
+ mZoomSubscribers.clear();
+ mFaceSubscribers.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t BaseCameraAdapter::registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mReleaseImageBuffersCallback = callback;
+ mReleaseData = user_data;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::registerEndCaptureCallback(end_image_capture_callback callback, void *user_data)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mEndImageCaptureCallback= callback;
+ mEndCaptureData = user_data;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, event_callback eventCb, void* cookie)
+{
+ Mutex::Autolock lock(mSubscriberLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
+ {
+ mFrameSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
+ {
+ mFrameDataSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::IMAGE_FRAME == msgs)
+ {
+ mImageSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::RAW_FRAME == msgs)
+ {
+ mRawSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
+ {
+ mVideoSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ {
+ mFocusSubscribers.add((int) cookie, eventCb);
+ mShutterSubscribers.add((int) cookie, eventCb);
+ mZoomSubscribers.add((int) cookie, eventCb);
+ mFaceSubscribers.add((int) cookie, eventCb);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Message type subscription no supported yet!");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie)
+{
+ Mutex::Autolock lock(mSubscriberLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
+ {
+ mFrameSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
+ {
+ mFrameDataSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::IMAGE_FRAME == msgs)
+ {
+ mImageSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::RAW_FRAME == msgs)
+ {
+ mRawSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
+ {
+ mVideoSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::ALL_FRAMES == msgs )
+ {
+ mFrameSubscribers.removeItem((int) cookie);
+ mFrameDataSubscribers.removeItem((int) cookie);
+ mImageSubscribers.removeItem((int) cookie);
+ mRawSubscribers.removeItem((int) cookie);
+ mVideoSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ {
+ //Subscribe only for focus
+ //TODO: Process case by case
+ mFocusSubscribers.removeItem((int) cookie);
+ mShutterSubscribers.removeItem((int) cookie);
+ mZoomSubscribers.removeItem((int) cookie);
+ mFaceSubscribers.removeItem((int) cookie);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Message type 0x%x subscription no supported yet!", msgs);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t res = NO_ERROR;
+ size_t subscriberCount = 0;
+ int refCount = -1;
+
+ Mutex::Autolock lock(mReturnFrameLock);
+
+ if ( NULL == frameBuf )
+ {
+ CAMHAL_LOGEA("Invalid frameBuf");
+ return;
+ }
+
+ if ( NO_ERROR == res)
+ {
+
+ refCount = getFrameRefCount(frameBuf, frameType);
+
+ if ( 0 < refCount )
+ {
+
+ refCount--;
+ setFrameRefCount(frameBuf, frameType, refCount);
+
+ if ( ( mRecording ) && ( CameraFrame::VIDEO_FRAME_SYNC == frameType ) )
+ {
+ refCount += getFrameRefCount(frameBuf, CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+ else if ( ( mRecording ) && ( CameraFrame::PREVIEW_FRAME_SYNC == frameType ) )
+ {
+ refCount += getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ }
+ else
+ {
+ return;
+ }
+ }
+
+ if ( NO_ERROR == res )
+ {
+ //check if someone is holding this buffer
+ if ( 0 == refCount )
+ {
+ res = fillThisBuffer(frameBuf, frameType);
+ }
+ }
+
+}
+
+status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, int value2, int value3)
+{
+ status_t ret = NO_ERROR;
+ struct timeval *refTimestamp;
+ BuffersDescriptor *desc = NULL;
+ CameraFrame *frame = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ switch ( operation ) {
+ case CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW:
+ CAMHAL_LOGDA("Use buffers for preview");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid preview buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ mPreviewBuffers = (int *) desc->mBuffers;
+ mPreviewBuffersLength = desc->mLength;
+ mPreviewBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mPreviewBuffersAvailable.add(mPreviewBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mPreviewBuffersAvailable.add(mPreviewBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_PREVIEW,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA:
+ CAMHAL_LOGDA("Use buffers for preview data");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid preview data buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ Mutex::Autolock lock(mPreviewDataBufferLock);
+ mPreviewDataBuffers = (int *) desc->mBuffers;
+ mPreviewDataBuffersLength = desc->mLength;
+ mPreviewDataBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], true);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mPreviewDataBuffersAvailable.add(mPreviewBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_MEASUREMENT,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDA("Use buffers for image capture");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ Mutex::Autolock lock(mCaptureBufferLock);
+ mCaptureBuffers = (int *) desc->mBuffers;
+ mCaptureBuffersLength = desc->mLength;
+ mCaptureBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mCaptureBuffersAvailable.add(mCaptureBuffers[i], true);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mCaptureBuffersAvailable.add(mPreviewBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_IMAGE_CAPTURE,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_START_SMOOTH_ZOOM:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startSmoothZoom(value1);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopSmoothZoom();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_PREVIEW:
+ {
+
+ CAMHAL_LOGDA("Start Preview");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startPreview();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_PREVIEW:
+ {
+
+ CAMHAL_LOGDA("Stop Preview");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopPreview();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_VIDEO:
+ {
+
+ CAMHAL_LOGDA("Start video recording");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startVideoCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_VIDEO:
+ {
+
+ CAMHAL_LOGDA("Stop video recording");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopVideoCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = flushBuffers();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_IMAGE_CAPTURE:
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value1;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = takePicture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopImageCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_BRACKET_CAPTURE:
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value2;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startBracketing(value1);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopBracketing();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_PERFORM_AUTOFOCUS:
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value1;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartFocus, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = autoFocus();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_CANCEL_AUTOFOCUS:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = cancelAutoFocus();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getFrameSize(frame->mWidth, frame->mHeight);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getPictureBufferSize(frame->mLength, value2);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getFrameDataSize(frame->mLength, value2);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_START_FD:
+
+ ret = startFaceDetection();
+
+ break;
+
+ case CameraAdapter::CAMERA_STOP_FD:
+
+ ret = stopFaceDetection();
+
+ break;
+
+ default:
+ CAMHAL_LOGEB("Command 0x%x unsupported!", operation);
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyFocusSubscribers(bool status)
+{
+ event_callback eventCb;
+ CameraHalEvent focusEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mFocusSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No Focus Subscribers!");
+ return NO_INIT;
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //dump the AF latency
+ CameraHal::PPM("Focus finished in: ", &mStartFocus);
+
+#endif
+
+ focusEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == focusEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ focusEvent.mEventType = CameraHalEvent::EVENT_FOCUS_LOCKED;
+ focusEvent.mEventData->focusEvent.focusLocked = status;
+ focusEvent.mEventData->focusEvent.focusError = !status;
+
+ for (unsigned int i = 0 ; i < mFocusSubscribers.size(); i++ )
+ {
+ focusEvent.mCookie = (void *) mFocusSubscribers.keyAt(i);
+ eventCb = (event_callback) mFocusSubscribers.valueAt(i);
+ eventCb ( &focusEvent );
+ }
+
+ focusEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyShutterSubscribers()
+{
+ CameraHalEvent shutterEvent;
+ event_callback eventCb;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mShutterSubscribers.size() == 0 )
+ {
+ CAMHAL_LOGEA("No shutter Subscribers!");
+ return NO_INIT;
+ }
+
+ shutterEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == shutterEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ shutterEvent.mEventType = CameraHalEvent::EVENT_SHUTTER;
+ shutterEvent.mEventData->shutterEvent.shutterClosed = true;
+
+ for (unsigned int i = 0 ; i < mShutterSubscribers.size() ; i++ ) {
+ shutterEvent.mCookie = ( void * ) mShutterSubscribers.keyAt(i);
+ eventCb = ( event_callback ) mShutterSubscribers.valueAt(i);
+
+ CAMHAL_LOGEA("Sending shutter callback");
+
+ eventCb ( &shutterEvent );
+ }
+
+ shutterEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyZoomSubscribers(int zoomIdx, bool targetReached)
+{
+ event_callback eventCb;
+ CameraHalEvent zoomEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mZoomSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No zoom Subscribers!");
+ return NO_INIT;
+ }
+
+ zoomEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == zoomEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ zoomEvent.mEventType = CameraHalEvent::EVENT_ZOOM_INDEX_REACHED;
+ zoomEvent.mEventData->zoomEvent.currentZoomIndex = zoomIdx;
+ zoomEvent.mEventData->zoomEvent.targetZoomIndexReached = targetReached;
+
+ for (unsigned int i = 0 ; i < mZoomSubscribers.size(); i++ ) {
+ zoomEvent.mCookie = (void *) mZoomSubscribers.keyAt(i);
+ eventCb = (event_callback) mZoomSubscribers.valueAt(i);
+
+ eventCb ( &zoomEvent );
+ }
+
+ zoomEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyFaceSubscribers(sp<CameraFDResult> &faces)
+{
+ event_callback eventCb;
+ CameraHalEvent faceEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mFaceSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No face detection subscribers!");
+ return NO_INIT;
+ }
+
+ faceEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == faceEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ faceEvent.mEventType = CameraHalEvent::EVENT_FACE;
+ faceEvent.mEventData->faceEvent = faces;
+
+ for (unsigned int i = 0 ; i < mFaceSubscribers.size(); i++ ) {
+ faceEvent.mCookie = (void *) mFaceSubscribers.keyAt(i);
+ eventCb = (event_callback) mFaceSubscribers.valueAt(i);
+
+ eventCb ( &faceEvent );
+ }
+
+ faceEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
+{
+ status_t ret = NO_ERROR;
+ frame_callback callback;
+ uint32_t i = 0;
+ KeyedVector<int, frame_callback> *subscribers = NULL;
+ size_t refCount = 0;
+
+ if ( NULL == frame )
+ {
+ CAMHAL_LOGEA("Invalid CameraFrame");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ switch(frame->mFrameType)
+ {
+ case CameraFrame::IMAGE_FRAME:
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Shot to Jpeg: ", &mStartCapture);
+
+#endif
+
+ subscribers = &mImageSubscribers;
+ break;
+ }
+ case CameraFrame::RAW_FRAME:
+ {
+ subscribers = &mRawSubscribers;
+ break;
+ }
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ subscribers = &mVideoSubscribers;
+ break;
+ }
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ subscribers = &mFrameDataSubscribers;
+ break;
+ }
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ subscribers = &mFrameSubscribers;
+ break;
+ }
+ default:
+ {
+ ret = -EINVAL;
+ break;
+ }
+ };
+
+ }
+
+ if ( ( NO_ERROR == ret ) &&
+ ( NULL != subscribers ) )
+ {
+ Mutex::Autolock lock(mSubscriberLock);
+
+ refCount = subscribers->size();
+ CAMHAL_LOGVB("Type of Frame: 0x%x address: 0x%x refCount start %d",
+ frame->mFrameType,
+ ( uint32_t ) frame->mBuffer,
+ refCount);
+
+ setFrameRefCount(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType,
+ refCount);
+
+ for ( i = 0 ; i < subscribers->size(); i++ )
+ {
+ frame->mCookie = ( void * ) subscribers->keyAt(i);
+ callback = (frame_callback) subscribers->valueAt(i);
+ callback(frame);
+ }
+ }
+
+ if ( 0 == i )
+ {
+ //No subscribers for this frame
+ ret = -1;
+ }
+
+ return ret;
+}
+
+int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType)
+{
+ int res = -1;
+
+ LOG_FUNCTION_NAME;
+
+ switch ( frameType )
+ {
+ case CameraFrame::IMAGE_FRAME:
+ case CameraFrame::RAW_FRAME:
+ {
+ Mutex::Autolock lock(mCaptureBufferLock);
+ res = mCaptureBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ res = mPreviewBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ Mutex::Autolock lock(mPreviewDataBufferLock);
+ res = mPreviewDataBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ Mutex::Autolock lock(mVideoBufferLock);
+ res = mVideoBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ default:
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return res;
+}
+
+void BaseCameraAdapter::setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount)
+{
+
+ LOG_FUNCTION_NAME;
+
+ switch ( frameType )
+ {
+ case CameraFrame::IMAGE_FRAME:
+ case CameraFrame::RAW_FRAME:
+ {
+ Mutex::Autolock lock(mCaptureBufferLock);
+ mCaptureBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ mPreviewBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ Mutex::Autolock lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ Mutex::Autolock lock(mVideoBufferLock);
+ mVideoBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ default:
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t BaseCameraAdapter::startVideoCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mVideoBufferLock);
+
+ //If the capture is already ongoing, return from here.
+ if ( mRecording )
+ {
+ ret = NO_INIT;
+ }
+
+
+ if ( NO_ERROR == ret )
+ {
+
+ for ( unsigned int i = 0 ; i < mPreviewBuffersAvailable.size() ; i++ )
+ {
+ mVideoBuffersAvailable.add(mPreviewBuffersAvailable.keyAt(i), 0);
+ }
+
+ mRecording = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopVideoCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( !mRecording )
+ {
+ ret = NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ for ( unsigned int i = 0 ; i < mVideoBuffersAvailable.size() ; i++ )
+ {
+ void *frameBuf = ( void * ) mVideoBuffersAvailable.keyAt(i);
+ if( getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC) > 0)
+ {
+ returnFrame(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+ }
+
+ mVideoBuffersAvailable.clear();
+
+ mRecording = false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//-----------------Stub implementation of the interface ------------------------------
+
+status_t BaseCameraAdapter::takePicture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startBracketing(int range)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ notifyFocusSubscribers(false);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::cancelAutoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startSmoothZoom(int targetIdx)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopSmoothZoom()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startFaceDetection()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopFaceDetection()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::setState(CameraCommands operation)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mLock.lock();
+
+ switch ( mAdapterState )
+ {
+
+ case INTIALIZED_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_USE_BUFFERS_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = LOADED_PREVIEW_STATE;
+ break;
+
+ //These events don't change the current state
+ case CAMERA_QUERY_RESOLUTION_PREVIEW:
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = 0x%x",
+ operation);
+ mNextState = INTIALIZED_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case LOADED_PREVIEW_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ //These events don't change the current state
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+ case CAMERA_USE_BUFFERS_PREVIEW_DATA:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = LOADED_PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case PREVIEW_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = 0x%x",
+ operation);
+ mNextState = INTIALIZED_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = 0x%x",
+ operation);
+ mNextState = AF_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = ZOOM_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = LOADED_CAPTURE_STATE;
+ break;
+
+ case CAMERA_START_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_STATE;
+ break;
+
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case LOADED_CAPTURE_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = CAPTURE_STATE;
+ break;
+
+ case CAMERA_START_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = 0x%x",
+ operation);
+ mNextState = BRACKETING_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case CAPTURE_STATE:
+
+ switch ( operation )
+ {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case BRACKETING_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = CAPTURE_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case AF_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = AF_ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = AF_ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case AF_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = 0x%x",
+ operation);
+ mNextState = AF_STATE;
+ break;
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case BRACKETING_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = 0x%x",
+ operation);
+ mNextState = BRACKETING_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ default:
+ CAMHAL_LOGEA("Invalid Adapter state!");
+ ret = INVALID_OPERATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//State transition finished successfully.
+//Commit the state and unlock the adapter state.
+status_t BaseCameraAdapter::commitState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mAdapterState = mNextState;
+
+ mLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::rollbackState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mNextState = mAdapterState;
+
+ mLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// getNextState() and getState()
+// publicly exposed functions to retrieve the adapter states
+// please notice that these functions are locked
+CameraAdapter::AdapterState BaseCameraAdapter::getState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mAdapterState;
+}
+
+CameraAdapter::AdapterState BaseCameraAdapter::getNextState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mNextState;
+}
+
+// getNextState() and getState()
+// internal protected functions to retrieve the adapter states
+// please notice that these functions are NOT locked to help
+// internal functions query state in the middle of state
+// transition
+status_t BaseCameraAdapter::getState(AdapterState &state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ state = mAdapterState;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getNextState(AdapterState &state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ state = mNextState;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void BaseCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+}
+//-----------------------------------------------------------------------------
+
+
+
+};
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp
new file mode 100755
index 0000000..53bc377
--- /dev/null
+++ b/camera/CameraHal.cpp
@@ -0,0 +1,3030 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "ANativeWindowDisplayAdapter.h"
+#include "TICameraParameters.h"
+#include "CameraProperties.h"
+#include <cutils/properties.h>
+
+#include <poll.h>
+#include <math.h>
+
+namespace android {
+
+extern "C" CameraAdapter* CameraAdapter_Factory();
+
+/*****************************************************************************/
+
+////Constant definitions and declarations
+////@todo Have a CameraProperties class to store these parameters as constants for every camera
+//// Currently, they are hard-coded
+
+const int CameraHal::NO_BUFFERS_PREVIEW = MAX_CAMERA_BUFFERS;
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 2;
+
+const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 0;
+const uint32_t MessageNotifier::FRAME_BIT_FIELD_POSITION = 0;
+
+/******************************************************************************/
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+struct timeval CameraHal::mStartPreview;
+struct timeval CameraHal::mStartFocus;
+struct timeval CameraHal::mStartCapture;
+
+#endif
+
+static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
+ CameraHal *camera = NULL;
+
+ if (cookie) {
+ camera = (CameraHal*) cookie;
+ camera->onOrientationEvent(orientation, tilt);
+ }
+
+}
+/*-------------Camera Hal Interface Method definitions STARTS here--------------------*/
+
+/**
+ Callback function to receive orientation events from SensorListener
+ */
+void CameraHal::onOrientationEvent(uint32_t orientation, uint32_t tilt) {
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mCameraAdapter ) {
+ mCameraAdapter->onOrientationEvent(orientation, tilt);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Set the notification and data callbacks
+
+ @param[in] notify_cb Notify callback for notifying the app about events and errors
+ @param[in] data_cb Buffer callback for sending the preview/raw frames to the app
+ @param[in] data_cb_timestamp Buffer callback for sending the video frames w/ timestamp
+ @param[in] user Callback cookie
+ @return none
+
+ */
+void CameraHal::setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setCallbacks(this,
+ notify_cb,
+ data_cb,
+ data_cb_timestamp,
+ get_memory,
+ user);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Enable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to enable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void CameraHal::enableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( ( msgType & CAMERA_MSG_SHUTTER ) && ( !mShutterEnabled ) )
+ {
+ msgType &= ~CAMERA_MSG_SHUTTER;
+ }
+
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= msgType;
+ }
+
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ if(mDisplayPaused)
+ {
+ CAMHAL_LOGDA("Preview currently paused...will enable preview callback when restarted");
+ msgType &= ~CAMERA_MSG_PREVIEW_FRAME;
+ }else
+ {
+ CAMHAL_LOGDA("Enabling Preview Callback");
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDB("Preview callback not enabled %x", msgType);
+ }
+
+
+ ///Configure app callback notifier with the message callback required
+ mAppCallbackNotifier->enableMsgType (msgType);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Disable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to disable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void CameraHal::disableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled &= ~msgType;
+ }
+
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ CAMHAL_LOGDA("Disabling Preview Callback");
+ }
+
+ ///Configure app callback notifier
+ mAppCallbackNotifier->disableMsgType (msgType);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Query whether a message, or a set of messages, is enabled.
+
+ Note that this is operates as an AND, if any of the messages queried are off, this will
+ return false.
+
+ @param[in] msgtype Bitmask of the messages to query (defined in include/ui/Camera.h)
+ @return true If all message types are enabled
+ false If any message type
+
+ */
+int CameraHal::msgTypeEnabled(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+ Mutex::Autolock lock(mLock);
+ LOG_FUNCTION_NAME_EXIT;
+ return (mMsgEnabled & msgType);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int CameraHal::setParameters(const char* parameters)
+{
+
+ LOG_FUNCTION_NAME;
+
+ CameraParameters params;
+
+ String8 str_params(parameters);
+ params.unflatten(str_params);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return setParameters(params);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int CameraHal::setParameters(const CameraParameters& params)
+{
+
+ LOG_FUNCTION_NAME;
+
+ int w, h;
+ int w_orig, h_orig;
+ int framerate,minframerate;
+ bool framerateUpdated = true;
+ int maxFPS, minFPS;
+ int error;
+ int base;
+ const char *valstr = NULL;
+ const char *prevFormat;
+ char *af_coord;
+ TIUTILS::Message msg;
+ status_t ret = NO_ERROR;
+
+ Mutex::Autolock lock(mLock);
+
+ ///Ensure that preview is not enabled when the below parameters are changed.
+ if(!previewEnabled())
+ {
+
+ CAMHAL_LOGDB("PreviewFormat %s", params.getPreviewFormat());
+
+ if ( !isParameterValid(params.getPreviewFormat(), mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS)))
+ {
+ CAMHAL_LOGEB("Invalid preview format %s", mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ return -EINVAL;
+ }
+ else
+ {
+ if ( (valstr = params.getPreviewFormat()) != NULL)
+ mParameters.setPreviewFormat(valstr);
+ }
+
+ params.getPreviewSize(&w, &h);
+ if (w == -1 && h == -1) {
+ CAMHAL_LOGEA("Unable to get preview size");
+ return ret;
+ }
+
+ int orientation =0;
+ if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Sensor Orientation is set to %s", params.get(TICameraParameters::KEY_SENSOR_ORIENTATION));
+ mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
+ orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ }
+
+ if(orientation ==90 || orientation ==270)
+ {
+ if ( !isResolutionValid(h,w, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ {
+ CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
+ return -EINVAL;
+ }
+ else
+ {
+ mParameters.setPreviewSize(w, h);
+ }
+ }
+ else
+ {
+ if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ {
+ CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h);
+ return -EINVAL;
+ }
+ else
+ {
+ mParameters.setPreviewSize(w, h);
+ }
+ }
+
+ CAMHAL_LOGDB("PreviewResolution by App %d x %d", w, h);
+
+ if(( (valstr = params.get(TICameraParameters::KEY_VNF)) != NULL)
+ && ((params.getInt(TICameraParameters::KEY_VNF)==0) || (params.getInt(TICameraParameters::KEY_VNF)==1)))
+ {
+ CAMHAL_LOGDB("VNF set %s", params.get(TICameraParameters::KEY_VNF));
+ mParameters.set(TICameraParameters::KEY_VNF, valstr);
+ }
+
+ if(( (valstr = params.get(TICameraParameters::KEY_VSTAB)) != NULL)
+ && ((params.getInt(TICameraParameters::KEY_VSTAB)==0) || (params.getInt(TICameraParameters::KEY_VSTAB)==1)))
+ {
+ CAMHAL_LOGDB("VSTAB set %s", params.get(TICameraParameters::KEY_VSTAB));
+ mParameters.set(TICameraParameters::KEY_VSTAB, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL)
+ {
+ CAMHAL_LOGDB("Capture mode set %s", params.get(TICameraParameters::KEY_CAP_MODE));
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr);
+ }
+
+ if((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL)
+ {
+ CAMHAL_LOGDB("IPP mode set %s", params.get(TICameraParameters::KEY_IPP));
+ mParameters.set(TICameraParameters::KEY_IPP, valstr);
+ }
+
+ if((valstr = params.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL)
+ {
+ CAMHAL_LOGDB("Stereo 3D->2D Preview mode is %s", params.get(TICameraParameters::KEY_S3D2D_PREVIEW));
+ mParameters.set(TICameraParameters::KEY_S3D2D_PREVIEW, valstr);
+ }
+
+ if((valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
+ {
+ CAMHAL_LOGDB("AutoConvergence mode is %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
+ mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ }
+ if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Sensor Orientation is set to %s", params.get(TICameraParameters::KEY_SENSOR_ORIENTATION));
+ mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
+ }
+
+ }
+
+ ///Below parameters can be changed when the preview is running
+ if ( !isParameterValid(params.getPictureFormat(),
+ mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS)))
+ {
+ CAMHAL_LOGEA("Invalid picture format");
+ return -EINVAL;
+ }
+ else
+ {
+ valstr = params.getPictureFormat();
+ if (valstr)
+ mParameters.setPictureFormat(valstr);
+ }
+
+ params.getPictureSize(&w, &h);
+ if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES)))
+ {
+ CAMHAL_LOGEB("Invalid picture resolution %dx%d", w, h);
+ return -EINVAL;
+ }
+ else
+ {
+ mParameters.setPictureSize(w, h);
+ }
+
+ CAMHAL_LOGDB("Picture Size by App %d x %d", w, h);
+
+ if(( (valstr = params.get(TICameraParameters::KEY_BURST)) != NULL)
+ && (params.getInt(TICameraParameters::KEY_BURST) >=0))
+ {
+ CAMHAL_LOGDB("Burst set %s", params.get(TICameraParameters::KEY_BURST));
+ mParameters.set(TICameraParameters::KEY_BURST, valstr);
+ }
+
+ framerate = params.getPreviewFrameRate();
+ if ( isParameterValid(framerate, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES)))
+ {
+ if ( mLastPreviewFramerate != framerate )
+ {
+ mLastPreviewFramerate = framerate;
+ mParameters.setPreviewFrameRate(framerate);
+ framerateUpdated = true;
+ }
+ else
+ {
+ framerateUpdated = false;
+ }
+ }
+ else
+ {
+ framerateUpdated = false;
+ }
+
+ CAMHAL_LOGDB("FRAMERATE %d", framerate);
+
+ //If client uses fixed framerate than
+ //give it a higher piority than VFR.
+ if ( framerateUpdated )
+ {
+
+ minFPS = framerate;
+ maxFPS = framerate;
+
+ CAMHAL_LOGDB("FPS Range [%d, %d]", minFPS, maxFPS);
+ mParameters.set(TICameraParameters::KEY_MINFRAMERATE, minFPS);
+ mParameters.set(TICameraParameters::KEY_MAXFRAMERATE, maxFPS);
+ mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE));
+ }
+ else if ( ( valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE) ) != NULL )
+ {
+ CAMHAL_LOGDB("FPS Range = %s", valstr);
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+
+ if ( ( 0 > minFPS ) || ( 0 > maxFPS ) )
+ {
+ CAMHAL_LOGEA("FPS Range is negative!");
+ return -EINVAL;
+ }
+
+ minFPS /= CameraHal::VFR_SCALE;
+ maxFPS /= CameraHal::VFR_SCALE;
+
+ if ( ( 0 == minFPS ) || ( 0 == maxFPS ) )
+ {
+ CAMHAL_LOGEA("FPS Range is invalid!");
+ return -EINVAL;
+ }
+
+ if ( maxFPS < minFPS )
+ {
+ CAMHAL_LOGEA("Max FPS is smaller than Min FPS!");
+ return -EINVAL;
+ }
+
+ if ( maxFPS > framerate )
+ {
+ framerate = maxFPS;
+ mParameters.setPreviewFrameRate(framerate);
+ }
+
+ CAMHAL_LOGDB("FPS Range [%d, %d]", minFPS, maxFPS);
+ mParameters.set(TICameraParameters::KEY_MINFRAMERATE, minFPS);
+ mParameters.set(TICameraParameters::KEY_MAXFRAMERATE, maxFPS);
+ mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GBCE) ) != NULL )
+ {
+ CAMHAL_LOGDB("GBCE Value = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GBCE, valstr);
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
+ {
+ CAMHAL_LOGDB("GLBCE Value = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
+ }
+
+ ///Update the current parameter set
+ if( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
+ {
+ CAMHAL_LOGDB("AutoConvergence Mode is set = %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
+ mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ }
+
+// if(params.get(TICameraParameters::KEY_AUTOCONVERGENCE_MODE)!=NULL)
+// {
+// CAMHAL_LOGDB("AutoConvergence Mode is set = %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE_MODE));
+// mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, params.get(TICameraParameters::KEY_AUTOCONVERGENCE_MODE));
+// }
+
+ if( (valstr = params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES)) !=NULL )
+ {
+ CAMHAL_LOGDB("ManualConvergence Value = %s", params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES));
+ mParameters.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, valstr);
+ }
+
+ if( ((valstr = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL)
+ && isParameterValid(params.get(TICameraParameters::KEY_EXPOSURE_MODE),
+ mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES)))
+ {
+ CAMHAL_LOGDB("Exposure set = %s", params.get(TICameraParameters::KEY_EXPOSURE_MODE));
+ mParameters.set(TICameraParameters::KEY_EXPOSURE_MODE, valstr);
+ }
+
+ if( ((valstr = params.get(CameraParameters::KEY_WHITE_BALANCE)) != NULL)
+ && isParameterValid(params.get(CameraParameters::KEY_WHITE_BALANCE),
+ mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE)))
+ {
+ CAMHAL_LOGDB("White balance set %s", params.get(CameraParameters::KEY_WHITE_BALANCE));
+ mParameters.set(CameraParameters::KEY_WHITE_BALANCE, valstr);
+ }
+
+ if( ((valstr = params.get(TICameraParameters::KEY_CONTRAST)) != NULL)
+ && (params.getInt(TICameraParameters::KEY_CONTRAST) >= 0 ))
+ {
+ CAMHAL_LOGDB("Contrast set %s", params.get(TICameraParameters::KEY_CONTRAST));
+ mParameters.set(TICameraParameters::KEY_CONTRAST, valstr);
+ }
+
+ if( ((valstr =params.get(TICameraParameters::KEY_SHARPNESS)) != NULL) && params.getInt(TICameraParameters::KEY_SHARPNESS) >= 0 )
+ {
+ CAMHAL_LOGDB("Sharpness set %s", params.get(TICameraParameters::KEY_SHARPNESS));
+ mParameters.set(TICameraParameters::KEY_SHARPNESS, valstr);
+ }
+
+
+ if( ((valstr = params.get(TICameraParameters::KEY_SATURATION)) != NULL)
+ && (params.getInt(TICameraParameters::KEY_SATURATION) >= 0 ) )
+ {
+ CAMHAL_LOGDB("Saturation set %s", params.get(TICameraParameters::KEY_SATURATION));
+ mParameters.set(TICameraParameters::KEY_SATURATION, valstr);
+ }
+
+ if( ((valstr = params.get(TICameraParameters::KEY_BRIGHTNESS)) != NULL)
+ && (params.getInt(TICameraParameters::KEY_BRIGHTNESS) >= 0 ))
+ {
+ CAMHAL_LOGDB("Brightness set %s", params.get(TICameraParameters::KEY_BRIGHTNESS));
+ mParameters.set(TICameraParameters::KEY_BRIGHTNESS, valstr);
+ }
+
+
+ if( ((valstr = params.get(CameraParameters::KEY_ANTIBANDING)) != NULL)
+ && isParameterValid(params.get(CameraParameters::KEY_ANTIBANDING),
+ mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING)))
+ {
+ CAMHAL_LOGDB("Antibanding set %s", params.get(CameraParameters::KEY_ANTIBANDING));
+ mParameters.set(CameraParameters::KEY_ANTIBANDING, valstr);
+ }
+
+ if( ((valstr = params.get(TICameraParameters::KEY_ISO)) != NULL)
+ && isParameterValid(params.get(TICameraParameters::KEY_ISO),
+ mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES)))
+ {
+ CAMHAL_LOGDB("ISO set %s", params.get(TICameraParameters::KEY_ISO));
+ mParameters.set(TICameraParameters::KEY_ISO, valstr);
+ }
+
+ if( ((valstr = params.get(CameraParameters::KEY_FOCUS_MODE)) != NULL)
+ && isParameterValid(params.get(CameraParameters::KEY_FOCUS_MODE),
+ mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES)))
+ {
+ CAMHAL_LOGDB("Focus mode set %s", params.get(CameraParameters::KEY_FOCUS_MODE));
+ mParameters.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL )
+ {
+ CAMHAL_LOGEB("Focus areas position set %s", params.get(CameraParameters::KEY_FOCUS_AREAS));
+ mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
+ {
+ CAMHAL_LOGDB("Measurements set to %s", params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE));
+ mParameters.set(TICameraParameters::KEY_MEASUREMENT_ENABLE, valstr);
+
+ if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ {
+ mMeasurementEnabled = true;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ {
+ mMeasurementEnabled = false;
+ }
+ else
+ {
+ mMeasurementEnabled = false;
+ }
+
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Exposure compensation set %s", params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_SCENE_MODE)) != NULL)
+ && isParameterValid(params.get(CameraParameters::KEY_SCENE_MODE),
+ mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES)))
+ {
+ CAMHAL_LOGDB("Scene mode set %s", params.get(CameraParameters::KEY_SCENE_MODE));
+ mParameters.set(CameraParameters::KEY_SCENE_MODE, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_FLASH_MODE)) != NULL)
+ && isParameterValid(params.get(CameraParameters::KEY_FLASH_MODE),
+ mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES)))
+ {
+ CAMHAL_LOGDB("Flash mode set %s", params.get(CameraParameters::KEY_FLASH_MODE));
+ mParameters.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_EFFECT)) != NULL)
+ && isParameterValid(params.get(CameraParameters::KEY_EFFECT),
+ mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS)))
+ {
+ CAMHAL_LOGDB("Effect set %s", params.get(CameraParameters::KEY_EFFECT));
+ mParameters.set(CameraParameters::KEY_EFFECT, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_ROTATION)) != NULL)
+ && (params.getInt(CameraParameters::KEY_ROTATION) >=0))
+ {
+ CAMHAL_LOGDB("Rotation set %s", params.get(CameraParameters::KEY_ROTATION));
+ mParameters.set(CameraParameters::KEY_ROTATION, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_QUALITY)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_QUALITY) >=0))
+ {
+ CAMHAL_LOGDB("Jpeg quality set %s", params.get(CameraParameters::KEY_JPEG_QUALITY));
+ mParameters.set(CameraParameters::KEY_JPEG_QUALITY, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY)) != NULL )
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail quality set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, valstr);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_LATITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS latitude set %s", params.get(CameraParameters::KEY_GPS_LATITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_LATITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_LATITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS longitude set %s", params.get(CameraParameters::KEY_GPS_LONGITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS altitude set %s", params.get(CameraParameters::KEY_GPS_ALTITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS timestamp set %s", params.get(CameraParameters::KEY_GPS_TIMESTAMP));
+ mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_DATESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS datestamp set %s", params.get(TICameraParameters::KEY_GPS_DATESTAMP));
+ mParameters.set(TICameraParameters::KEY_GPS_DATESTAMP, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_DATESTAMP);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS processing method set %s", params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD));
+ mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM )) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", params.get(TICameraParameters::KEY_GPS_MAPDATUM));
+ mParameters.set(TICameraParameters::KEY_GPS_MAPDATUM, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_MAPDATUM);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_VERSION)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", params.get(TICameraParameters::KEY_GPS_VERSION));
+ mParameters.set(TICameraParameters::KEY_GPS_VERSION, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_VERSION);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXIF_MODEL)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Model set %s", params.get(TICameraParameters::KEY_EXIF_MODEL));
+ mParameters.set(TICameraParameters::KEY_EXIF_MODEL, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXIF_MAKE)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Make set %s", params.get(TICameraParameters::KEY_EXIF_MAKE));
+ mParameters.set(TICameraParameters::KEY_EXIF_MAKE, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL )
+ {
+ CAMHAL_LOGDB("Exposure Bracketing set %s", params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE));
+ mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valstr);
+ }
+ else
+ {
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+ }
+
+ if( ( (valstr = params.get(CameraParameters::KEY_ZOOM)) != NULL )
+ && (params.getInt(CameraParameters::KEY_ZOOM) >= 0 )
+ && (params.getInt(CameraParameters::KEY_ZOOM) <= mMaxZoomSupported ) )
+ {
+ CAMHAL_LOGDB("Zoom set %s", params.get(CameraParameters::KEY_ZOOM));
+ mParameters.set(CameraParameters::KEY_ZOOM, valstr);
+ }
+ else
+ {
+ //CTS requirement: Invalid zoom values should always return an error.
+ ret = -EINVAL;
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto Exposure Lock set %s", params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK));
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
+ mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
+ }
+
+ CameraParameters adapterParams = mParameters;
+
+ //If the app has not set the capture mode, set the capture resolution as preview resolution
+ //so that black bars are not displayed in preview.
+ //Later in takePicture we will configure the correct picture size
+ if(params.get(TICameraParameters::KEY_CAP_MODE) == NULL)
+ {
+ CAMHAL_LOGDA("Capture mode not set by app, setting picture res to preview res");
+ mParameters.getPreviewSize(&w, &h);
+ adapterParams.setPictureSize(w,h);
+ }
+
+ // Only send parameters to adapter if preview is already
+ // enabled. Initial setParameters to camera adapter, will
+ // be called in startPreview()
+ if ( NULL != mCameraAdapter && mPreviewEnabled ) {
+ ret |= mCameraAdapter->setParameters(adapterParams);
+ }
+
+ if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) )
+ {
+ int posBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS);
+ if ( 0 < posBracketRange )
+ {
+ mBracketRangePositive = posBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Positive bracketing range %d", mBracketRangePositive);
+
+
+ if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG) )
+ {
+ int negBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG);
+ if ( 0 < negBracketRange )
+ {
+ mBracketRangeNegative = negBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative);
+
+ if( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL) &&
+ ( strcmp(valstr, TICameraParameters::BRACKET_ENABLE) == 0 ))
+ {
+ if ( !mBracketingEnabled )
+ {
+ CAMHAL_LOGDA("Enabling bracketing");
+ mBracketingEnabled = true;
+
+ //Wait for AF events to enable bracketing
+ if ( NULL != mCameraAdapter )
+ {
+ setEventProvider( CameraHalEvent::ALL_EVENTS, mCameraAdapter );
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDA("Bracketing already enabled");
+ }
+ }
+ else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
+ ( strcmp(valstr, TICameraParameters::BRACKET_DISABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Disabling bracketing");
+
+ mBracketingEnabled = false;
+ stopImageBracketing();
+
+ //Remove AF events subscription
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification( CameraHalEvent::ALL_EVENTS );
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ }
+
+ if( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, TICameraParameters::SHUTTER_ENABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Enabling shutter sound");
+
+ mShutterEnabled = true;
+ mMsgEnabled |= CAMERA_MSG_SHUTTER;
+ mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+ else if ( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, TICameraParameters::SHUTTER_DISABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Disabling shutter sound");
+
+ mShutterEnabled = false;
+ mMsgEnabled &= ~CAMERA_MSG_SHUTTER;
+ mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+ }
+
+status_t CameraHal::allocPreviewBufs(int width, int height, const char* previewFormat,
+ unsigned int buffercount, unsigned int &max_queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if(mDisplayAdapter.get() == NULL)
+ {
+ // Memory allocation of preview buffers is now placed in gralloc
+ // CameraHal should not allocate preview buffers without DisplayAdapter
+ return NO_MEMORY;
+ }
+
+ if(!mPreviewBufs)
+ {
+ ///@todo Pluralise the name of this method to allocateBuffers
+ mPreviewLength = 0;
+ mPreviewBufs = (int32_t *) mDisplayAdapter->allocateBuffer(width, height,
+ previewFormat,
+ mPreviewLength,
+ buffercount);
+
+ if (NULL == mPreviewBufs ) {
+ CAMHAL_LOGEA("Couldn't allocate preview buffers");
+ return NO_MEMORY;
+ }
+
+ mPreviewOffsets = (uint32_t *) mDisplayAdapter->getOffsets();
+ if ( NULL == mPreviewOffsets ) {
+ CAMHAL_LOGEA("Buffer mapping failed");
+ return BAD_VALUE;
+ }
+
+ mPreviewFd = mDisplayAdapter->getFd();
+ if ( -1 == mPreviewFd ) {
+ CAMHAL_LOGEA("Invalid handle");
+ return BAD_VALUE;
+ }
+
+ mBufProvider = (BufferProvider*) mDisplayAdapter.get();
+
+ ret = mDisplayAdapter->maxQueueableBuffers(max_queueable);
+ if (ret != NO_ERROR) {
+ return ret;
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+status_t CameraHal::freePreviewBufs()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDB("mPreviewBufs = 0x%x", (unsigned int)mPreviewBufs);
+ if(mPreviewBufs)
+ {
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mBufProvider->freeBuffer(mPreviewBufs);
+ mPreviewBufs = NULL;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes;
+
+ LOG_FUNCTION_NAME;
+
+ bytes = size;
+
+ if ( NO_ERROR == ret )
+ {
+ if( NULL != mPreviewDataBufs )
+ {
+ ret = freePreviewDataBufs();
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mPreviewDataBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, NULL, bytes, bufferCount);
+
+ CAMHAL_LOGDB("Size of Preview data buffer = %d", bytes);
+ if( NULL == mPreviewDataBufs )
+ {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ else
+ {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mPreviewDataFd = mMemoryManager->getFd();
+ mPreviewDataLength = bytes;
+ mPreviewDataOffsets = mMemoryManager->getOffsets();
+ }
+ else
+ {
+ mPreviewDataFd = -1;
+ mPreviewDataLength = 0;
+ mPreviewDataOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t CameraHal::freePreviewDataBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+
+ if( NULL != mPreviewDataBufs )
+ {
+
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBuffer(mPreviewDataBufs);
+ mPreviewDataBufs = NULL;
+
+ }
+ else
+ {
+ CAMHAL_LOGEA("Couldn't free PreviewDataBufs allocated by memory manager");
+ ret = -EINVAL;
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, const char* previewFormat, unsigned int bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes;
+
+ LOG_FUNCTION_NAME;
+
+ bytes = size;
+
+ ///Always allocate the buffers for image capture using MemoryManager
+ if ( NO_ERROR == ret )
+ {
+ if( ( NULL != mImageBufs ) )
+ {
+ ret = freeImageBufs();
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mImageBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, previewFormat, bytes, bufferCount);
+
+ CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
+ if( NULL == mImageBufs )
+ {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ else
+ {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mImageFd = mMemoryManager->getFd();
+ mImageLength = bytes;
+ mImageOffsets = mMemoryManager->getOffsets();
+ }
+ else
+ {
+ mImageFd = -1;
+ mImageLength = 0;
+ mImageOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+void endImageCapture( void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != userData )
+ {
+ CameraHal *c = reinterpret_cast<CameraHal *>(userData);
+ c->signalEndImageCapture();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void releaseImageBuffers(void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != userData )
+ {
+ CameraHal *c = reinterpret_cast<CameraHal *>(userData);
+ c->freeImageBufs();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t CameraHal::signalEndImageCapture()
+{
+ status_t ret = NO_ERROR;
+ int w,h;
+ CameraParameters adapterParams = mParameters;
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+
+ //If the app has not set the capture mode, restore the capture resolution
+ //back to the preview resolution to get rid of the black bars issue
+ if (mParameters.get(TICameraParameters::KEY_CAP_MODE) == NULL) {
+ CAMHAL_LOGDA("Capture mode not set by app, setting picture res back to preview res");
+ mParameters.getPreviewSize(&w, &h);
+ adapterParams.setPictureSize(w,h);
+ ret = mCameraAdapter->setParameters(adapterParams);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freeImageBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+
+ if( NULL != mImageBufs )
+ {
+
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBuffer(mImageBufs);
+ mImageBufs = NULL;
+
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+
+/**
+ @brief Start preview mode.
+
+ @param none
+ @return NO_ERROR Camera switched to VF mode
+ @todo Update function header with the different errors that are possible
+
+ */
+status_t CameraHal::startPreview()
+{
+
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+ CameraFrame frame;
+ const char *valstr = NULL;
+ unsigned int required_buffer_count;
+ unsigned int max_queueble_buffers;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartPreview, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if( (mDisplayAdapter.get() != NULL) && ( !mPreviewEnabled ) && ( mDisplayPaused ) )
+ {
+ CAMHAL_LOGDA("Preview is in paused state");
+
+ mDisplayPaused = false;
+ mPreviewEnabled = true;
+ if ( NO_ERROR == ret )
+ {
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Display adapter resume failed %x", ret);
+ }
+ }
+ //restart preview callbacks
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME);
+ }
+ return ret;
+
+ }
+ else if ( mPreviewEnabled )
+ {
+ CAMHAL_LOGDA("Preview already running");
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ALREADY_EXISTS;
+ }
+
+ ///If we don't have the preview callback enabled and display adapter,
+ if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL))
+ {
+ CAMHAL_LOGEA("Preview not started. Preview in progress flag set");
+ mPreviewStartInProgress = true;
+ return NO_ERROR;
+ }
+
+ if ( NULL != mCameraAdapter ) {
+
+ CameraParameters adapterParams = mParameters;
+
+ //If the app has not set the capture mode, set the capture resolution as preview resolution
+ //so that black bars are not displayed in preview.
+ //Later in takePicture we will configure the correct picture size
+ if(mParameters.get(TICameraParameters::KEY_CAP_MODE) == NULL)
+ {
+ int w,h;
+ CAMHAL_LOGDA("Capture mode not set by app, setting picture res to preview res");
+ mParameters.getPreviewSize(&w, &h);
+ adapterParams.setPictureSize(w,h);
+ }
+
+ ret = mCameraAdapter->setParameters(adapterParams);
+ }
+
+ /// Ensure that buffers for preview are allocated before we start the camera
+ ///Get the updated size from Camera Adapter, to account for padding etc
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW,
+ ( int ) &frame);
+ if ( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ ///Update the current preview width and height
+ mPreviewWidth = frame.mWidth;
+ mPreviewHeight = frame.mHeight;
+
+ //Update the padded width and height - required for VNF and VSTAB
+ mParameters.set(TICameraParameters::KEY_PADDED_WIDTH, mPreviewWidth);
+ mParameters.set(TICameraParameters::KEY_PADDED_HEIGHT, mPreviewHeight);
+
+ required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+
+ ///Allocate the preview buffers
+ ret = allocPreviewBufs(frame.mWidth, frame.mHeight, mParameters.getPreviewFormat(), required_buffer_count, max_queueble_buffers);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Couldn't allocate buffers for Preview");
+ goto error;
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA,
+ ( int ) &frame,
+ required_buffer_count);
+ if ( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ ///Allocate the preview data buffers
+ ret = allocPreviewDataBufs(frame.mLength, required_buffer_count);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEA("Couldn't allocate preview data buffers");
+ goto error;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ desc.mBuffers = mPreviewDataBufs;
+ desc.mOffsets = mPreviewDataOffsets;
+ desc.mFd = mPreviewDataFd;
+ desc.mLength = mPreviewDataLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) required_buffer_count;
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA,
+ ( int ) &desc);
+ }
+
+ }
+
+ ///Pass the buffers to Camera Adapter
+ desc.mBuffers = mPreviewBufs;
+ desc.mOffsets = mPreviewOffsets;
+ desc.mFd = mPreviewFd;
+ desc.mLength = mPreviewLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) max_queueble_buffers;
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW,
+ ( int ) &desc);
+
+ mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
+
+ ///Start the callback notifier
+ ret = mAppCallbackNotifier->start();
+
+ if( ALREADY_EXISTS == ret )
+ {
+ //Already running, do nothing
+ CAMHAL_LOGDA("AppCallbackNotifier already running");
+ ret = NO_ERROR;
+ }
+ else if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("Started AppCallbackNotifier..");
+ mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Couldn't start AppCallbackNotifier");
+ goto error;
+ }
+
+ ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
+ if(mDisplayAdapter.get() != NULL)
+ {
+ CAMHAL_LOGDA("Enabling display");
+ bool isS3d = false;
+ DisplayAdapter::S3DParameters s3dParams;
+ int width, height;
+ mParameters.getPreviewSize(&width, &height);
+#if 0 //TODO: s3d is not part of bringup...will reenable
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_SUPPORTED)) != NULL) {
+ isS3d = (strcmp(valstr, "true") == 0);
+ }
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL) {
+ if (strcmp(valstr, "off") == 0)
+ {
+ CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS OFF");
+ //TODO: obtain the frame packing configuration from camera or user settings
+ //once side by side configuration is supported
+ s3dParams.mode = OVERLAY_S3D_MODE_ON;
+ s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
+ s3dParams.order = OVERLAY_S3D_ORDER_LF;
+ s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
+ }
+ else
+ {
+ CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS ON");
+ s3dParams.mode = OVERLAY_S3D_MODE_OFF;
+ s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
+ s3dParams.order = OVERLAY_S3D_ORDER_LF;
+ s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
+ }
+ }
+#endif //if 0
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview, isS3d ? &s3dParams : NULL);
+
+#else
+
+ ret = mDisplayAdapter->enableDisplay(width, height, NULL, isS3d ? &s3dParams : NULL);
+
+#endif
+
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Couldn't enable display");
+ goto error;
+ }
+
+ }
+
+ ///Send START_PREVIEW command to adapter
+ CAMHAL_LOGDA("Starting CameraAdapter preview mode");
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
+ goto error;
+ }
+ CAMHAL_LOGDA("Started preview");
+
+
+ mPreviewEnabled = true;
+ mPreviewStartInProgress = false;
+ return ret;
+
+ error:
+
+ CAMHAL_LOGEA("Performing cleanup after error");
+
+ //Do all the cleanup
+ freePreviewBufs();
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ if(mDisplayAdapter.get() != NULL)
+ {
+ mDisplayAdapter->disableDisplay();
+ }
+ mAppCallbackNotifier->stop();
+ mPreviewStartInProgress = false;
+ mPreviewEnabled = false;
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Preview buffers provided to CameraHal via this object. DisplayAdapter will be interfacing with it
+ to render buffers to display.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
+{
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+
+ LOG_FUNCTION_NAME;
+ mSetPreviewWindowCalled = true;
+
+ ///If the Camera service passes a null window, we destroy existing window and free the DisplayAdapter
+ if(!window)
+ {
+ if(mDisplayAdapter.get() != NULL)
+ {
+ ///NULL window passed, destroy the display adapter if present
+ CAMHAL_LOGEA("NULL window passed, destroying display adapter");
+ mDisplayAdapter.clear();
+ ///@remarks If there was a window previously existing, we usually expect another valid window to be passed by the client
+ ///@remarks so, we will wait until it passes a valid window to begin the preview again
+ mSetPreviewWindowCalled = false;
+ }
+ CAMHAL_LOGEA("NULL ANativeWindow passed to setPreviewWindow");
+ return NO_ERROR;
+ }else if(mDisplayAdapter.get() == NULL)
+ {
+ // Need to create the display adapter since it has not been created
+ // Create display adapter
+ mDisplayAdapter = new ANativeWindowDisplayAdapter();
+ ret = NO_ERROR;
+ if(!mDisplayAdapter.get() || ((ret=mDisplayAdapter->initialize())!=NO_ERROR))
+ {
+ if(ret!=NO_ERROR)
+ {
+ mDisplayAdapter.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Couldn't create DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_MEMORY;
+ }
+ }
+
+ // DisplayAdapter needs to know where to get the CameraFrames from inorder to display
+ // Since CameraAdapter is the one that provides the frames, set it as the frame provider for DisplayAdapter
+ mDisplayAdapter->setFrameProvider(mCameraAdapter);
+
+ // Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ // via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ // Set it as the error handler for the DisplayAdapter
+ mDisplayAdapter->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mDisplayAdapter->setPreviewWindow(window);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ }
+
+ if(mPreviewStartInProgress)
+ {
+ CAMHAL_LOGDA("setPreviewWindow called when preview running");
+ // Start the preview since the window is now available
+ ret = startPreview();
+ }
+ }else
+ {
+ /* If mDisplayAdpater is already created. No need to do anything.
+ * We get a surface handle directly now, so we can reconfigure surface
+ * itself in DisplayAdapter if dimensions have changed
+ */
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+
+/**
+ @brief Stop a previously started preview.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::stopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return;
+ }
+
+ if(mDisplayPaused)
+ {
+ // Display is paused, which essentially means there is no preview active.
+ // Note: this is done so that when stopPreview is called by client after
+ // an image capture, we do not de-initialize the camera adapter and
+ // restart over again.
+ return;
+ }
+
+ forceStopPreview();
+
+ CAMHAL_LOGDA("Resetting Capture-Mode to default");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if preview is enabled
+
+ @param none
+ @return true If preview is running currently
+ false If preview has been stopped
+
+ */
+bool CameraHal::previewEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ return (mPreviewEnabled || mPreviewStartInProgress);
+}
+
+/**
+ @brief Start record mode.
+
+ When a record image is available a CAMERA_MSG_VIDEO_FRAME message is sent with
+ the corresponding frame. Every record frame must be released by calling
+ releaseRecordingFrame().
+
+ @param none
+ @return NO_ERROR If recording could be started without any issues
+ @todo Update the header with possible error values in failure scenarios
+
+ */
+status_t CameraHal::startRecording( )
+{
+ int w, h;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartPreview, NULL);
+
+#endif
+
+ if(!previewEnabled())
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = setVideoModeParameters();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS)));
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mAppCallbackNotifier->startRecording();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Buffers for video capture (if different from preview) are expected to be allocated within CameraAdapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_VIDEO);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mRecordingEnabled = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Set the camera parameters specific to Video Recording.
+
+ This function checks for the camera parameters which have to be set for recording.
+ Video Recording needs CAPTURE_MODE to be VIDEO_MODE. This function sets it.
+ This function also enables Video Recording specific functions like VSTAB & VNF.
+
+ @param none
+ @return NO_ERROR If recording parameters could be set without any issues
+ @todo Modify the policies for enabling VSTAB & VNF usecase based later.
+
+ */
+status_t CameraHal::setVideoModeParameters()
+{
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (valstr == NULL) ||
+ ( (valstr != NULL) && (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) ) )
+ {
+ CAMHAL_LOGDA("Set CAPTURE_MODE to VIDEO_MODE");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, (const char *) TICameraParameters::VIDEO_MODE);
+ restartPreviewRequired = true;
+ }
+
+ // FIXME: This check is put since currently VSTAB and VNF are functional only for Primary Camera.
+ // Remove this check once VSTAB and VNF are functional for Secondary Camera as well.
+ if(mCameraIndex == 0)
+ {
+ // Check if CAPTURE_MODE is VIDEO_MODE, since VSTAB & VNF work only in VIDEO_MODE.
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0)
+ {
+ // Enable VSTAB, if not enabled already
+ valstr = mParameters.get(TICameraParameters::KEY_VSTAB);
+ if ( (valstr == NULL) ||
+ ( (valstr != NULL) && (strcmp(valstr, "1") != 0) ) )
+ {
+ CAMHAL_LOGDA("Enable VSTAB");
+ mParameters.set(TICameraParameters::KEY_VSTAB, "1");
+ restartPreviewRequired = true;
+ }
+
+ // Enable VNF, if not enabled already
+ valstr = mParameters.get(TICameraParameters::KEY_VNF);
+ if ( (valstr == NULL) ||
+ ( (valstr != NULL) && (strcmp(valstr, "1") != 0) ) )
+ {
+ CAMHAL_LOGDA("Enable VNF");
+ mParameters.set(TICameraParameters::KEY_VNF, "1");
+ restartPreviewRequired = true;
+ }
+
+ // For VSTAB alone for 1080p resolution, padded width goes > 2048, which cannot be rendered by GPU.
+ // In such case, there is support in Ducati for combination of VSTAB & VNF requiring padded width < 2048.
+ // So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution.
+ valstr = mParameters.get(TICameraParameters::KEY_VSTAB);
+ if ((valstr != NULL) && (strcmp(valstr, "1") == 0) && (mPreviewWidth == 1920))
+ {
+ CAMHAL_LOGDA("Force Enable VNF for 1080p");
+ mParameters.set(TICameraParameters::KEY_VNF, "1");
+ restartPreviewRequired = true;
+ }
+ }
+ }
+
+ if (restartPreviewRequired)
+ {
+ CAMHAL_LOGDA("Restarting preview");
+ stopPreview();
+ // Setting CAPTURE_MODE to VIDEO_MODE again, since it is reset in stopPreview()
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, (const char *) TICameraParameters::VIDEO_MODE);
+ mCameraAdapter->setParameters(mParameters);
+ ret = startPreview();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Stop a previously started recording.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::stopRecording()
+{
+ LOG_FUNCTION_NAME;
+
+ if (!mRecordingEnabled )
+ {
+ return;
+ }
+
+ mAppCallbackNotifier->stopRecording();
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_VIDEO);
+
+ mRecordingEnabled = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if recording is enabled.
+
+ @param none
+ @return true If recording is currently running
+ false If recording has been stopped
+
+ */
+int CameraHal::recordingEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mRecordingEnabled;
+}
+
+/**
+ @brief Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+
+ @param[in] mem MemoryBase pointer to the frame being released. Must be one of the buffers
+ previously given by CameraHal
+ @return none
+
+ */
+void CameraHal::releaseRecordingFrame(const void* mem)
+{
+ LOG_FUNCTION_NAME;
+
+ //CAMHAL_LOGDB(" 0x%x", mem->pointer());
+
+ if ( ( mRecordingEnabled ) && mem != NULL)
+ {
+ mAppCallbackNotifier->releaseRecordingFrame(mem);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return;
+}
+
+/**
+ @brief Start auto focus
+
+ This call asynchronous.
+ The notification callback routine is called with CAMERA_MSG_FOCUS once when
+ focusing is complete. autoFocus() will be called again if another auto focus is
+ needed.
+
+ @param none
+ @return NO_ERROR
+ @todo Define the error codes if the focus is not locked
+
+ */
+status_t CameraHal::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartFocus, NULL);
+
+#endif
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mCameraAdapter )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass the autoFocus timestamp along with the command to camera adapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS, ( int ) &mStartFocus);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS);
+
+#endif
+
+ }
+ else
+ {
+ ret = -1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Cancels auto-focus function.
+
+ If the auto-focus is still in progress, this function will cancel it.
+ Whether the auto-focus is in progress or not, this function will return the
+ focus position to the default. If the camera does not support auto-focus, this is a no-op.
+
+
+ @param none
+ @return NO_ERROR If the cancel succeeded
+ @todo Define error codes if cancel didnt succeed
+
+ */
+status_t CameraHal::cancelAutoFocus()
+{
+ LOG_FUNCTION_NAME;
+ if( NULL != mCameraAdapter )
+ {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS);
+ }
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+void CameraHal::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::eventCallbackRelay(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ CameraHal *appcbn = ( CameraHal * ) (event->mCookie);
+ appcbn->eventCallback(event );
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::eventCallback(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != event )
+ {
+ switch( event->mEventType )
+ {
+ case CameraHalEvent::EVENT_FOCUS_LOCKED:
+ case CameraHalEvent::EVENT_FOCUS_ERROR:
+ {
+ if ( mBracketingEnabled )
+ {
+ startImageBracketing();
+ }
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ };
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t CameraHal::startImageBracketing()
+{
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartCapture, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if ( !mBracketingEnabled )
+ {
+ return ret;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingRunning = true;
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ ( mBracketRangeNegative + 1 ));
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setBurst(true);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mParameters.getPictureSize(( int * ) &frame.mWidth,
+ ( int * ) &frame.mHeight);
+
+ ret = allocImageBufs(frame.mWidth,
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ ( mBracketRangeNegative + 1 ));
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+
+ desc.mBuffers = mImageBufs;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 );
+ desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 );
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
+ ( int ) &desc);
+
+ if ( NO_ERROR == ret )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ), (int) &mStartCapture);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ));
+
+#endif
+
+ }
+ }
+
+ return ret;
+}
+
+status_t CameraHal::stopImageBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( !mBracketingRunning )
+ {
+ return ret;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingRunning = false;
+ }
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Take a picture.
+
+ @param none
+ @return NO_ERROR If able to switch to image capture
+ @todo Define error codes if unable to switch to image capture
+
+ */
+status_t CameraHal::takePicture( )
+{
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+ int burst;
+ unsigned int bufferCount = 1;
+
+ Mutex::Autolock lock(mLock);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartCapture, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ //If capture has already started, then queue this call for later execution
+ if ( mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) {
+ return NO_INIT;
+ }
+
+ if ( !mBracketingRunning )
+ {
+
+ if ( NO_ERROR == ret )
+ {
+ burst = mParameters.getInt(TICameraParameters::KEY_BURST);
+ }
+
+ //Allocate all buffers only in burst capture case
+ if ( burst > 1 )
+ {
+ bufferCount = CameraHal::NO_BUFFERS_IMAGE_CAPTURE;
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setBurst(true);
+ }
+ }
+ else
+ {
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setBurst(false);
+ }
+ }
+
+ //Pause Preview during capture
+ if ( (NO_ERROR == ret) && ( NULL != mDisplayAdapter.get() ) && ( burst < 1 ) )
+ {
+ mDisplayPaused = true;
+ mPreviewEnabled = false;
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ mDisplayAdapter->setSnapshotTimeRef(&mStartCapture);
+
+#endif
+ // since preview is paused we should stop sending preview frames too
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ mAppCallbackNotifier->disableMsgType (CAMERA_MSG_PREVIEW_FRAME);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+
+ //Configure the correct picture resolution now if the capture mode is not set
+ if(mParameters.get(TICameraParameters::KEY_CAP_MODE) == NULL)
+ {
+ ret = mCameraAdapter->setParameters(mParameters);
+ }
+
+ if ( NO_ERROR == ret )
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ bufferCount);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mParameters.getPictureSize(( int * ) &frame.mWidth,
+ ( int * ) &frame.mHeight);
+
+ ret = allocImageBufs(frame.mWidth,
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ bufferCount);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ desc.mBuffers = mImageBufs;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) bufferCount;
+ desc.mMaxQueueable = ( size_t ) bufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
+ ( int ) &desc);
+ }
+ }
+ else
+ {
+ mBracketingRunning = false;
+ }
+
+ if ( ( NO_ERROR == ret ) && ( NULL != mCameraAdapter ) )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE, (int) &mStartCapture);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
+
+#endif
+
+ }
+
+ return ret;
+}
+
+/**
+ @brief Cancel a picture that was started with takePicture.
+
+ Calling this method when no picture is being taken is a no-op.
+
+ @param none
+ @return NO_ERROR If cancel succeeded. Cancel can succeed if image callback is not sent
+ @todo Define error codes
+
+ */
+status_t CameraHal::cancelPicture( )
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+
+ return NO_ERROR;
+}
+
+/**
+ @brief Return the camera parameters.
+
+ @param none
+ @return Currently configured camera parameters
+
+ */
+char* CameraHal::getParameters()
+{
+ CameraParameters params;
+ String8 params_str8;
+ char* params_string;
+
+ LOG_FUNCTION_NAME;
+
+ params = mParameters;
+ if( NULL != mCameraAdapter )
+ {
+ mCameraAdapter->getParameters(params);
+ }
+
+ params_str8 = params.flatten();
+ // camera service frees this string...
+ params_string = (char*) malloc(sizeof(char) * (params_str8.length()+1));
+ strcpy(params_string, params_str8.string());
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ ///Return the current set of parameters
+
+ return params_string;
+}
+
+void CameraHal::putParameters(char *parms)
+{
+ free(parms);
+}
+
+/**
+ @brief Send command to camera driver.
+
+ @param none
+ @return NO_ERROR If the command succeeds
+ @todo Define the error codes that this function can return
+
+ */
+status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+ if ( ( NO_ERROR == ret ) && ( NULL == mCameraAdapter ) )
+ {
+ CAMHAL_LOGEA("No CameraAdapter instance");
+ ret = -EINVAL;
+ }
+
+ if ( ( NO_ERROR == ret ) && ( !previewEnabled() ))
+ {
+ CAMHAL_LOGEA("Preview is not running");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ switch(cmd)
+ {
+ case CAMERA_CMD_START_SMOOTH_ZOOM:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_SMOOTH_ZOOM, arg1);
+
+ break;
+ case CAMERA_CMD_STOP_SMOOTH_ZOOM:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
+
+ case CAMERA_CMD_START_FACE_DETECTION:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_FD);
+
+ break;
+
+ case CAMERA_CMD_STOP_FACE_DETECTION:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
+
+ break;
+
+ default:
+ break;
+ };
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Release the hardware resources owned by this object.
+
+ Note that this is *not* done in the destructor.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::release()
+{
+ LOG_FUNCTION_NAME;
+ ///@todo Investigate on how release is used by CameraService. Vaguely remember that this is called
+ ///just before CameraHal object destruction
+ deinitialize();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+/**
+ @brief Dump state of the camera hardware
+
+ @param[in] fd File descriptor
+ @param[in] args Arguments
+ @return NO_ERROR Dump succeeded
+ @todo Error codes for dump fail
+
+ */
+status_t CameraHal::dump(int fd) const
+{
+ LOG_FUNCTION_NAME;
+ ///Implement this method when the h/w dump function is supported on Ducati side
+ return NO_ERROR;
+}
+
+/*-------------Camera Hal Interface Method definitions ENDS here--------------------*/
+
+
+
+
+/*-------------Camera Hal Internal Method definitions STARTS here--------------------*/
+
+/**
+ @brief Constructor of CameraHal
+
+ Member variables are initialized here. No allocations should be done here as we
+ don't use c++ exceptions in the code.
+
+ */
+CameraHal::CameraHal(int cameraId)
+{
+ LOG_FUNCTION_NAME;
+
+ ///Initialize all the member variables to their defaults
+ mPreviewEnabled = false;
+ mPreviewBufs = NULL;
+ mImageBufs = NULL;
+ mBufProvider = NULL;
+ mPreviewStartInProgress = false;
+ mVideoBufs = NULL;
+ mVideoBufProvider = NULL;
+ mRecordingEnabled = false;
+ mDisplayPaused = false;
+ mSetPreviewWindowCalled = false;
+ mMsgEnabled = 0;
+ mAppCallbackNotifier = NULL;
+ mMemoryManager = NULL;
+ mCameraAdapter = NULL;
+ mBracketingEnabled = false;
+ mBracketingRunning = false;
+ mEventProvider = NULL;
+ mBracketRangePositive = 1;
+ mBracketRangeNegative = 1;
+ mMaxZoomSupported = 0;
+ mShutterEnabled = true;
+ mMeasurementEnabled = false;
+ mPreviewDataBufs = NULL;
+ mCameraProperties = NULL;
+ mCurrentTime = 0;
+ mFalsePreview = 0;
+ mImageOffsets = NULL;
+ mImageLength = 0;
+ mImageFd = 0;
+ mVideoOffsets = NULL;
+ mVideoFd = 0;
+ mVideoLength = 0;
+ mPreviewDataOffsets = NULL;
+ mPreviewDataFd = 0;
+ mPreviewDataLength = 0;
+ mPreviewFd = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+ mPreviewLength = 0;
+ mPreviewOffsets = NULL;
+ mPreviewRunning = 0;
+ mPreviewStateOld = 0;
+ mRecordingEnabled = 0;
+ mRecordEnabled = 0;
+ mSensorListener = NULL;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Initialize the CameraHAL constructor timestamp, which is used in the
+ // PPM() method as time reference if the user does not supply one.
+ gettimeofday(&ppm_start, NULL);
+
+#endif
+
+ mCameraIndex = cameraId;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Destructor of CameraHal
+
+ This function simply calls deinitialize() to free up memory allocate during construct
+ phase
+ */
+CameraHal::~CameraHal()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Call de-initialize here once more - it is the last chance for us to relinquish all the h/w and s/w resources
+ deinitialize();
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ /// Free the callback notifier
+ mAppCallbackNotifier.clear();
+
+ /// Free the memory manager
+ mMemoryManager.clear();
+
+ /// Free the display adapter
+ mDisplayAdapter.clear();
+
+ if ( NULL != mCameraAdapter ) {
+ int strongCount = mCameraAdapter->getStrongCount();
+
+ mCameraAdapter->decStrong(mCameraAdapter);
+
+ mCameraAdapter = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Initialize the Camera HAL
+
+ Creates CameraAdapter, AppCallbackNotifier, DisplayAdapter and MemoryManager
+
+ @param None
+ @return NO_ERROR - On success
+ NO_MEMORY - On failure to allocate memory for any of the objects
+ @remarks Camera Hal internal function
+
+ */
+
+status_t CameraHal::initialize(CameraProperties::Properties* properties)
+{
+ LOG_FUNCTION_NAME;
+
+ int sensor_index = 0;
+
+ mLastPreviewFramerate = 0;
+
+ ///Initialize the event mask used for registering an event provider for AppCallbackNotifier
+ ///Currently, registering all events as to be coming from CameraAdapter
+ int32_t eventMask = CameraHalEvent::ALL_EVENTS;
+
+ // Get my camera properties
+ mCameraProperties = properties;
+
+ if(!mCameraProperties)
+ {
+ goto fail_loop;
+ }
+
+ // Dump the properties of this Camera
+ // will only print if DEBUG macro is defined
+ mCameraProperties->dump();
+
+ if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)) != 0 )
+ {
+ sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
+ }
+
+ CAMHAL_LOGDB("Sensor index %d", sensor_index);
+
+ mCameraAdapter = CameraAdapter_Factory();
+ if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties, sensor_index)!=NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
+ mCameraAdapter = NULL;
+ goto fail_loop;
+ }
+
+ mCameraAdapter->incStrong(mCameraAdapter);
+ mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
+ mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
+
+ if(!mAppCallbackNotifier.get())
+ {
+ /// Create the callback notifier
+ mAppCallbackNotifier = new AppCallbackNotifier();
+ if( ( NULL == mAppCallbackNotifier.get() ) || ( mAppCallbackNotifier->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize AppCallbackNotifier");
+ goto fail_loop;
+ }
+ }
+
+ if(!mMemoryManager.get())
+ {
+ /// Create Memory Manager
+ mMemoryManager = new MemoryManager();
+ if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize MemoryManager");
+ goto fail_loop;
+ }
+ }
+
+ ///Setup the class dependencies...
+
+ ///AppCallbackNotifier has to know where to get the Camera frames and the events like auto focus lock etc from.
+ ///CameraAdapter is the one which provides those events
+ ///Set it as the frame and event providers for AppCallbackNotifier
+ ///@remarks setEventProvider API takes in a bit mask of events for registering a provider for the different events
+ /// That way, if events can come from DisplayAdapter in future, we will be able to add it as provider
+ /// for any event
+ mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter);
+ mAppCallbackNotifier->setFrameProvider(mCameraAdapter);
+
+ ///Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ ///via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ ///Set it as the error handler for CameraAdapter
+ mCameraAdapter->setErrorHandler(mAppCallbackNotifier.get());
+
+ ///Start the callback notifier
+ if(mAppCallbackNotifier->start() != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't start AppCallbackNotifier");
+ goto fail_loop;
+ }
+
+ CAMHAL_LOGDA("Started AppCallbackNotifier..");
+ mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
+
+ ///Initialize default parameters
+ initDefaultParameters();
+
+
+ if ( setParameters(mParameters) != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Failed to set default parameters?!");
+ }
+
+ // register for sensor events
+ mSensorListener = new SensorListener();
+ if (mSensorListener.get()) {
+ if (mSensorListener->initialize() == NO_ERROR) {
+ mSensorListener->setCallbacks(orientation_cb, this);
+ mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
+ } else {
+ CAMHAL_LOGEA("Error initializing SensorListener. not fatal, continuing");
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+ fail_loop:
+
+ ///Free up the resources because we failed somewhere up
+ deinitialize();
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_MEMORY;
+
+}
+
+bool CameraHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions)
+{
+ bool ret = true;
+ status_t status = NO_ERROR;
+ char tmpBuffer[PARAM_BUFFER + 1];
+ char *pos = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedResolutions )
+ {
+ CAMHAL_LOGEA("Invalid supported resolutions string");
+ ret = false;
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, PARAM_BUFFER, "%dx%d", width, height);
+ if ( 0 > status )
+ {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedResolutions, tmpBuffer);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isParameterValid(const char *param, const char *supportedParams)
+{
+ bool ret = true;
+ char *pos = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedParams )
+ {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ ret = false;
+ goto exit;
+ }
+
+ if ( NULL == param )
+ {
+ CAMHAL_LOGEA("Invalid parameter string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedParams, param);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isParameterValid(int param, const char *supportedParams)
+{
+ bool ret = true;
+ char *pos = NULL;
+ status_t status;
+ char tmpBuffer[PARAM_BUFFER + 1];
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedParams )
+ {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ ret = false;
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, PARAM_BUFFER, "%d", param);
+ if ( 0 > status )
+ {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedParams, tmpBuffer);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
+{
+ status_t ret = NO_ERROR;
+ char *ctx, *pWidth, *pHeight;
+ const char *sep = "x";
+ char *tmp = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == resStr )
+ {
+ return -EINVAL;
+ }
+
+ //This fixes "Invalid input resolution"
+ char *resStr_copy = (char *)malloc(strlen(resStr) + 1);
+ if ( NULL!=resStr_copy ) {
+ if ( NO_ERROR == ret )
+ {
+ strcpy(resStr_copy, resStr);
+ pWidth = strtok_r( (char *) resStr_copy, sep, &ctx);
+
+ if ( NULL != pWidth )
+ {
+ width = atoi(pWidth);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ pHeight = strtok_r(NULL, sep, &ctx);
+
+ if ( NULL != pHeight )
+ {
+ height = atoi(pHeight);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ free(resStr_copy);
+ resStr_copy = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void CameraHal::insertSupportedParams()
+{
+ char tmpBuffer[PARAM_BUFFER + 1];
+
+ LOG_FUNCTION_NAME;
+
+ CameraParameters &p = mParameters;
+
+ ///Set the name of the camera
+ p.set(TICameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME));
+
+ mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+
+ p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+ p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
+ p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
+ p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
+ p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
+ p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
+ p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
+ p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
+ p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
+ p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(TICameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES));
+ p.set(TICameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES));
+ p.set(CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
+ p.set(CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+ p.set(CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
+ p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
+ p.set(TICameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
+ p.set(TICameraParameters::KEY_S3D_SUPPORTED,mCameraProperties->get(CameraProperties::S3D_SUPPORTED));
+ p.set(TICameraParameters::KEY_S3D2D_PREVIEW_MODE,mCameraProperties->get(CameraProperties::S3D2D_PREVIEW_MODES));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
+ p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
+ p.set(TICameraParameters::KEY_VSTAB,mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(TICameraParameters::KEY_VSTAB_VALUES,mCameraProperties->get(CameraProperties::VSTAB_VALUES));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
+ p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
+ p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+void CameraHal::initDefaultParameters()
+{
+ //Purpose of this function is to initialize the default current and supported parameters for the currently
+ //selected camera.
+
+ CameraParameters &p = mParameters;
+ int currentRevision, adapterRevision;
+ status_t ret = NO_ERROR;
+ int width, height;
+
+ LOG_FUNCTION_NAME;
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PREVIEW_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPreviewSize(width, height);
+ }
+ else
+ {
+ p.setPreviewSize(MIN_WIDTH, MIN_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PICTURE_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPictureSize(width, height);
+ }
+ else
+ {
+ p.setPictureSize(PICTURE_WIDTH, PICTURE_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ }
+ else
+ {
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
+ }
+
+ insertSupportedParams();
+
+ //Insert default values
+ p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)));
+ p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT));
+ p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT));
+ p.set(CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
+ p.set(CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
+ p.set(CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
+ p.set(CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
+ p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
+ p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
+ p.set(CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
+ p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
+ p.set(TICameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST));
+ p.set(TICameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION));
+ p.set(TICameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS));
+ p.set(TICameraParameters::KEY_SHARPNESS, mCameraProperties->get(CameraProperties::SHARPNESS));
+ p.set(TICameraParameters::KEY_EXPOSURE_MODE, mCameraProperties->get(CameraProperties::EXPOSURE_MODE));
+ p.set(TICameraParameters::KEY_ISO, mCameraProperties->get(CameraProperties::ISO_MODE));
+ p.set(TICameraParameters::KEY_IPP, mCameraProperties->get(CameraProperties::IPP));
+ p.set(TICameraParameters::KEY_S3D2D_PREVIEW, mCameraProperties->get(CameraProperties::S3D2D_PREVIEW));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE));
+ p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
+ p.set(TICameraParameters::KEY_VSTAB,mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(TICameraParameters::KEY_VSTAB_VALUES,mCameraProperties->get(CameraProperties::VSTAB_VALUES));
+ p.set(CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
+ p.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
+ p.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
+ p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
+ p.set(TICameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE));
+ p.set(TICameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL));
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
+ p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
+ p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
+ p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
+
+ // Only one area a.k.a Touch AF for now.
+ // TODO: Add support for multiple focus areas.
+ p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
+ p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
+ p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
+ p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Stop a previously started preview.
+ @param none
+ @return none
+
+ */
+void CameraHal::forceStopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ // stop bracketing if it is running
+ stopImageBracketing();
+
+ if(mDisplayAdapter.get() != NULL) {
+ ///Stop the buffer display first
+ mDisplayAdapter->disableDisplay();
+ }
+
+ if(mAppCallbackNotifier.get() != NULL) {
+ //Stop the callback sending
+ mAppCallbackNotifier->stop();
+ mAppCallbackNotifier->stopPreviewCallbacks();
+ }
+
+ // since prerequisite for capturing is for camera system
+ // to be previewing...cancel all captures before stopping
+ // preview
+ if ( mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ cancelAutoFocus();
+ //Stop the source of frames
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ }
+
+ freePreviewBufs();
+ freePreviewDataBufs();
+
+ mPreviewEnabled = false;
+ mDisplayPaused = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Deallocates memory for all the resources held by Camera HAL.
+
+ Frees the following objects- CameraAdapter, AppCallbackNotifier, DisplayAdapter,
+ and Memory Manager
+
+ @param none
+ @return none
+
+ */
+void CameraHal::deinitialize()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( mPreviewEnabled ) {
+ forceStopPreview();
+ }
+
+ freeImageBufs();
+
+ mSetPreviewWindowCalled = false;
+
+ if (mSensorListener.get()) {
+ mSensorListener->disableSensor(SensorListener::SENSOR_ORIENTATION);
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t CameraHal::storeMetaDataInBuffers(bool enable)
+{
+ LOG_FUNCTION_NAME;
+
+ return mAppCallbackNotifier->useMetaDataBufferMode(enable);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+};
+
+
diff --git a/camera/CameraHalCommon.cpp b/camera/CameraHalCommon.cpp
new file mode 100644
index 0000000..6d4ea2c
--- /dev/null
+++ b/camera/CameraHalCommon.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "CameraHal.h"
+
+namespace android {
+
+const char CameraHal::PARAMS_DELIMITER []= ",";
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+struct timeval CameraHal::ppm_start;
+
+#endif
+
+#if PPM_INSTRUMENTATION
+
+/**
+ @brief PPM instrumentation
+
+ Dumps the current time offset. The time reference point
+ lies within the CameraHAL constructor.
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str){
+ struct timeval ppm;
+
+ gettimeofday(&ppm, NULL);
+ ppm.tv_sec = ppm.tv_sec - ppm_start.tv_sec;
+ ppm.tv_sec = ppm.tv_sec * 1000000;
+ ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_start.tv_usec;
+
+ LOGD("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ));
+}
+
+#elif PPM_INSTRUMENTATION_ABS
+
+/**
+ @brief PPM instrumentation
+
+ Dumps the current time offset. The time reference point
+ lies within the CameraHAL constructor. This implemetation
+ will also dump the abosolute timestamp, which is useful when
+ post calculation is done with data coming from the upper
+ layers (Camera application etc.)
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str){
+ struct timeval ppm;
+
+ unsigned long long elapsed, absolute;
+ gettimeofday(&ppm, NULL);
+ elapsed = ppm.tv_sec - ppm_start.tv_sec;
+ elapsed *= 1000000;
+ elapsed += ppm.tv_usec - ppm_start.tv_usec;
+ absolute = ppm.tv_sec;
+ absolute *= 1000;
+ absolute += ppm.tv_usec /1000;
+
+ LOGD("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute);
+}
+
+#endif
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+/**
+ @brief PPM instrumentation
+
+ Calculates and dumps the elapsed time using 'ppm_first' as
+ reference.
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){
+ char temp_str[256];
+ struct timeval ppm;
+ unsigned long long absolute;
+ va_list args;
+
+ va_start(args, ppm_first);
+ vsprintf(temp_str, str, args);
+ gettimeofday(&ppm, NULL);
+ absolute = ppm.tv_sec;
+ absolute *= 1000;
+ absolute += ppm.tv_usec /1000;
+ ppm.tv_sec = ppm.tv_sec - ppm_first->tv_sec;
+ ppm.tv_sec = ppm.tv_sec * 1000000;
+ ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_first->tv_usec;
+
+ LOGD("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute);
+
+ va_end(args);
+}
+
+#endif
+
+};
+
+
diff --git a/camera/CameraHalUtilClasses.cpp b/camera/CameraHalUtilClasses.cpp
new file mode 100644
index 0000000..84eea38
--- /dev/null
+++ b/camera/CameraHalUtilClasses.cpp
@@ -0,0 +1,273 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHalUtilClasses.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#define LOG_TAG "CameraHAL"
+
+
+#include "CameraHal.h"
+
+namespace android {
+
+/*--------------------FrameProvider Class STARTS here-----------------------------*/
+
+int FrameProvider::enableFrameNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface)
+ mFrameNotifier->enableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mFrameCallback
+ , NULL
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int FrameProvider::disableFrameNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ mFrameNotifier->disableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int FrameProvider::returnFrame(void *frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+
+ mFrameNotifier->returnFrame(frameBuf, frameType);
+
+ return ret;
+}
+
+
+/*--------------------FrameProvider Class ENDS here-----------------------------*/
+
+/*--------------------EventProvider Class STARTS here-----------------------------*/
+
+int EventProvider::enableEventNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface)
+ mEventNotifier->enableMsgType(frameTypes<<MessageNotifier::EVENT_BIT_FIELD_POSITION
+ , NULL
+ , mEventCallback
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int EventProvider::disableEventNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ mEventNotifier->disableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+/*--------------------EventProvider Class ENDS here-----------------------------*/
+
+/*--------------------CameraArea Class STARTS here-----------------------------*/
+
+status_t CameraArea::transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight)
+{
+ status_t ret = NO_ERROR;
+ size_t hRange, vRange;
+ double hScale, vScale;
+
+ LOG_FUNCTION_NAME
+
+ hRange = CameraArea::RIGHT - CameraArea::LEFT;
+ vRange = CameraArea::BOTTOM - CameraArea::TOP;
+ hScale = ( double ) width / ( double ) hRange;
+ vScale = ( double ) height / ( double ) vRange;
+
+ top = ( mTop + vRange / 2 ) * vScale;
+ left = ( mLeft + hRange / 2 ) * hScale;
+ areaHeight = ( mBottom + vRange / 2 ) * vScale;
+ areaHeight -= top;
+ areaWidth = ( mRight + hRange / 2) * hScale;
+ areaWidth -= left;
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t CameraArea::parseFocusArea(const char *area,
+ size_t areaLength,
+ Vector< sp<CameraArea> > &areas)
+{
+ status_t ret = NO_ERROR;
+ char *ctx;
+ char *pArea = NULL;
+ char *pStart = NULL;
+ char *pEnd = NULL;
+ const char *startToken = "(";
+ const char endToken = ')';
+ const char sep = ',';
+ ssize_t top, left, bottom, right, weight;
+ char *tmpBuffer = NULL;
+ sp<CameraArea> currentArea;
+
+ LOG_FUNCTION_NAME
+
+ if ( ( NULL == area ) ||
+ ( 0 >= areaLength ) )
+ {
+ return -EINVAL;
+ }
+
+ tmpBuffer = ( char * ) malloc(areaLength);
+ if ( NULL == tmpBuffer )
+ {
+ return -ENOMEM;
+ }
+
+ memcpy(tmpBuffer, area, areaLength);
+
+ pArea = strtok_r(tmpBuffer, startToken, &ctx);
+
+ do
+ {
+
+ pStart = pArea;
+ if ( NULL == pStart )
+ {
+ CAMHAL_LOGEA("Parsing of the left area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ left = static_cast<ssize_t>(strtol(pStart, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the top area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ top = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the right area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ right = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the bottom area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ bottom = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the weight area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ weight = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( endToken != *pEnd )
+ {
+ CAMHAL_LOGEA("Malformed area!");
+ ret = -EINVAL;
+ break;
+ }
+
+ currentArea = new CameraArea(top, left, bottom, right, weight);
+ CAMHAL_LOGDB("Area parsed [%dx%d, %dx%d] %d",
+ ( int ) top,
+ ( int ) left,
+ ( int ) bottom,
+ ( int ) right,
+ ( int )weight);
+ if ( NULL != currentArea.get() )
+ {
+ areas.add(currentArea);
+ }
+ else
+ {
+ ret = -ENOMEM;
+ break;
+ }
+
+ pArea = strtok_r(NULL, startToken, &ctx);
+
+ }
+ while ( NULL != pArea );
+
+ if ( NULL != tmpBuffer )
+ {
+ free(tmpBuffer);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+/*--------------------CameraArea Class ENDS here-----------------------------*/
+
+};
diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp
new file mode 100644
index 0000000..cf3ec60
--- /dev/null
+++ b/camera/CameraHal_Module.cpp
@@ -0,0 +1,676 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "CameraProperties.h"
+#include "TICameraParameters.h"
+
+
+static android::CameraProperties gCameraProperties;
+static android::CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
+
+static int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device);
+static int camera_device_close(hw_device_t* device);
+static int camera_get_number_of_cameras(void);
+static int camera_get_camera_info(int camera_id, struct camera_info *info);
+
+static struct hw_module_methods_t camera_module_methods = {
+ open: camera_device_open
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+ common: {
+ tag: HARDWARE_MODULE_TAG,
+ version_major: 1,
+ version_minor: 0,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: "TI OMAP CameraHal Module",
+ author: "TI",
+ methods: &camera_module_methods,
+ dso: NULL, /* remove compilation warnings */
+ reserved: {0}, /* remove compilation warnings */
+ },
+ get_number_of_cameras: camera_get_number_of_cameras,
+ get_camera_info: camera_get_camera_info,
+};
+
+typedef struct ti_camera_device {
+ camera_device_t base;
+ /* TI specific "private" data can go here (base.priv) */
+ int cameraid;
+} ti_camera_device_t;
+
+
+/*******************************************************************
+ * implementation of camera_device_ops functions
+ *******************************************************************/
+
+int camera_set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->setPreviewWindow(window);
+
+ return rv;
+}
+
+void camera_set_callbacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user);
+}
+
+void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->enableMsgType(msg_type);
+}
+
+void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->disableMsgType(msg_type);
+}
+
+int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return 0;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ return gCameraHals[ti_dev->cameraid]->msgTypeEnabled(msg_type);
+}
+
+int camera_start_preview(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->startPreview();
+
+ return rv;
+}
+
+void camera_stop_preview(struct camera_device * device)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->stopPreview();
+}
+
+int camera_preview_enabled(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->previewEnabled();
+ return rv;
+}
+
+int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ // TODO: meta data buffer not current supported
+ rv = gCameraHals[ti_dev->cameraid]->storeMetaDataInBuffers(enable);
+ return rv;
+ //return enable ? android::INVALID_OPERATION: android::OK;
+}
+
+int camera_start_recording(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->startRecording();
+ return rv;
+}
+
+void camera_stop_recording(struct camera_device * device)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->stopRecording();
+}
+
+int camera_recording_enabled(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->recordingEnabled();
+ return rv;
+}
+
+void camera_release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->releaseRecordingFrame(opaque);
+}
+
+int camera_auto_focus(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->autoFocus();
+ return rv;
+}
+
+int camera_cancel_auto_focus(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->cancelAutoFocus();
+ return rv;
+}
+
+int camera_take_picture(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->takePicture();
+ return rv;
+}
+
+int camera_cancel_picture(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->cancelPicture();
+ return rv;
+}
+
+int camera_set_parameters(struct camera_device * device, const char *params)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->setParameters(params);
+ return rv;
+}
+
+char* camera_get_parameters(struct camera_device * device)
+{
+ char* param = NULL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return NULL;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ param = gCameraHals[ti_dev->cameraid]->getParameters();
+
+ return param;
+}
+
+static void camera_put_parameters(struct camera_device *device, char *parms)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->putParameters(parms);
+}
+
+int camera_send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->sendCommand(cmd, arg1, arg2);
+ return rv;
+}
+
+void camera_release(struct camera_device * device)
+{
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ gCameraHals[ti_dev->cameraid]->release();
+}
+
+int camera_dump(struct camera_device * device, int fd)
+{
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->dump(fd);
+ return rv;
+}
+
+extern "C" void heaptracker_free_leaked_memory(void);
+
+int camera_device_close(hw_device_t* device)
+{
+ int ret = 0;
+ ti_camera_device_t* ti_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if (!device) {
+ ret = -EINVAL;
+ goto done;
+ }
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ if (gCameraHals[ti_dev->cameraid]) {
+ delete gCameraHals[ti_dev->cameraid];
+ gCameraHals[ti_dev->cameraid] = NULL;
+ }
+
+ if (ti_dev->base.ops) {
+ free(ti_dev->base.ops);
+ }
+ free(ti_dev);
+
+done:
+#ifdef HEAPTRACKER
+ heaptracker_free_leaked_memory();
+#endif
+ return ret;
+}
+
+/*******************************************************************
+ * implementation of camera_module functions
+ *******************************************************************/
+
+/* open device handle to one of the cameras
+ *
+ * assume camera service will keep singleton of each camera
+ * so this function will always only be called once per camera instance
+ */
+
+int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device)
+{
+ int rv = -EINVAL;
+ int num_cameras = 0;
+ int cameraid;
+ ti_camera_device_t* camera_device = NULL;
+ camera_device_ops_t* camera_ops = NULL;
+ android::CameraHal* camera = NULL;
+ android::CameraProperties::Properties* properties = NULL;
+
+ LOGI("camera_device open");
+
+ if (name != NULL) {
+ cameraid = atoi(name);
+ num_cameras = gCameraProperties.camerasSupported();
+
+ if(cameraid > num_cameras)
+ {
+ LOGE("camera service provided cameraid out of bounds, "
+ "cameraid = %d, num supported = %d",
+ cameraid, num_cameras);
+ goto fail;
+ }
+
+ camera_device = (ti_camera_device_t*)malloc(sizeof(*camera_device));
+ if(!camera_device)
+ {
+ LOGE("camera_device allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
+ if(!camera_ops)
+ {
+ LOGE("camera_ops allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ memset(camera_device, 0, sizeof(*camera_device));
+ memset(camera_ops, 0, sizeof(*camera_ops));
+
+ camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
+ camera_device->base.common.version = 0;
+ camera_device->base.common.module = (hw_module_t *)(module);
+ camera_device->base.common.close = camera_device_close;
+ camera_device->base.ops = camera_ops;
+
+ camera_ops->set_preview_window = camera_set_preview_window;
+ camera_ops->set_callbacks = camera_set_callbacks;
+ camera_ops->enable_msg_type = camera_enable_msg_type;
+ camera_ops->disable_msg_type = camera_disable_msg_type;
+ camera_ops->msg_type_enabled = camera_msg_type_enabled;
+ camera_ops->start_preview = camera_start_preview;
+ camera_ops->stop_preview = camera_stop_preview;
+ camera_ops->preview_enabled = camera_preview_enabled;
+ camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
+ camera_ops->start_recording = camera_start_recording;
+ camera_ops->stop_recording = camera_stop_recording;
+ camera_ops->recording_enabled = camera_recording_enabled;
+ camera_ops->release_recording_frame = camera_release_recording_frame;
+ camera_ops->auto_focus = camera_auto_focus;
+ camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
+ camera_ops->take_picture = camera_take_picture;
+ camera_ops->cancel_picture = camera_cancel_picture;
+ camera_ops->set_parameters = camera_set_parameters;
+ camera_ops->get_parameters = camera_get_parameters;
+ camera_ops->put_parameters = camera_put_parameters;
+ camera_ops->send_command = camera_send_command;
+ camera_ops->release = camera_release;
+ camera_ops->dump = camera_dump;
+
+ *device = &camera_device->base.common;
+
+ // -------- TI specific stuff --------
+
+ camera_device->cameraid = cameraid;
+
+ if(gCameraProperties.getProperties(cameraid, &properties) < 0)
+ {
+ LOGE("Couldn't get camera properties");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera = new android::CameraHal(cameraid);
+
+ if(!camera)
+ {
+ LOGE("Couldn't create instance of CameraHal class");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ if(properties && (camera->initialize(properties) != android::NO_ERROR))
+ {
+ LOGE("Couldn't initialize camera instance");
+ rv = -ENODEV;
+ goto fail;
+ }
+
+ gCameraHals[cameraid] = camera;
+ }
+
+ return rv;
+
+fail:
+
+ if(camera_device)
+ free(camera_device);
+ if(camera_ops)
+ free(camera_ops);
+ if(camera)
+ delete camera;
+
+ return rv;
+}
+
+int camera_get_number_of_cameras(void)
+{
+ int num_cameras = MAX_CAMERAS_SUPPORTED;
+
+ // TODO(XXX): Ducati is not loaded yet when camera service gets here
+ // Lets revisit this later to see if we can somehow get this working
+#if 0
+ // this going to be the first call from camera service
+ // initialize camera properties here...
+ if(gCameraProperties.initialize() != android::NO_ERROR)
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
+ return NULL;
+ }
+
+ num_cameras = gCameraProperties.camerasSupported();
+#endif
+
+ return num_cameras;
+}
+
+int camera_get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rv = 0;
+ int face_value = CAMERA_FACING_BACK;
+ int orientation = 0;
+ const char *valstr = NULL;
+ android::CameraProperties::Properties* properties = NULL;
+
+ // this going to be the first call from camera service
+ // initialize camera properties here...
+ if(gCameraProperties.initialize() != android::NO_ERROR)
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
+ return NULL;
+ }
+
+ //Get camera properties for camera index
+ if(gCameraProperties.getProperties(camera_id, &properties) < 0)
+ {
+ LOGE("Couldn't get camera properties");
+ rv = -EINVAL;
+ goto end;
+ }
+
+ if(properties)
+ {
+ valstr = properties->get(android::CameraProperties::FACING_INDEX);
+ if(valstr != NULL)
+ {
+ if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_FRONT) == 0)
+ {
+ face_value = CAMERA_FACING_FRONT;
+ }
+ else if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_BACK) == 0)
+ {
+ face_value = CAMERA_FACING_BACK;
+ }
+ }
+
+ valstr = properties->get(android::CameraProperties::ORIENTATION_INDEX);
+ if(valstr != NULL)
+ {
+ orientation = atoi(valstr);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("getProperties() returned a NULL property set for Camera id %d", camera_id);
+ }
+
+ info->facing = face_value;
+ info->orientation = orientation;
+
+end:
+ return rv;
+}
+
+
+
+
+
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
new file mode 100644
index 0000000..f19a0fe
--- /dev/null
+++ b/camera/CameraParameters.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraProperties.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#include "CameraHal.h"
+#include "CameraProperties.h"
+
+namespace android {
+
+const char CameraProperties::INVALID[]="prop-invalid-key";
+const char CameraProperties::CAMERA_NAME[]="prop-camera-name";
+const char CameraProperties::CAMERA_SENSOR_INDEX[]="prop-sensor-index";
+const char CameraProperties::ORIENTATION_INDEX[]="prop-orientation";
+const char CameraProperties::FACING_INDEX[]="prop-facing";
+const char CameraProperties::S3D_SUPPORTED[]="prop-s3d-supported";
+const char CameraProperties::SUPPORTED_PREVIEW_SIZES[] = "prop-preview-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FORMATS[] = "prop-preview-format-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES[] = "prop-preview-frame-rate-values";
+const char CameraProperties::SUPPORTED_PICTURE_SIZES[] = "prop-picture-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_FORMATS[] = "prop-picture-format-values";
+const char CameraProperties::SUPPORTED_THUMBNAIL_SIZES[] = "prop-jpeg-thumbnail-size-values";
+const char CameraProperties::SUPPORTED_WHITE_BALANCE[] = "prop-whitebalance-values";
+const char CameraProperties::SUPPORTED_EFFECTS[] = "prop-effect-values";
+const char CameraProperties::SUPPORTED_ANTIBANDING[] = "prop-antibanding-values";
+const char CameraProperties::SUPPORTED_EXPOSURE_MODES[] = "prop-exposure-mode-values";
+const char CameraProperties::SUPPORTED_EV_MAX[] = "prop-ev-compensation-max";
+const char CameraProperties::SUPPORTED_EV_MIN[] = "prop-ev-compensation-min";
+const char CameraProperties::SUPPORTED_EV_STEP[] = "prop-ev-compensation-step";
+const char CameraProperties::SUPPORTED_ISO_VALUES[] = "prop-iso-mode-values";
+const char CameraProperties::SUPPORTED_SCENE_MODES[] = "prop-scene-mode-values";
+const char CameraProperties::SUPPORTED_FLASH_MODES[] = "prop-flash-mode-values";
+const char CameraProperties::SUPPORTED_FOCUS_MODES[] = "prop-focus-mode-values";
+const char CameraProperties::REQUIRED_PREVIEW_BUFS[] = "prop-required-preview-bufs";
+const char CameraProperties::REQUIRED_IMAGE_BUFS[] = "prop-required-image-bufs";
+const char CameraProperties::SUPPORTED_ZOOM_RATIOS[] = "prop-zoom-ratios";
+const char CameraProperties::SUPPORTED_ZOOM_STAGES[] = "prop-zoom-stages";
+const char CameraProperties::SUPPORTED_IPP_MODES[] = "prop-ipp-values";
+const char CameraProperties::SMOOTH_ZOOM_SUPPORTED[] = "prop-smooth-zoom-supported";
+const char CameraProperties::ZOOM_SUPPORTED[] = "prop-zoom-supported";
+const char CameraProperties::PREVIEW_SIZE[] = "prop-preview-size-default";
+const char CameraProperties::PREVIEW_FORMAT[] = "prop-preview-format-default";
+const char CameraProperties::PREVIEW_FRAME_RATE[] = "prop-preview-frame-rate-default";
+const char CameraProperties::ZOOM[] = "prop-zoom-default";
+const char CameraProperties::PICTURE_SIZE[] = "prop-picture-size-default";
+const char CameraProperties::PICTURE_FORMAT[] = "prop-picture-format-default";
+const char CameraProperties::JPEG_THUMBNAIL_SIZE[] = "prop-jpeg-thumbnail-size-default";
+const char CameraProperties::WHITEBALANCE[] = "prop-whitebalance-default";
+const char CameraProperties::EFFECT[] = "prop-effect-default";
+const char CameraProperties::ANTIBANDING[] = "prop-antibanding-default";
+const char CameraProperties::EXPOSURE_MODE[] = "prop-exposure-mode-default";
+const char CameraProperties::EV_COMPENSATION[] = "prop-ev-compensation-default";
+const char CameraProperties::ISO_MODE[] = "prop-iso-mode-default";
+const char CameraProperties::FOCUS_MODE[] = "prop-focus-mode-default";
+const char CameraProperties::SCENE_MODE[] = "prop-scene-mode-default";
+const char CameraProperties::FLASH_MODE[] = "prop-flash-mode-default";
+const char CameraProperties::JPEG_QUALITY[] = "prop-jpeg-quality-default";
+const char CameraProperties::CONTRAST[] = "prop-contrast-default";
+const char CameraProperties::BRIGHTNESS[] = "prop-brightness-default";
+const char CameraProperties::SATURATION[] = "prop-saturation-default";
+const char CameraProperties::SHARPNESS[] = "prop-sharpness-default";
+const char CameraProperties::IPP[] = "prop-ipp-default";
+const char CameraProperties::S3D2D_PREVIEW[] = "prop-s3d2d-preview";
+const char CameraProperties::S3D2D_PREVIEW_MODES[] = "prop-s3d2d-preview-values";
+const char CameraProperties::AUTOCONVERGENCE[] = "prop-auto-convergence";
+const char CameraProperties::AUTOCONVERGENCE_MODE[] = "prop-auto-convergence-mode";
+const char CameraProperties::MANUALCONVERGENCE_VALUES[] = "prop-manual-convergence-values";
+const char CameraProperties::VSTAB[] = "prop-vstab-default";
+const char CameraProperties::VSTAB_VALUES[] = "prop-vstab-values";
+const char CameraProperties::REVISION[] = "prop-revision";
+const char CameraProperties::FOCAL_LENGTH[] = "prop-focal-length";
+const char CameraProperties::HOR_ANGLE[] = "prop-horizontal-angle";
+const char CameraProperties::VER_ANGLE[] = "prop-vertical-angle";
+const char CameraProperties::FRAMERATE_RANGE[]="prop-framerate-range-default";
+const char CameraProperties::FRAMERATE_RANGE_SUPPORTED[]="prop-framerate-range-values";
+const char CameraProperties::SENSOR_ORIENTATION[]= "sensor-orientation";
+const char CameraProperties::SENSOR_ORIENTATION_VALUES[]= "sensor-orientation-values";
+const char CameraProperties::EXIF_MAKE[] = "prop-exif-make";
+const char CameraProperties::EXIF_MODEL[] = "prop-exif-model";
+const char CameraProperties::JPEG_THUMBNAIL_QUALITY[] = "prop-jpeg-thumbnail-quality-default";
+const char CameraProperties::MAX_FOCUS_AREAS[] = "prop-max-focus-areas";
+const char CameraProperties::MAX_FD_HW_FACES[] = "prop-max-fd-hw-faces";
+const char CameraProperties::MAX_FD_SW_FACES[] = "prop-max-fd-sw-faces";
+const char CameraProperties::AUTO_EXPOSURE_LOCK[] = "prop-auto-exposure-lock";
+const char CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED[] = "prop-auto-exposure-lock-supported";
+const char CameraProperties::AUTO_WHITEBALANCE_LOCK[] = "prop-auto-whitebalance-lock";
+const char CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED[] = "prop-auto-whitebalance-lock-supported";
+const char CameraProperties::MAX_NUM_METERING_AREAS[] = "prop-max-num-metering-areas";
+const char CameraProperties::METERING_AREAS[] = "prop-metering-areas";
+
+const char CameraProperties::DEFAULT_VALUE[] = "";
+
+const char CameraProperties::PARAMS_DELIMITER []= ",";
+
+// Returns the properties class for a specific Camera
+// Each value is indexed by the CameraProperties::CameraPropertyIndex enum
+int CameraProperties::getProperties(int cameraIndex, CameraProperties::Properties** properties)
+{
+ LOG_FUNCTION_NAME;
+
+ if((unsigned int)cameraIndex >= mCamerasSupported)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ *properties = mCameraProps+cameraIndex;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return 0;
+}
+
+ssize_t CameraProperties::Properties::set(const char *prop, const char *value)
+{
+ if(!prop)
+ return -EINVAL;
+ if(!value)
+ value = DEFAULT_VALUE;
+
+ return mProperties->replaceValueFor(String8(prop), String8(value));
+}
+
+ssize_t CameraProperties::Properties::set(const char *prop, int value)
+{
+ char s_val[30];
+
+ sprintf(s_val, "%d", value);
+
+ return set(prop, s_val);
+}
+
+const char* CameraProperties::Properties::get(const char * prop)
+{
+ String8 value = mProperties->valueFor(String8(prop));
+ return value.string();
+}
+
+void CameraProperties::Properties::dump()
+{
+ for (size_t i = 0; i < mProperties->size(); i++)
+ {
+ CAMHAL_LOGDB("%s = %s\n",
+ mProperties->keyAt(i).string(),
+ mProperties->valueAt(i).string());
+ }
+}
+
+const char* CameraProperties::Properties::keyAt(unsigned int index)
+{
+ if(index < mProperties->size())
+ {
+ return mProperties->keyAt(index).string();
+ }
+ return NULL;
+}
+
+const char* CameraProperties::Properties::valueAt(unsigned int index)
+{
+ if(index < mProperties->size())
+ {
+ return mProperties->valueAt(index).string();
+ }
+ return NULL;
+}
+
+};
diff --git a/camera/CameraProperties.cpp b/camera/CameraProperties.cpp
new file mode 100644
index 0000000..47b2177
--- /dev/null
+++ b/camera/CameraProperties.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraProperties.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+//#include "CameraHal.h"
+#include "DebugUtils.h"
+#include "CameraProperties.h"
+
+#define CAMERA_ROOT "CameraRoot"
+#define CAMERA_INSTANCE "CameraInstance"
+
+namespace android {
+
+// lower entries have higher priority
+static const char* g_camera_adapters[] = {
+#ifdef OMAP4_SUPPORT_OMX_CAMERA_ADAPTER
+ "libomxcameraadapter.so",
+#endif
+#ifdef OMAP4_SUPPORT_USB_CAMERA_ADAPTER
+ "libusbcameraadapter.so"
+#endif
+};
+
+/*********************************************************
+ CameraProperties - public function implemetation
+**********************************************************/
+
+CameraProperties::CameraProperties() : mCamerasSupported(0)
+{
+ LOG_FUNCTION_NAME;
+
+ mCamerasSupported = 0;
+ mInitialized = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+CameraProperties::~CameraProperties()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+// Initializes the CameraProperties class
+status_t CameraProperties::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret;
+
+ if(mInitialized)
+ return NO_ERROR;
+
+ ret = loadProperties();
+
+ mInitialized = 1;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ const unsigned int starting_camera,
+ const unsigned int max_camera);
+
+///Loads all the Camera related properties
+status_t CameraProperties::loadProperties()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ // adapter updates capabilities and we update camera count
+ mCamerasSupported = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported, MAX_CAMERAS_SUPPORTED);
+
+ if((int)mCamerasSupported < 0) {
+ LOGE("error while getting capabilities");
+ ret = UNKNOWN_ERROR;
+ } else if (mCamerasSupported > MAX_CAMERAS_SUPPORTED) {
+ LOGE("returned too many adapaters");
+ ret = UNKNOWN_ERROR;
+ } else {
+ LOGE("num_cameras = %d", mCamerasSupported);
+
+ for (unsigned int i = 0; i < mCamerasSupported; i++) {
+ mCameraProps[i].set(CAMERA_SENSOR_INDEX, i);
+ mCameraProps[i].dump();
+ }
+ }
+
+ LOGV("mCamerasSupported = %d", mCamerasSupported);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+// Returns the number of Cameras found
+int CameraProperties::camerasSupported()
+{
+ LOG_FUNCTION_NAME;
+ return mCamerasSupported;
+}
+
+};
diff --git a/camera/MemoryManager.cpp b/camera/MemoryManager.cpp
new file mode 100644
index 0000000..f86b263
--- /dev/null
+++ b/camera/MemoryManager.cpp
@@ -0,0 +1,284 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#define LOG_TAG "CameraHAL"
+
+
+#include "CameraHal.h"
+#include "TICameraParameters.h"
+
+extern "C" {
+
+#include "memmgr.h"
+#include "tiler.h"
+//#include <timm_osal_interfaces.h>
+//#include <timm_osal_trace.h>
+
+
+};
+
+namespace android {
+
+///@todo Move these constants to a common header file, preferably in tiler.h
+#define STRIDE_8BIT (4 * 1024)
+#define STRIDE_16BIT (4 * 1024)
+
+#define ALLOCATION_2D 2
+
+///Utility Macro Declarations
+#define ZERO_OUT_ARR(a,b) { for(unsigned int i=0;i<b;i++) a[i]=NULL;}
+
+#define ZERO_OUT_STRUCT(a, b) memset(a, 0, sizeof(b));
+
+/*--------------------MemoryManager Class STARTS here-----------------------------*/
+void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ ///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing
+ ///the buffers
+ const uint numArrayEntriesC = (uint)(numBufs+1);
+
+ MemAllocBlock *tMemBlock;
+
+
+ ///Allocate a buffer array
+ uint32_t *bufsArr = new uint32_t[numArrayEntriesC];
+ if(!bufsArr)
+ {
+ CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC);
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ ///Initialize the array with zeros - this will help us while freeing the array in case of error
+ ///If a value of an array element is NULL, it means we didnt allocate it
+ ZERO_OUT_ARR(bufsArr, numArrayEntriesC);
+
+ ///If the bytes field is not zero, it means it is a 1-D tiler buffer request (possibly for image capture bit stream buffer)
+ if(bytes!=0)
+ {
+ ///MemAllocBlock is the structure that describes the buffer alloc request to MemMgr
+ tMemBlock = (MemAllocBlock*)malloc(sizeof(MemAllocBlock));
+
+ if(!tMemBlock)
+ {
+ delete [] bufsArr;
+ return NULL;
+ }
+
+ ZERO_OUT_STRUCT(tMemBlock, MemAllocBlock );
+
+ ///1D buffers
+ for (int i = 0; i < numBufs; i++)
+ {
+ tMemBlock->dim.len = bytes;
+ tMemBlock->pixelFormat = PIXEL_FMT_PAGE;
+ tMemBlock->stride = 0;
+ CAMHAL_LOGDB("requested bytes = %d", bytes);
+ CAMHAL_LOGDB("tMemBlock.dim.len = %d", tMemBlock->dim.len);
+ bufsArr[i] = (uint32_t)MemMgr_Alloc(tMemBlock, 1);
+ if(!bufsArr[i])
+ {
+ LOGE("Buffer allocation failed for iteration %d", i);
+ goto error;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Allocated Tiler PAGED mode buffer address[%x]", bufsArr[i]);
+ }
+ }
+
+ }
+ else ///If bytes is not zero, then it is a 2-D tiler buffer request
+ {
+ ///2D buffers
+ ///MemAllocBlock is the structure that describes the buffer alloc request to MemMgr
+ tMemBlock = (MemAllocBlock*)malloc(sizeof(MemAllocBlock)*ALLOCATION_2D);
+
+ if(!tMemBlock)
+ {
+ delete [] bufsArr;
+ return NULL;
+ }
+
+ memset(tMemBlock, 0, sizeof(MemAllocBlock)*ALLOCATION_2D);
+
+ for (int i = 0; i < numBufs; i++)
+ {
+ int numAllocs = 1;
+ pixel_fmt_t pixelFormat[ALLOCATION_2D];
+ int stride[ALLOCATION_2D];
+
+ if(!strcmp(format,(const char *) CameraParameters::PIXEL_FORMAT_YUV422I))
+ {
+ ///YUV422I format
+ pixelFormat[0] = PIXEL_FMT_16BIT;
+ stride[0] = STRIDE_16BIT;
+ numAllocs = 1;
+ }
+ else if(!strcmp(format,(const char *) CameraParameters::PIXEL_FORMAT_YUV420SP))
+ {
+ ///YUV420 NV12 format
+ pixelFormat[0] = PIXEL_FMT_8BIT;
+ pixelFormat[1] = PIXEL_FMT_16BIT;
+ stride[0] = STRIDE_8BIT;
+ stride[1] = STRIDE_16BIT;
+ numAllocs = 2;
+ }
+ else if(!strcmp(format,(const char *) CameraParameters::PIXEL_FORMAT_RGB565))
+ {
+ ///RGB 565 format
+ pixelFormat[0] = PIXEL_FMT_16BIT;
+ stride[0] = STRIDE_16BIT;
+ numAllocs = 1;
+ }
+ else if(!strcmp(format,(const char *) TICameraParameters::PIXEL_FORMAT_RAW))
+ {
+ ///RAW format
+ pixelFormat[0] = PIXEL_FMT_16BIT;
+ stride[0] = STRIDE_16BIT;
+ numAllocs = 1;
+ }
+ else
+ {
+ ///By default assume YUV420 NV12 format
+ ///YUV420 NV12 format
+ pixelFormat[0] = PIXEL_FMT_8BIT;
+ pixelFormat[1] = PIXEL_FMT_16BIT;
+ stride[0] = STRIDE_8BIT;
+ stride[1] = STRIDE_16BIT;
+ numAllocs = 2;
+ }
+
+ for(int index=0;index<numAllocs;index++)
+ {
+ tMemBlock[index].pixelFormat = pixelFormat[index];
+ tMemBlock[index].stride = stride[index];
+ tMemBlock[index].dim.area.width= width;/*width*/
+ tMemBlock[index].dim.area.height= height;/*height*/
+ }
+
+ bufsArr[i] = (uint32_t)MemMgr_Alloc(tMemBlock, numAllocs);
+ if(!bufsArr[i])
+ {
+ CAMHAL_LOGEB("Buffer allocation failed for iteration %d", i);
+ goto error;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Allocated Tiler PAGED mode buffer address[%x]", bufsArr[i]);
+ }
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+
+ ///Free the request structure before returning from the function
+ free(tMemBlock);
+
+ return (void*)bufsArr;
+
+ error:
+ LOGE("Freeing buffers already allocated after error occurred");
+ freeBuffer(bufsArr);
+ free(tMemBlock);
+
+ if ( NULL != mErrorNotifier.get() )
+ {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+}
+
+//TODO: Get needed data to map tiler buffers
+//Return dummy data for now
+uint32_t * MemoryManager::getOffsets()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int MemoryManager::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return -1;
+}
+
+int MemoryManager::freeBuffer(void* buf)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ uint32_t *bufEntry = (uint32_t*)buf;
+
+ if(!bufEntry)
+ {
+ CAMHAL_LOGEA("NULL pointer passed to freebuffer");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ while(*bufEntry)
+ {
+ ret |= MemMgr_Free((void*)*bufEntry++);
+ }
+
+ ///@todo Check if this way of deleting array is correct, else use malloc/free
+ uint32_t * bufArr = (uint32_t*)buf;
+ delete [] bufArr;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+};
+
+
+/*--------------------MemoryManager Class ENDS here-----------------------------*/
diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp
new file mode 100644
index 0000000..1c347a2
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMX3A.cpp
@@ -0,0 +1,1083 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMX3A.cpp
+*
+* This file contains functionality for handling 3A configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+
+namespace android {
+
+status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ int mode = 0;
+ const char *str = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ str = params.get(TICameraParameters::KEY_EXPOSURE_MODE);
+ mode = getLUTvalue_HALtoOMX( str, ExpLUT);
+ if ( ( str != NULL ) && ( mParameters3A.Exposure != mode ) )
+ {
+ mParameters3A.Exposure = mode;
+ CAMHAL_LOGDB("Exposure mode %d", mode);
+ if ( 0 <= mParameters3A.Exposure )
+ {
+ mPending3Asettings |= SetExpMode;
+ }
+ }
+
+ str = params.get(CameraParameters::KEY_WHITE_BALANCE);
+ mode = getLUTvalue_HALtoOMX( str, WBalLUT);
+ if ( ( mFirstTimeInit || ( str != NULL ) ) && ( mode != mParameters3A.WhiteBallance ) )
+ {
+ mParameters3A.WhiteBallance = mode;
+ CAMHAL_LOGDB("Whitebalance mode %d", mode);
+ if ( 0 <= mParameters3A.WhiteBallance )
+ {
+ mPending3Asettings |= SetWhiteBallance;
+ }
+ }
+
+ if ( 0 <= params.getInt(TICameraParameters::KEY_CONTRAST) )
+ {
+ if ( mFirstTimeInit ||
+ ( (mParameters3A.Contrast + CONTRAST_OFFSET) !=
+ params.getInt(TICameraParameters::KEY_CONTRAST)) )
+ {
+ mParameters3A.Contrast = params.getInt(TICameraParameters::KEY_CONTRAST) - CONTRAST_OFFSET;
+ CAMHAL_LOGDB("Contrast %d", mParameters3A.Contrast);
+ mPending3Asettings |= SetContrast;
+ }
+ }
+
+ if ( 0 <= params.getInt(TICameraParameters::KEY_SHARPNESS) )
+ {
+ if ( mFirstTimeInit ||
+ ((mParameters3A.Sharpness + SHARPNESS_OFFSET) !=
+ params.getInt(TICameraParameters::KEY_SHARPNESS)))
+ {
+ mParameters3A.Sharpness = params.getInt(TICameraParameters::KEY_SHARPNESS) - SHARPNESS_OFFSET;
+ CAMHAL_LOGDB("Sharpness %d", mParameters3A.Sharpness);
+ mPending3Asettings |= SetSharpness;
+ }
+ }
+
+ if ( 0 <= params.getInt(TICameraParameters::KEY_SATURATION) )
+ {
+ if ( mFirstTimeInit ||
+ ((mParameters3A.Saturation + SATURATION_OFFSET) !=
+ params.getInt(TICameraParameters::KEY_SATURATION)) )
+ {
+ mParameters3A.Saturation = params.getInt(TICameraParameters::KEY_SATURATION) - SATURATION_OFFSET;
+ CAMHAL_LOGDB("Saturation %d", mParameters3A.Saturation);
+ mPending3Asettings |= SetSaturation;
+ }
+ }
+
+ if ( 0 <= params.getInt(TICameraParameters::KEY_BRIGHTNESS) )
+ {
+ if ( mFirstTimeInit ||
+ (( mParameters3A.Brightness !=
+ ( unsigned int ) params.getInt(TICameraParameters::KEY_BRIGHTNESS))) )
+ {
+ mParameters3A.Brightness = (unsigned)params.getInt(TICameraParameters::KEY_BRIGHTNESS);
+ CAMHAL_LOGDB("Brightness %d", mParameters3A.Brightness);
+ mPending3Asettings |= SetBrightness;
+ }
+ }
+
+ str = params.get(CameraParameters::KEY_ANTIBANDING);
+ mode = getLUTvalue_HALtoOMX(str,FlickerLUT);
+ if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Flicker != mode ) ))
+ {
+ mParameters3A.Flicker = mode;
+ CAMHAL_LOGDB("Flicker %d", mParameters3A.Flicker);
+ if ( 0 <= mParameters3A.Flicker )
+ {
+ mPending3Asettings |= SetFlicker;
+ }
+ }
+
+ str = params.get(TICameraParameters::KEY_ISO);
+ mode = getLUTvalue_HALtoOMX(str, IsoLUT);
+ CAMHAL_LOGVB("ISO mode arrived in HAL : %s", str);
+ if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.ISO != mode )) )
+ {
+ mParameters3A.ISO = mode;
+ CAMHAL_LOGDB("ISO %d", mParameters3A.ISO);
+ if ( 0 <= mParameters3A.ISO )
+ {
+ mPending3Asettings |= SetISO;
+ }
+ }
+
+ str = params.get(CameraParameters::KEY_FOCUS_MODE);
+ mode = getLUTvalue_HALtoOMX(str, FocusLUT);
+ if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Focus != mode ) ) )
+ {
+ //Apply focus mode immediatly only if CAF or Inifinity are selected
+ if ( ( mode == OMX_IMAGE_FocusControlAuto ) ||
+ ( mode == OMX_IMAGE_FocusControlAutoInfinity ) )
+ {
+ mPending3Asettings |= SetFocus;
+ mParameters3A.Focus = mode;
+ }
+ else if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAuto )
+ {
+ //If we switch from CAF to something else, then disable CAF
+ mPending3Asettings |= SetFocus;
+ mParameters3A.Focus = OMX_IMAGE_FocusControlOff;
+ }
+
+ mParameters3A.Focus = mode;
+ CAMHAL_LOGDB("Focus %x", mParameters3A.Focus);
+ }
+
+ str = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if ( mFirstTimeInit ||
+ (( str != NULL ) &&
+ (mParameters3A.EVCompensation !=
+ params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION))))
+ {
+ CAMHAL_LOGDB("Setting EV Compensation to %d",
+ params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION));
+
+ mParameters3A.EVCompensation = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ mPending3Asettings |= SetEVCompensation;
+ }
+
+ str = params.get(CameraParameters::KEY_SCENE_MODE);
+ mode = getLUTvalue_HALtoOMX( str, SceneLUT);
+ if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.SceneMode != mode )) )
+ {
+ if ( 0 <= mode )
+ {
+ mParameters3A.SceneMode = mode;
+ mPending3Asettings |= SetSceneMode;
+ }
+ else
+ {
+ mParameters3A.SceneMode = OMX_Manual;
+ }
+
+ CAMHAL_LOGDB("SceneMode %d", mParameters3A.SceneMode);
+ }
+
+ str = params.get(CameraParameters::KEY_FLASH_MODE);
+ mode = getLUTvalue_HALtoOMX( str, FlashLUT);
+ if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.FlashMode != mode )) )
+ {
+ if ( 0 <= mode )
+ {
+ mParameters3A.FlashMode = mode;
+ mPending3Asettings |= SetFlash;
+ }
+ else
+ {
+ mParameters3A.FlashMode = OMX_Manual;
+ }
+ }
+
+ CAMHAL_LOGVB("Flash Setting %s", str);
+ CAMHAL_LOGVB("FlashMode %d", mParameters3A.FlashMode);
+
+ str = params.get(CameraParameters::KEY_EFFECT);
+ mode = getLUTvalue_HALtoOMX( str, EffLUT);
+ if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.Effect != mode )) )
+ {
+ mParameters3A.Effect = mode;
+ CAMHAL_LOGDB("Effect %d", mParameters3A.Effect);
+ if ( 0 <= mParameters3A.Effect )
+ {
+ mPending3Asettings |= SetEffect;
+ }
+ }
+
+ str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, "true")) )
+ {
+ OMX_BOOL lock = OMX_FALSE;
+ str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ if ( (strcmp(str, "true")) == 0)
+ {
+ CAMHAL_LOGVA("Locking Exposure");
+ lock = OMX_TRUE;
+ }
+ else
+ {
+ CAMHAL_LOGVA("UnLocking Exposure");
+ }
+ if (mParameters3A.ExposureLock != lock)
+ {
+ mParameters3A.ExposureLock = lock;
+ CAMHAL_LOGDB("ExposureLock %d", lock);
+ mPending3Asettings |= SetExpLock;
+ }
+ }
+
+ str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, "true")) )
+ {
+ OMX_BOOL lock = OMX_FALSE;
+ str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ if ( (strcmp(str, "true")) == 0)
+ {
+ CAMHAL_LOGVA("Locking WhiteBalance");
+ lock = OMX_TRUE;
+ }
+ else
+ {
+ CAMHAL_LOGVA("UnLocking WhiteBalance");
+ }
+ if (mParameters3A.WhiteBalanceLock != lock)
+ {
+ mParameters3A.WhiteBalanceLock = lock;
+ CAMHAL_LOGDB("WhiteBalanceLock %d", lock);
+ mPending3Asettings |= SetWBLock;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int OMXCameraAdapter::getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT)
+{
+ int LUTsize = LUT.size;
+ if( HalValue )
+ for(int i = 0; i < LUTsize; i++)
+ if( 0 == strcmp(LUT.Table[i].userDefinition, HalValue) )
+ return LUT.Table[i].omxDefinition;
+
+ return -ENOENT;
+}
+
+const char* OMXCameraAdapter::getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT)
+{
+ int LUTsize = LUT.size;
+ for(int i = 0; i < LUTsize; i++)
+ if( LUT.Table[i].omxDefinition == OMXValue )
+ return LUT.Table[i].userDefinition;
+
+ return NULL;
+}
+
+status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSURECONTROLTYPE exp;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ if ( EXPOSURE_FACE_PRIORITY == Gen3A.Exposure )
+ {
+ //Disable Region priority and enable Face priority
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
+
+ //Then set the mode to auto
+ Gen3A.WhiteBallance = OMX_ExposureControlAuto;
+ }
+ else
+ {
+ //Disable Face and Region priority
+ setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, false);
+ setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
+ }
+
+ OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSURECONTROLTYPE);
+ exp.nPortIndex = OMX_ALL;
+ exp.eExposureControl = (OMX_EXPOSURECONTROLTYPE)Gen3A.Exposure;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposure,
+ &exp);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring exposure mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera exposure mode configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_PARAM_FLASHCONTROLTYPE flash;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&flash, OMX_IMAGE_PARAM_FLASHCONTROLTYPE);
+ flash.nPortIndex = OMX_ALL;
+ flash.eFlashControl = ( OMX_IMAGE_FLASHCONTROLTYPE ) Gen3A.FlashMode;
+
+ CAMHAL_LOGDB("Configuring flash mode 0x%x", flash.eFlashControl);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigFlashControl,
+ &flash);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring flash mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera flash mode configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focus;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ //First Disable Face and Region priority
+ ret |= setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);
+ ret |= setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&focus, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focus.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ focus.eFocusControl = (OMX_IMAGE_FOCUSCONTROLTYPE)Gen3A.Focus;
+
+ CAMHAL_LOGDB("Configuring focus mode 0x%x", focus.eFocusControl);
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigFocusControl, &focus);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring focus mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera focus mode configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_SCENEMODETYPE scene;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&scene, OMX_CONFIG_SCENEMODETYPE);
+ scene.nPortIndex = OMX_ALL;
+ scene.eSceneMode = ( OMX_SCENEMODETYPE ) Gen3A.SceneMode;
+
+ CAMHAL_LOGDB("Configuring scene mode 0x%x", scene.eSceneMode);
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSceneMode,
+ &scene);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring scene mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera scene configured successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+ CAMHAL_LOGDB("old EV Compensation for OMX = 0x%x", (int)expValues.xEVCompensation);
+ CAMHAL_LOGDB("EV Compensation for HAL = %d", Gen3A.EVCompensation);
+
+ expValues.xEVCompensation = ( Gen3A.EVCompensation * ( 1 << Q16_OFFSET ) ) / 10;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+ CAMHAL_LOGDB("new EV Compensation for OMX = 0x%x", (int)expValues.xEVCompensation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring EV Compensation 0x%x error = 0x%x",
+ ( unsigned int ) expValues.xEVCompensation,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("EV Compensation 0x%x configured successfully",
+ ( unsigned int ) expValues.xEVCompensation);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_WHITEBALCONTROLTYPE wb;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&wb, OMX_CONFIG_WHITEBALCONTROLTYPE);
+ wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ wb.eWhiteBalControl = ( OMX_WHITEBALCONTROLTYPE ) Gen3A.WhiteBallance;
+
+ if ( WB_FACE_PRIORITY == Gen3A.WhiteBallance )
+ {
+ //Disable Region priority and enable Face priority
+ setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
+ setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, true);
+
+ //Then set the mode to auto
+ Gen3A.WhiteBallance = OMX_WhiteBalControlAuto;
+ }
+ else
+ {
+ //Disable Face and Region priority
+ setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false);
+ setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false);
+ }
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonWhiteBalance,
+ &wb);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Whitebalance mode 0x%x error = 0x%x",
+ ( unsigned int ) wb.eWhiteBalControl,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Whitebalance mode 0x%x configured successfully",
+ ( unsigned int ) wb.eWhiteBalControl);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return eError;
+}
+
+status_t OMXCameraAdapter::setFlicker(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_FLICKERCANCELTYPE flicker;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&flicker, OMX_CONFIG_FLICKERCANCELTYPE);
+ flicker.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ flicker.eFlickerCancel = (OMX_COMMONFLICKERCANCELTYPE)Gen3A.Flicker;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigFlickerCancel,
+ &flicker );
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Flicker mode 0x%x error = 0x%x",
+ ( unsigned int ) flicker.eFlickerCancel,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Flicker mode 0x%x configured successfully",
+ ( unsigned int ) flicker.eFlickerCancel);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BRIGHTNESSTYPE brightness;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&brightness, OMX_CONFIG_BRIGHTNESSTYPE);
+ brightness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ brightness.nBrightness = Gen3A.Brightness;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonBrightness,
+ &brightness);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Brightness 0x%x error = 0x%x",
+ ( unsigned int ) brightness.nBrightness,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Brightness 0x%x configured successfully",
+ ( unsigned int ) brightness.nBrightness);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setContrast(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CONTRASTTYPE contrast;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&contrast, OMX_CONFIG_CONTRASTTYPE);
+ contrast.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ contrast.nContrast = Gen3A.Contrast;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonContrast,
+ &contrast);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Contrast 0x%x error = 0x%x",
+ ( unsigned int ) contrast.nContrast,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Contrast 0x%x configured successfully",
+ ( unsigned int ) contrast.nContrast);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return eError;
+}
+
+status_t OMXCameraAdapter::setSharpness(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE procSharpness;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&procSharpness, OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE);
+ procSharpness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ procSharpness.nLevel = Gen3A.Sharpness;
+
+ if( procSharpness.nLevel == 0 )
+ {
+ procSharpness.bAuto = OMX_TRUE;
+ }
+ else
+ {
+ procSharpness.bAuto = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigSharpeningLevel,
+ &procSharpness);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Sharpness 0x%x error = 0x%x",
+ ( unsigned int ) procSharpness.nLevel,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Sharpness 0x%x configured successfully",
+ ( unsigned int ) procSharpness.nLevel);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_SATURATIONTYPE saturation;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&saturation, OMX_CONFIG_SATURATIONTYPE);
+ saturation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ saturation.nSaturation = Gen3A.Saturation;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonSaturation,
+ &saturation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Saturation 0x%x error = 0x%x",
+ ( unsigned int ) saturation.nSaturation,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Saturation 0x%x configured successfully",
+ ( unsigned int ) saturation.nSaturation);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXPOSUREVALUETYPE expValues;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE);
+ expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+
+ if( 0 == Gen3A.ISO )
+ {
+ expValues.bAutoSensitivity = OMX_TRUE;
+ }
+ else
+ {
+ expValues.bAutoSensitivity = OMX_FALSE;
+ expValues.nSensitivity = Gen3A.ISO;
+ }
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &expValues);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring ISO 0x%x error = 0x%x",
+ ( unsigned int ) expValues.nSensitivity,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("ISO 0x%x configured successfully",
+ ( unsigned int ) expValues.nSensitivity);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_IMAGEFILTERTYPE effect;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&effect, OMX_CONFIG_IMAGEFILTERTYPE);
+ effect.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ effect.eImageFilter = (OMX_IMAGEFILTERTYPE ) Gen3A.Effect;
+
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonImageFilter,
+ &effect);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Effect 0x%x error = 0x%x",
+ ( unsigned int ) effect.eImageFilter,
+ eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Effect 0x%x configured successfully",
+ ( unsigned int ) effect.eImageFilter);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_LOCKTYPE lock;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ lock.bLock = Gen3A.WhiteBalanceLock;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageWhiteBalanceLock,
+ &lock);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring WhiteBalance Lock error = 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("WhiteBalance Lock configured successfully %d ", lock.bLock);
+ }
+ LOG_FUNCTION_NAME_EXIT
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_LOCKTYPE lock;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ lock.bLock = Gen3A.ExposureLock;
+ eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageExposureLock,
+ &lock);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring Exposure Lock error = 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("Exposure Lock configured successfully %d ", lock.bLock);
+ }
+ LOG_FUNCTION_NAME_EXIT
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::release3ALock()
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_LOCKTYPE lock;
+
+ LOG_FUNCTION_NAME
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ return NO_INIT;
+ }
+
+ OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE);
+ lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ mParameters3A.ExposureLock = OMX_FALSE;
+ mParameters3A.WhiteBalanceLock = OMX_FALSE;
+
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageExposureLock,
+ &lock);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error GetConfig Exposure Lock error = 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Exposure Lock GetConfig successfull");
+ }
+
+ /*if locked then unlock */
+ if ( lock.bLock )
+ {
+ setExposureLock(mParameters3A);
+ }
+
+ eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexConfigImageWhiteBalanceLock,
+ &lock);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error GetConfig WhiteBalance Lock error = 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("WhiteBalance Lock GetConfig successfull");
+ }
+
+ /*if locked then unlock */
+ if ( lock.bLock )
+ {
+ setWhiteBalanceLock(mParameters3A);
+ }
+
+ return ErrorUtils::omxToAndroidError(eError);
+
+}
+
+status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
+{
+ status_t ret = NO_ERROR;
+ unsigned int currSett; // 32 bit
+ int portIndex;
+
+ /*
+ * Scenes have a priority during the process
+ * of applying 3A related parameters.
+ * They can override pretty much all other 3A
+ * settings and similarly get overridden when
+ * for instance the focus mode gets switched.
+ * There is only one exception to this rule,
+ * the manual a.k.a. auto scene.
+ */
+ if ( ( SetSceneMode & mPending3Asettings ) )
+ {
+ mPending3Asettings &= ~SetSceneMode;
+ return setScene(Gen3A);
+ }
+ else if ( OMX_Manual != Gen3A.SceneMode )
+ {
+ mPending3Asettings = 0;
+ return NO_ERROR;
+ }
+
+ for( currSett = 1; currSett < E3aSettingMax; currSett <<= 1)
+ {
+ if( currSett & mPending3Asettings )
+ {
+ switch( currSett )
+ {
+ case SetEVCompensation:
+ {
+ ret |= setEVCompensation(Gen3A);
+ break;
+ }
+
+ case SetWhiteBallance:
+ {
+ ret |= setWBMode(Gen3A);
+ break;
+ }
+
+ case SetFlicker:
+ {
+ ret |= setFlicker(Gen3A);
+ break;
+ }
+
+ case SetBrightness:
+ {
+ ret |= setBrightness(Gen3A);
+ break;
+ }
+
+ case SetContrast:
+ {
+ ret |= setContrast(Gen3A);
+ break;
+ }
+
+ case SetSharpness:
+ {
+ ret |= setSharpness(Gen3A);
+ break;
+ }
+
+ case SetSaturation:
+ {
+ ret |= setSaturation(Gen3A);
+ break;
+ }
+
+ case SetISO:
+ {
+ ret |= setISO(Gen3A);
+ break;
+ }
+
+ case SetEffect:
+ {
+ ret |= setEffect(Gen3A);
+ break;
+ }
+
+ case SetFocus:
+ {
+ ret |= setFocusMode(Gen3A);
+ break;
+ }
+
+ case SetExpMode:
+ {
+ ret |= setExposureMode(Gen3A);
+ break;
+ }
+
+ case SetFlash:
+ {
+ ret |= setFlashMode(Gen3A);
+ break;
+ }
+
+ case SetExpLock:
+ {
+ ret |= setExposureLock(Gen3A);
+ break;
+ }
+
+ case SetWBLock:
+ {
+ ret |= setWhiteBalanceLock(Gen3A);
+ break;
+ }
+
+ default:
+ CAMHAL_LOGEB("this setting (0x%x) is still not supported in CameraAdapter ",
+ currSett);
+ break;
+ }
+ mPending3Asettings &= ~currSett;
+ }
+ }
+ return ret;
+}
+
+};
diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp
new file mode 100644
index 0000000..ef9414e
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXAlgo.cpp
@@ -0,0 +1,1181 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXAlgo.cpp
+*
+* This file contains functionality for handling algorithm configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace android {
+
+status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *valstr = NULL;
+ const char *oldstr = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ CaptureMode capMode;
+ CAMHAL_LOGEB("Capture mode %s", params.get(TICameraParameters::KEY_CAP_MODE));
+ if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL )
+ {
+ if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0)
+ {
+ capMode = OMXCameraAdapter::HIGH_SPEED;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0)
+ {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0)
+ {
+ capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0)
+ {
+ capMode = OMXCameraAdapter::VIDEO_MODE;
+ }
+ else
+ {
+ capMode = OMXCameraAdapter::HIGH_QUALITY;
+ }
+ }
+ else
+ {
+ capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL;
+
+ }
+
+ if ( mCapMode != capMode )
+ {
+ mCapMode = capMode;
+ mOMXStateSwitch = true;
+ }
+
+ CAMHAL_LOGDB("Capture Mode set %d", mCapMode);
+
+ /// Configure IPP, LDCNSF, GBCE and GLBCE only in HQ mode
+ IPPMode ipp;
+ if((mCapMode == OMXCameraAdapter::HIGH_QUALITY) || (mCapMode == OMXCameraAdapter::HIGH_QUALITY_ZSL))
+ {
+ if ( (valstr = params.get(TICameraParameters::KEY_IPP)) != NULL )
+ {
+ if (strcmp(valstr, (const char *) TICameraParameters::IPP_LDCNSF) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_LDCNSF;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::IPP_LDC) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_LDC;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::IPP_NSF) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_NSF;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::IPP_NONE) == 0)
+ {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+ else
+ {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+ }
+ else
+ {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+
+ CAMHAL_LOGVB("IPP Mode set %d", ipp);
+
+ if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) )
+ {
+ // Configure GBCE only if the setting has changed since last time
+ oldstr = mParams.get(TICameraParameters::KEY_GBCE);
+ bool cmpRes = true;
+ if ( NULL != oldstr )
+ {
+ cmpRes = strcmp(valstr, oldstr) != 0;
+ }
+ else
+ {
+ cmpRes = true;
+ }
+
+
+ if( cmpRes )
+ {
+ if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_ENABLE ) == 0)
+ {
+ setGBCE(OMXCameraAdapter::BRIGHTNESS_ON);
+ }
+ else if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_DISABLE ) == 0)
+ {
+ setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+ else
+ {
+ setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+ }
+ }
+ else
+ {
+ //Disable GBCE by default
+ setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+
+ if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
+ {
+ // Configure GLBCE only if the setting has changed since last time
+
+ oldstr = mParams.get(TICameraParameters::KEY_GLBCE);
+ bool cmpRes = true;
+ if ( NULL != oldstr )
+ {
+ cmpRes = strcmp(valstr, oldstr) != 0;
+ }
+ else
+ {
+ cmpRes = true;
+ }
+
+
+ if( cmpRes )
+ {
+ if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_ENABLE ) == 0)
+ {
+ setGLBCE(OMXCameraAdapter::BRIGHTNESS_ON);
+ }
+ else if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_DISABLE ) == 0)
+ {
+ setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+ else
+ {
+ setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+ }
+ }
+ else
+ {
+ //Disable GLBCE by default
+ setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF);
+ }
+ }
+ else
+ {
+ ipp = OMXCameraAdapter::IPP_NONE;
+ }
+
+ if ( mIPP != ipp )
+ {
+ mIPP = ipp;
+ mOMXStateSwitch = true;
+ }
+
+ ///Set VNF Configuration
+ bool vnfEnabled = false;
+ if ( params.getInt(TICameraParameters::KEY_VNF) > 0 )
+ {
+ CAMHAL_LOGDA("VNF Enabled");
+ vnfEnabled = true;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VNF Disabled");
+ vnfEnabled = false;
+ }
+
+ if ( mVnfEnabled != vnfEnabled )
+ {
+ mVnfEnabled = vnfEnabled;
+ mOMXStateSwitch = true;
+ }
+
+ ///Set VSTAB Configuration
+ bool vstabEnabled = false;
+ if ( params.getInt(TICameraParameters::KEY_VSTAB) > 0 )
+ {
+ CAMHAL_LOGDA("VSTAB Enabled");
+ vstabEnabled = true;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VSTAB Disabled");
+ vstabEnabled = false;
+ }
+
+ if ( mVstabEnabled != vstabEnabled )
+ {
+ mVstabEnabled = vstabEnabled;
+ mOMXStateSwitch = true;
+ }
+
+ //A work-around for a failing call to OMX flush buffers
+ if ( ( capMode = OMXCameraAdapter::VIDEO_MODE ) &&
+ ( mVstabEnabled ) )
+ {
+ mOMXStateSwitch = true;
+ }
+
+ //Set Auto Convergence Mode
+ valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE);
+ if ( valstr != NULL )
+ {
+ // Set ManualConvergence default value
+ OMX_S32 manualconvergence = -30;
+ if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE) == 0 )
+ {
+ setAutoConvergence(OMX_TI_AutoConvergenceModeDisable, manualconvergence);
+ }
+ else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FRAME) == 0 )
+ {
+ setAutoConvergence(OMX_TI_AutoConvergenceModeFrame, manualconvergence);
+ }
+ else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_CENTER) == 0 )
+ {
+ setAutoConvergence(OMX_TI_AutoConvergenceModeCenter, manualconvergence);
+ }
+ else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FFT) == 0 )
+ {
+ setAutoConvergence(OMX_TI_AutoConvergenceModeFocusFaceTouch, manualconvergence);
+ }
+ else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL) == 0 )
+ {
+ manualconvergence = (OMX_S32)params.getInt(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES);
+ setAutoConvergence(OMX_TI_AutoConvergenceModeManual, manualconvergence);
+ }
+ CAMHAL_LOGVB("AutoConvergenceMode %s, value = %d", valstr, (int) manualconvergence);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// Get AutoConvergence
+status_t OMXCameraAdapter::getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode,
+ OMX_S32 *pManualConverence)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
+
+ ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
+ ACParams.nVersion = mLocalVersionParam;
+ ACParams.nPortIndex = OMX_ALL;
+
+ LOG_FUNCTION_NAME;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
+ &ACParams);
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGEB("Error while getting AutoConvergence 0x%x", eError);
+ ret = -EINVAL;
+ }
+ else
+ {
+ *pManualConverence = ACParams.nManualConverence;
+ *pACMode = ACParams.eACMode;
+ CAMHAL_LOGDA("AutoConvergence got successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// Set AutoConvergence
+status_t OMXCameraAdapter::setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode,
+ OMX_S32 pManualConverence)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
+
+ LOG_FUNCTION_NAME;
+
+ ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
+ ACParams.nVersion = mLocalVersionParam;
+ ACParams.nPortIndex = OMX_ALL;
+ ACParams.nManualConverence = pManualConverence;
+ ACParams.eACMode = pACMode;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
+ &ACParams);
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGEB("Error while setting AutoConvergence 0x%x", eError);
+ ret = -EINVAL;
+ }
+ else
+ {
+ CAMHAL_LOGDA("AutoConvergence applied successfully");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::enableVideoNoiseFilter(bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_VIDEONOISEFILTERTYPE vnfCfg;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&vnfCfg, OMX_PARAM_VIDEONOISEFILTERTYPE);
+
+ if ( enable )
+ {
+ CAMHAL_LOGDA("VNF is enabled");
+ vnfCfg.eMode = OMX_VideoNoiseFilterModeOn;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VNF is disabled");
+ vnfCfg.eMode = OMX_VideoNoiseFilterModeOff;
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamVideoNoiseFilter,
+ &vnfCfg);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring video noise filter 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Video noise filter is configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::enableVideoStabilization(bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_FRAMESTABTYPE frameStabCfg;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_CONFIG_BOOLEANTYPE vstabp;
+ OMX_INIT_STRUCT_PTR (&vstabp, OMX_CONFIG_BOOLEANTYPE);
+ if(enable)
+ {
+ vstabp.bEnabled = OMX_TRUE;
+ }
+ else
+ {
+ vstabp.bEnabled = OMX_FALSE;
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexParamFrameStabilisation,
+ &vstabp);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring video stabilization param 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Video stabilization param configured successfully");
+ }
+
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&frameStabCfg, OMX_CONFIG_FRAMESTABTYPE);
+
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigCommonFrameStabilisation,
+ &frameStabCfg);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while getting video stabilization mode 0x%x",
+ (unsigned int)eError);
+ ret = -1;
+ }
+
+ CAMHAL_LOGDB("VSTAB Port Index = %d", (int)frameStabCfg.nPortIndex);
+
+ frameStabCfg.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ if ( enable )
+ {
+ CAMHAL_LOGDA("VSTAB is enabled");
+ frameStabCfg.bStab = OMX_TRUE;
+ }
+ else
+ {
+ CAMHAL_LOGDA("VSTAB is disabled");
+ frameStabCfg.bStab = OMX_FALSE;
+
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigCommonFrameStabilisation,
+ &frameStabCfg);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring video stabilization mode 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Video stabilization mode configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setGBCE(OMXCameraAdapter::BrightnessMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE bControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bControl, OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE);
+
+ bControl.nPortIndex = OMX_ALL;
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::BRIGHTNESS_ON:
+ {
+ bControl.eControl = OMX_TI_BceModeOn;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_AUTO:
+ {
+ bControl.eControl = OMX_TI_BceModeAuto;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_OFF:
+ default:
+ {
+ bControl.eControl = OMX_TI_BceModeOff;
+ break;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigGlobalBrightnessContrastEnhance,
+ &bControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting GBCE 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDB("GBCE configured successfully 0x%x", mode);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setGLBCE(OMXCameraAdapter::BrightnessMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE bControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bControl, OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE);
+ bControl.nPortIndex = OMX_ALL;
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::BRIGHTNESS_ON:
+ {
+ bControl.eControl = OMX_TI_BceModeOn;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_AUTO:
+ {
+ bControl.eControl = OMX_TI_BceModeAuto;
+ break;
+ }
+ case OMXCameraAdapter::BRIGHTNESS_OFF:
+ default:
+ {
+ bControl.eControl = OMX_TI_BceModeOff;
+ break;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigLocalBrightnessContrastEnhance,
+ &bControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configure GLBCE 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("GLBCE configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAMOPERATINGMODETYPE camMode;
+ OMX_CONFIG_BOOLEANTYPE bCAC;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&bCAC, OMX_CONFIG_BOOLEANTYPE);
+ bCAC.bEnabled = OMX_FALSE;
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
+ if ( mSensorIndex == OMX_TI_StereoSensor )
+ {
+ CAMHAL_LOGDA("Camera mode: STEREO");
+ camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
+ }
+ else if ( OMXCameraAdapter::HIGH_SPEED == mode )
+ {
+ CAMHAL_LOGDA("Camera mode: HIGH SPEED");
+ camMode.eCamOperatingMode = OMX_CaptureImageHighSpeedTemporalBracketing;
+ }
+ else if( OMXCameraAdapter::HIGH_QUALITY == mode )
+ {
+ CAMHAL_LOGDA("Camera mode: HIGH QUALITY");
+ camMode.eCamOperatingMode = OMX_CaptureImageProfileBase;
+ bCAC.bEnabled = OMX_TRUE;
+ }
+ else if( OMXCameraAdapter::HIGH_QUALITY_ZSL== mode )
+ {
+ CAMHAL_LOGDA("Camera mode: HIGH QUALITY_ZSL");
+ camMode.eCamOperatingMode = OMX_TI_CaptureImageProfileZeroShutterLag;
+ bCAC.bEnabled = OMX_TRUE;
+ }
+ else if( OMXCameraAdapter::VIDEO_MODE == mode )
+ {
+ CAMHAL_LOGDA("Camera mode: VIDEO MODE");
+ camMode.eCamOperatingMode = OMX_CaptureVideo;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Camera mode: INVALID mode passed!");
+ return BAD_VALUE;
+ }
+
+ if(ret != -1)
+ {
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode,
+ &camMode);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring camera mode 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera mode configured successfully");
+ }
+ }
+
+ if(ret != -1)
+ {
+ //Configure CAC
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigChromaticAberrationCorrection,
+ &bCAC);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring CAC 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("CAC configured successfully");
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setLDC(OMXCameraAdapter::IPPMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in loaded state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::IPP_LDCNSF:
+ case OMXCameraAdapter::IPP_LDC:
+ {
+ bOMX.bEnabled = OMX_TRUE;
+ break;
+ }
+ case OMXCameraAdapter::IPP_NONE:
+ case OMXCameraAdapter::IPP_NSF:
+ default:
+ {
+ bOMX.bEnabled = OMX_FALSE;
+ break;
+ }
+ }
+
+ CAMHAL_LOGVB("Configuring LDC mode 0x%x", bOMX.bEnabled);
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamLensDistortionCorrection,
+ &bOMX);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEA("Error while setting LDC");
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setNSF(OMXCameraAdapter::IPPMode mode)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_ISONOISEFILTERTYPE nsf;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in loaded state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&nsf, OMX_PARAM_ISONOISEFILTERTYPE);
+ nsf.nPortIndex = OMX_ALL;
+
+ switch ( mode )
+ {
+ case OMXCameraAdapter::IPP_LDCNSF:
+ case OMXCameraAdapter::IPP_NSF:
+ {
+ nsf.eMode = OMX_ISONoiseFilterModeOn;
+ break;
+ }
+ case OMXCameraAdapter::IPP_LDC:
+ case OMXCameraAdapter::IPP_NONE:
+ default:
+ {
+ nsf.eMode = OMX_ISONoiseFilterModeOff;
+ break;
+ }
+ }
+
+ CAMHAL_LOGVB("Configuring NSF mode 0x%x", nsf.eMode);
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_IndexParamHighISONoiseFiler,
+ &nsf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEA("Error while setting NSF");
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setImageQuality(unsigned int quality)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_PARAM_QFACTORTYPE jpegQualityConf;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT(jpegQualityConf, OMX_IMAGE_PARAM_QFACTORTYPE);
+ jpegQualityConf.nQFactor = quality;
+ jpegQualityConf.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamQFactor,
+ &jpegQualityConf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring jpeg Quality 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setThumbnailParams(unsigned int width,
+ unsigned int height,
+ unsigned int quality)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_THUMBNAILTYPE thumbConf;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT(thumbConf, OMX_PARAM_THUMBNAILTYPE);
+ thumbConf.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamThumbnail,
+ &thumbConf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while retrieving thumbnail size 0x%x", eError);
+ ret = -1;
+ }
+
+ //CTS Requirement: width or height equal to zero should
+ //result in absent EXIF thumbnail
+ if ( ( 0 == width ) || ( 0 == height ) )
+ {
+ thumbConf.nWidth = mThumbRes[0].width;
+ thumbConf.nHeight = mThumbRes[0].height;
+ thumbConf.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+ else
+ {
+ thumbConf.nWidth = width;
+ thumbConf.nHeight = height;
+ thumbConf.nQuality = quality;
+ thumbConf.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ }
+
+ CAMHAL_LOGDB("Thumbnail width = %d, Thumbnail Height = %d", width, height);
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexParamThumbnail,
+ &thumbConf);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring thumbnail size 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setAlgoPriority(AlgoPriority priority,
+ Algorithm3A algo,
+ bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_3A_REGION_PRIORITY regionPriority;
+ OMX_TI_CONFIG_3A_FACE_PRIORITY facePriority;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ if ( FACE_PRIORITY == priority )
+ {
+ OMX_INIT_STRUCT_PTR (&facePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY);
+ facePriority.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ if ( algo & WHITE_BALANCE_ALGO )
+ {
+ if ( enable )
+ {
+ facePriority.bAwbFaceEnable = OMX_TRUE;
+ }
+ else
+ {
+ facePriority.bAwbFaceEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & EXPOSURE_ALGO )
+ {
+ if ( enable )
+ {
+ facePriority.bAeFaceEnable = OMX_TRUE;
+ }
+ else
+ {
+ facePriority.bAeFaceEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & FOCUS_ALGO )
+ {
+ if ( enable )
+ {
+ facePriority.bAfFaceEnable= OMX_TRUE;
+ }
+ else
+ {
+ facePriority.bAfFaceEnable = OMX_FALSE;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFacePriority3a,
+ &facePriority);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring face priority 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Face priority for algorithms set successfully");
+ }
+
+ }
+ else if ( REGION_PRIORITY == priority )
+ {
+
+ OMX_INIT_STRUCT_PTR (&regionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY);
+ regionPriority.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ if ( algo & WHITE_BALANCE_ALGO )
+ {
+ if ( enable )
+ {
+ regionPriority.bAwbRegionEnable= OMX_TRUE;
+ }
+ else
+ {
+ regionPriority.bAwbRegionEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & EXPOSURE_ALGO )
+ {
+ if ( enable )
+ {
+ regionPriority.bAeRegionEnable = OMX_TRUE;
+ }
+ else
+ {
+ regionPriority.bAeRegionEnable = OMX_FALSE;
+ }
+ }
+
+ if ( algo & FOCUS_ALGO )
+ {
+ if ( enable )
+ {
+ regionPriority.bAfRegionEnable = OMX_TRUE;
+ }
+ else
+ {
+ regionPriority.bAfRegionEnable = OMX_FALSE;
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigRegionPriority3a,
+ &regionPriority);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring region priority 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Region priority for algorithms set successfully");
+ }
+
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setPictureRotation(unsigned int degree)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_ROTATIONTYPE rotation;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT(rotation, OMX_CONFIG_ROTATIONTYPE);
+ rotation.nRotation = degree;
+ rotation.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonRotate,
+ &rotation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setSensorOrientation(unsigned int degree)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_ROTATIONTYPE sensorOrientation;
+ int tmpHeight, tmpWidth;
+ OMXCameraPortParameters *mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ LOG_FUNCTION_NAME;
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ /* Set Temproary Port resolution.
+ * For resolution with height > 1008,resolution cannot be set without configuring orientation.
+ * So we first set a temp resolution. We have used VGA
+ */
+ tmpHeight = mPreviewData->mHeight;
+ tmpWidth = mPreviewData->mWidth;
+ mPreviewData->mWidth = 640;
+ mPreviewData->mHeight = 480;
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData);
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ }
+
+ /* Now set Required Orientation*/
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT(sensorOrientation, OMX_CONFIG_ROTATIONTYPE);
+ sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonRotate,
+ &sensorOrientation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
+ }
+ CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
+ ( unsigned int ) sensorOrientation.nRotation);
+ sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ sensorOrientation.nRotation = degree;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonRotate,
+ &sensorOrientation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError);
+ }
+ CAMHAL_LOGVA(" Read the Parameters that are set");
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonRotate,
+ &sensorOrientation);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError);
+ }
+ CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d",
+ ( unsigned int ) sensorOrientation.nRotation);
+ CAMHAL_LOGVB(" Sensor Configured for Port : %d",
+ ( unsigned int ) sensorOrientation.nPortIndex);
+ }
+
+ /* Now set the required resolution as requested */
+
+ mPreviewData->mWidth = tmpWidth;
+ mPreviewData->mHeight = tmpHeight;
+ if ( NO_ERROR == ret )
+ {
+ ret = setFormat (mCameraAdapterParameters.mPrevPortIndex,
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameRate)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_VARFRMRANGETYPE vfr;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&vfr, OMX_TI_CONFIG_VARFRMRANGETYPE);
+
+ vfr.xMin = minFrameRate<<16;
+ vfr.xMax = maxFrameRate<<16;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexConfigVarFrmRange,
+ &vfr);
+ if(OMX_ErrorNone != eError)
+ {
+ CAMHAL_LOGEB("Error while setting VFR min = %d, max = %d, error = 0x%x",
+ ( unsigned int ) minFrameRate,
+ ( unsigned int ) maxFrameRate,
+ eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDB("VFR Configured Successfully [%d:%d]",
+ ( unsigned int ) minFrameRate,
+ ( unsigned int ) maxFrameRate);
+ }
+ }
+
+ return ret;
+ }
+
+};
diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
new file mode 100644
index 0000000..899e3dc
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
@@ -0,0 +1,3160 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXCameraAdapter.cpp
+*
+* This file maps the Camera Hardware Interface to OMX.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+#include <signal.h>
+#include <math.h>
+
+#include <cutils/properties.h>
+#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
+static int mDebugFps = 0;
+
+#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+
+namespace android {
+
+#undef LOG_TAG
+///Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
+#define LOG_TAG "CameraHAL"
+
+//frames skipped before recalculating the framerate
+#define FPS_PERIOD 30
+
+static OMXCameraAdapter *gCameraAdapter = NULL;
+Mutex gAdapterLock;
+
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps, int sensor_index)
+{
+ LOG_FUNCTION_NAME;
+
+ char value[PROPERTY_VALUE_MAX];
+ property_get("debug.camera.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
+
+
+ mLocalVersionParam.s.nVersionMajor = 0x1;
+ mLocalVersionParam.s.nVersionMinor = 0x1;
+ mLocalVersionParam.s.nRevision = 0x0 ;
+ mLocalVersionParam.s.nStep = 0x0;
+
+ mPending3Asettings = 0;//E3AsettingsAll;
+
+ if ( 0 != mInitSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if (mComponentState != OMX_StateLoaded && mComponentState != OMX_StateInvalid) {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if ( mComponentState != OMX_StateExecuting ){
+ ///Update the preview and image capture port indexes
+ mCameraAdapterParameters.mPrevPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
+ // temp changed in order to build OMX_CAMERA_PORT_VIDEO_OUT_IMAGE;
+ mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE;
+ mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT;
+ //currently not supported use preview port instead
+ mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
+
+ if(!mCameraAdapterParameters.mHandleComp)
+ {
+ ///Initialize the OMX Core
+ eError = OMX_Init();
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_Init -0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Setup key parameters to send to Ducati during init
+ OMX_CALLBACKTYPE oCallbacks;
+
+ /* Initialize the callback handles */
+ oCallbacks.EventHandler = android::OMXCameraAdapterEventHandler;
+ oCallbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
+ oCallbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
+
+ ///Get the handle to the OMX Component
+ mCameraAdapterParameters.mHandleComp = NULL;
+ eError = OMX_GetHandle(&(mCameraAdapterParameters.mHandleComp), // previously used: OMX_GetHandle
+ (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.CAMERA" ///@todo Use constant instead of hardcoded name
+ , this
+ , &oCallbacks);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ OMX_ALL,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortDisable) -0x%x", eError);
+ }
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Register for port enable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mInitSem);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable PREVIEW Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ }
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ //Wait for the port enable event to occur
+ ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("-Port enable event arrived");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout for enabling preview port expired!");
+ goto EXIT;
+ }
+
+ }
+ else
+ {
+ OMXCameraPortParameters * mPreviewData =
+ &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ //Apply default configs before trying to swtich to a new sensor
+ if ( NO_ERROR != setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData) )
+ {
+ CAMHAL_LOGEB("Error 0x%x while applying defaults", ret);
+ goto EXIT;
+ }
+ }
+ }
+ ///Select the sensor
+ OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
+ OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
+ sensorSelect.eSensor = (OMX_SENSORSELECT)sensor_index;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while selecting the sensor index as %d - 0x%x", sensor_index, eError);
+ return BAD_VALUE;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Sensor %d selected successfully", sensor_index);
+ }
+
+ printComponentVersion(mCameraAdapterParameters.mHandleComp);
+
+ mSensorIndex = sensor_index;
+ mBracketingEnabled = false;
+ mBracketingBuffersQueuedCount = 0;
+ mBracketingRange = 1;
+ mLastBracetingBufferIdx = 0;
+ mOMXStateSwitch = false;
+
+ if ( mComponentState != OMX_StateExecuting ){
+ mCaptureSignalled = false;
+ mCaptureConfigured = false;
+ mRecording = false;
+ mWaitingForSnapshot = false;
+ mSnapshotCount = 0;
+ mComponentState = OMX_StateLoaded;
+
+ mCapMode = HIGH_QUALITY;
+ mBurstFrames = 1;
+ mCapturedFrames = 0;
+ mPictureQuality = 100;
+ mCurrentZoomIdx = 0;
+ mTargetZoomIdx = 0;
+ mReturnZoomStatus = false;
+ mZoomInc = 1;
+ mZoomParameterIdx = 0;
+ mExposureBracketingValidEntries = 0;
+ mSensorOverclock = false;
+
+ mDeviceOrientation = 0;
+ mCapabilities = caps;
+
+ mEXIFData.mGPSData.mAltitudeValid = false;
+ mEXIFData.mGPSData.mDatestampValid = false;
+ mEXIFData.mGPSData.mLatValid = false;
+ mEXIFData.mGPSData.mLongValid = false;
+ mEXIFData.mGPSData.mMapDatumValid = false;
+ mEXIFData.mGPSData.mProcMethodValid = false;
+ mEXIFData.mGPSData.mVersionIdValid = false;
+ mEXIFData.mGPSData.mTimeStampValid = false;
+ mEXIFData.mModelValid = false;
+ mEXIFData.mMakeValid = false;
+
+ // initialize command handling thread
+ if(mCommandHandler.get() == NULL)
+ mCommandHandler = new CommandHandler(this);
+
+ if ( NULL == mCommandHandler.get() )
+ {
+ CAMHAL_LOGEA("Couldn't create command handler");
+ return NO_MEMORY;
+ }
+
+ ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY);
+ if ( ret != NO_ERROR )
+ {
+ if( ret == INVALID_OPERATION){
+ CAMHAL_LOGDA("command handler thread already runnning!!");
+ }else
+ {
+ CAMHAL_LOGEA("Couldn't run command handlerthread");
+ return ret;
+ }
+ }
+
+ // initialize omx callback handling thread
+ if(mOMXCallbackHandler.get() == NULL)
+ mOMXCallbackHandler = new OMXCallbackHandler(this);
+
+ if ( NULL == mOMXCallbackHandler.get() )
+ {
+ CAMHAL_LOGEA("Couldn't create omx callback handler");
+ return NO_MEMORY;
+ }
+
+ ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY);
+ if ( ret != NO_ERROR )
+ {
+ if( ret == INVALID_OPERATION){
+ CAMHAL_LOGDA("omx callback handler thread already runnning!!");
+ }else
+ {
+ CAMHAL_LOGEA("Couldn't run omx callback handler thread");
+ return ret;
+ }
+ }
+
+ //Remove any unhandled events
+ if ( !mEventSignalQ.isEmpty() )
+ {
+ for (unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
+ {
+ TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ //remove from queue and free msg
+ mEventSignalQ.removeAt(i);
+ if ( NULL != msg )
+ {
+ free(msg);
+ }
+ }
+ }
+
+ //Setting this flag will that the first setParameter call will apply all 3A settings
+ //and will not conditionally apply based on current values.
+ mFirstTimeInit = true;
+
+ memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int));
+ mMeasurementEnabled = false;
+ mFaceDetectionRunning = false;
+
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters));
+ memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters));
+ }
+ LOG_FUNCTION_NAME_EXIT;
+ return ErrorUtils::omxToAndroidError(eError);
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+
+ if(mCameraAdapterParameters.mHandleComp)
+ {
+ ///Free the OMX component handle in case of error
+ OMX_FreeHandle(mCameraAdapterParameters.mHandleComp);
+ }
+
+ ///De-init the OMX
+ OMX_Deinit();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(CameraFrame::FrameType frameType)
+{
+ OMXCameraAdapter::OMXCameraPortParameters *ret = NULL;
+
+ switch ( frameType )
+ {
+ case CameraFrame::IMAGE_FRAME:
+ case CameraFrame::RAW_FRAME:
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ case CameraFrame::SNAPSHOT_FRAME:
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+ break;
+ default:
+ break;
+ };
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+ OMXCameraPortParameters *port = NULL;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE )
+ {
+ return NO_INIT;
+ }
+
+ if ( NULL == frameBuf )
+ {
+ return -EINVAL;
+ }
+
+ if ( (NO_ERROR == ret) &&
+ ((CameraFrame::IMAGE_FRAME == frameType) || (CameraFrame::RAW_FRAME == frameType)) &&
+ (1 > mCapturedFrames) &&
+ (!mBracketingEnabled)) {
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ mEndImageCaptureCallback(mEndCaptureData);
+ }
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ port = getPortParams(frameType);
+ if ( NULL == port )
+ {
+ CAMHAL_LOGEB("Invalid frameType 0x%x", frameType);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ for ( int i = 0 ; i < port->mNumBufs ; i++)
+ {
+ if ( port->mBufferHeader[i]->pBuffer == frameBuf )
+ {
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, port->mBufferHeader[i]);
+ if ( eError != OMX_ErrorNone )
+ {
+ CAMHAL_LOGDB("OMX_FillThisBuffer 0x%x", eError);
+ ret = ErrorUtils::omxToAndroidError(eError);
+ }
+ break;
+ }
+ }
+
+ }
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
+{
+ LOG_FUNCTION_NAME;
+
+ const char * str = NULL;
+ int mode = 0;
+ status_t ret = NO_ERROR;
+ bool updateImagePortParams = false;
+ int minFramerate, maxFramerate, frameRate;
+ const char *valstr = NULL;
+ const char *oldstr = NULL;
+ int w, h;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ ///@todo Include more camera parameters
+ if ( (valstr = params.getPreviewFormat()) != NULL )
+ {
+ if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+ else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ CAMHAL_LOGDA("RGB565 format selected");
+ pixFormat = OMX_COLOR_Format16bitRGB565;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+
+ OMXCameraPortParameters *cap;
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ params.getPreviewSize(&w, &h);
+ frameRate = params.getPreviewFrameRate();
+ minFramerate = params.getInt(TICameraParameters::KEY_MINFRAMERATE);
+ maxFramerate = params.getInt(TICameraParameters::KEY_MAXFRAMERATE);
+ if ( ( 0 < minFramerate ) &&
+ ( 0 < maxFramerate ) )
+ {
+ if ( minFramerate > maxFramerate )
+ {
+ CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value");
+ maxFramerate = minFramerate;
+ }
+
+ if ( 0 >= frameRate )
+ {
+ frameRate = maxFramerate;
+ }
+
+ if( ( cap->mMinFrameRate != minFramerate ) ||
+ ( cap->mMaxFrameRate != maxFramerate ) )
+ {
+ cap->mMinFrameRate = minFramerate;
+ cap->mMaxFrameRate = maxFramerate;
+ setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate);
+ }
+ }
+
+ if ( 0 < frameRate )
+ {
+ cap->mColorFormat = pixFormat;
+ cap->mWidth = w;
+ cap->mHeight = h;
+ cap->mFrameRate = frameRate;
+
+ CAMHAL_LOGVB("Prev: cap.mColorFormat = %d", (int)cap->mColorFormat);
+ CAMHAL_LOGVB("Prev: cap.mWidth = %d", (int)cap->mWidth);
+ CAMHAL_LOGVB("Prev: cap.mHeight = %d", (int)cap->mHeight);
+ CAMHAL_LOGVB("Prev: cap.mFrameRate = %d", (int)cap->mFrameRate);
+
+ //TODO: Add an additional parameter for video resolution
+ //use preview resolution for now
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ cap->mColorFormat = pixFormat;
+ cap->mWidth = w;
+ cap->mHeight = h;
+ cap->mFrameRate = frameRate;
+
+ CAMHAL_LOGVB("Video: cap.mColorFormat = %d", (int)cap->mColorFormat);
+ CAMHAL_LOGVB("Video: cap.mWidth = %d", (int)cap->mWidth);
+ CAMHAL_LOGVB("Video: cap.mHeight = %d", (int)cap->mHeight);
+ CAMHAL_LOGVB("Video: cap.mFrameRate = %d", (int)cap->mFrameRate);
+
+ ///mStride is set from setBufs() while passing the APIs
+ cap->mStride = 4096;
+ cap->mBufSize = cap->mStride * cap->mHeight;
+ }
+
+ if ( ( cap->mWidth >= 1920 ) &&
+ ( cap->mHeight >= 1080 ) &&
+ ( cap->mFrameRate >= FRAME_RATE_FULL_HD ) &&
+ ( !mSensorOverclock ) )
+ {
+ mOMXStateSwitch = true;
+ }
+ else if ( ( ( cap->mWidth < 1920 ) ||
+ ( cap->mHeight < 1080 ) ||
+ ( cap->mFrameRate < FRAME_RATE_FULL_HD ) ) &&
+ ( mSensorOverclock ) )
+ {
+ mOMXStateSwitch = true;
+ }
+
+ if ( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
+ {
+ if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ {
+ mMeasurementEnabled = true;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ {
+ mMeasurementEnabled = false;
+ }
+ else
+ {
+ mMeasurementEnabled = false;
+ }
+ }
+ else
+ {
+ //Disable measurement data by default
+ mMeasurementEnabled = false;
+ }
+
+ ret |= setParametersCapture(params, state);
+
+ ret |= setParameters3A(params, state);
+
+ ret |= setParametersAlgo(params, state);
+
+ ret |= setParametersFocus(params, state);
+
+ ret |= setParametersFD(params, state);
+
+ ret |= setParametersZoom(params, state);
+
+ ret |= setParametersEXIF(params, state);
+
+ mParams = params;
+ mFirstTimeInit = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void saveFile(unsigned char *buff, int width, int height, int format) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+
+ fn[0] = 0;
+ sprintf(fn, "/preview%03d.yuv", counter);
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ CAMHAL_LOGVB("Copying from 0x%x, size=%d x %d", buff, width, height);
+
+ //method currently supports only nv12 dumping
+ int stride = width;
+ uint8_t *bf = (uint8_t*) buff;
+ for(int i=0;i<height;i++)
+ {
+ write(fd, bf, width);
+ bf += 4096;
+ }
+
+ for(int i=0;i<height/2;i++)
+ {
+ write(fd, bf, stride);
+ bf += 4096;
+ }
+
+ close(fd);
+
+
+ counter++;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OMXCameraAdapter::getParameters(CameraParameters& params)
+{
+ status_t ret = NO_ERROR;
+ OMX_CONFIG_EXPOSUREVALUETYPE exp;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ LOG_FUNCTION_NAME;
+
+#ifdef PARAM_FEEDBACK
+
+ OMX_CONFIG_WHITEBALCONTROLTYPE wb;
+ OMX_CONFIG_FLICKERCANCELTYPE flicker;
+ OMX_CONFIG_SCENEMODETYPE scene;
+ OMX_IMAGE_PARAM_FLASHCONTROLTYPE flash;
+ OMX_CONFIG_BRIGHTNESSTYPE brightness;
+ OMX_CONFIG_CONTRASTTYPE contrast;
+ OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE procSharpness;
+ OMX_CONFIG_SATURATIONTYPE saturation;
+ OMX_CONFIG_IMAGEFILTERTYPE effect;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focus;
+
+ exp.nSize = sizeof(OMX_CONFIG_EXPOSURECONTROLTYPE);
+ exp.nVersion = mLocalVersionParam;
+ exp.nPortIndex = OMX_ALL;
+
+ expValues.nSize = sizeof(OMX_CONFIG_EXPOSUREVALUETYPE);
+ expValues.nVersion = mLocalVersionParam;
+ expValues.nPortIndex = OMX_ALL;
+
+ wb.nSize = sizeof(OMX_CONFIG_WHITEBALCONTROLTYPE);
+ wb.nVersion = mLocalVersionParam;
+ wb.nPortIndex = OMX_ALL;
+
+ flicker.nSize = sizeof(OMX_CONFIG_FLICKERCANCELTYPE);
+ flicker.nVersion = mLocalVersionParam;
+ flicker.nPortIndex = OMX_ALL;
+
+ scene.nSize = sizeof(OMX_CONFIG_SCENEMODETYPE);
+ scene.nVersion = mLocalVersionParam;
+ scene.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ flash.nSize = sizeof(OMX_IMAGE_PARAM_FLASHCONTROLTYPE);
+ flash.nVersion = mLocalVersionParam;
+ flash.nPortIndex = OMX_ALL;
+
+
+ brightness.nSize = sizeof(OMX_CONFIG_BRIGHTNESSTYPE);
+ brightness.nVersion = mLocalVersionParam;
+ brightness.nPortIndex = OMX_ALL;
+
+ contrast.nSize = sizeof(OMX_CONFIG_CONTRASTTYPE);
+ contrast.nVersion = mLocalVersionParam;
+ contrast.nPortIndex = OMX_ALL;
+
+ procSharpness.nSize = sizeof( OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE );
+ procSharpness.nVersion = mLocalVersionParam;
+ procSharpness.nPortIndex = OMX_ALL;
+
+ saturation.nSize = sizeof(OMX_CONFIG_SATURATIONTYPE);
+ saturation.nVersion = mLocalVersionParam;
+ saturation.nPortIndex = OMX_ALL;
+
+ effect.nSize = sizeof(OMX_CONFIG_IMAGEFILTERTYPE);
+ effect.nVersion = mLocalVersionParam;
+ effect.nPortIndex = OMX_ALL;
+
+ focus.nSize = sizeof(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focus.nVersion = mLocalVersionParam;
+ focus.nPortIndex = OMX_ALL;
+
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp,OMX_IndexConfigCommonExposure, &exp);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigCommonWhiteBalance, &wb);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_IndexConfigFlickerCancel, &flicker );
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_IndexParamSceneMode, &scene);
+ OMX_GetParameter( mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_IndexParamFlashControl, &flash);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigCommonBrightness, &brightness);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigCommonContrast, &contrast);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_IndexConfigSharpeningLevel, &procSharpness);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigCommonSaturation, &saturation);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigCommonImageFilter, &effect);
+ OMX_GetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigFocusControl, &focus);
+
+ char * str = NULL;
+
+ for(int i = 0; i < ExpLUT.size; i++)
+ if( ExpLUT.Table[i].omxDefinition == exp.eExposureControl )
+ str = (char*)ExpLUT.Table[i].userDefinition;
+ params.set( TICameraParameters::KEY_EXPOSURE_MODE , str);
+
+ for(int i = 0; i < WBalLUT.size; i++)
+ if( WBalLUT.Table[i].omxDefinition == wb.eWhiteBalControl )
+ str = (char*)WBalLUT.Table[i].userDefinition;
+ params.set( CameraParameters::KEY_WHITE_BALANCE , str );
+
+ for(int i = 0; i < FlickerLUT.size; i++)
+ if( FlickerLUT.Table[i].omxDefinition == flicker.eFlickerCancel )
+ str = (char*)FlickerLUT.Table[i].userDefinition;
+ params.set( CameraParameters::KEY_ANTIBANDING , str );
+
+ for(int i = 0; i < SceneLUT.size; i++)
+ if( SceneLUT.Table[i].omxDefinition == scene.eSceneMode )
+ str = (char*)SceneLUT.Table[i].userDefinition;
+ params.set( CameraParameters::KEY_SCENE_MODE , str );
+
+ for(int i = 0; i < FlashLUT.size; i++)
+ if( FlashLUT.Table[i].omxDefinition == flash.eFlashControl )
+ str = (char*)FlashLUT.Table[i].userDefinition;
+ params.set( CameraParameters::KEY_FLASH_MODE, str );
+
+ for(int i = 0; i < EffLUT.size; i++)
+ if( EffLUT.Table[i].omxDefinition == effect.eImageFilter )
+ str = (char*)EffLUT.Table[i].userDefinition;
+ params.set( CameraParameters::KEY_EFFECT , str );
+
+ for(int i = 0; i < FocusLUT.size; i++)
+ if( FocusLUT.Table[i].omxDefinition == focus.eFocusControl )
+ str = (char*)FocusLUT.Table[i].userDefinition;
+
+ params.set( CameraParameters::KEY_FOCUS_MODE , str );
+
+ int comp = ((expValues.xEVCompensation * 10) >> Q16_OFFSET);
+
+ params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, comp );
+ params.set( TICameraParameters::KEY_MAN_EXPOSURE, expValues.nShutterSpeedMsec);
+ params.set( TICameraParameters::KEY_BRIGHTNESS, brightness.nBrightness);
+ params.set( TICameraParameters::KEY_CONTRAST, contrast.nContrast );
+ params.set( TICameraParameters::KEY_SHARPNESS, procSharpness.nLevel);
+ params.set( TICameraParameters::KEY_SATURATION, saturation.nSaturation);
+
+#else
+
+ //Query focus distances only during CAF, Infinity
+ //or when focus is running
+ if ( ( AF_ACTIVE & state ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlAuto ) ||
+ ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) ||
+ ( NULL == mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES) ) )
+ {
+ updateFocusDistances(params);
+ }
+ else
+ {
+ params.set(CameraParameters::KEY_FOCUS_DISTANCES,
+ mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES));
+ }
+
+ OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSUREVALUETYPE);
+ exp.nPortIndex = OMX_ALL;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonExposureValue,
+ &exp);
+ if ( OMX_ErrorNone == eError )
+ {
+ params.set(TICameraParameters::KEY_CURRENT_ISO, exp.nSensitivity);
+ }
+ else
+ {
+ CAMHAL_LOGEB("OMX error 0x%x, while retrieving current ISO value", eError);
+ }
+
+ {
+ Mutex::Autolock lock(mZoomLock);
+ //Immediate zoom should not be avaialable while smooth zoom is running
+ if ( ZOOM_ACTIVE & state )
+ {
+ if ( mZoomParameterIdx != mCurrentZoomIdx )
+ {
+ mZoomParameterIdx += mZoomInc;
+ }
+ params.set( CameraParameters::KEY_ZOOM, mZoomParameterIdx);
+ if ( ( mCurrentZoomIdx == mTargetZoomIdx ) &&
+ ( mZoomParameterIdx == mCurrentZoomIdx ) )
+ {
+
+ if ( NO_ERROR == ret )
+ {
+
+ ret = BaseCameraAdapter::setState(CAMERA_STOP_SMOOTH_ZOOM);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+
+ }
+
+ }
+
+ CAMHAL_LOGDB("CameraParameters Zoom = %d", mCurrentZoomIdx);
+ }
+ else
+ {
+ params.set( CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
+ }
+ }
+
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams)
+{
+ size_t bufferCount;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+
+ OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+
+ portCheck.nPortIndex = port;
+
+ eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ if ( OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port )
+ {
+ portCheck.format.video.nFrameWidth = portParams.mWidth;
+ portCheck.format.video.nFrameHeight = portParams.mHeight;
+ portCheck.format.video.eColorFormat = portParams.mColorFormat;
+ portCheck.format.video.nStride = portParams.mStride;
+ if( ( portCheck.format.video.nFrameWidth >= 1920 ) &&
+ ( portCheck.format.video.nFrameHeight >= 1080 ) &&
+ ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) )
+ {
+ setSensorOverclock(true);
+ }
+ else
+ {
+ setSensorOverclock(false);
+ }
+
+ portCheck.format.video.xFramerate = portParams.mFrameRate<<16;
+ portCheck.nBufferSize = portParams.mStride * portParams.mHeight;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ mFocusThreshold = FOCUS_THRESHOLD * portParams.mFrameRate;
+ }
+ else if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
+ {
+ portCheck.format.image.nFrameWidth = portParams.mWidth;
+ portCheck.format.image.nFrameHeight = portParams.mHeight;
+ if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingNone )
+ {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ }
+ else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingJPS )
+ {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS;
+ }
+ else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingMPO )
+ {
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO;
+ }
+ else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWJPEG )
+ {
+ //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWJPEG when
+ // RAW format is supported
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ }
+ else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWMPO )
+ {
+ //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWMPO when
+ // RAW format is supported
+ portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG;
+ }
+ else
+ {
+ portCheck.format.image.eColorFormat = portParams.mColorFormat;
+ portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused;
+ }
+
+ //Stride for 1D tiler buffer is zero
+ portCheck.format.image.nStride = 0;
+ portCheck.nBufferSize = portParams.mStride * portParams.mWidth * portParams.mHeight;
+ portCheck.nBufferCountActual = portParams.mNumBufs;
+ }
+ else
+ {
+ CAMHAL_LOGEB("Unsupported port index 0x%x", (unsigned int)port);
+ }
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ /* check if parameters are set correctly by calling GetParameter() */
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexParamPortDefinition, &portCheck);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ portParams.mBufSize = portCheck.nBufferSize;
+
+ if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port )
+ {
+ CAMHAL_LOGDB("\n *** IMG Width = %ld", portCheck.format.image.nFrameWidth);
+ CAMHAL_LOGDB("\n ***IMG Height = %ld", portCheck.format.image.nFrameHeight);
+
+ CAMHAL_LOGDB("\n ***IMG IMG FMT = %x", portCheck.format.image.eColorFormat);
+ CAMHAL_LOGDB("\n ***IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n ***IMG portCheck.format.image.nStride = %ld\n",
+ portCheck.format.image.nStride);
+ }
+ else
+ {
+ CAMHAL_LOGDB("\n *** PRV Width = %ld", portCheck.format.video.nFrameWidth);
+ CAMHAL_LOGDB("\n ***PRV Height = %ld", portCheck.format.video.nFrameHeight);
+
+ CAMHAL_LOGDB("\n ***PRV IMG FMT = %x", portCheck.format.video.eColorFormat);
+ CAMHAL_LOGDB("\n ***PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize);
+ CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountMin = %ld\n",
+ portCheck.nBufferCountMin);
+ CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountActual = %ld\n",
+ portCheck.nBufferCountActual);
+ CAMHAL_LOGDB("\n ***PRV portCheck.format.video.nStride = %ld\n",
+ portCheck.format.video.nStride);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OMXCameraAdapter::flushBuffers()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ TIMM_OSAL_ERRORTYPE err;
+ TIMM_OSAL_U32 uRequestedEvents = OMXCameraAdapter::CAMERA_PORT_FLUSH;
+ TIMM_OSAL_U32 pRetrievedEvents;
+
+ if ( 0 != mFlushSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mFlushSem semaphore count %d", mFlushSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ LOG_FUNCTION_NAME;
+
+ OMXCameraPortParameters * mPreviewData = NULL;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+
+ ///Register for the FLUSH event
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandFlush,
+ OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ mFlushSem);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Send FLUSH command to preview port
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
+ OMX_CommandFlush,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandFlush)-0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Waiting for flush event");
+
+ ///Wait for the FLUSH event to occur
+ ret = mFlushSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Flush event received");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandFlush,
+ OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW,
+ NULL);
+ CAMHAL_LOGDA("Flush event timeout expired");
+ goto EXIT;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | ErrorUtils::omxToAndroidError(eError));
+
+ EXIT:
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | ErrorUtils::omxToAndroidError(eError));
+}
+
+///API to give the buffers to Adapter
+status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+{
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ switch(mode)
+ {
+ case CAMERA_PREVIEW:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ case CAMERA_IMAGE_CAPTURE:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersCapture(bufArr, num);
+ break;
+
+ case CAMERA_VIDEO:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ case CAMERA_MEASUREMENT:
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mNumBufs = num;
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mMaxQueueable = queueable;
+ ret = UseBuffersPreviewData(bufArr, num);
+ break;
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * measurementData = NULL;
+ uint32_t *buffers;
+ Mutex::Autolock lock( mPreviewDataBufferLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( mComponentState != OMX_StateLoaded )
+ {
+ CAMHAL_LOGEA("Calling UseBuffersPreviewData() when not in LOADED state");
+ ret = BAD_VALUE;
+ }
+
+ if ( NULL == bufArr )
+ {
+ CAMHAL_LOGEA("NULL pointer passed for buffArr");
+ ret = BAD_VALUE;
+ }
+
+ if ( 0 != mUsePreviewDataSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mUsePreviewDataSem semaphore count %d", mUsePreviewDataSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+ measurementData->mNumBufs = num ;
+ buffers= (uint32_t*) bufArr;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Register for port enable event on measurement port
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ mUsePreviewDataSem);
+
+ if ( ret == NO_ERROR )
+ {
+ CAMHAL_LOGDB("Registering for event %d", ret);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Enable MEASUREMENT Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ NULL);
+
+ if ( eError == OMX_ErrorNone )
+ {
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mUsePreviewDataSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Port enable event arrived on measurement port");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout expoired during port enable on measurement port");
+ }
+
+ CAMHAL_LOGDA("Port enable event arrived on measurement port");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::switchToLoaded()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mComponentState == OMX_StateLoaded )
+ {
+ CAMHAL_LOGDA("Already in OMX_Loaded state");
+ goto EXIT;
+ }
+
+ if ( 0 != mSwitchToLoadedSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mSwitchToLoadedSem semaphore count %d", mSwitchToLoadedSem.Count());
+ goto EXIT;
+ }
+
+ ///Register for EXECUTING state transition.
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ mSwitchToLoadedSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_StateIdle) - %x", eError);
+ }
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ ///Wait for the EXECUTING ->IDLE transition to arrive
+
+ CAMHAL_LOGDA("EXECUTING->IDLE state changed");
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("EXECUTING->IDLE state changed");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on EXECUTING->IDLE state change");
+ goto EXIT;
+ }
+
+ ///Register for LOADED state transition.
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
+ mSwitchToLoadedSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_StateLoaded) - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Switching IDLE->LOADED state");
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("IDLE->LOADED state changed");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateLoaded,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change");
+ goto EXIT;
+ }
+
+ mComponentState = OMX_StateLoaded;
+
+ ///Register for Preview port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mSwitchToLoadedSem);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ CAMHAL_LOGDA("Enabling Preview port");
+ ///Wait for state to switch to idle
+ ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview port enabled!");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Preview enable timedout");
+ goto EXIT;
+ }
+
+ EXIT:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ int tmpHeight, tmpWidth;
+
+ LOG_FUNCTION_NAME;
+
+ ///Flag to determine whether it is 3D camera or not
+ bool isS3d = false;
+ const char *valstr = NULL;
+ if ( (valstr = mParams.get(TICameraParameters::KEY_S3D_SUPPORTED)) != NULL) {
+ isS3d = (strcmp(valstr, "true") == 0);
+ }
+
+ if(!bufArr)
+ {
+ CAMHAL_LOGEA("NULL pointer passed for buffArr");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ OMXCameraPortParameters * mPreviewData = NULL;
+ OMXCameraPortParameters *measurementData = NULL;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+ mPreviewData->mNumBufs = num ;
+ uint32_t *buffers = (uint32_t*)bufArr;
+
+ if ( 0 != mUsePreviewSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mUsePreviewSem semaphore count %d", mUsePreviewSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if(mPreviewData->mNumBufs != num)
+ {
+ CAMHAL_LOGEA("Current number of buffers doesnt equal new num of buffers passed!");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ if ( mComponentState == OMX_StateLoaded )
+ {
+
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ CAMHAL_LOGDB("Camera Mode = %d", mCapMode);
+
+ if( ( mCapMode == OMXCameraAdapter::VIDEO_MODE ) ||
+ ( isS3d && (mCapMode == OMXCameraAdapter::HIGH_QUALITY)) )
+ {
+ ///Enable/Disable Video Noise Filter
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ return ret;
+ }
+
+ ///Enable/Disable Video Stabilization
+ ret = enableVideoStabilization(mVstabEnabled);
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ return ret;
+ }
+ }
+ else
+ {
+ ret = enableVideoNoiseFilter(false);
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ return ret;
+ }
+ ///Enable/Disable Video Stabilization
+ ret = enableVideoStabilization(false);
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ return ret;
+ }
+ }
+ }
+
+ ret = setSensorOrientation(mSensorOrientation);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error configuring Sensor Orientation %x", ret);
+ mSensorOrientation = 0;
+ }
+
+ ret = setVFramerate(mPreviewData->mMinFrameRate, mPreviewData->mMaxFrameRate);
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEB("VFR configuration failed 0x%x", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ if ( mComponentState == OMX_StateLoaded )
+ {
+ ///Register for IDLE state switch event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ mUsePreviewSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Once we get the buffers, move component state to idle state and pass the buffers to OMX comp using UseBuffer
+ eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp ,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+
+ CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ mComponentState = OMX_StateIdle;
+ }
+ else
+ {
+ ///Register for Preview port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mUsePreviewSem);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Enable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ }
+
+
+ ///Configure DOMX to use either gralloc handles or vptrs
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ for(int index=0;index<num;index++) {
+
+ CAMHAL_LOGDB("OMX_UseBuffer(0x%x)", buffers[index]);
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mPrevPortIndex,
+ 0,
+ mPreviewData->mBufSize,
+ (OMX_U8*)buffers[index]);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_UseBuffer-0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ //pBufferHdr->pAppPrivate = (OMX_PTR)pBufferHdr;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0 ;
+ pBufferHdr->nVersion.s.nStep = 0;
+ mPreviewData->mBufferHeader[index] = pBufferHdr;
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ for( int i = 0; i < num; i++ )
+ {
+ OMX_BUFFERHEADERTYPE *pBufHdr;
+ eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp,
+ &pBufHdr,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ 0,
+ measurementData->mBufSize,
+ (OMX_U8*)(mPreviewDataBuffers[i]));
+
+ if ( eError == OMX_ErrorNone )
+ {
+ pBufHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufHdr->nVersion.s.nRevision = 0 ;
+ pBufHdr->nVersion.s.nStep = 0;
+ measurementData->mBufferHeader[i] = pBufHdr;
+ }
+ else
+ {
+ CAMHAL_LOGEB("OMX_UseBuffer -0x%x", eError);
+ ret = BAD_VALUE;
+ break;
+ }
+ }
+
+ }
+
+ CAMHAL_LOGDA("Registering preview buffers");
+
+ ret = mUsePreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview buffer registration successfull");
+ }
+ else
+ {
+ if ( mComponentState == OMX_StateLoaded )
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateIdle,
+ NULL);
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ }
+ CAMHAL_LOGEA("Timeout expired on preview buffer registration");
+ goto EXIT;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | ErrorUtils::omxToAndroidError(eError));
+
+ ///If there is any failure, we reach here.
+ ///Here, we do any resource freeing and convert from OMX error code to Camera Hal error code
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | ErrorUtils::omxToAndroidError(eError));
+}
+
+status_t OMXCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters *mPreviewData = NULL;
+ OMXCameraPortParameters *measurementData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if( 0 != mStartPreviewSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mStartPreviewSem semaphore count %d", mStartPreviewSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+
+ if( OMX_StateIdle == mComponentState )
+ {
+ ///Register for EXECUTING state transition.
+ ///This method just inserts a message in Event Q, which is checked in the callback
+ ///The sempahore passed is signalled by the callback
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ mStartPreviewSem);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error in registering for event %d", ret);
+ goto EXIT;
+ }
+
+ ///Switch to EXECUTING state
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ NULL);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_SendCommand(OMX_StateExecuting)-0x%x", eError);
+ }
+
+ CAMHAL_LOGDA("+Waiting for component to go into EXECUTING state");
+ ret = mStartPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("+Great. Component went into executing state!!");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandStateSet,
+ OMX_StateExecuting,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on executing state switch!");
+ goto EXIT;
+ }
+
+ mComponentState = OMX_StateExecuting;
+
+ }
+
+ //Queue all the buffers on preview port
+ for(int index=0;index< mPreviewData->mMaxQueueable;index++)
+ {
+ CAMHAL_LOGDB("Queuing buffer on Preview port - 0x%x", (uint32_t)mPreviewData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)mPreviewData->mBufferHeader[index]);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ for(int index=0;index< mPreviewData->mNumBufs;index++)
+ {
+ CAMHAL_LOGDB("Queuing buffer on Measurement port - 0x%x", (uint32_t) measurementData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*) measurementData->mBufferHeader[index]);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ }
+
+ if ( mPending3Asettings )
+ apply3Asettings(mParameters3A);
+
+ //Query current focus distance after
+ //starting the preview
+ updateFocusDistances(mParameters);
+
+ //reset frame rate estimates
+ mFPS = 0.0f;
+ mLastFPS = 0.0f;
+ mFrameCount = 0;
+ mLastFrameCount = 0;
+ mIter = 1;
+ mLastFPSTime = systemTime();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | ErrorUtils::omxToAndroidError(eError));
+
+}
+
+status_t OMXCameraAdapter::stopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ status_t ret = NO_ERROR;
+
+ OMXCameraPortParameters *mCaptureData , *mPreviewData, *measurementData;
+ mCaptureData = mPreviewData = measurementData = NULL;
+
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex];
+
+ if ( mComponentState != OMX_StateExecuting )
+ {
+ CAMHAL_LOGEA("Calling StopPreview() when not in EXECUTING state");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ ret = cancelAutoFocus();
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error canceling autofocus %d", ret);
+ // Error, but we probably still want to continue to stop preview
+ }
+
+ ret = release3ALock();
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error Releaseing 3A locks%d", ret);
+ }
+
+ if ( 0 != mStopPreviewSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mStopPreviewSem semaphore count %d", mStopPreviewSem.Count());
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ CAMHAL_LOGDB("Average framerate: %f", mFPS);
+
+ //Avoid state switching of the OMX Component
+ ret = flushBuffers();
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Flush Buffers failed 0x%x", ret);
+ goto EXIT;
+ }
+
+ ///Register for Preview port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mStopPreviewSem);
+
+ ///Disable Preview Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+
+ ///Free the OMX Buffers
+ for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ )
+ {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mPrevPortIndex,
+ mPreviewData->mBufferHeader[i]);
+
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ for ( int i = 0 ; i < measurementData->mNumBufs ; i++ )
+ {
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mMeasurementPortIndex,
+ measurementData->mBufferHeader[i]);
+ if(eError!=OMX_ErrorNone)
+ {
+ CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ {
+ Mutex::Autolock lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.clear();
+ }
+
+ }
+
+ CAMHAL_LOGDA("Disabling preview port");
+ ret = mStopPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Preview port disabled");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mPrevPortIndex,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on preview port disable");
+ goto EXIT;
+ }
+
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+
+ switchToLoaded();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | ErrorUtils::omxToAndroidError(eError));
+
+ EXIT:
+
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ ///Clear all the available preview buffers
+ mPreviewBuffersAvailable.clear();
+ }
+
+ switchToLoaded();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (ret | ErrorUtils::omxToAndroidError(eError));
+
+}
+
+status_t OMXCameraAdapter::setSensorOverclock(bool enable)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGDA("OMX component is not in loaded state");
+ return ret;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+
+ if ( enable )
+ {
+ bOMX.bEnabled = OMX_TRUE;
+ }
+ else
+ {
+ bOMX.bEnabled = OMX_FALSE;
+ }
+
+ CAMHAL_LOGDB("Configuring Sensor overclock mode 0x%x", bOMX.bEnabled);
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexParamSensorOverClockMode, &bOMX);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting Sensor overclock 0x%x", eError);
+ ret = BAD_VALUE;
+ }
+ else
+ {
+ mSensorOverclock = enable;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_VERSIONTYPE compVersion;
+ char compName[OMX_MAX_STRINGNAME_SIZE];
+ char *currentUUID = NULL;
+ size_t offset = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == handle )
+ {
+ CAMHAL_LOGEB("Invalid OMX Handle =0x%x", ( unsigned int ) handle);
+ ret = -EINVAL;
+ }
+
+ mCompUUID[0] = 0;
+
+ if ( NO_ERROR == ret )
+ {
+ eError = OMX_GetComponentVersion(handle,
+ compName,
+ &compVersion,
+ &mCompRevision,
+ &mCompUUID
+ );
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_GetComponentVersion returned 0x%x", eError);
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGVB("OMX Component name: [%s]", compName);
+ CAMHAL_LOGVB("OMX Component version: [%u]", ( unsigned int ) compVersion.nVersion);
+ CAMHAL_LOGVB("Spec version: [%u]", ( unsigned int ) mCompRevision.nVersion);
+ CAMHAL_LOGVB("Git Commit ID: [%s]", mCompUUID);
+ currentUUID = ( char * ) mCompUUID;
+ }
+
+ if ( NULL != currentUUID )
+ {
+ offset = strlen( ( const char * ) mCompUUID) + 1;
+ if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE )
+ {
+ currentUUID += offset;
+ CAMHAL_LOGVB("Git Branch: [%s]", currentUUID);
+ }
+ else
+ {
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ offset = strlen( ( const char * ) currentUUID) + 1;
+
+ if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE )
+ {
+ currentUUID += offset;
+ CAMHAL_LOGVB("Build date and time: [%s]", currentUUID);
+ }
+ else
+ {
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ offset = strlen( ( const char * ) currentUUID) + 1;
+
+ if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE )
+ {
+ currentUUID += offset;
+ CAMHAL_LOGVB("Build description: [%s]", currentUUID);
+ }
+ else
+ {
+ ret = BAD_VALUE;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ msg.command = CommandHandler::CAMERA_PERFORM_AUTOFOCUS;
+ msg.arg1 = mErrorNotifier;
+ ret = mCommandHandler->put(&msg);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::takePicture()
+{
+ status_t ret = NO_ERROR;
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE;
+ msg.arg1 = mErrorNotifier;
+ ret = mCommandHandler->put(&msg);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startVideoCapture()
+{
+ return BaseCameraAdapter::startVideoCapture();
+}
+
+status_t OMXCameraAdapter::stopVideoCapture()
+{
+ return BaseCameraAdapter::stopVideoCapture();
+}
+
+//API to get the frame size required to be allocated. This size is used to override the size passed
+//by camera service when VSTAB/VNF is turned ON for example
+status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_RECTTYPE tFrameDim;
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT_PTR (&tFrameDim, OMX_CONFIG_RECTTYPE);
+ tFrameDim.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ if ( mOMXStateSwitch )
+ {
+
+ ret = switchToLoaded();
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("switchToLoaded() failed 0x%x", ret);
+ goto exit;
+ }
+
+ mOMXStateSwitch = false;
+ }
+
+ if ( OMX_StateLoaded == mComponentState )
+ {
+
+ ret = setLDC(mIPP);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("setLDC() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
+ }
+
+ ret = setNSF(mIPP);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("setNSF() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ goto exit;
+ }
+
+ ret = setCaptureMode(mCapMode);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("setCaptureMode() failed %d", ret);
+ }
+
+ if(mCapMode == OMXCameraAdapter::VIDEO_MODE)
+ {
+ if ( NO_ERROR == ret )
+ {
+ ///Enable/Disable Video Noise Filter
+ ret = enableVideoNoiseFilter(mVnfEnabled);
+ }
+
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Enable/Disable Video Stabilization
+ ret = enableVideoStabilization(mVstabEnabled);
+ }
+
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ }
+ }
+ else
+ {
+ if ( NO_ERROR == ret )
+ {
+ ///Enable/Disable Video Noise Filter
+ ret = enableVideoNoiseFilter(false);
+ }
+
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VNF %x", ret);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Enable/Disable Video Stabilization
+ ret = enableVideoStabilization(false);
+ }
+
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring VSTAB %x", ret);
+ }
+ }
+
+ }
+
+ ret = setSensorOrientation(mSensorOrientation);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error configuring Sensor Orientation %x", ret);
+ mSensorOrientation = 0;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexParam2DBufferAllocDimension, &tFrameDim);
+ if ( OMX_ErrorNone == eError)
+ {
+ width = tFrameDim.nWidth;
+ height = tFrameDim.nHeight;
+ }
+ }
+
+exit:
+
+ CAMHAL_LOGDB("Required frame size %dx%d", width, height);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ OMX_PARAM_PORTDEFINITIONTYPE portCheck;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateLoaded != mComponentState )
+ {
+ CAMHAL_LOGEA("Calling getFrameDataSize() when not in LOADED state");
+ dataFrameSize = 0;
+ ret = BAD_VALUE;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR(&portCheck, OMX_PARAM_PORTDEFINITIONTYPE);
+ portCheck.nPortIndex = mCameraAdapterParameters.mMeasurementPortIndex;
+
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_GetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError);
+ dataFrameSize = 0;
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ portCheck.nBufferCountActual = bufferCount;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_SetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError);
+ dataFrameSize = 0;
+ ret = BAD_VALUE;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("OMX_GetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError);
+ ret = BAD_VALUE;
+ }
+ else
+ {
+ mCameraAdapterParameters.mCameraPortParams[portCheck.nPortIndex].mBufSize = portCheck.nBufferSize;
+ dataFrameSize = portCheck.nBufferSize;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ LOG_FUNCTION_NAME;
+
+ static const int DEGREES_TILT_IGNORE = 45;
+ int device_orientation = 0;
+ int mount_orientation = 0;
+ const char *facing_direction = NULL;
+
+ // if tilt angle is greater than DEGREES_TILT_IGNORE
+ // we are going to ignore the orientation returned from
+ // sensor. the orientation returned from sensor is not
+ // reliable. Value of DEGREES_TILT_IGNORE may need adjusting
+ if (tilt > DEGREES_TILT_IGNORE) {
+ return;
+ }
+
+ if (mCapabilities) {
+ if (mCapabilities->get(CameraProperties::ORIENTATION_INDEX)) {
+ mount_orientation = atoi(mCapabilities->get(CameraProperties::ORIENTATION_INDEX));
+ }
+ facing_direction = mCapabilities->get(CameraProperties::FACING_INDEX);
+ }
+
+ // calculate device orientation relative to the sensor orientation
+ // front camera display is mirrored
+ if (facing_direction && !strcmp(facing_direction, TICameraParameters::FACING_FRONT)) {
+ device_orientation = (orientation - mount_orientation + 360) % 360;
+ } else { // back-facing camera
+ device_orientation = (orientation + mount_orientation) % 360;
+ }
+
+ if (device_orientation != mDeviceOrientation) {
+ mDeviceOrientation = device_orientation;
+
+ mFaceDetectionLock.lock();
+ if (mFaceDetectionRunning) {
+ // restart face detection with new rotation
+ setFaceDetection(true, mDeviceOrientation);
+ }
+ mFaceDetectionLock.unlock();
+ }
+ CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/* Application callback Functions */
+/*========================================================*/
+/* @ fn SampleTest_EventHandler :: Application callback */
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDB("Event %d", eEvent);
+
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ OMXCameraAdapter *oca = (OMXCameraAdapter*)pAppData;
+ ret = oca->OMXCameraAdapterEventHandler(hComponent, eEvent, nData1, nData2, pEventData);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+/* Application callback Functions */
+/*========================================================*/
+/* @ fn SampleTest_EventHandler :: Application callback */
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CAMHAL_LOGDB("+OMX_Event %x, %d %d", eEvent, (int)nData1, (int)nData2);
+
+ switch (eEvent) {
+ case OMX_EventCmdComplete:
+ CAMHAL_LOGDB("+OMX_EventCmdComplete %d %d", (int)nData1, (int)nData2);
+
+ if (OMX_CommandStateSet == nData1) {
+ mCameraAdapterParameters.mState = (OMX_STATETYPE) nData2;
+
+ } else if (OMX_CommandFlush == nData1) {
+ CAMHAL_LOGDB("OMX_CommandFlush received for port %d", (int)nData2);
+
+ } else if (OMX_CommandPortDisable == nData1) {
+ CAMHAL_LOGDB("OMX_CommandPortDisable received for port %d", (int)nData2);
+
+ } else if (OMX_CommandPortEnable == nData1) {
+ CAMHAL_LOGDB("OMX_CommandPortEnable received for port %d", (int)nData2);
+
+ } else if (OMX_CommandMarkBuffer == nData1) {
+ ///This is not used currently
+ }
+
+ CAMHAL_LOGDA("-OMX_EventCmdComplete");
+ break;
+
+ case OMX_EventIndexSettingChanged:
+ CAMHAL_LOGDB("OMX_EventIndexSettingChanged event received data1 0x%x, data2 0x%x",
+ ( unsigned int ) nData1, ( unsigned int ) nData2);
+ break;
+
+ case OMX_EventError:
+ CAMHAL_LOGDB("OMX interface failed to execute OMX command %d", (int)nData1);
+ CAMHAL_LOGDA("See OMX_INDEXTYPE for reference");
+ if ( NULL != mErrorNotifier && ( ( OMX_U32 ) OMX_ErrorHardware == nData1 ) && mComponentState != OMX_StateInvalid)
+ {
+ CAMHAL_LOGEA("***Got Fatal Error Notification***\n");
+ mComponentState = OMX_StateInvalid;
+ ///Report Error to App
+ mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
+ }
+ break;
+
+ case OMX_EventMark:
+ break;
+
+ case OMX_EventPortSettingsChanged:
+ break;
+
+ case OMX_EventBufferFlag:
+ break;
+
+ case OMX_EventResourcesAcquired:
+ break;
+
+ case OMX_EventComponentResumed:
+ break;
+
+ case OMX_EventDynamicResourcesAvailable:
+ break;
+
+ case OMX_EventPortFormatDetected:
+ break;
+
+ default:
+ break;
+ }
+
+ ///Signal to the thread(s) waiting that the event has occured
+ SignalEvent(hComponent, eEvent, nData1, nData2, pEventData);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return eError;
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError);
+ LOG_FUNCTION_NAME_EXIT;
+ return eError;
+}
+
+OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+ Mutex::Autolock lock(mEventLock);
+ TIUTILS::Message *msg;
+
+ LOG_FUNCTION_NAME;
+
+ if ( !mEventSignalQ.isEmpty() )
+ {
+ CAMHAL_LOGDA("Event queue not empty");
+
+ for ( unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
+ {
+ msg = mEventSignalQ.itemAt(i);
+ if ( NULL != msg )
+ {
+ if( ( msg->command != 0 || msg->command == ( unsigned int ) ( eEvent ) )
+ && ( !msg->arg1 || ( OMX_U32 ) msg->arg1 == nData1 )
+ && ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
+ && msg->arg3)
+ {
+ Semaphore *sem = (Semaphore*) msg->arg3;
+ CAMHAL_LOGDA("Event matched, signalling sem");
+ mEventSignalQ.removeAt(i);
+ //Signal the semaphore provided
+ sem->Signal();
+ free(msg);
+ break;
+ }
+ }
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Event queue empty!!!");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
+}
+
+status_t OMXCameraAdapter::RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN Semaphore &semaphore)
+{
+ status_t ret = NO_ERROR;
+ ssize_t res;
+ Mutex::Autolock lock(mEventLock);
+
+ LOG_FUNCTION_NAME;
+
+ TIUTILS::Message * msg = ( struct TIUTILS::Message * ) malloc(sizeof(struct TIUTILS::Message));
+ if ( NULL != msg )
+ {
+ msg->command = ( unsigned int ) eEvent;
+ msg->arg1 = ( void * ) nData1;
+ msg->arg2 = ( void * ) nData2;
+ msg->arg3 = ( void * ) &semaphore;
+ msg->arg4 = ( void * ) hComponent;
+ res = mEventSignalQ.add(msg);
+ if ( NO_MEMORY == res )
+ {
+ CAMHAL_LOGEA("No ressources for inserting OMX events");
+ ret = -ENOMEM;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/*========================================================*/
+/* @ fn SampleTest_EmptyBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ OMXCameraAdapter *oca = (OMXCameraAdapter*)pAppData;
+ eError = oca->OMXCameraAdapterEmptyBufferDone(hComponent, pBuffHeader);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return eError;
+}
+
+
+/*========================================================*/
+/* @ fn SampleTest_EmptyBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return OMX_ErrorNone;
+}
+
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ // XXX: mFPS has the value we want
+}
+
+/*========================================================*/
+/* @ fn SampleTest_FillBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+ TIUTILS::Message msg;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ if (UNLIKELY(mDebugFps)) {
+ debugShowFPS();
+ }
+
+ OMXCameraAdapter *adapter = ( OMXCameraAdapter * ) pAppData;
+ if ( NULL != adapter )
+ {
+ msg.command = OMXCameraAdapter::OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE;
+ msg.arg1 = ( void * ) hComponent;
+ msg.arg2 = ( void * ) pBuffHeader;
+ adapter->mOMXCallbackHandler->put(&msg);
+ }
+
+ return eError;
+}
+
+/*========================================================*/
+/* @ fn SampleTest_FillBufferDone :: Application callback*/
+/*========================================================*/
+OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
+{
+
+ status_t stat = NO_ERROR;
+ status_t res1, res2;
+ OMXCameraPortParameters *pPortParam;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CameraFrame::FrameType typeOfFrame = CameraFrame::ALL_FRAMES;
+ unsigned int refCount = 0;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+ sp<CameraFDResult> fdResult = NULL;
+
+ res1 = res2 = NO_ERROR;
+ pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]);
+ if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW)
+ {
+
+ if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE )
+ {
+ return OMX_ErrorNone;
+ }
+
+ recalculateFPS();
+
+ {
+ Mutex::Autolock lock(mFaceDetectionLock);
+ if ( mFaceDetectionRunning ) {
+ detectFaces(pBuffHeader, fdResult, pPortParam->mWidth, pPortParam->mHeight);
+ if ( NULL != fdResult.get() ) {
+ notifyFaceSubscribers(fdResult);
+ fdResult.clear();
+ }
+ }
+ }
+
+ stat |= advanceZoom();
+
+ ///On the fly update to 3A settings not working
+ if( mPending3Asettings )
+ {
+ apply3Asettings(mParameters3A);
+ }
+
+ ///Prepare the frames to be sent - initialize CameraFrame object and reference count
+ CameraFrame cameraFrameVideo, cameraFramePreview;
+ if ( mRecording )
+ {
+ res1 = initCameraFrame(cameraFrameVideo,
+ pBuffHeader,
+ CameraFrame::VIDEO_FRAME_SYNC,
+ pPortParam);
+ }
+
+ if( mWaitingForSnapshot )
+ {
+ typeOfFrame = CameraFrame::SNAPSHOT_FRAME;
+ }
+ else
+ {
+ typeOfFrame = CameraFrame::PREVIEW_FRAME_SYNC;
+ }
+
+ LOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
+
+ res2 = initCameraFrame(cameraFramePreview,
+ pBuffHeader,
+ typeOfFrame,
+ pPortParam);
+
+ stat |= res1 | res2;
+
+ if ( mRecording )
+ {
+ res1 = sendFrame(cameraFrameVideo);
+ }
+
+ if( mWaitingForSnapshot )
+ {
+ mSnapshotCount++;
+
+ if ( ( mSnapshotCount == 1 ) &&
+ ( HIGH_SPEED == mCapMode ) )
+ {
+ notifyShutterSubscribers();
+ }
+ }
+
+ res2 = sendFrame(cameraFramePreview);
+
+ stat |= ( ( NO_ERROR == res1 ) || ( NO_ERROR == res2 ) ) ? ( ( int ) NO_ERROR ) : ( -1 );
+
+ }
+ else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT )
+ {
+ typeOfFrame = CameraFrame::FRAME_DATA_SYNC;
+ CameraFrame cameraFrame;
+ stat |= initCameraFrame(cameraFrame,
+ pBuffHeader,
+ typeOfFrame,
+ pPortParam);
+ stat |= sendFrame(cameraFrame);
+ }
+ else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_IMAGE_OUT_IMAGE )
+ {
+
+ if ( OMX_COLOR_FormatUnused == mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mColorFormat )
+ {
+ typeOfFrame = CameraFrame::IMAGE_FRAME;
+ }
+ else
+ {
+ typeOfFrame = CameraFrame::RAW_FRAME;
+ }
+
+ pPortParam->mImageType = typeOfFrame;
+
+ if((mCapturedFrames>0) && !mCaptureSignalled)
+ {
+ mCaptureSignalled = true;
+ mCaptureSem.Signal();
+ }
+
+ if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE )
+ {
+ goto EXIT;
+ }
+
+ {
+ Mutex::Autolock lock(mBracketingLock);
+ if ( mBracketingEnabled )
+ {
+ doBracketing(pBuffHeader, typeOfFrame);
+ return eError;
+ }
+ }
+
+ if ( 1 > mCapturedFrames )
+ {
+ goto EXIT;
+ }
+
+ CAMHAL_LOGDB("Captured Frames: %d", mCapturedFrames);
+
+ mCapturedFrames--;
+
+ CameraFrame cameraFrame;
+ stat |= initCameraFrame(cameraFrame,
+ pBuffHeader,
+ typeOfFrame,
+ pPortParam);
+ stat |= sendFrame(cameraFrame);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR != stat )
+ {
+ CAMHAL_LOGDB("sendFrameToSubscribers error: %d", stat);
+ returnFrame(pBuffHeader->pBuffer, typeOfFrame);
+ }
+
+ return eError;
+
+ EXIT:
+
+ CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, stat, eError);
+
+ if ( NO_ERROR != stat )
+ {
+ if ( NULL != mErrorNotifier )
+ {
+ mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
+ }
+ }
+
+ return eError;
+}
+
+status_t OMXCameraAdapter::recalculateFPS()
+{
+ float currentFPS;
+
+ mFrameCount++;
+
+ if ( ( mFrameCount % FPS_PERIOD ) == 0 )
+ {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFPSTime;
+ currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFPSTime = now;
+ mLastFrameCount = mFrameCount;
+
+ if ( 1 == mIter )
+ {
+ mFPS = currentFPS;
+ }
+ else
+ {
+ //cumulative moving average
+ mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
+ }
+
+ mLastFPS = mFPS;
+ mIter++;
+ }
+
+ return NO_ERROR;
+}
+
+status_t OMXCameraAdapter::sendFrame(CameraFrame &frame)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+ if ( NO_ERROR == ret )
+ {
+ ret = sendFrameToSubscribers(&frame);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::initCameraFrame( CameraFrame &frame,
+ OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader,
+ int typeOfFrame,
+ OMXCameraPortParameters *port)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == port)
+ {
+ CAMHAL_LOGEA("Invalid portParam");
+ return -EINVAL;
+ }
+
+ if ( NULL == pBuffHeader )
+ {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ return -EINVAL;
+ }
+
+ frame.mFrameType = typeOfFrame;
+ frame.mBuffer = pBuffHeader->pBuffer;
+ frame.mLength = pBuffHeader->nFilledLen;
+ frame.mAlignment = port->mStride;
+ frame.mOffset = pBuffHeader->nOffset;
+ frame.mWidth = port->mWidth;
+ frame.mHeight = port->mHeight;
+
+ // Calculating the time source delta of Ducati & system time only once at the start of camera.
+ // It's seen that there is a one-time constant diff between the ducati source clock &
+ // System monotonic timer, although both derived from the same 32KHz clock.
+ // This delta is offsetted to/from ducati timestamp to match with system time so that
+ // video timestamps are aligned with Audio with a periodic timestamp intervals.
+ if ( onlyOnce )
+ {
+ mTimeSourceDelta = (pBuffHeader->nTimeStamp * 1000) - systemTime(SYSTEM_TIME_MONOTONIC);
+ onlyOnce = false;
+ }
+
+ // Calculating the new video timestamp based on offset from ducati source.
+ frame.mTimestamp = (pBuffHeader->nTimeStamp * 1000) - mTimeSourceDelta;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool OMXCameraAdapter::CommandHandler::Handler()
+{
+ TIUTILS::Message msg;
+ volatile int forever = 1;
+ status_t stat;
+ ErrorNotifier *errorNotify = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ while ( forever )
+ {
+ stat = NO_ERROR;
+ CAMHAL_LOGDA("Handler: waiting for messsage...");
+ TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ mCommandMsgQ.get(&msg);
+ CAMHAL_LOGDB("msg.command = %d", msg.command);
+ switch ( msg.command ) {
+ case CommandHandler::CAMERA_START_IMAGE_CAPTURE:
+ {
+ stat = mCameraAdapter->startImageCapture();
+ break;
+ }
+ case CommandHandler::CAMERA_PERFORM_AUTOFOCUS:
+ {
+ stat = mCameraAdapter->doAutoFocus();
+ break;
+ }
+ case CommandHandler::COMMAND_EXIT:
+ {
+ CAMHAL_LOGEA("Exiting command handler");
+ forever = 0;
+ break;
+ }
+ }
+
+ if ( NO_ERROR != stat )
+ {
+ errorNotify = ( ErrorNotifier * ) msg.arg1;
+ if ( NULL != errorNotify )
+ {
+ errorNotify->errorNotify(CAMERA_ERROR_UNKNOWN);
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return false;
+}
+
+bool OMXCameraAdapter::OMXCallbackHandler::Handler()
+{
+ TIUTILS::Message msg;
+ volatile int forever = 1;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ while(forever){
+ TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ mCommandMsgQ.get(&msg);
+ switch ( msg.command ) {
+ case OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE:
+ {
+ ret = mCameraAdapter->OMXCameraAdapterFillBufferDone(( OMX_HANDLETYPE ) msg.arg1,
+ ( OMX_BUFFERHEADERTYPE *) msg.arg2);
+ break;
+ }
+ case CommandHandler::COMMAND_EXIT:
+ {
+ CAMHAL_LOGEA("Exiting OMX callback handler");
+ forever = 0;
+ break;
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return false;
+}
+
+OMXCameraAdapter::OMXCameraAdapter():mComponentState (OMX_StateInvalid)
+{
+ LOG_FUNCTION_NAME;
+
+ mPictureRotation = 0;
+ // Initial values
+ mTimeSourceDelta = 0;
+ onlyOnce = true;
+
+ mDoAFSem.Create(0);
+ mInitSem.Create(0);
+ mFlushSem.Create(0);
+ mUsePreviewDataSem.Create(0);
+ mUsePreviewSem.Create(0);
+ mUseCaptureSem.Create(0);
+ mStartPreviewSem.Create(0);
+ mStopPreviewSem.Create(0);
+ mStartCaptureSem.Create(0);
+ mStopCaptureSem.Create(0);
+ mSwitchToLoadedSem.Create(0);
+ mCaptureSem.Create(0);
+
+ mCameraAdapterParameters.mHandleComp = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+OMXCameraAdapter::~OMXCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ //Return to OMX Loaded state
+ switchToLoaded();
+
+ ///Free the handle for the Camera component
+ if(mCameraAdapterParameters.mHandleComp)
+ {
+ OMX_FreeHandle(mCameraAdapterParameters.mHandleComp);
+ }
+
+ ///De-init the OMX
+ if( (mComponentState==OMX_StateLoaded) || (mComponentState==OMX_StateInvalid))
+ {
+ OMX_Deinit();
+ }
+
+ //Exit and free ref to command handling thread
+ if ( NULL != mCommandHandler.get() )
+ {
+ TIUTILS::Message msg;
+ msg.command = CommandHandler::COMMAND_EXIT;
+ msg.arg1 = mErrorNotifier;
+ mCommandHandler->put(&msg);
+ mCommandHandler->requestExitAndWait();
+ mCommandHandler.clear();
+ }
+
+ //Exit and free ref to callback handling thread
+ if ( NULL != mOMXCallbackHandler.get() )
+ {
+ TIUTILS::Message msg;
+ msg.command = OMXCallbackHandler::COMMAND_EXIT;
+ mOMXCallbackHandler->put(&msg);
+ mOMXCallbackHandler->requestExitAndWait();
+ mOMXCallbackHandler.clear();
+ }
+
+ gCameraAdapter = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+extern "C" CameraAdapter* CameraAdapter_Factory()
+{
+ Mutex::Autolock lock(gAdapterLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == gCameraAdapter )
+ {
+ CAMHAL_LOGDA("Creating new Camera adapter instance");
+ gCameraAdapter= new OMXCameraAdapter();
+ }
+ else
+ {
+ CAMHAL_LOGDA("Reusing existing Camera adapter instance");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return gCameraAdapter;
+}
+
+extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ const unsigned int starting_camera,
+ const unsigned int max_camera) {
+ int num_cameras_supported = 0;
+ CameraProperties::Properties* properties = NULL;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_HANDLETYPE handle = NULL;
+ OMX_TI_CAPTYPE caps;
+
+ LOG_FUNCTION_NAME;
+
+ if (!properties_array) {
+ CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ // OMX_Init
+ eError = OMX_Init();
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_Init -0x%x", eError);
+ return 0; // no cameras supported
+ }
+
+ // Setup key parameters to send to Ducati during init
+ OMX_CALLBACKTYPE oCallbacks;
+
+ // Initialize the callback handles
+ oCallbacks.EventHandler = android::OMXCameraAdapterEventHandler;
+ oCallbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
+ oCallbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
+
+ // Get Handle
+ eError = OMX_GetHandle(&handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.CAMERA", NULL, &oCallbacks);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
+ goto EXIT;
+ }
+
+ // Continue selecting sensor and then querying OMX Camera for it's capabilities
+ // When sensor select returns an error, we know to break and stop
+ while (eError == OMX_ErrorNone &&
+ (starting_camera + num_cameras_supported) < max_camera) {
+ // sensor select
+ OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
+ OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
+ sensorSelect.eSensor = (OMX_SENSORSELECT) num_cameras_supported;
+ eError = OMX_SetConfig(handle, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
+
+ if ( OMX_ErrorNone != eError ) {
+ break;
+ }
+
+ // get and fill capabilities
+ properties = properties_array + starting_camera + num_cameras_supported;
+ OMXCameraAdapter::getCaps(properties, handle);
+
+ // need to fill facing information
+ // assume that only sensor 0 is back facing
+ if (num_cameras_supported == 0) {
+ properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_BACK);
+ } else {
+ properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
+ }
+
+ num_cameras_supported++;
+ }
+
+ EXIT:
+ // clean up
+ if(handle) {
+ OMX_FreeHandle(handle);
+ }
+ OMX_Deinit();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return num_cameras_supported;
+}
+
+};
+
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp
new file mode 100644
index 0000000..fc961fa
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp
@@ -0,0 +1,1181 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXCap.cpp
+*
+* This file implements the OMX Capabilities feature.
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+
+extern "C" {
+#include "memmgr.h"
+}
+
+namespace android {
+
+#undef LOG_TAG
+
+// Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific
+#define LOG_TAG "CameraHAL"
+
+/************************************
+ * global constants and variables
+ *************************************/
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+#define FPS_MIN 5
+#define FPS_STEP 5
+#define FPS_RANGE_STEP 10
+
+static const char PARAM_SEP[] = ",";
+static const int PARAM_SEP_CHAR = ',';
+static const uint32_t VFR_OFFSET = 8;
+static const char VFR_BACKET_START[] = "(";
+static const char VFR_BRACKET_END[] = ")";
+static const char FRAMERATE_COUNT = 10;
+
+/**** look up tables to translate OMX Caps to Parameter ****/
+
+const CapResolution OMXCameraAdapter::mImageCapRes [] = {
+ { 4032, 3024, "4032x3024" },
+ { 4000, 3000, "4000x3000" },
+ { 3648, 2736, "3648x2736" },
+ { 3264, 2448, "3264x2448" },
+ { 2592, 1944, "2592x1944" },
+ { 2048, 1536, "2048x1536" },
+ { 1600, 1200, "1600x1200" },
+ { 1280, 1024, "1280x1024" },
+ { 1152, 864, "1152x864" },
+ { 1280, 960, "1280x960" },
+ { 640, 480, "640x480" },
+ { 320, 240, "320x240" },
+};
+
+const CapResolution OMXCameraAdapter::mPreviewRes [] = {
+ { 1920, 1080, "1920x1080" },
+ { 1280, 720, "1280x720" },
+ { 800, 480, "800x480" },
+ { 720, 576, "720x576" },
+ { 720, 480, "720x480" },
+ { 768, 576, "768x576" },
+ { 640, 480, "640x480" },
+ { 320, 240, "320x240" },
+ { 352, 288, "352x288" },
+ { 240, 160, "240x160" },
+ { 176, 144, "176x144" },
+ { 128, 96, "128x96" },
+};
+
+const CapResolution OMXCameraAdapter::mThumbRes [] = {
+ { 640, 480, "640x480" },
+ { 160, 120, "160x120" },
+ { 200, 120, "200x120" },
+ { 320, 240, "320x240" },
+ { 512, 384, "512x384" },
+ { 352, 144, "352x144" },
+ { 176, 144, "176x144" },
+ { 96, 96, "96x96" },
+};
+
+const CapPixelformat OMXCameraAdapter::mPixelformats [] = {
+ { OMX_COLOR_FormatCbYCrY, CameraParameters::PIXEL_FORMAT_YUV422I },
+ { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420SP },
+ { OMX_COLOR_Format16bitRGB565, CameraParameters::PIXEL_FORMAT_RGB565 },
+ { OMX_COLOR_FormatRawBayer10bit, TICameraParameters::PIXEL_FORMAT_RAW },
+};
+
+const CapFramerate OMXCameraAdapter::mFramerates [] = {
+ { 30, "30" },
+ { 25, "25" },
+ { 24, "24" },
+ { 20, "20" },
+ { 15, "15" },
+ { 10, "10" },
+};
+
+const CapZoom OMXCameraAdapter::mZoomStages [] = {
+ { 65536, "100" },
+ { 68157, "104" },
+ { 70124, "107" },
+ { 72745, "111" },
+ { 75366, "115" },
+ { 77988, "119" },
+ { 80609, "123" },
+ { 83231, "127" },
+ { 86508, "132" },
+ { 89784, "137" },
+ { 92406, "141" },
+ { 95683, "146" },
+ { 99615, "152" },
+ { 102892, "157" },
+ { 106168, "162" },
+ { 110100, "168" },
+ { 114033, "174" },
+ { 117965, "180" },
+ { 122552, "187" },
+ { 126484, "193" },
+ { 131072, "200" },
+ { 135660, "207" },
+ { 140247, "214" },
+ { 145490, "222" },
+ { 150733, "230" },
+ { 155976, "238" },
+ { 161219, "246" },
+ { 167117, "255" },
+ { 173015, "264" },
+ { 178913, "273" },
+ { 185467, "283" },
+ { 192020, "293" },
+ { 198574, "303" },
+ { 205783, "314" },
+ { 212992, "325" },
+ { 220201, "336" },
+ { 228065, "348" },
+ { 236585, "361" },
+ { 244449, "373" },
+ { 252969, "386" },
+ { 262144, "400" },
+ { 271319, "414" },
+ { 281149, "429" },
+ { 290980, "444" },
+ { 300810, "459" },
+ { 311951, "476" },
+ { 322437, "492" },
+ { 334234, "510" },
+ { 346030, "528" },
+ { 357827, "546" },
+ { 370934, "566" },
+ { 384041, "586" },
+ { 397148, "606" },
+ { 411566, "628" },
+ { 425984, "650" },
+ { 441057, "673" },
+ { 456131, "696" },
+ { 472515, "721" },
+ { 488899, "746" },
+ { 506593, "773" },
+ { 524288, "800" },
+};
+
+const CapISO OMXCameraAdapter::mISOStages [] = {
+ { 0, "auto" },
+ { 100, "100" },
+ { 200, "200"},
+ { 400, "400" },
+ { 800, "800" },
+ { 1000, "1000" },
+ { 1200, "1200" },
+ { 1600, "1600" },
+};
+
+// mapped values have to match with new_sensor_MSP.h
+const CapU32 OMXCameraAdapter::mSensorNames [] = {
+ { 300, "IMX060" },
+ { 301, "OV5650" },
+ { 305, "S5K4E1GA"},
+ { 306, "S5K6A1GX03" }
+ // TODO(XXX): need to account for S3D camera later
+};
+
+/************************************
+ * static helper functions
+ *************************************/
+
+// utility function to remove last seperator
+void remove_last_sep(char* buffer) {
+ char* last_sep = NULL;
+ last_sep = strrchr(buffer, PARAM_SEP_CHAR);
+ if (last_sep != NULL) {
+ last_sep[0] = '\0';
+ }
+}
+
+
+/*****************************************
+ * internal static function declarations
+ *****************************************/
+
+/**** Utility functions to help translate OMX Caps to Parameter ****/
+
+status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
+ const CapPixelformat *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( ( NULL == buffer ) || ( NULL == cap ) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0; i < capCount; i++ ) {
+ if ( format == cap[i].pixelformat ) {
+ strncat(buffer, cap[i].param, bufferSize - 1);
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax,
+ OMX_U32 framerateMin,
+ const CapFramerate *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize) {
+ status_t ret = NO_ERROR;
+ bool minInserted = false;
+ bool maxInserted = false;
+ char tmpBuffer[FRAMERATE_COUNT];
+
+ LOG_FUNCTION_NAME;
+
+ if ( ( NULL == buffer ) || ( NULL == cap ) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0; i < capCount; i++ ) {
+ if ( (framerateMax >= cap[i].num) && (framerateMin <= cap[i].num) ) {
+ strncat(buffer, cap[i].param, bufferSize - 1);
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+
+ if ( cap[i].num == framerateMin ) {
+ minInserted = true;
+ }
+ }
+ if ( cap[i].num == framerateMax ) {
+ maxInserted = true;
+ }
+ }
+
+ if ( !maxInserted ) {
+ memset(tmpBuffer, 0, FRAMERATE_COUNT);
+ snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMax);
+ strncat(buffer, tmpBuffer, bufferSize - 1);
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+
+ if ( !minInserted ) {
+ memset(tmpBuffer, 0, FRAMERATE_COUNT);
+ snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMin);
+ strncat(buffer, tmpBuffer, bufferSize - 1);
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+
+ remove_last_sep(buffer);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps,
+ char *buffer,
+ char *defaultRange,
+ size_t bufferSize) {
+ status_t ret = NO_ERROR;
+ uint32_t minVFR, maxVFR;
+ char tmpBuffer[MAX_PROP_VALUE_LENGTH];
+ bool skipLast = false;
+ uint32_t min, max;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == buffer ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ if(caps.ulPrvVarFPSModesCount < 1) {
+ return NO_ERROR;
+ }
+
+ // Assumption: last range in tPrvVarFPSModes will be for S30FPSHD mode
+ minVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMin >> VFR_OFFSET;
+ maxVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMax >> VFR_OFFSET;
+
+ if (minVFR < FPS_MIN) {
+ minVFR = FPS_MIN;
+ }
+
+ memset(tmpBuffer, '\0', MAX_PROP_VALUE_LENGTH);
+ min = max = 0;
+ for (unsigned int i = minVFR; i <= maxVFR; i += FPS_STEP) {
+
+ min = i * CameraHal::VFR_SCALE;
+ max = (i + FPS_RANGE_STEP) * CameraHal::VFR_SCALE;
+
+ snprintf(tmpBuffer, ( MAX_PROP_VALUE_LENGTH - 1 ), "(%d,%d)", min, min);
+ strncat(buffer, tmpBuffer, ( bufferSize - 1 ));
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+
+ if (max <= maxVFR * CameraHal::VFR_SCALE) {
+ snprintf(tmpBuffer, ( MAX_PROP_VALUE_LENGTH - 1 ), "(%d,%d)", min, max);
+ strncat(buffer, tmpBuffer, ( bufferSize - 1 ));
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ }
+ remove_last_sep(buffer);
+
+ if ( 1 < strlen(tmpBuffer) ) {
+ snprintf(defaultRange, ( MAX_PROP_VALUE_LENGTH - 1 ), "%d,%d", min, min);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom,
+ const CapZoom *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize) {
+ status_t res = NO_ERROR;
+ size_t ret = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+
+ for ( unsigned int i = 0; i < capCount; i++ ) {
+ if ( cap[i].num <= maxZoom ) {
+ strncat(buffer, cap[i].param, bufferSize - 1);
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ ret++;
+ }
+ }
+ remove_last_sep(buffer);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
+ const CapISO *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0; i < capCount; i++ ) {
+ if ( cap[i].num <= maxISO) {
+ strncat(buffer, cap[i].param, bufferSize - 1);
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ }
+ remove_last_sep(buffer);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
+ const CapResolution *cap,
+ size_t capCount,
+ char * buffer,
+ size_t bufferSize) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( (NULL == buffer) || (NULL == cap) ) {
+ CAMHAL_LOGEA("Invalid input arguments");
+ return -EINVAL;
+ }
+
+ for ( unsigned int i = 0 ; i < capCount ; i++ ) {
+ if ( (cap[i].width <= res.nWidthMax) &&
+ (cap[i].height <= res.nHeightMax) &&
+ (cap[i].width >= res.nWidthMin) &&
+ (cap[i].height >= res.nHeightMin) ) {
+ strncat(buffer, cap[i].param, bufferSize -1);
+ strncat(buffer, PARAM_SEP, bufferSize - 1);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeSizeCap(caps.tImageResRange,
+ mImageCapRes,
+ ARRAY_SIZE(mImageCapRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret);
+ } else {
+ remove_last_sep(supported);
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeSizeCap(caps.tPreviewResRange,
+ mPreviewRes,
+ ARRAY_SIZE(mPreviewRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret);
+ } else {
+ remove_last_sep(supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeSizeCap(caps.tThumbResRange,
+ mThumbRes,
+ ARRAY_SIZE(mThumbRes),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported thumbnail sizes 0x%x", ret);
+ } else {
+ //CTS Requirement: 0x0 should always be supported
+ strncat(supported, "0x0", MAX_PROP_NAME_LENGTH);
+ params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, supported);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertZoomStages(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ size_t zoomStageCount = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ zoomStageCount = encodeZoomCap(caps.xMaxWidthZoom,
+ mZoomStages,
+ ARRAY_SIZE(mZoomStages),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ params->set(CameraProperties::SUPPORTED_ZOOM_RATIOS, supported);
+ params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, zoomStageCount - 1); //As per CTS requirement
+
+ if ( 0 == zoomStageCount ) {
+ params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED);
+ } else {
+ params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( int i = 0 ; i < caps.ulImageFormatCount ; i++ ) {
+ ret = encodePixelformatCap(caps.eImageFormats[i],
+ mPixelformats,
+ ARRAY_SIZE(mPixelformats),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
+ break;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ //jpeg is not supported in OMX capabilies yet
+ strncat(supported, CameraParameters::PIXEL_FORMAT_JPEG, MAX_PROP_VALUE_LENGTH - 1);
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, supported);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( int i = 0 ; i < caps.ulPreviewFormatCount; i++ ) {
+ ret = encodePixelformatCap(caps.ePreviewFormats[i],
+ mPixelformats,
+ ARRAY_SIZE(mPixelformats),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview formats 0x%x", ret);
+ break;
+ }
+ }
+
+ if ( NO_ERROR == ret ) {
+ // need to advertise we support YV12 format
+ // We will program preview port with NV21 when we see application set YV12
+ strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeFramerateCap(caps.xFramerateMax >> VFR_OFFSET,
+ caps.xFramerateMin >> VFR_OFFSET,
+ mFramerates,
+ ARRAY_SIZE(mFramerates),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview framerates 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertVFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char defaultRange[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeVFramerateCap(caps, supported, defaultRange, MAX_PROP_VALUE_LENGTH);
+
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported preview framerate ranges 0x%x", ret);
+ } else {
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
+ CAMHAL_LOGDB("framerate ranges %s", supported);
+ params->set(CameraProperties::FRAMERATE_RANGE, defaultRange);
+ CAMHAL_LOGDB("Default framerate range: [%s]", defaultRange);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMin * 10 ));
+ params->set(CameraProperties::SUPPORTED_EV_MIN, supported);
+
+ snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMax * 10 ));
+ params->set(CameraProperties::SUPPORTED_EV_MAX, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ ret = encodeISOCap(caps.nSensitivityMax,
+ mISOStages,
+ ARRAY_SIZE(mISOStages),
+ supported,
+ MAX_PROP_VALUE_LENGTH);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("Error inserting supported ISO modes 0x%x", ret);
+ } else {
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, supported);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ //Off is always supported
+ strncat(supported, TICameraParameters::IPP_NONE, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+
+ if ( caps.bLensDistortionCorrectionSupported ) {
+ strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+
+ if ( caps.bISONoiseFilterSupported ) {
+ strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+
+ if ( caps.bISONoiseFilterSupported && caps.bLensDistortionCorrectionSupported ) {
+ strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+
+ remove_last_sep(supported);
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulWhiteBalanceCount ; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eWhiteBalanceModes[i], WBalLUT);
+ if ( NULL != p ) {
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+ }
+
+ //These modes are not supported by the capability feature
+ strncat(supported, TICameraParameters::WHITE_BALANCE_FACE, MAX_PROP_NAME_LENGTH);
+
+ params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulColorEffectCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eColorEffects[i], EffLUT);
+ if ( NULL != p ) {
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+ }
+ remove_last_sep(supported);
+ params->set(CameraProperties::SUPPORTED_EFFECTS, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulExposureModeCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eExposureModes[i], ExpLUT);
+ if ( NULL != p ) {
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+ }
+
+ //These modes are not supported by the capability feature
+ strncat(supported, TICameraParameters::EXPOSURE_MODE_FACE, MAX_PROP_NAME_LENGTH);
+
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulFlashCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eFlashModes[i], FlashLUT);
+ if ( NULL != p ) {
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+ }
+
+ remove_last_sep(supported);
+ params->set(CameraProperties::SUPPORTED_FLASH_MODES, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulSceneCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eSceneModes[i], SceneLUT);
+ if ( NULL != p ) {
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+ }
+
+ remove_last_sep(supported);
+ params->set(CameraProperties::SUPPORTED_SCENE_MODES, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulFocusModeCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT);
+ if ( NULL != p ) {
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+ }
+
+ // Check if focus is supported by camera
+ if (caps.ulFocusModeCount == 1 &&
+ caps.eFocusModes[0] == OMX_IMAGE_FocusControlOff) {
+ // Focus is not supported by camera
+ // Advertise this to app as infinitiy focus mode
+ strncat(supported, CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
+ } else {
+ // Focus is supported but these modes are not supported by the
+ // capability feature. Apply manually
+ strncat(supported, TICameraParameters::FOCUS_MODE_FACE, MAX_PROP_NAME_LENGTH);
+ }
+
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ for ( unsigned int i = 0 ; i < caps.ulFlickerCount; i++ ) {
+ p = getLUTvalue_OMXtoHAL(caps.eFlicker[i], FlickerLUT);
+ if ( NULL != p ) {
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ strncat(supported, PARAM_SEP, 1);
+ }
+ }
+ remove_last_sep(supported);
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, supported);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME
+
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED);
+
+ LOG_FUNCTION_NAME
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::ANTIBANDING, DEFAULT_ANTIBANDING);
+ params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS);
+ params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST);
+ params->set(CameraProperties::EFFECT, DEFAULT_EFFECT);
+ params->set(CameraProperties::EV_COMPENSATION, DEFAULT_EV_COMPENSATION);
+ params->set(CameraProperties::SUPPORTED_EV_STEP, DEFAULT_EV_STEP);
+ params->set(CameraProperties::EXPOSURE_MODE, DEFAULT_EXPOSURE_MODE);
+ params->set(CameraProperties::FLASH_MODE, DEFAULT_FLASH_MODE);
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
+ params->set(CameraProperties::IPP, DEFAULT_IPP);
+ params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
+ params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY);
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY);
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, DEFAULT_THUMBNAIL_SIZE);
+ params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
+ params->set(CameraProperties::MAX_FOCUS_AREAS, DEFAULT_MAX_FOCUS_AREAS);
+ params->set(CameraProperties::SATURATION, DEFAULT_SATURATION);
+ params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE);
+ params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS);
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::WHITEBALANCE, DEFAULT_WB);
+ params->set(CameraProperties::ZOOM, DEFAULT_ZOOM);
+ params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES);
+ params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES);
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK);
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK);
+ params->set(CameraProperties::MAX_NUM_METERING_AREAS, DEFAULT_MAX_NUM_METERING_AREAS);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ const char *p;
+ int i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+
+ // 1) Look up and assign sensor name
+ for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) {
+ if(mSensorNames[i].num == caps.tSenMounting.nSenId) {
+ // sensor found
+ break;
+ }
+ }
+ if ( i == ARRAY_SIZE(mSensorNames) ) {
+ p = "UNKNOWN_SENSOR";
+ } else {
+ p = mSensorNames[i].param;
+ }
+ strncat(supported, p, MAX_PROP_NAME_LENGTH);
+ params->set(CameraProperties::CAMERA_NAME, supported);
+
+ // 2) Assign mounting rotation
+ params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation);
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
+ status_t ret = NO_ERROR;
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertThumbSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertZoomStages(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFramerates(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertVFramerates(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertEVs(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertISOModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertIPPModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertWBModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertEffects(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertExpModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFlashModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertSceneModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFocusModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFlickerModes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertSenMount(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertLocks(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/*****************************************
+ * public exposed function declarations
+ *****************************************/
+
+status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HANDLETYPE handle) {
+ status_t ret = NO_ERROR;
+ int caps_size = 0;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CAPTYPE** caps = NULL;;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ MemoryManager memMgr;
+
+ LOG_FUNCTION_NAME;
+
+ // allocate tiler (or ion) buffer for caps
+ caps_size = sizeof(OMX_TI_CAPTYPE);
+ caps = (OMX_TI_CAPTYPE**) memMgr.allocateBuffer(0, 0, NULL, caps_size, 1);
+
+ if (!caps) {
+ CAMHAL_LOGEB("Error allocating buffer for caps %d", eError);
+ ret = -ENOMEM;
+ goto EXIT;
+ }
+
+ // initialize structures to be passed to OMX Camera
+ OMX_INIT_STRUCT_PTR (caps[0], OMX_TI_CAPTYPE);
+ caps[0]->nPortIndex = OMX_ALL;
+
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+ sharedBuffer.nPortIndex = OMX_ALL;
+ sharedBuffer.nSharedBuffSize = caps_size;
+ sharedBuffer.pSharedBuff = (OMX_U8 *) caps[0];
+
+ // Get capabilities from OMX Camera
+ eError = OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error during capabilities query 0x%x", eError);
+ ret = UNKNOWN_ERROR;
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("OMX capability query success");
+ }
+
+ // Translate and insert Ducati capabilities to CameraProperties
+ if ( NO_ERROR == ret ) {
+ ret = insertCapabilities(params, *caps[0]);
+ }
+
+ CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps[0]->tSenMounting.nSenId);
+
+
+ EXIT:
+ if (caps) {
+ memMgr.freeBuffer((void*) caps);
+ caps = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+};
+
diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp
new file mode 100644
index 0000000..42fb384
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXCapture.cpp
@@ -0,0 +1,1066 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXCapture.cpp
+*
+* This file contains functionality for handling image capture.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace android {
+
+status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *str = NULL;
+ int w, h;
+ OMX_COLOR_FORMATTYPE pixFormat;
+ const char *valstr = NULL;
+ bool updateImagePortParams = false;
+
+ LOG_FUNCTION_NAME;
+
+ OMXCameraPortParameters *cap;
+ cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ params.getPictureSize(&w, &h);
+
+ if ( ( w != ( int ) cap->mWidth ) ||
+ ( h != ( int ) cap->mHeight ) )
+ {
+ updateImagePortParams = true;
+ }
+
+ cap->mWidth = w;
+ cap->mHeight = h;
+ //TODO: Support more pixelformats
+ cap->mStride = 2;
+
+ CAMHAL_LOGVB("Image: cap.mWidth = %d", (int)cap->mWidth);
+ CAMHAL_LOGVB("Image: cap.mHeight = %d", (int)cap->mHeight);
+
+ if ( (valstr = params.getPictureFormat()) != NULL )
+ {
+ if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ CAMHAL_LOGDA("CbYCrY format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
+ }
+ else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ {
+ CAMHAL_LOGDA("YUV420SP format selected");
+ pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ CAMHAL_LOGDA("RGB565 format selected");
+ pixFormat = OMX_COLOR_Format16bitRGB565;
+ }
+ else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_JPEG) == 0)
+ {
+ CAMHAL_LOGDA("JPEG format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ mCodingMode = CodingNone;
+ }
+ else if(strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_JPS) == 0)
+ {
+ CAMHAL_LOGDA("JPS format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ mCodingMode = CodingJPS;
+ }
+ else if(strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_MPO) == 0)
+ {
+ CAMHAL_LOGDA("MPO format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ mCodingMode = CodingMPO;
+ }
+ else if(strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_RAW_JPEG) == 0)
+ {
+ CAMHAL_LOGDA("RAW + JPEG format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ mCodingMode = CodingRAWJPEG;
+ }
+ else if(strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_RAW_MPO) == 0)
+ {
+ CAMHAL_LOGDA("RAW + MPO format selected");
+ pixFormat = OMX_COLOR_FormatUnused;
+ mCodingMode = CodingRAWMPO;
+ }
+ else if(strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_RAW) == 0)
+ {
+ CAMHAL_LOGDA("RAW Picture format selected");
+ pixFormat = OMX_COLOR_FormatRawBayer10bit;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Invalid format, JPEG format selected as default");
+ pixFormat = OMX_COLOR_FormatUnused;
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Picture format is NULL, defaulting to JPEG");
+ pixFormat = OMX_COLOR_FormatUnused;
+ }
+
+ if ( pixFormat != cap->mColorFormat )
+ {
+ updateImagePortParams = true;
+ cap->mColorFormat = pixFormat;
+ }
+
+ if ( updateImagePortParams )
+ {
+ if ( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE )
+ {
+ setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *cap);
+ }
+ }
+
+ str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+ if ( NULL != str ) {
+ parseExpRange(str, mExposureBracketingValues, EXP_BRACKET_RANGE, mExposureBracketingValidEntries);
+ } else {
+ mExposureBracketingValidEntries = 0;
+ }
+
+ if ( params.getInt(CameraParameters::KEY_ROTATION) != -1 )
+ {
+ mPictureRotation = params.getInt(CameraParameters::KEY_ROTATION);
+ }
+ else
+ {
+ mPictureRotation = 0;
+ }
+
+ CAMHAL_LOGVB("Picture Rotation set %d", mPictureRotation);
+
+ // Read Sensor Orientation and set it based on perating mode
+
+ if (( params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION) != -1 ) && (mCapMode == OMXCameraAdapter::VIDEO_MODE))
+ {
+ mSensorOrientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ if (mSensorOrientation == 270 ||mSensorOrientation==90)
+ {
+ CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati");
+ mSensorOrientation +=180;
+ mSensorOrientation%=360;
+ }
+ }
+ else
+ {
+ mSensorOrientation = 0;
+ }
+
+ CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation);
+
+ if ( params.getInt(TICameraParameters::KEY_BURST) >= 1 )
+ {
+ mBurstFrames = params.getInt(TICameraParameters::KEY_BURST);
+ }
+ else
+ {
+ mBurstFrames = 1;
+ }
+
+ CAMHAL_LOGVB("Burst Frames set %d", mBurstFrames);
+
+ if ( ( params.getInt(CameraParameters::KEY_JPEG_QUALITY) >= MIN_JPEG_QUALITY ) &&
+ ( params.getInt(CameraParameters::KEY_JPEG_QUALITY) <= MAX_JPEG_QUALITY ) )
+ {
+ mPictureQuality = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ }
+ else
+ {
+ mPictureQuality = MAX_JPEG_QUALITY;
+ }
+
+ CAMHAL_LOGVB("Picture Quality set %d", mPictureQuality);
+
+ if ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >= 0 )
+ {
+ mThumbWidth = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ }
+ else
+ {
+ mThumbWidth = DEFAULT_THUMB_WIDTH;
+ }
+
+
+ CAMHAL_LOGVB("Picture Thumb width set %d", mThumbWidth);
+
+ if ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >= 0 )
+ {
+ mThumbHeight = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ }
+ else
+ {
+ mThumbHeight = DEFAULT_THUMB_HEIGHT;
+ }
+
+
+ CAMHAL_LOGVB("Picture Thumb height set %d", mThumbHeight);
+
+ if ( ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >= MIN_JPEG_QUALITY ) &&
+ ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) <= MAX_JPEG_QUALITY ) )
+ {
+ mThumbQuality = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ }
+ else
+ {
+ mThumbQuality = MAX_JPEG_QUALITY;
+ }
+
+ CAMHAL_LOGDB("Thumbnail Quality set %d", mThumbQuality);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ OMXCameraPortParameters *imgCaptureData = NULL;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ imgCaptureData->mNumBufs = bufferCount;
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
+ if ( ret == NO_ERROR )
+ {
+ length = imgCaptureData->mBufSize;
+ }
+ else
+ {
+ CAMHAL_LOGEB("setFormat() failed 0x%x", ret);
+ length = 0;
+ }
+ }
+
+ CAMHAL_LOGDB("getPictureBufferSize %d", length);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::parseExpRange(const char *rangeStr,
+ int * expRange,
+ size_t count,
+ size_t &validEntries)
+{
+ status_t ret = NO_ERROR;
+ char *ctx, *expVal;
+ char *tmp = NULL;
+ size_t i = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == rangeStr )
+ {
+ return -EINVAL;
+ }
+
+ if ( NULL == expRange )
+ {
+ return -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ tmp = ( char * ) malloc( strlen(rangeStr) + 1 );
+
+ if ( NULL == tmp )
+ {
+ CAMHAL_LOGEA("No resources for temporary buffer");
+ return -1;
+ }
+ memset(tmp, '\0', strlen(rangeStr) + 1);
+
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ strncpy(tmp, rangeStr, strlen(rangeStr) );
+ expVal = strtok_r( (char *) tmp, CameraHal::PARAMS_DELIMITER, &ctx);
+
+ i = 0;
+ while ( ( NULL != expVal ) && ( i < count ) )
+ {
+ expRange[i] = atoi(expVal);
+ expVal = strtok_r(NULL, CameraHal::PARAMS_DELIMITER, &ctx);
+ i++;
+ }
+ validEntries = i;
+ }
+
+ if ( NULL != tmp )
+ {
+ free(tmp);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
+ size_t evCount,
+ size_t frameCount)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CAPTUREMODETYPE expCapMode;
+ OMX_CONFIG_EXTCAPTUREMODETYPE extExpCapMode;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NULL == evValues )
+ {
+ CAMHAL_LOGEA("Exposure compensation values pointer is invalid");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&expCapMode, OMX_CONFIG_CAPTUREMODETYPE);
+ expCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ /// If frameCount>0 but evCount<=0, then this is the case of HQ burst.
+ //Otherwise, it is normal HQ capture
+ ///If frameCount>0 and evCount>0 then this is the cause of HQ Exposure bracketing.
+ if ( 0 == evCount && 0 == frameCount )
+ {
+ expCapMode.bFrameLimited = OMX_FALSE;
+ }
+ else
+ {
+ expCapMode.bFrameLimited = OMX_TRUE;
+ expCapMode.nFrameLimit = frameCount;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCaptureMode,
+ &expCapMode);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring capture mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Camera capture mode configured successfully");
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&extExpCapMode, OMX_CONFIG_EXTCAPTUREMODETYPE);
+ extExpCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ if ( 0 == evCount )
+ {
+ extExpCapMode.bEnableBracketing = OMX_FALSE;
+ }
+ else
+ {
+ extExpCapMode.bEnableBracketing = OMX_TRUE;
+ extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketExposureRelativeInEV;
+ extExpCapMode.tBracketConfigType.nNbrBracketingValues = evCount - 1;
+ }
+
+ for ( unsigned int i = 0 ; i < evCount ; i++ )
+ {
+ extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigExtCaptureMode,
+ &extExpCapMode);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring extended capture mode 0x%x", eError);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Extended camera capture mode configured successfully");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setShutterCallback(bool enabled)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CALLBACKREQUESTTYPE shutterRequstCallback;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&shutterRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE);
+ shutterRequstCallback.nPortIndex = OMX_ALL;
+
+ if ( enabled )
+ {
+ shutterRequstCallback.bEnable = OMX_TRUE;
+ shutterRequstCallback.nIndex = ( OMX_INDEXTYPE ) OMX_TI_IndexConfigShutterCallback;
+ CAMHAL_LOGDA("Enabling shutter callback");
+ }
+ else
+ {
+ shutterRequstCallback.bEnable = OMX_FALSE;
+ shutterRequstCallback.nIndex = ( OMX_INDEXTYPE ) OMX_TI_IndexConfigShutterCallback;
+ CAMHAL_LOGDA("Disabling shutter callback");
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigCallbackRequest,
+ &shutterRequstCallback);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error registering shutter callback 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Shutter callback for index 0x%x registered successfully",
+ OMX_TI_IndexConfigShutterCallback);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader,
+ CameraFrame::FrameType typeOfFrame)
+{
+ status_t ret = NO_ERROR;
+ int currentBufferIdx, nextBufferIdx;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ currentBufferIdx = ( unsigned int ) pBuffHeader->pAppPrivate;
+
+ if ( currentBufferIdx >= imgCaptureData->mNumBufs)
+ {
+ CAMHAL_LOGEB("Invalid bracketing buffer index 0x%x", currentBufferIdx);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingBuffersQueued[currentBufferIdx] = false;
+ mBracketingBuffersQueuedCount--;
+
+ if ( 0 >= mBracketingBuffersQueuedCount )
+ {
+ nextBufferIdx = ( currentBufferIdx + 1 ) % imgCaptureData->mNumBufs;
+ mBracketingBuffersQueued[nextBufferIdx] = true;
+ mBracketingBuffersQueuedCount++;
+ mLastBracetingBufferIdx = nextBufferIdx;
+ setFrameRefCount(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame, 1);
+ returnFrame(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::sendBracketFrames()
+{
+ status_t ret = NO_ERROR;
+ int currentBufferIdx;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ currentBufferIdx = mLastBracetingBufferIdx;
+ do
+ {
+ currentBufferIdx++;
+ currentBufferIdx %= imgCaptureData->mNumBufs;
+ if (!mBracketingBuffersQueued[currentBufferIdx] )
+ {
+ CameraFrame cameraFrame;
+ initCameraFrame(cameraFrame,
+ imgCaptureData->mBufferHeader[currentBufferIdx],
+ imgCaptureData->mImageType,
+ imgCaptureData);
+ sendFrame(cameraFrame);
+ }
+ } while ( currentBufferIdx != mLastBracetingBufferIdx );
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startBracketing(int range)
+{
+ status_t ret = NO_ERROR;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ ret = -EINVAL;
+ }
+
+ {
+ Mutex::Autolock lock(mBracketingLock);
+
+ if ( mBracketingEnabled )
+ {
+ return ret;
+ }
+ }
+
+ if ( 0 == imgCaptureData->mNumBufs )
+ {
+ CAMHAL_LOGEB("Image capture buffers set to %d", imgCaptureData->mNumBufs);
+ ret = -EINVAL;
+ }
+
+ if ( mPending3Asettings )
+ apply3Asettings(mParameters3A);
+
+ if ( NO_ERROR == ret )
+ {
+ Mutex::Autolock lock(mBracketingLock);
+
+ mBracketingRange = range;
+ mBracketingBuffersQueued = new bool[imgCaptureData->mNumBufs];
+ if ( NULL == mBracketingBuffersQueued )
+ {
+ CAMHAL_LOGEA("Unable to allocate bracketing management structures");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingBuffersQueuedCount = imgCaptureData->mNumBufs;
+ mLastBracetingBufferIdx = mBracketingBuffersQueuedCount - 1;
+
+ for ( int i = 0 ; i < imgCaptureData->mNumBufs ; i++ )
+ {
+ mBracketingBuffersQueued[i] = true;
+ }
+
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ ret = startImageCapture();
+ {
+ Mutex::Autolock lock(mBracketingLock);
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingEnabled = true;
+ }
+ else
+ {
+ mBracketingEnabled = false;
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mBracketingLock);
+
+ if ( mBracketingEnabled )
+ {
+
+ if ( NULL != mBracketingBuffersQueued )
+ {
+ delete [] mBracketingBuffersQueued;
+ }
+
+ ret = stopImageCapture();
+
+ mBracketingBuffersQueued = NULL;
+ mBracketingEnabled = false;
+ mBracketingBuffersQueuedCount = 0;
+ mLastBracetingBufferIdx = 0;
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startImageCapture()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMXCameraPortParameters * capData = NULL;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+
+ LOG_FUNCTION_NAME;
+
+ if(!mCaptureConfigured)
+ {
+ ///Image capture was cancelled before we could start
+ return NO_ERROR;
+ }
+
+ if ( 0 != mStartCaptureSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mStartCaptureSem semaphore count %d", mStartCaptureSem.Count());
+ return NO_INIT;
+ }
+
+ //During bracketing image capture is already active
+ {
+ Mutex::Autolock lock(mBracketingLock);
+ if ( mBracketingEnabled )
+ {
+ //Stop bracketing, activate normal burst for the remaining images
+ mBracketingEnabled = false;
+ mCapturedFrames = mBracketingRange;
+ ret = sendBracketFrames();
+ goto EXIT;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = setPictureRotation(mPictureRotation);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error configuring image rotation %x", ret);
+ }
+ }
+
+ //OMX shutter callback events are only available in hq mode
+ if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
+ {
+
+ if ( NO_ERROR == ret )
+ {
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_TI_IndexConfigShutterCallback,
+ mStartCaptureSem);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = setShutterCallback(true);
+ }
+
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ ///Queue all the buffers on capture port
+ for ( int index = 0 ; index < capData->mNumBufs ; index++ )
+ {
+ CAMHAL_LOGDB("Queuing buffer on Capture port - 0x%x",
+ ( unsigned int ) capData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
+ (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+
+ mWaitingForSnapshot = true;
+ mCaptureSignalled = false;
+
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+ bOMX.bEnabled = OMX_TRUE;
+
+ /// sending Capturing Command to the component
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCapturing,
+ &bOMX);
+
+ CAMHAL_LOGDB("Capture set - 0x%x", eError);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+
+ }
+
+ //OMX shutter callback events are only available in hq mode
+ if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
+ {
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mStartCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDA("Shutter callback received");
+ notifyShutterSubscribers();
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_TI_IndexConfigShutterCallback,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on shutter callback");
+ goto EXIT;
+ }
+
+ }
+
+ EXIT:
+
+ if ( eError != OMX_ErrorNone )
+ {
+
+ mWaitingForSnapshot = false;
+ mCaptureSignalled = false;
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError;
+ OMX_CONFIG_BOOLEANTYPE bOMX;
+ OMXCameraPortParameters *imgCaptureData = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if (!mCaptureConfigured) {
+ //Capture is not ongoing, return from here
+ return NO_ERROR;
+ }
+
+ if ( 0 != mStopCaptureSem.Count() ) {
+ CAMHAL_LOGEB("Error mStopCaptureSem semaphore count %d", mStopCaptureSem.Count());
+ goto EXIT;
+ }
+
+ //Disable the callback first
+ mWaitingForSnapshot = false;
+ mSnapshotCount = 0;
+
+ //Disable the callback first
+ ret = setShutterCallback(false);
+
+ //release any 3A locks if locked
+ ret = release3ALock();
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("Error Releaseing 3A locks%d", ret);
+ }
+
+ //Wait here for the capture to be done, in worst case timeout and proceed with cleanup
+ ret = mCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT);
+ if ( NO_ERROR != ret ) {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_TI_IndexConfigShutterCallback,
+ NULL);
+ CAMHAL_LOGEA("Timeout expired on shutter callback");
+ }
+
+ //Disable image capture
+ OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
+ bOMX.bEnabled = OMX_FALSE;
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCapturing,
+ &bOMX);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGDB("Error during SetConfig- 0x%x", eError);
+ ret = -1;
+ }
+
+ CAMHAL_LOGDB("Capture set - 0x%x", eError);
+
+ mCaptureSignalled = true; //set this to true if we exited because of timeout
+
+ mCaptureConfigured = false;
+
+ ///Register for Image port Disable event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mImagePortIndex,
+ mStopCaptureSem);
+ ///Disable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+
+ ///Free all the buffers on capture port
+ if (imgCaptureData) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - %d", imgCaptureData->mNumBufs);
+ for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++) {
+ CAMHAL_LOGDB("Freeing buffer on Capture port - 0x%x",
+ ( unsigned int ) imgCaptureData->mBufferHeader[index]->pBuffer);
+ eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp,
+ mCameraAdapterParameters.mImagePortIndex,
+ (OMX_BUFFERHEADERTYPE*)imgCaptureData->mBufferHeader[index]);
+
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+ }
+ }
+ CAMHAL_LOGDA("Waiting for port disable");
+ //Wait for the image port enable event
+ ret = mStopCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("Port disabled");
+ } else {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortDisable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port disable");
+ goto EXIT;
+ }
+
+ EXIT:
+
+ //Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError;
+ OMXCameraPortParameters * imgCaptureData = NULL;
+ uint32_t *buffers = (uint32_t*)bufArr;
+ OMXCameraPortParameters cap;
+
+ imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( 0 != mUseCaptureSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mUseCaptureSem semaphore count %d", mUseCaptureSem.Count());
+ goto EXIT;
+ }
+
+ imgCaptureData->mNumBufs = num;
+
+ //TODO: Support more pixelformats
+
+ CAMHAL_LOGDB("Params Width = %d", (int)imgCaptureData->mWidth);
+ CAMHAL_LOGDB("Params Height = %d", (int)imgCaptureData->mWidth);
+
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ ret = setThumbnailParams(mThumbWidth, mThumbHeight, mThumbQuality);
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring thumbnail size %x", ret);
+ return ret;
+ }
+
+ ret = setExposureBracketing( mExposureBracketingValues,
+ mExposureBracketingValidEntries, mBurstFrames);
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEB("setExposureBracketing() failed %d", ret);
+ return ret;
+ }
+
+ ret = setImageQuality(mPictureQuality);
+ if ( NO_ERROR != ret)
+ {
+ CAMHAL_LOGEB("Error configuring image quality %x", ret);
+ return ret;
+ }
+
+ ///Register for Image port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ mUseCaptureSem);
+
+ ///Enable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+
+ for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++ )
+ {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)buffers[index],
+ (int)imgCaptureData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mImagePortIndex,
+ 0,
+ mCaptureBuffersLength,
+ (OMX_U8*)buffers[index]);
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgCaptureData->mBufferHeader[index] = pBufferHdr;
+ }
+
+ //Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if ( ret == NO_ERROR )
+ {
+ CAMHAL_LOGDA("Port enabled");
+ }
+ else
+ {
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = setupEXIF();
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
+ }
+ }
+
+ mCapturedFrames = mBurstFrames;
+ mCaptureConfigured = true;
+
+ EXIT:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+};
diff --git a/camera/OMXCameraAdapter/OMXDefaults.cpp b/camera/OMXCameraAdapter/OMXDefaults.cpp
new file mode 100644
index 0000000..670f562
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXDefaults.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXDefaults.cpp
+*
+* This file contains definitions are OMX Camera defaults
+*
+*/
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace android {
+
+#undef LOG_TAG
+#define LOG_TAG "CameraHAL"
+
+// OMX Camera defaults
+const char OMXCameraAdapter::DEFAULT_ANTIBANDING[] = "auto";
+const char OMXCameraAdapter::DEFAULT_BRIGHTNESS[] = "50";
+const char OMXCameraAdapter::DEFAULT_CONTRAST[] = "100";
+const char OMXCameraAdapter::DEFAULT_EFFECT[] = "none";
+const char OMXCameraAdapter::DEFAULT_EV_COMPENSATION[] = "0";
+const char OMXCameraAdapter::DEFAULT_EV_STEP[] = "0.1";
+const char OMXCameraAdapter::DEFAULT_EXPOSURE_MODE[] = "auto";
+const char OMXCameraAdapter::DEFAULT_FLASH_MODE[] = "off";
+const char OMXCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
+const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE[] = "20000,30000";
+const char OMXCameraAdapter::DEFAULT_IPP[] = "off";
+const char OMXCameraAdapter::DEFAULT_ISO_MODE[] = "auto";
+const char OMXCameraAdapter::DEFAULT_JPEG_QUALITY[] = "95";
+const char OMXCameraAdapter::DEFAULT_THUMBNAIL_QUALITY[] = "60";
+const char OMXCameraAdapter::DEFAULT_THUMBNAIL_SIZE[] = "160x120";
+const char OMXCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char OMXCameraAdapter::DEFAULT_PICTURE_SIZE[] = "320x240";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
+const char OMXCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char OMXCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char OMXCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
+const char OMXCameraAdapter::DEFAULT_NUM_PIC_BUFS[] = "1";
+const char OMXCameraAdapter::DEFAULT_MAX_FOCUS_AREAS[] = "1";
+const char OMXCameraAdapter::DEFAULT_SATURATION[] = "100";
+const char OMXCameraAdapter::DEFAULT_SCENE_MODE[] = "auto";
+const char OMXCameraAdapter::DEFAULT_SHARPNESS[] = "100";
+const char OMXCameraAdapter::DEFAULT_VSTAB[] = "0";
+const char OMXCameraAdapter::DEFAULT_WB[] = "auto";
+const char OMXCameraAdapter::DEFAULT_ZOOM[] = "0";
+const char OMXCameraAdapter::DEFAULT_MAX_FD_HW_FACES[] = "35";
+const char OMXCameraAdapter::DEFAULT_MAX_FD_SW_FACES[] = "0";
+
+const char OMXCameraAdapter::DEFAULT_AE_LOCK[] = "false";
+const char OMXCameraAdapter::DEFAULT_AWB_LOCK[] = "false";
+const char OMXCameraAdapter::DEFAULT_MAX_NUM_METERING_AREAS[] = "0";
+const char OMXCameraAdapter::DEFAULT_LOCK_SUPPORTED[] = "true";
+const char OMXCameraAdapter::DEFAULT_LOCK_UNSUPPORTED[] = "false";
+};
+
diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp
new file mode 100644
index 0000000..56b1694
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXExif.cpp
@@ -0,0 +1,567 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXExif.cpp
+*
+* This file contains functionality for handling EXIF insertion.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+#include <math.h>
+
+namespace android {
+
+const char OMXCameraAdapter::EXIFASCIIPrefix [] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };
+
+status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *valstr = NULL;
+ double gpsPos;
+
+ LOG_FUNCTION_NAME;
+
+ if( ( valstr = params.get(CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
+ {
+ gpsPos = strtod(valstr, NULL);
+
+ if ( convertGPSCoord( gpsPos, &mEXIFData.mGPSData.mLatDeg,
+ &mEXIFData.mGPSData.mLatMin,
+ &mEXIFData.mGPSData.mLatSec ) == NO_ERROR )
+ {
+
+ if ( 0 < gpsPos )
+ {
+ strncpy(mEXIFData.mGPSData.mLatRef, GPS_NORTH_REF, GPS_REF_SIZE);
+ }
+ else
+ {
+ strncpy(mEXIFData.mGPSData.mLatRef, GPS_SOUTH_REF, GPS_REF_SIZE);
+ }
+
+ mEXIFData.mGPSData.mLatValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLatValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLatValid = false;
+ }
+
+ if( ( valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
+ {
+ gpsPos = strtod(valstr, NULL);
+
+ if ( convertGPSCoord( gpsPos, &mEXIFData.mGPSData.mLongDeg,
+ &mEXIFData.mGPSData.mLongMin,
+ &mEXIFData.mGPSData.mLongSec ) == NO_ERROR )
+ {
+
+ if ( 0 < gpsPos )
+ {
+ strncpy(mEXIFData.mGPSData.mLongRef, GPS_EAST_REF, GPS_REF_SIZE);
+ }
+ else
+ {
+ strncpy(mEXIFData.mGPSData.mLongRef, GPS_WEST_REF, GPS_REF_SIZE);
+ }
+
+ mEXIFData.mGPSData.mLongValid= true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLongValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mLongValid = false;
+ }
+
+ if( ( valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
+ {
+ gpsPos = strtod(valstr, NULL);
+ mEXIFData.mGPSData.mAltitude = floor(fabs(gpsPos));
+ if (gpsPos < 0) {
+ mEXIFData.mGPSData.mAltitudeRef = 1;
+ } else {
+ mEXIFData.mGPSData.mAltitudeRef = 0;
+ }
+ mEXIFData.mGPSData.mAltitudeValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mAltitudeValid= false;
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ {
+ long gpsTimestamp = strtol(valstr, NULL, 10);
+ struct tm *timeinfo = localtime( ( time_t * ) & (gpsTimestamp) );
+ if ( NULL != timeinfo )
+ {
+ mEXIFData.mGPSData.mTimeStampHour = timeinfo->tm_hour;
+ mEXIFData.mGPSData.mTimeStampMin = timeinfo->tm_min;
+ mEXIFData.mGPSData.mTimeStampSec = timeinfo->tm_sec;
+ mEXIFData.mGPSData.mTimeStampValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mTimeStampValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mTimeStampValid = false;
+ }
+
+ if( ( valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
+ {
+ long gpsDatestamp = strtol(valstr, NULL, 10);
+ struct tm *timeinfo = localtime( ( time_t * ) & (gpsDatestamp) );
+ if ( NULL != timeinfo )
+ {
+ strftime(mEXIFData.mGPSData.mDatestamp, GPS_DATESTAMP_SIZE, "%Y:%m:%d", timeinfo);
+ mEXIFData.mGPSData.mDatestampValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mDatestampValid = false;
+ }
+ }
+ else
+ {
+ mEXIFData.mGPSData.mDatestampValid = false;
+ }
+
+ if( ( valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
+ {
+ strncpy(mEXIFData.mGPSData.mProcMethod, valstr, GPS_PROCESSING_SIZE-1);
+ mEXIFData.mGPSData.mProcMethodValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mProcMethodValid = false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM) ) != NULL )
+ {
+ strncpy(mEXIFData.mGPSData.mMapDatum, valstr, GPS_MAPDATUM_SIZE-1);
+ mEXIFData.mGPSData.mMapDatumValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mMapDatumValid = false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GPS_VERSION) ) != NULL )
+ {
+ strncpy(mEXIFData.mGPSData.mVersionId, valstr, GPS_VERSION_SIZE-1);
+ mEXIFData.mGPSData.mVersionIdValid = true;
+ }
+ else
+ {
+ mEXIFData.mGPSData.mVersionIdValid = false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_EXIF_MODEL ) ) != NULL )
+ {
+ CAMHAL_LOGVB("EXIF Model: %s", valstr);
+ mEXIFData.mModelValid= true;
+ }
+ else
+ {
+ mEXIFData.mModelValid= false;
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_EXIF_MAKE ) ) != NULL )
+ {
+ CAMHAL_LOGVB("EXIF Make: %s", valstr);
+ mEXIFData.mMakeValid = true;
+ }
+ else
+ {
+ mEXIFData.mMakeValid= false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setupEXIF()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ OMX_TI_CONFIG_EXIF_TAGS *exifTags;
+ unsigned char *sharedPtr = NULL;
+ struct timeval sTv;
+ struct tm *pTime;
+ OMXCameraPortParameters * capData = NULL;
+ MemoryManager memMgr;
+ OMX_U8** memmgr_buf_array = NULL;
+ int buf_size = 0;
+
+ LOG_FUNCTION_NAME;
+
+ sharedBuffer.pSharedBuff = NULL;
+ capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+ sharedBuffer.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ //We allocate the shared buffer dynamically based on the
+ //requirements of the EXIF tags. The additional buffers will
+ //get stored after the EXIF configuration structure and the pointers
+ //will contain offsets within the shared buffer itself.
+ buf_size = sizeof(OMX_TI_CONFIG_EXIF_TAGS) +
+ ( EXIF_MODEL_SIZE ) +
+ ( EXIF_MAKE_SIZE ) +
+ ( EXIF_DATE_TIME_SIZE ) +
+ ( GPS_MAPDATUM_SIZE ) +
+ ( GPS_PROCESSING_SIZE );
+ sharedBuffer.nSharedBuffSize = buf_size;
+
+ memmgr_buf_array = (OMX_U8 **)memMgr.allocateBuffer(0, 0, NULL, buf_size, 1);
+ sharedBuffer.pSharedBuff = ( OMX_U8 * ) memmgr_buf_array[0];
+
+ if ( NULL == sharedBuffer.pSharedBuff )
+ {
+ CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
+ ret = -1;
+ }
+
+ //Extra data begins right after the EXIF configuration structure.
+ sharedPtr = sharedBuffer.pSharedBuff + sizeof(OMX_TI_CONFIG_EXIF_TAGS);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) sharedBuffer.pSharedBuff;
+ OMX_INIT_STRUCT_PTR (exifTags, OMX_TI_CONFIG_EXIF_TAGS);
+ exifTags->nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigExifTags,
+ &sharedBuffer );
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while retrieving EXIF configuration structure 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusModel ) &&
+ ( mEXIFData.mModelValid ) )
+ {
+ strncpy(( char * ) sharedPtr,
+ ( char * ) mParams.get(TICameraParameters::KEY_EXIF_MODEL ),
+ EXIF_MODEL_SIZE - 1);
+
+ exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ sharedPtr += EXIF_MODEL_SIZE;
+ exifTags->ulModelBuffSizeBytes = EXIF_MODEL_SIZE;
+ exifTags->eStatusModel = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusMake) &&
+ ( mEXIFData.mMakeValid ) )
+ {
+ strncpy( ( char * ) sharedPtr,
+ ( char * ) mParams.get(TICameraParameters::KEY_EXIF_MAKE ),
+ EXIF_MAKE_SIZE - 1);
+
+ exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ sharedPtr += EXIF_MAKE_SIZE;
+ exifTags->ulMakeBuffSizeBytes = EXIF_MAKE_SIZE;
+ exifTags->eStatusMake = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusFocalLength ))
+ {
+ char *ctx;
+ int len;
+ char* temp = (char*) mParams.get(CameraParameters::KEY_FOCAL_LENGTH);
+ char * tempVal = NULL;
+ if(temp != NULL)
+ {
+ len = strlen(temp);
+ tempVal = (char*) malloc( sizeof(char) * (len + 1));
+ }
+ if(tempVal != NULL)
+ {
+ memset(tempVal, '\0', len + 1);
+ strncpy(tempVal, temp, len);
+ CAMHAL_LOGDB("KEY_FOCAL_LENGTH = %s", tempVal);
+
+ // convert the decimal string into a rational
+ size_t den_len;
+ OMX_U32 numerator = 0;
+ OMX_U32 denominator = 0;
+ char* temp = strtok_r(tempVal, ".", &ctx);
+
+ if(temp != NULL)
+ numerator = atoi(temp);
+
+ temp = strtok_r(NULL, ".", &ctx);
+ if(temp != NULL)
+ {
+ den_len = strlen(temp);
+ if(HUGE_VAL == den_len )
+ {
+ den_len = 0;
+ }
+ denominator = static_cast<OMX_U32>(pow(10, den_len));
+ numerator = numerator*denominator + atoi(temp);
+ }else{
+ denominator = 1;
+ }
+
+ free(tempVal);
+
+ exifTags->ulFocalLength[0] = numerator;
+ exifTags->ulFocalLength[1] = denominator;
+ CAMHAL_LOGVB("exifTags->ulFocalLength = [%u] [%u]",
+ (unsigned int)(exifTags->ulFocalLength[0]),
+ (unsigned int)(exifTags->ulFocalLength[1]));
+ exifTags->eStatusFocalLength = OMX_TI_TagUpdated;
+ }
+ }
+
+ if ( OMX_TI_TagReadWrite == exifTags->eStatusDateTime )
+ {
+ int status = gettimeofday (&sTv, NULL);
+ pTime = localtime (&sTv.tv_sec);
+ if ( ( 0 == status ) && ( NULL != pTime ) )
+ {
+ snprintf(( char * ) sharedPtr, EXIF_DATE_TIME_SIZE,
+ "%04d:%02d:%02d %02d:%02d:%02d",
+ pTime->tm_year + 1900,
+ pTime->tm_mon + 1,
+ pTime->tm_mday,
+ pTime->tm_hour,
+ pTime->tm_min,
+ pTime->tm_sec );
+ }
+
+ exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ sharedPtr += EXIF_DATE_TIME_SIZE;
+ exifTags->ulDateTimeBuffSizeBytes = EXIF_DATE_TIME_SIZE;
+ exifTags->eStatusDateTime = OMX_TI_TagUpdated;
+ }
+
+ if ( OMX_TI_TagReadWrite == exifTags->eStatusImageWidth )
+ {
+ exifTags->ulImageWidth = capData->mWidth;
+ exifTags->eStatusImageWidth = OMX_TI_TagUpdated;
+ }
+
+ if ( OMX_TI_TagReadWrite == exifTags->eStatusImageHeight )
+ {
+ exifTags->ulImageHeight = capData->mHeight;
+ exifTags->eStatusImageHeight = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLatitude ) &&
+ ( mEXIFData.mGPSData.mLatValid ) )
+ {
+ exifTags->ulGpsLatitude[0] = abs(mEXIFData.mGPSData.mLatDeg);
+ exifTags->ulGpsLatitude[2] = abs(mEXIFData.mGPSData.mLatMin);
+ exifTags->ulGpsLatitude[4] = abs(mEXIFData.mGPSData.mLatSec);
+ exifTags->ulGpsLatitude[1] = 1;
+ exifTags->ulGpsLatitude[3] = 1;
+ exifTags->ulGpsLatitude[5] = 1;
+ exifTags->eStatusGpsLatitude = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpslatitudeRef ) &&
+ ( mEXIFData.mGPSData.mLatValid ) )
+ {
+ exifTags->cGpslatitudeRef[0] = ( OMX_S8 ) mEXIFData.mGPSData.mLatRef[0];
+ exifTags->cGpslatitudeRef[1] = '\0';
+ exifTags->eStatusGpslatitudeRef = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLongitude ) &&
+ ( mEXIFData.mGPSData.mLongValid ) )
+ {
+ exifTags->ulGpsLongitude[0] = abs(mEXIFData.mGPSData.mLongDeg);
+ exifTags->ulGpsLongitude[2] = abs(mEXIFData.mGPSData.mLongMin);
+ exifTags->ulGpsLongitude[4] = abs(mEXIFData.mGPSData.mLongSec);
+ exifTags->ulGpsLongitude[1] = 1;
+ exifTags->ulGpsLongitude[3] = 1;
+ exifTags->ulGpsLongitude[5] = 1;
+ exifTags->eStatusGpsLongitude = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLongitudeRef ) &&
+ ( mEXIFData.mGPSData.mLongValid ) )
+ {
+ exifTags->cGpsLongitudeRef[0] = ( OMX_S8 ) mEXIFData.mGPSData.mLongRef[0];
+ exifTags->cGpsLongitudeRef[1] = '\0';
+ exifTags->eStatusGpsLongitudeRef = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsAltitude ) &&
+ ( mEXIFData.mGPSData.mAltitudeValid) )
+ {
+ exifTags->ulGpsAltitude[0] = ( OMX_U32 ) mEXIFData.mGPSData.mAltitude;
+ exifTags->ulGpsAltitude[1] = 1;
+ exifTags->eStatusGpsAltitude = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsAltitudeRef ) &&
+ ( mEXIFData.mGPSData.mAltitudeValid) )
+ {
+ exifTags->ucGpsAltitudeRef = (OMX_U8) mEXIFData.mGPSData.mAltitudeRef;
+ exifTags->eStatusGpsAltitudeRef = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsMapDatum ) &&
+ ( mEXIFData.mGPSData.mMapDatumValid ) )
+ {
+ memcpy(sharedPtr, mEXIFData.mGPSData.mMapDatum, GPS_MAPDATUM_SIZE);
+
+ exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ exifTags->ulGpsMapDatumBuffSizeBytes = GPS_MAPDATUM_SIZE;
+ exifTags->eStatusGpsMapDatum = OMX_TI_TagUpdated;
+ sharedPtr += GPS_MAPDATUM_SIZE;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsProcessingMethod ) &&
+ ( mEXIFData.mGPSData.mProcMethodValid ) )
+ {
+ exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff );
+ memcpy(sharedPtr, EXIFASCIIPrefix, sizeof(EXIFASCIIPrefix));
+ sharedPtr += sizeof(EXIFASCIIPrefix);
+
+ memcpy(sharedPtr,
+ mEXIFData.mGPSData.mProcMethod,
+ ( GPS_PROCESSING_SIZE - sizeof(EXIFASCIIPrefix) ) );
+ exifTags->ulGpsProcessingMethodBuffSizeBytes = GPS_PROCESSING_SIZE;
+ exifTags->eStatusGpsProcessingMethod = OMX_TI_TagUpdated;
+ sharedPtr += GPS_PROCESSING_SIZE;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsVersionId ) &&
+ ( mEXIFData.mGPSData.mVersionIdValid ) )
+ {
+ exifTags->ucGpsVersionId[0] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[0];
+ exifTags->ucGpsVersionId[1] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[1];
+ exifTags->ucGpsVersionId[2] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[2];
+ exifTags->ucGpsVersionId[3] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[3];
+ exifTags->eStatusGpsVersionId = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsTimeStamp ) &&
+ ( mEXIFData.mGPSData.mTimeStampValid ) )
+ {
+ exifTags->ulGpsTimeStamp[0] = mEXIFData.mGPSData.mTimeStampHour;
+ exifTags->ulGpsTimeStamp[2] = mEXIFData.mGPSData.mTimeStampMin;
+ exifTags->ulGpsTimeStamp[4] = mEXIFData.mGPSData.mTimeStampSec;
+ exifTags->ulGpsTimeStamp[1] = 1;
+ exifTags->ulGpsTimeStamp[3] = 1;
+ exifTags->ulGpsTimeStamp[5] = 1;
+ exifTags->eStatusGpsTimeStamp = OMX_TI_TagUpdated;
+ }
+
+ if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsDateStamp ) &&
+ ( mEXIFData.mGPSData.mDatestampValid ) )
+ {
+ strncpy( ( char * ) exifTags->cGpsDateStamp,
+ ( char * ) mEXIFData.mGPSData.mDatestamp,
+ GPS_DATESTAMP_SIZE );
+ exifTags->eStatusGpsDateStamp = OMX_TI_TagUpdated;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigExifTags,
+ &sharedBuffer );
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while setting EXIF configuration 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ if ( NULL != memmgr_buf_array )
+ {
+ memMgr.freeBuffer(memmgr_buf_array);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::convertGPSCoord(double coord, int *deg, int *min, int *sec)
+{
+ double tmp;
+
+ LOG_FUNCTION_NAME;
+
+ if ( coord == 0 ) {
+
+ LOGE("Invalid GPS coordinate");
+
+ return -EINVAL;
+ }
+
+ *deg = (int) floor(fabs(coord));
+ tmp = ( fabs(coord) - floor(fabs(coord)) )*60;
+ *min = (int) floor(tmp);
+ tmp = ( tmp - floor(tmp) )*60;
+ *sec = (int) floor(tmp);
+
+ if( *sec >= 60 ) {
+ *sec = 0;
+ *min += 1;
+ }
+
+ if( *min >= 60 ) {
+ *min = 0;
+ *deg += 1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+};
diff --git a/camera/OMXCameraAdapter/OMXFD.cpp b/camera/OMXCameraAdapter/OMXFD.cpp
new file mode 100644
index 0000000..c1617d7
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXFD.cpp
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXFD.cpp
+*
+* This file contains functionality for handling face detection.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace android {
+
+status_t OMXCameraAdapter::setParametersFD(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startFaceDetection()
+{
+ Mutex::Autolock lock(mFaceDetectionLock);
+ return setFaceDetection(true, mDeviceOrientation);
+}
+
+status_t OMXCameraAdapter::stopFaceDetection()
+{
+ Mutex::Autolock lock(mFaceDetectionLock);
+ return setFaceDetection(false, mDeviceOrientation);
+}
+
+status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXTRADATATYPE extraDataControl;
+ OMX_CONFIG_OBJDETECTIONTYPE objDetection;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( orientation < 0 || orientation > 270 ) {
+ orientation = 0;
+ }
+
+ OMX_INIT_STRUCT_PTR (&objDetection, OMX_CONFIG_OBJDETECTIONTYPE);
+ objDetection.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ objDetection.nDeviceOrientation = orientation;
+ if ( enable )
+ {
+ objDetection.bEnable = OMX_TRUE;
+ }
+ else
+ {
+ objDetection.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigImageFaceDetection,
+ &objDetection);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring face detection 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Face detection configured successfully");
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
+ extraDataControl.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+ extraDataControl.eExtraDataType = OMX_FaceDetection;
+ extraDataControl.eCameraView = OMX_2D;
+ if ( enable )
+ {
+ extraDataControl.bEnable = OMX_TRUE;
+ }
+ else
+ {
+ extraDataControl.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
+ &extraDataControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring face detection extra data 0x%x",
+ eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Face detection extra data configured successfully");
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFaceDetectionRunning = enable;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ sp<CameraFDResult> &result,
+ size_t previewWidth,
+ size_t previewHeight)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_TI_FACERESULT *faceResult;
+ OMX_OTHER_EXTRADATATYPE *extraData;
+ OMX_FACEDETECTIONTYPE *faceData;
+ OMX_TI_PLATFORMPRIVATE *platformPrivate;
+ camera_frame_metadata_t *faces;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateExecuting != mComponentState ) {
+ CAMHAL_LOGEA("OMX component is not in executing state");
+ return NO_INIT;
+ }
+
+ if ( NULL == pBuffHeader ) {
+ CAMHAL_LOGEA("Invalid Buffer header");
+ return-EINVAL;
+ }
+
+ platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate);
+ if ( NULL != platformPrivate ) {
+ if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
+ platformPrivate->nSize,
+ sizeof(OMX_TI_PLATFORMPRIVATE),
+ platformPrivate->pAuxBuf1,
+ platformPrivate->pAuxBufSize1,
+ platformPrivate->pMetaDataBuffer,
+ platformPrivate->nMetaDataSize);
+ } else {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
+ ( unsigned int ) platformPrivate->nSize);
+ ret = -EINVAL;
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
+ return-EINVAL;
+ }
+
+
+ if ( 0 >= platformPrivate->nMetaDataSize ) {
+ CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
+ ( unsigned int ) platformPrivate->nMetaDataSize);
+ return -EINVAL;
+ }
+
+ extraData = (OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer);
+ if ( NULL != extraData ) {
+ CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
+ extraData->nSize,
+ sizeof(OMX_OTHER_EXTRADATATYPE),
+ extraData->eType,
+ extraData->nDataSize,
+ extraData->nPortIndex,
+ extraData->nVersion);
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_OTHER_EXTRADATATYPE");
+ return -EINVAL;
+ }
+
+ faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
+ if ( NULL != faceData ) {
+ if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
+ CAMHAL_LOGVB("Faces detected %d",
+ faceData->ulFaceCount,
+ faceData->nSize,
+ sizeof(OMX_FACEDETECTIONTYPE),
+ faceData->eCameraView,
+ faceData->nPortIndex,
+ faceData->nVersion);
+ } else {
+ CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
+ ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
+ ( unsigned int ) faceData->nSize);
+ return -EINVAL;
+ }
+ } else {
+ CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
+ return -EINVAL;
+ }
+
+ ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight);
+
+ if ( NO_ERROR == ret ) {
+ result = new CameraFDResult(faces);
+ } else {
+ result.clear();
+ result = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
+ camera_frame_metadata_t **pFaces,
+ size_t previewWidth,
+ size_t previewHeight)
+{
+ status_t ret = NO_ERROR;
+ camera_face_t *faces;
+ camera_frame_metadata_t *faceResult;
+ size_t hRange, vRange;
+ double tmp;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == faceData ) {
+ CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter");
+ return EINVAL;
+ }
+
+ LOG_FUNCTION_NAME
+
+ hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT;
+ vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP;
+
+ faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t));
+ if ( NULL == faceResult ) {
+ return -ENOMEM;
+ }
+
+ if ( 0 < faceData->ulFaceCount ) {
+
+ faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
+ if ( NULL == faces ) {
+ return -ENOMEM;
+ }
+
+ for ( int i = 0 ; i < faceData->ulFaceCount ; i++)
+ {
+
+ tmp = ( double ) faceData->tFacePosition[i].nLeft / ( double ) previewWidth;
+ tmp *= hRange;
+ tmp -= hRange/2;
+ faces[i].rect[0] = tmp;
+
+ tmp = ( double ) faceData->tFacePosition[i].nTop / ( double )previewHeight;
+ tmp *= vRange;
+ tmp -= vRange/2;
+ faces[i].rect[1] = tmp;
+
+ tmp = ( double ) faceData->tFacePosition[i].nWidth / ( double ) previewWidth;
+ tmp *= hRange;
+ faces[i].rect[2] = faces[i].rect[0] + tmp;
+
+ tmp = ( double ) faceData->tFacePosition[i].nHeight / ( double ) previewHeight;
+ tmp *= vRange;
+ faces[i].rect[3] = faces[i].rect[1] + tmp;
+
+ faces[i].score = faceData->tFacePosition[i].nScore;
+ faces[i].id = 0;
+ faces[i].left_eye[0] = CameraFDResult::INVALID_DATA;
+ faces[i].left_eye[1] = CameraFDResult::INVALID_DATA;
+ faces[i].right_eye[0] = CameraFDResult::INVALID_DATA;
+ faces[i].right_eye[1] = CameraFDResult::INVALID_DATA;
+ faces[i].mouth[0] = CameraFDResult::INVALID_DATA;
+ faces[i].mouth[1] = CameraFDResult::INVALID_DATA;
+ }
+
+ faceResult->number_of_faces = faceData->ulFaceCount;
+ faceResult->faces = faces;
+
+ } else {
+ faceResult->number_of_faces = 0;
+ faceResult->faces = NULL;
+ }
+
+ *pFaces = faceResult;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+};
diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp
new file mode 100644
index 0000000..19286ea
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXFocus.cpp
@@ -0,0 +1,673 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+* @file OMXFocus.cpp
+*
+* This file contains functionality for handling focus configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+#define TOUCH_FOCUS_RANGE 0xFF
+#define AF_CALLBACK_TIMEOUT 10000000 //10 seconds timeout
+
+namespace android {
+
+status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ const char *str = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ str = params.get(CameraParameters::KEY_FOCUS_AREAS);
+ mFocusAreas.clear();
+ if ( NULL != str )
+ {
+ CameraArea::parseFocusArea(str, strlen(str), mFocusAreas);
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doAutoFocus()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
+ size_t top, left, width, height, weight;
+ sp<CameraArea> focusArea = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ returnFocusStatus(false);
+ return NO_INIT;
+ }
+
+ if ( 0 != mDoAFSem.Count() )
+ {
+ CAMHAL_LOGEB("Error mDoAFSem semaphore count %d", mDoAFSem.Count());
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( !mFocusAreas.isEmpty() )
+ {
+ focusArea = mFocusAreas.itemAt(0);
+ }
+
+ OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) mParameters3A.Focus;
+
+ //If touch AF is set, then necessary configuration first
+ if ( ( NULL != focusArea.get() ) && ( focusArea->isValid() ) )
+ {
+
+ //Disable face priority first
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);
+
+ //Enable region algorithm priority
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
+
+ //Set position
+ OMXCameraPortParameters * mPreviewData = NULL;
+ mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];
+ focusArea->transfrom(mPreviewData->mWidth,
+ mPreviewData->mHeight,
+ top,
+ left,
+ width,
+ height);
+ setTouchFocus(left,
+ top,
+ width,
+ height,
+ mPreviewData->mWidth,
+ mPreviewData->mHeight);
+
+ //Do normal focus afterwards
+ focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) OMX_IMAGE_FocusControlExtended;
+
+ }
+ else if ( FOCUS_FACE_PRIORITY == focusControl.eFocusControl )
+ {
+
+ //Disable region priority first
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+
+ //Enable face algorithm priority
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, true);
+
+ //Do normal focus afterwards
+ focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) OMX_IMAGE_FocusControlExtended;
+
+ }
+ else
+ {
+
+ //Disable both region and face priority
+ setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
+
+ setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);
+
+ }
+
+ if ( ( mParameters3A.Focus != OMX_IMAGE_FocusControlAuto ) &&
+ ( mParameters3A.Focus != OMX_IMAGE_FocusControlAutoInfinity ) )
+ {
+
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_IndexConfigCommonFocusStatus,
+ mDoAFSem);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = setFocusCallback(true);
+ }
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigFocusControl,
+ &focusControl);
+
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
+ return INVALID_OPERATION;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Autofocus started successfully");
+ }
+ }
+
+ if ( ( mParameters3A.Focus != OMX_IMAGE_FocusControlAuto ) &&
+ ( mParameters3A.Focus != OMX_IMAGE_FocusControlAutoInfinity ) )
+ {
+
+ ret = mDoAFSem.WaitTimeout(AF_CALLBACK_TIMEOUT);
+ //Disable auto focus callback from Ducati
+ setFocusCallback(false);
+ //Signal a dummy AF event so that in case the callback from ducati
+ //does come then it doesnt crash after
+ //exiting this function since eventSem will go out of scope.
+ if(ret != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Autofocus callback timeout expired");
+ SignalEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_IndexConfigCommonFocusStatus,
+ NULL );
+ returnFocusStatus(true);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Autofocus callback received");
+ ret = returnFocusStatus(false);
+ }
+
+ }
+ else
+ {
+ if ( NO_ERROR == ret )
+ {
+ ret = returnFocusStatus(true);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopAutoFocus()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ return NO_INIT;
+ }
+
+ if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) {
+ // No need to stop focus if we are in infinity mode. Nothing to stop.
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ //Disable the callback first
+ ret = setFocusCallback(false);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
+ focusControl.eFocusControl = OMX_IMAGE_FocusControlOff;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigFocusControl,
+ &focusControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ mParameters3A.Focus = OMX_IMAGE_FocusControlOff;
+ CAMHAL_LOGDA("Autofocus stopped successfully");
+ }
+ }
+
+ //Query current focus distance after AF is complete
+ updateFocusDistances(mParameters);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::cancelAutoFocus()
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ stopAutoFocus();
+ //Signal a dummy AF event so that in case the callback from ducati
+ //does come then it doesnt crash after
+ //exiting this function since eventSem will go out of scope.
+ ret |= SignalEvent(mCameraAdapterParameters.mHandleComp,
+ (OMX_EVENTTYPE) OMX_EventIndexSettingChanged,
+ OMX_ALL,
+ OMX_IndexConfigCommonFocusStatus,
+ NULL );
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+status_t OMXCameraAdapter::setFocusCallback(bool enabled)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_CALLBACKREQUESTTYPE focusRequstCallback;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ OMX_INIT_STRUCT_PTR (&focusRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE);
+ focusRequstCallback.nPortIndex = OMX_ALL;
+ focusRequstCallback.nIndex = OMX_IndexConfigCommonFocusStatus;
+
+ if ( enabled )
+ {
+ focusRequstCallback.bEnable = OMX_TRUE;
+ }
+ else
+ {
+ focusRequstCallback.bEnable = OMX_FALSE;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_IndexConfigCallbackRequest,
+ &focusRequstCallback);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error registering focus callback 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Autofocus callback for index 0x%x registered successfully",
+ OMX_IndexConfigCommonFocusStatus);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
+{
+ status_t ret = NO_ERROR;
+ OMX_PARAM_FOCUSSTATUSTYPE eFocusStatus;
+ bool focusStatus = false;
+ BaseCameraAdapter::AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ LOG_FUNCTION_NAME;
+
+ OMX_INIT_STRUCT(eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
+ if( ( AF_ACTIVE & state ) != AF_ACTIVE )
+ {
+ /// We don't send focus callback if focus was not started
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ if ( !timeoutReached )
+ {
+ ret = checkFocus(&eFocusStatus);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Focus status check failed!");
+ }
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ if ( timeoutReached )
+ {
+ focusStatus = false;
+ }
+ else
+ {
+
+ switch (eFocusStatus.eFocusStatus)
+ {
+ case OMX_FocusStatusReached:
+ {
+ focusStatus = true;
+ break;
+ }
+ case OMX_FocusStatusOff:
+ case OMX_FocusStatusUnableToReach:
+ case OMX_FocusStatusRequest:
+ default:
+ {
+ focusStatus = false;
+ break;
+ }
+ }
+
+ stopAutoFocus();
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ ret = BaseCameraAdapter::setState(CAMERA_CANCEL_AUTOFOCUS);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ notifyFocusSubscribers(focusStatus);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == eFocusStatus )
+ {
+ CAMHAL_LOGEA("Invalid focus status");
+ ret = -EINVAL;
+ }
+
+ if ( OMX_StateExecuting != mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component not in executing state");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonFocusStatus,
+ eFocusStatus);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while retrieving focus status: 0x%x", eError);
+ ret = -1;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ CAMHAL_LOGDB("Focus Status: %d", eFocusStatus->eFocusStatus);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::updateFocusDistances(CameraParameters &params)
+{
+ OMX_U32 focusNear, focusOptimal, focusFar;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ ret = getFocusDistances(focusNear, focusOptimal, focusFar);
+ if ( NO_ERROR == ret)
+ {
+ ret = addFocusDistances(focusNear, focusOptimal, focusFar, params);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error in call to addFocusDistances() 0x%x", ret);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("Error in call to getFocusDistances() 0x%x", ret);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError;
+
+ OMX_TI_CONFIG_FOCUSDISTANCETYPE focusDist;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = UNKNOWN_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR(&focusDist, OMX_TI_CONFIG_FOCUSDISTANCETYPE);
+ focusDist.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
+
+ eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFocusDistance,
+ &focusDist);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while querying focus distances 0x%x", eError);
+ ret = UNKNOWN_ERROR;
+ }
+
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ near = focusDist.nFocusDistanceNear;
+ optimal = focusDist.nFocusDistanceOptimal;
+ far = focusDist.nFocusDistanceFar;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length)
+{
+ status_t ret = NO_ERROR;
+ uint32_t focusScale = 1000;
+ float distFinal;
+
+ LOG_FUNCTION_NAME;
+
+ if(mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity)
+ {
+ dist=0;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( 0 == dist )
+ {
+ strncpy(buffer, CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
+ }
+ else
+ {
+ distFinal = dist;
+ distFinal /= focusScale;
+ snprintf(buffer, ( length - 1 ) , "%5.3f", distFinal);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
+ OMX_U32 &optimal,
+ OMX_U32 &far,
+ CameraParameters& params)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+ ret = encodeFocusDistance(near, mFocusDistNear, FOCUS_DIST_SIZE);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = encodeFocusDistance(optimal, mFocusDistOptimal, FOCUS_DIST_SIZE);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = encodeFocusDistance(far, mFocusDistFar, FOCUS_DIST_SIZE);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ snprintf(mFocusDistBuffer, ( FOCUS_DIST_BUFFER_SIZE - 1) ,"%s,%s,%s", mFocusDistNear,
+ mFocusDistOptimal,
+ mFocusDistFar);
+
+ params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::setTouchFocus(size_t posX,
+ size_t posY,
+ size_t posWidth,
+ size_t posHeight,
+ size_t previewWidth,
+ size_t previewHeight)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_EXTFOCUSREGIONTYPE touchControl;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&touchControl, OMX_CONFIG_EXTFOCUSREGIONTYPE);
+ touchControl.nLeft = ( posX * TOUCH_FOCUS_RANGE ) / previewWidth;
+ touchControl.nTop = ( posY * TOUCH_FOCUS_RANGE ) / previewHeight;
+ touchControl.nWidth = ( posWidth * TOUCH_FOCUS_RANGE ) / previewWidth;
+ touchControl.nHeight = ( posHeight * TOUCH_FOCUS_RANGE ) / previewHeight;
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_IndexConfigExtFocusRegion,
+ &touchControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while configuring touch focus 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDB("Touch focus %d,%d %d,%d configured successfuly",
+ ( int ) touchControl.nLeft,
+ ( int ) touchControl.nTop,
+ ( int ) touchControl.nWidth,
+ ( int ) touchControl.nHeight);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+};
diff --git a/camera/OMXCameraAdapter/OMXZoom.cpp b/camera/OMXCameraAdapter/OMXZoom.cpp
new file mode 100644
index 0000000..9e59f3d
--- /dev/null
+++ b/camera/OMXCameraAdapter/OMXZoom.cpp
@@ -0,0 +1,270 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file OMXZoom.cpp
+*
+* This file contains functionality for handling zoom configurations.
+*
+*/
+
+#undef LOG_TAG
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "OMXCameraAdapter.h"
+
+namespace android {
+
+const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
+ 65536, 68157, 70124, 72745,
+ 75366, 77988, 80609, 83231,
+ 86508, 89784, 92406, 95683,
+ 99615, 102892, 106168, 110100,
+ 114033, 117965, 122552, 126484,
+ 131072, 135660, 140247, 145490,
+ 150733, 155976, 161219, 167117,
+ 173015, 178913, 185467, 192020,
+ 198574, 205783, 212992, 220201,
+ 228065, 236585, 244449, 252969,
+ 262144, 271319, 281149, 290980,
+ 300810, 311951, 322437, 334234,
+ 346030, 357827, 370934, 384041,
+ 397148, 411566, 425984, 441057,
+ 456131, 472515, 488899, 506593,
+ 524288 };
+
+
+status_t OMXCameraAdapter::setParametersZoom(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state)
+{
+ status_t ret = NO_ERROR;
+ Mutex::Autolock lock(mZoomLock);
+
+ LOG_FUNCTION_NAME;
+
+ //Immediate zoom should not be avaialable while smooth zoom is running
+ if ( ( ZOOM_ACTIVE & state ) != ZOOM_ACTIVE )
+ {
+ int zoom = params.getInt(CameraParameters::KEY_ZOOM);
+ if( ( zoom >= 0 ) && ( zoom < ZOOM_STAGES ) )
+ {
+ mTargetZoomIdx = zoom;
+
+ //Immediate zoom should be applied instantly ( CTS requirement )
+ mCurrentZoomIdx = mTargetZoomIdx;
+ doZoom(mCurrentZoomIdx);
+
+ CAMHAL_LOGDB("Zoom by App %d", zoom);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::doZoom(int index)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_CONFIG_SCALEFACTORTYPE zoomControl;
+ static int prevIndex = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState )
+ {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = -1;
+ }
+
+ if ( ( 0 > index) || ( ( ZOOM_STAGES - 1 ) < index ) )
+ {
+ CAMHAL_LOGEB("Zoom index %d out of range", index);
+ ret = -EINVAL;
+ }
+
+ if ( prevIndex == index )
+ {
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ OMX_INIT_STRUCT_PTR (&zoomControl, OMX_CONFIG_SCALEFACTORTYPE);
+ zoomControl.nPortIndex = OMX_ALL;
+ zoomControl.xHeight = ZOOM_STEPS[index];
+ zoomControl.xWidth = ZOOM_STEPS[index];
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ OMX_IndexConfigCommonDigitalZoom,
+ &zoomControl);
+ if ( OMX_ErrorNone != eError )
+ {
+ CAMHAL_LOGEB("Error while applying digital zoom 0x%x", eError);
+ ret = -1;
+ }
+ else
+ {
+ CAMHAL_LOGDA("Digital zoom applied successfully");
+ prevIndex = index;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::advanceZoom()
+{
+ status_t ret = NO_ERROR;
+ AdapterState state;
+ BaseCameraAdapter::getState(state);
+
+ if ( mReturnZoomStatus )
+ {
+ mTargetZoomIdx = mCurrentZoomIdx;
+ mReturnZoomStatus = false;
+ ret = doZoom(mCurrentZoomIdx);
+ notifyZoomSubscribers(mCurrentZoomIdx, true);
+ }
+ else if ( mCurrentZoomIdx != mTargetZoomIdx )
+ {
+ if ( ZOOM_ACTIVE & state )
+ {
+ if ( mCurrentZoomIdx < mTargetZoomIdx )
+ {
+ mZoomInc = 1;
+ }
+ else
+ {
+ mZoomInc = -1;
+ }
+
+ mCurrentZoomIdx += mZoomInc;
+ }
+ else
+ {
+ mCurrentZoomIdx = mTargetZoomIdx;
+ }
+
+ ret = doZoom(mCurrentZoomIdx);
+
+ if ( ZOOM_ACTIVE & state )
+ {
+ if ( mCurrentZoomIdx == mTargetZoomIdx )
+ {
+ CAMHAL_LOGDB("[Goal Reached] Smooth Zoom notify currentIdx = %d, targetIdx = %d",
+ mCurrentZoomIdx,
+ mTargetZoomIdx);
+ notifyZoomSubscribers(mCurrentZoomIdx, true);
+
+ if ( NO_ERROR == ret )
+ {
+
+ ret = BaseCameraAdapter::setState(CAMERA_STOP_SMOOTH_ZOOM);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDB("[Advancing] Smooth Zoom notify currentIdx = %d, targetIdx = %d",
+ mCurrentZoomIdx,
+ mTargetZoomIdx);
+ notifyZoomSubscribers(mCurrentZoomIdx, false);
+ }
+ }
+ }
+ else if ( (mCurrentZoomIdx == mTargetZoomIdx ) &&
+ ( ZOOM_ACTIVE & state ) )
+ {
+ ret = BaseCameraAdapter::setState(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
+
+ if ( NO_ERROR == ret )
+ {
+ ret = BaseCameraAdapter::commitState();
+ }
+ else
+ {
+ ret |= BaseCameraAdapter::rollbackState();
+ }
+ }
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mZoomLock);
+
+ CAMHAL_LOGDB("Start smooth zoom target = %d, mCurrentIdx = %d",
+ targetIdx,
+ mCurrentZoomIdx);
+
+ if ( ( targetIdx >= 0 ) && ( targetIdx < ZOOM_STAGES ) )
+ {
+ mTargetZoomIdx = targetIdx;
+ mZoomParameterIdx = mCurrentZoomIdx;
+ }
+ else
+ {
+ CAMHAL_LOGEB("Smooth value out of range %d!", targetIdx);
+ ret = -EINVAL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t OMXCameraAdapter::stopSmoothZoom()
+{
+ status_t ret = NO_ERROR;
+ Mutex::Autolock lock(mZoomLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( mTargetZoomIdx != mCurrentZoomIdx )
+ {
+ mTargetZoomIdx = mCurrentZoomIdx;
+ mReturnZoomStatus = true;
+ CAMHAL_LOGDB("Stop smooth zoom mCurrentZoomIdx = %d, mTargetZoomIdx = %d",
+ mCurrentZoomIdx,
+ mTargetZoomIdx);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+};
diff --git a/camera/SensorListener.cpp b/camera/SensorListener.cpp
new file mode 100644
index 0000000..8b3e942
--- /dev/null
+++ b/camera/SensorListener.cpp
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file SensorListener.cpp
+*
+* This file listens and propogates sensor events to CameraHal.
+*
+*/
+
+#define LOG_TAG "CameraHAL"
+
+#include "SensorListener.h"
+#include "CameraHal.h"
+
+#include <stdint.h>
+#include <math.h>
+#include <sys/types.h>
+
+namespace android {
+
+/*** static declarations ***/
+static const float RADIANS_2_DEG = (float) (180 / M_PI);
+// measured values on device...might need tuning
+static const int DEGREES_90_THRESH = 50;
+static const int DEGREES_180_THRESH = 170;
+static const int DEGREES_270_THRESH = 250;
+
+static int sensor_events_listener(int fd, int events, void* data)
+{
+ SensorListener* listener = (SensorListener*) data;
+ ssize_t num_sensors;
+ ASensorEvent sen_events[8];
+ while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) {
+ for (int i = 0; i < num_sensors; i++) {
+ if (sen_events[i].type == Sensor::TYPE_ACCELEROMETER) {
+ float x = sen_events[i].vector.azimuth;
+ float y = sen_events[i].vector.pitch;
+ float z = sen_events[i].vector.roll;
+ float radius = 0;
+ int tilt = 0, orient = 0;
+
+ CAMHAL_LOGVA("ACCELEROMETER EVENT");
+ CAMHAL_LOGVB(" azimuth = %f pitch = %f roll = %f",
+ sen_events[i].vector.azimuth,
+ sen_events[i].vector.pitch,
+ sen_events[i].vector.roll);
+ // see http://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates
+ // about conversion from cartesian to spherical for orientation calculations
+ radius = (float) sqrt(x * x + y * y + z * z);
+ tilt = (int) asinf(z / radius) * RADIANS_2_DEG;
+ orient = (int) atan2f(-x, y) * RADIANS_2_DEG;
+
+ if (orient < 0) {
+ orient += 360;
+ }
+
+ if (orient >= DEGREES_270_THRESH) {
+ orient = 270;
+ } else if (orient >= DEGREES_180_THRESH) {
+ orient = 180;
+ } else if (orient >= DEGREES_90_THRESH) {
+ orient = 90;
+ } else {
+ orient = 0;
+ }
+ listener->handleOrientation(orient, tilt);
+ CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient);
+ } else if (sen_events[i].type == Sensor::TYPE_GYROSCOPE) {
+ CAMHAL_LOGVA("GYROSCOPE EVENT");
+ }
+ }
+ }
+
+ if (num_sensors < 0 && num_sensors != -EAGAIN) {
+ CAMHAL_LOGEB("reading events failed: %s", strerror(-num_sensors));
+ }
+
+ return 1;
+}
+
+/****** public - member functions ******/
+SensorListener::SensorListener() {
+ LOG_FUNCTION_NAME;
+
+ sensorsEnabled = 0;
+ mOrientationCb = NULL;
+ mSensorEventQueue = NULL;
+ mSensorLooperThread = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+SensorListener::~SensorListener() {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDA("Kill looper thread");
+ if (mSensorLooperThread.get()) {
+ // 1. Request exit
+ // 2. Wake up looper which should be polling for an event
+ // 3. Wait for exit
+ mSensorLooperThread->requestExit();
+ mSensorLooperThread->wake();
+ mSensorLooperThread->join();
+ mSensorLooperThread.clear();
+ mSensorLooperThread = NULL;
+ }
+
+ CAMHAL_LOGDA("Kill looper");
+ if (mLooper.get()) {
+ mLooper->removeFd(mSensorEventQueue->getFd());
+ mLooper.clear();
+ mLooper = NULL;
+ }
+ CAMHAL_LOGDA("SensorListener destroyed");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t SensorListener::initialize() {
+ status_t ret = NO_ERROR;
+ SensorManager& mgr(SensorManager::getInstance());
+ Sensor const* const* list;
+ ssize_t count = 0;
+ Sensor const* accelerometer;
+
+ LOG_FUNCTION_NAME;
+
+ count = mgr.getSensorList(&list);
+ CAMHAL_LOGDB("numSensors = %lu", count);
+
+ mSensorEventQueue = mgr.createEventQueue();
+
+ sp<Looper> mLooper = new Looper(false);
+ mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
+
+ if (mSensorLooperThread.get() == NULL)
+ mSensorLooperThread = new SensorLooperThread(mLooper.get());
+
+ if (mSensorLooperThread.get() == NULL) {
+ CAMHAL_LOGEA("Couldn't create sensor looper thread");
+ ret = NO_MEMORY;
+ goto out;
+ }
+
+ ret = mSensorLooperThread->run("sensor looper thread", PRIORITY_URGENT_DISPLAY);
+ if (ret == INVALID_OPERATION){
+ CAMHAL_LOGDA("thread already running ?!?");
+ } else if (ret != NO_ERROR) {
+ CAMHAL_LOGEA("couldn't run thread");
+ goto out;
+ }
+
+ out:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void SensorListener::setCallbacks(orientation_callback_t orientation_cb, void *cookie) {
+ LOG_FUNCTION_NAME;
+
+ if (orientation_cb) {
+ mOrientationCb = orientation_cb;
+ }
+ mCbCookie = cookie;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(&mLock);
+
+ if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
+ mOrientationCb(orientation, tilt, mCbCookie);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::enableSensor(sensor_type_t type) {
+ Sensor const* sensor;
+ SensorManager& mgr(SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(&mLock);
+
+ if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
+ sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->enableSensor(sensor);
+ mSensorEventQueue->setEventRate(sensor, ms2ns(100));
+ sensorsEnabled |= SENSOR_ORIENTATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::disableSensor(sensor_type_t type) {
+ Sensor const* sensor;
+ SensorManager& mgr(SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(&mLock);
+
+ if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) {
+ sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->disableSensor(sensor);
+ sensorsEnabled &= ~SENSOR_ORIENTATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+} // namespace android
diff --git a/camera/TICameraParameters.cpp b/camera/TICameraParameters.cpp
new file mode 100644
index 0000000..2980645
--- /dev/null
+++ b/camera/TICameraParameters.cpp
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#define LOG_TAG "CameraHAL"
+#include <utils/Log.h>
+
+#include <string.h>
+#include <stdlib.h>
+#include <TICameraParameters.h>
+#include "CameraHal.h"
+
+namespace android {
+
+//TI extensions to camera mode
+const char TICameraParameters::HIGH_PERFORMANCE_MODE[] = "high-performance";
+const char TICameraParameters::HIGH_QUALITY_MODE[] = "high-quality";
+const char TICameraParameters::HIGH_QUALITY_ZSL_MODE[] = "high-quality-zsl";
+const char TICameraParameters::VIDEO_MODE[] = "video-mode";
+
+// TI extensions to standard android Parameters
+const char TICameraParameters::KEY_SUPPORTED_CAMERAS[] = "camera-indexes";
+const char TICameraParameters::KEY_CAMERA[] = "camera-index";
+const char TICameraParameters::KEY_SHUTTER_ENABLE[] = "shutter-enable";
+const char TICameraParameters::KEY_CAMERA_NAME[] = "camera-name";
+const char TICameraParameters::KEY_BURST[] = "burst-capture";
+const char TICameraParameters::KEY_CAP_MODE[] = "mode";
+const char TICameraParameters::KEY_VSTAB[] = "vstab";
+const char TICameraParameters::KEY_VSTAB_VALUES[] = "vstab-values";
+const char TICameraParameters::KEY_VNF[] = "vnf";
+const char TICameraParameters::KEY_SATURATION[] = "saturation";
+const char TICameraParameters::KEY_BRIGHTNESS[] = "brightness";
+const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure";
+const char TICameraParameters::KEY_SUPPORTED_EXPOSURE[] = "exposure-mode-values";
+const char TICameraParameters::KEY_CONTRAST[] = "contrast";
+const char TICameraParameters::KEY_SHARPNESS[] = "sharpness";
+const char TICameraParameters::KEY_ISO[] = "iso";
+const char TICameraParameters::KEY_SUPPORTED_ISO_VALUES[] = "iso-mode-values";
+const char TICameraParameters::KEY_SUPPORTED_IPP[] = "ipp-values";
+const char TICameraParameters::KEY_IPP[] = "ipp";
+const char TICameraParameters::KEY_MAN_EXPOSURE[] = "manual-exposure";
+const char TICameraParameters::KEY_METERING_MODE[] = "meter-mode";
+const char TICameraParameters::KEY_PADDED_WIDTH[] = "padded-width";
+const char TICameraParameters::KEY_PADDED_HEIGHT[] = "padded-height";
+const char TICameraParameters::KEY_EXP_BRACKETING_RANGE[] = "exp-bracketing-range";
+const char TICameraParameters::KEY_TEMP_BRACKETING[] = "temporal-bracketing";
+const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS[] = "temporal-bracketing-range-positive";
+const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG[] = "temporal-bracketing-range-negative";
+const char TICameraParameters::KEY_S3D_SUPPORTED[] = "s3d-supported";
+const char TICameraParameters::KEY_MEASUREMENT_ENABLE[] = "measurement";
+const char TICameraParameters::KEY_GBCE[] = "gbce";
+const char TICameraParameters::KEY_GLBCE[] = "glbce";
+const char TICameraParameters::KEY_CURRENT_ISO[] = "current-iso";
+const char TICameraParameters::KEY_SENSOR_ORIENTATION[] = "sensor-orientation";
+const char TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES[] = "sensor-orientation-values";
+const char TICameraParameters::KEY_MINFRAMERATE[] = "min-framerate";
+const char TICameraParameters::KEY_MAXFRAMERATE[] = "max-framerate";
+
+//TI extensions for enabling/disabling GLBCE
+const char TICameraParameters::GLBCE_ENABLE[] = "enable";
+const char TICameraParameters::GLBCE_DISABLE[] = "disable";
+
+//TI extensions for enabling/disabling GBCE
+const char TICameraParameters::GBCE_ENABLE[] = "enable";
+const char TICameraParameters::GBCE_DISABLE[] = "disable";
+
+//TI extensions for enabling/disabling measurement
+const char TICameraParameters::MEASUREMENT_ENABLE[] = "enable";
+const char TICameraParameters::MEASUREMENT_DISABLE[] = "disable";
+
+//TI extensions for zoom
+const char TICameraParameters::ZOOM_SUPPORTED[] = "true";
+const char TICameraParameters::ZOOM_UNSUPPORTED[] = "false";
+
+// TI extensions for 2D Preview in Stereo Mode
+const char TICameraParameters::KEY_S3D2D_PREVIEW[] = "s3d2d-preview";
+const char TICameraParameters::KEY_S3D2D_PREVIEW_MODE[] = "s3d2d-preview-values";
+
+//TI extensions for SAC/SMC
+const char TICameraParameters::KEY_AUTOCONVERGENCE[] = "auto-convergence";
+const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE[] = "auto-convergence-mode";
+const char TICameraParameters::KEY_MANUALCONVERGENCE_VALUES[] = "manual-convergence-values";
+
+//TI extensions for setting EXIF tags
+const char TICameraParameters::KEY_EXIF_MODEL[] = "exif-model";
+const char TICameraParameters::KEY_EXIF_MAKE[] = "exif-make";
+
+//TI extensions for additiona GPS data
+const char TICameraParameters::KEY_GPS_MAPDATUM[] = "gps-mapdatum";
+const char TICameraParameters::KEY_GPS_VERSION[] = "gps-version";
+const char TICameraParameters::KEY_GPS_DATESTAMP[] = "gps-datestamp";
+
+//TI extensions for enabling/disabling shutter sound
+const char TICameraParameters::SHUTTER_ENABLE[] = "true";
+const char TICameraParameters::SHUTTER_DISABLE[] = "false";
+
+//TI extensions for Temporal Bracketing
+const char TICameraParameters::BRACKET_ENABLE[] = "enable";
+const char TICameraParameters::BRACKET_DISABLE[] = "disable";
+
+//TI extensions to Image post-processing
+const char TICameraParameters::IPP_LDCNSF[] = "ldc-nsf";
+const char TICameraParameters::IPP_LDC[] = "ldc";
+const char TICameraParameters::IPP_NSF[] = "nsf";
+const char TICameraParameters::IPP_NONE[] = "off";
+
+// TI extensions to standard android pixel formats
+const char TICameraParameters::PIXEL_FORMAT_RAW[] = "raw";
+const char TICameraParameters::PIXEL_FORMAT_JPS[] = "jps";
+const char TICameraParameters::PIXEL_FORMAT_MPO[] = "mpo";
+const char TICameraParameters::PIXEL_FORMAT_RAW_JPEG[] = "raw+jpeg";
+const char TICameraParameters::PIXEL_FORMAT_RAW_MPO[] = "raw+mpo";
+
+// TI extensions to standard android scene mode settings
+const char TICameraParameters::SCENE_MODE_SPORT[] = "sport";
+const char TICameraParameters::SCENE_MODE_CLOSEUP[] = "closeup";
+const char TICameraParameters::SCENE_MODE_AQUA[] = "aqua";
+const char TICameraParameters::SCENE_MODE_SNOWBEACH[] = "snow-beach";
+const char TICameraParameters::SCENE_MODE_MOOD[] = "mood";
+const char TICameraParameters::SCENE_MODE_NIGHT_INDOOR[] = "night-indoor";
+const char TICameraParameters::SCENE_MODE_DOCUMENT[] = "document";
+const char TICameraParameters::SCENE_MODE_BARCODE[] = "barcode";
+const char TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT[] = "super-night";
+const char TICameraParameters::SCENE_MODE_VIDEO_CINE[] = "cine";
+const char TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM[] = "old-film";
+
+// TI extensions to standard android white balance values.
+const char TICameraParameters::WHITE_BALANCE_TUNGSTEN[] = "tungsten";
+const char TICameraParameters::WHITE_BALANCE_HORIZON[] = "horizon";
+const char TICameraParameters::WHITE_BALANCE_SUNSET[] = "sunset";
+const char TICameraParameters::WHITE_BALANCE_FACE[] = "face-priority";
+
+// TI extensions to standard android focus modes.
+const char TICameraParameters::FOCUS_MODE_PORTRAIT[] = "portrait";
+const char TICameraParameters::FOCUS_MODE_EXTENDED[] = "extended";
+const char TICameraParameters::FOCUS_MODE_FACE[] = "face-priority";
+
+// TI extensions to add values for effect settings.
+const char TICameraParameters::EFFECT_NATURAL[] = "natural";
+const char TICameraParameters::EFFECT_VIVID[] = "vivid";
+const char TICameraParameters::EFFECT_COLOR_SWAP[] = "color-swap";
+const char TICameraParameters::EFFECT_BLACKWHITE[] = "blackwhite";
+
+// TI extensions to add exposure preset modes
+const char TICameraParameters::EXPOSURE_MODE_OFF[] = "off";
+const char TICameraParameters::EXPOSURE_MODE_AUTO[] = "auto";
+const char TICameraParameters::EXPOSURE_MODE_NIGHT[] = "night";
+const char TICameraParameters::EXPOSURE_MODE_BACKLIGHT[] = "backlighting";
+const char TICameraParameters::EXPOSURE_MODE_SPOTLIGHT[] = "spotlight";
+const char TICameraParameters::EXPOSURE_MODE_SPORTS[] = "sports";
+const char TICameraParameters::EXPOSURE_MODE_SNOW[] = "snow";
+const char TICameraParameters::EXPOSURE_MODE_BEACH[] = "beach";
+const char TICameraParameters::EXPOSURE_MODE_APERTURE[] = "aperture";
+const char TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE[] = "small-aperture";
+const char TICameraParameters::EXPOSURE_MODE_FACE[] = "face-priority";
+
+// TI extensions to add iso values
+const char TICameraParameters::ISO_MODE_AUTO[] = "auto";
+const char TICameraParameters::ISO_MODE_100[] = "100";
+const char TICameraParameters::ISO_MODE_200[] = "200";
+const char TICameraParameters::ISO_MODE_400[] = "400";
+const char TICameraParameters::ISO_MODE_800[] = "800";
+const char TICameraParameters::ISO_MODE_1000[] = "1000";
+const char TICameraParameters::ISO_MODE_1200[] = "1200";
+const char TICameraParameters::ISO_MODE_1600[] = "1600";
+
+// TI extensions to add auto convergence values
+const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "mode-disable";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "mode-frame";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "mode-center";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_FFT[] = "mode-fft";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "mode-manual";
+
+//TI values for camera direction
+const char TICameraParameters::FACING_FRONT[]="front";
+const char TICameraParameters::FACING_BACK[]="back";
+
+//TI extensions to flash settings
+const char TICameraParameters::FLASH_MODE_FILL_IN[] = "fill-in";
+
+//TI extensions to add sensor orientation parameters
+const char TICameraParameters::ORIENTATION_SENSOR_NONE[] = "0";
+const char TICameraParameters::ORIENTATION_SENSOR_90[] = "90";
+const char TICameraParameters::ORIENTATION_SENSOR_180[] = "180";
+const char TICameraParameters::ORIENTATION_SENSOR_270[] = "270";
+};
+
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
new file mode 100644
index 0000000..446a809
--- /dev/null
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -0,0 +1,615 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCameraAdapter.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+
+#include "V4LCameraAdapter.h"
+#include "CameraHal.h"
+#include "TICameraParameters.h"
+#include <signal.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/select.h>
+#include <linux/videodev.h>
+
+
+#include <cutils/properties.h>
+#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
+static int mDebugFps = 0;
+
+#define Q16_OFFSET 16
+
+#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+
+namespace android {
+
+#undef LOG_TAG
+///Maintain a separate tag for V4LCameraAdapter logs to isolate issues OMX specific
+#define LOG_TAG "CameraHAL"
+
+//frames skipped before recalculating the framerate
+#define FPS_PERIOD 30
+
+static V4LCameraAdapter *gCameraAdapter = NULL;
+Mutex gAdapterLock;
+const char *device = DEVICE;
+
+
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps, int sensor_index)
+{
+ LOG_FUNCTION_NAME;
+
+ char value[PROPERTY_VALUE_MAX];
+ property_get("debug.camera.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ int ret = NO_ERROR;
+
+ // Allocate memory for video info structure
+ mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
+ if(!mVideoInfo)
+ {
+ return NO_MEMORY;
+ }
+
+ if ((mCameraHandle = open(device, O_RDWR)) == -1)
+ {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
+ return -EINVAL;
+ }
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
+ if (ret < 0)
+ {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ return -EINVAL;
+ }
+
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
+ {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ return -EINVAL;
+ }
+
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING))
+ {
+ CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
+ return -EINVAL;
+ }
+
+ // Initialize flags
+ mPreviewing = false;
+ mVideoInfo->isStreaming = false;
+ mRecording = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+{
+
+ status_t ret = NO_ERROR;
+
+ if ( !mVideoInfo->isStreaming )
+ {
+ return NO_ERROR;
+ }
+
+ int i = mPreviewBufs.valueFor(( unsigned int )frameBuf);
+ if(i<0)
+ {
+ return BAD_VALUE;
+ }
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("Init: VIDIOC_QBUF Failed");
+ return -1;
+ }
+
+ nQueued++;
+
+ return ret;
+
+}
+
+status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ int width, height;
+
+ params.getPreviewSize(&width, &height);
+
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
+
+ mVideoInfo->width = width;
+ mVideoInfo->height = height;
+ mVideoInfo->framesizeIn = (width * height << 1);
+ mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->format.fmt.pix.width = width;
+ mVideoInfo->format.fmt.pix.height = height;
+ mVideoInfo->format.fmt.pix.pixelformat = DEFAULT_PIXEL_FORMAT;
+
+ ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
+ return ret;
+ }
+
+ // Udpate the current parameter set
+ mParams = params;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+void V4LCameraAdapter::getParameters(CameraParameters& params)
+{
+ LOG_FUNCTION_NAME;
+
+ // Return the current parameter set
+ params = mParams;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+///API to give the buffers to Adapter
+status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ switch(mode)
+ {
+ case CAMERA_PREVIEW:
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ //@todo Insert Image capture case here
+
+ case CAMERA_VIDEO:
+ //@warn Video capture is not fully supported yet
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
+{
+ int ret = NO_ERROR;
+
+ if(NULL == bufArr)
+ {
+ return BAD_VALUE;
+ }
+
+ //First allocate adapter internal buffers at V4L level for USB Cam
+ //These are the buffers from which we will copy the data into overlay buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = num;
+
+ ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ for (int i = 0; i < num; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (0,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
+ return -1;
+ }
+
+ uint32_t *ptr = (uint32_t*) bufArr;
+
+ //Associate each Camera internal buffer with the one from Overlay
+ mPreviewBufs.add((int)ptr[i], i);
+
+ }
+
+ // Update the preview buffer count
+ mPreviewBufferCount = num;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+
+ Mutex::Autolock lock(mPreviewBufsLock);
+
+ if(mPreviewing)
+ {
+ return BAD_VALUE;
+ }
+
+ for (int i = 0; i < mPreviewBufferCount; i++) {
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ return -EINVAL;
+ }
+
+ nQueued++;
+ }
+
+ enum v4l2_buf_type bufType;
+ if (!mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = true;
+ }
+
+ // Create and start preview thread for receiving buffers from V4L Camera
+ mPreviewThread = new PreviewThread(this);
+
+ CAMHAL_LOGDA("Created preview thread");
+
+
+ //Update the flag to indicate we are previewing
+ mPreviewing = true;
+
+ return ret;
+
+}
+
+status_t V4LCameraAdapter::stopPreview()
+{
+ enum v4l2_buf_type bufType;
+ int ret = NO_ERROR;
+
+ Mutex::Autolock lock(mPreviewBufsLock);
+
+ if(!mPreviewing)
+ {
+ return NO_INIT;
+ }
+
+ if (mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = false;
+ }
+
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ nQueued = 0;
+ nDequeued = 0;
+
+ /* Unmap buffers */
+ for (int i = 0; i < mPreviewBufferCount; i++)
+ if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0)
+ CAMHAL_LOGEA("Unmap failed");
+
+ mPreviewBufs.clear();
+
+ mPreviewThread->requestExitAndWait();
+ mPreviewThread.clear();
+
+ return ret;
+
+}
+
+char * V4LCameraAdapter::GetFrame(int &index)
+{
+ int ret;
+
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ /* DQ */
+ ret = ioctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
+ return NULL;
+ }
+ nDequeued++;
+
+ index = mVideoInfo->buf.index;
+
+ return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
+}
+
+//API to get the frame size required to be allocated. This size is used to override the size passed
+//by camera service when VSTAB/VNF is turned ON for example
+status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+
+ // Just return the current preview size, nothing more to do here.
+ mParams.getPreviewSize(( int * ) &width,
+ ( int * ) &height);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ // We don't support meta data, so simply return
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+{
+ // We don't support image capture yet, safely return from here without messing up
+ return NO_ERROR;
+}
+
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ // XXX: mFPS has the value we want
+}
+
+status_t V4LCameraAdapter::recalculateFPS()
+{
+ float currentFPS;
+
+ mFrameCount++;
+
+ if ( ( mFrameCount % FPS_PERIOD ) == 0 )
+ {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFPSTime;
+ currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFPSTime = now;
+ mLastFrameCount = mFrameCount;
+
+ if ( 1 == mIter )
+ {
+ mFPS = currentFPS;
+ }
+ else
+ {
+ //cumulative moving average
+ mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
+ }
+
+ mLastFPS = mFPS;
+ mIter++;
+ }
+
+ return NO_ERROR;
+}
+
+void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+V4LCameraAdapter::V4LCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ // Nothing useful to do in the constructor
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+V4LCameraAdapter::~V4LCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ // Close the camera handle and free the video info structure
+ close(mCameraHandle);
+
+ if (mVideoInfo)
+ {
+ free(mVideoInfo);
+ mVideoInfo = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/* Preview Thread */
+// ---------------------------------------------------------------------------
+
+int V4LCameraAdapter::previewThread()
+{
+ status_t ret = NO_ERROR;
+ int width, height;
+ CameraFrame frame;
+
+ if (mPreviewing)
+ {
+ int index = 0;
+ char *fp = this->GetFrame(index);
+ if(!fp)
+ {
+ return BAD_VALUE;
+ }
+
+ uint8_t* ptr = (uint8_t*) mPreviewBufs.keyAt(index);
+
+ int width, height;
+ uint16_t* dest = (uint16_t*)ptr;
+ uint16_t* src = (uint16_t*) fp;
+ mParams.getPreviewSize(&width, &height);
+ for(int i=0;i<height;i++)
+ {
+ for(int j=0;j<width;j++)
+ {
+ //*dest = *src;
+ //convert from YUYV to UYVY supported in Camera service
+ *dest = (((*src & 0xFF000000)>>24)<<16)|(((*src & 0x00FF0000)>>16)<<24) |
+ (((*src & 0xFF00)>>8)<<0)|(((*src & 0x00FF)>>0)<<8);
+ src++;
+ dest++;
+ }
+ dest += 4096/2-width;
+ }
+
+ mParams.getPreviewSize(&width, &height);
+ frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
+ frame.mBuffer = ptr;
+ frame.mLength = width*height*2;
+ frame.mAlignment = width*2;
+ frame.mOffset = 0;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);;
+
+ ret = sendFrameToSubscribers(&frame);
+
+ }
+
+ return ret;
+}
+
+extern "C" CameraAdapter* CameraAdapter_Factory()
+{
+ Mutex::Autolock lock(gAdapterLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == gCameraAdapter )
+ {
+ CAMHAL_LOGDA("Creating new Camera adapter instance");
+ gCameraAdapter= new V4LCameraAdapter();
+ }
+ else
+ {
+ CAMHAL_LOGDA("Reusing existing Camera adapter instance");
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return gCameraAdapter;
+}
+
+extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ const unsigned int starting_camera,
+ const unsigned int max_camera) {
+ int num_cameras_supported = 0;
+ CameraProperties::Properties* properties = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if(!properties_array)
+ {
+ return -EINVAL;
+ }
+
+ // TODO: Need to tell camera properties what other cameras we can support
+ if (starting_camera + num_cameras_supported < max_camera) {
+ num_cameras_supported++;
+ properties = properties_array + starting_camera;
+ properties->set(CameraProperties::CAMERA_NAME, "USBCamera");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return num_cameras_supported;
+}
+
+};
+
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h
new file mode 100644
index 0000000..89c2cf1
--- /dev/null
+++ b/camera/inc/ANativeWindowDisplayAdapter.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#include "CameraHal.h"
+#include <ui/egl/android_natives.h>
+#include <ui/GraphicBufferMapper.h>
+#include <hal_public.h>
+
+//temporarily define format here
+#define HAL_PIXEL_FORMAT_NV12 0x100
+
+namespace android {
+
+/**
+ * Display handler class - This class basically handles the buffer posting to display
+ */
+
+class ANativeWindowDisplayAdapter : public DisplayAdapter
+{
+public:
+
+ typedef struct
+ {
+ void *mBuffer;
+ void *mUser;
+ int mOffset;
+ int mWidth;
+ int mHeight;
+ int mWidthStride;
+ int mHeightStride;
+ int mLength;
+ CameraFrame::FrameType mType;
+ } DisplayFrame;
+
+ enum DisplayStates
+ {
+ DISPLAY_INIT = 0,
+ DISPLAY_STARTED,
+ DISPLAY_STOPPED,
+ DISPLAY_EXITED
+ };
+
+public:
+
+ ANativeWindowDisplayAdapter();
+ virtual ~ANativeWindowDisplayAdapter();
+
+ ///Initializes the display adapter creates any resources required
+ virtual status_t initialize();
+
+ virtual int setPreviewWindow(struct preview_stream_ops *window);
+ virtual int setFrameProvider(FrameNotifier *frameProvider);
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL);
+ virtual int disableDisplay();
+ virtual status_t pauseDisplay(bool pause);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Used for shot to snapshot measurement
+ virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL);
+
+#endif
+
+ virtual int useBuffers(void* bufArr, int num);
+ virtual bool supportsExternalBuffering();
+
+ //Implementation of inherited interfaces
+ virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual uint32_t * getOffsets() ;
+ virtual int getFd() ;
+ virtual int freeBuffer(void* buf);
+
+ virtual int maxQueueableBuffers(unsigned int& queueable);
+
+ ///Class specific functions
+ static void frameCallbackRelay(CameraFrame* caFrame);
+ void frameCallback(CameraFrame* caFrame);
+
+ void displayThread();
+
+ private:
+ void destroy();
+ bool processHalMsg();
+ status_t PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame);
+ bool handleFrameReturn();
+
+public:
+
+ static const int DISPLAY_TIMEOUT;
+ static const int FAILED_DQS_TO_SUSPEND;
+
+ class DisplayThread : public Thread
+ {
+ ANativeWindowDisplayAdapter* mDisplayAdapter;
+ TIUTILS::MessageQueue mDisplayThreadQ;
+
+ public:
+ DisplayThread(ANativeWindowDisplayAdapter* da)
+ : Thread(false), mDisplayAdapter(da) { }
+
+ ///Returns a reference to the display message Q for display adapter to post messages
+ TIUTILS::MessageQueue& msgQ()
+ {
+ return mDisplayThreadQ;
+ }
+
+ virtual bool threadLoop()
+ {
+ mDisplayAdapter->displayThread();
+ return false;
+ }
+
+ enum DisplayThreadCommands
+ {
+ DISPLAY_START,
+ DISPLAY_STOP,
+ DISPLAY_FRAME,
+ DISPLAY_EXIT
+ };
+ };
+
+ //friend declarations
+friend class DisplayThread;
+
+private:
+ int postBuffer(void* displayBuf);
+
+private:
+ bool mFirstInit;
+ bool mSuspend;
+ int mFailedDQs;
+ bool mPaused; //Pause state
+ preview_stream_ops_t* mANativeWindow;
+ sp<DisplayThread> mDisplayThread;
+ FrameProvider *mFrameProvider; ///Pointer to the frame provider interface
+ TIUTILS::MessageQueue mDisplayQ;
+ unsigned int mDisplayState;
+ ///@todo Have a common class for these members
+ mutable Mutex mLock;
+ bool mDisplayEnabled;
+ int mBufferCount;
+ buffer_handle_t** mBufferHandleMap;
+ IMG_native_handle_t** mGrallocHandleMap;
+ uint32_t* mOffsetsMap;
+ int mFD;
+ KeyedVector<int, int> mFramesWithCameraAdapterMap;
+ sp<ErrorNotifier> mErrorNotifier;
+
+ uint32_t mFrameWidth;
+ uint32_t mFrameHeight;
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+
+ uint32_t mXOff;
+ uint32_t mYOff;
+
+ const char *mPixelFormat;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //Used for calculating standby to first shot
+ struct timeval mStandbyToShot;
+ bool mMeasureStandby;
+ //Used for shot to snapshot/shot calculation
+ struct timeval mStartCapture;
+ bool mShotToShot;
+
+#endif
+
+};
+
+};
+
diff --git a/camera/inc/BaseCameraAdapter.h b/camera/inc/BaseCameraAdapter.h
new file mode 100644
index 0000000..15fb73f
--- /dev/null
+++ b/camera/inc/BaseCameraAdapter.h
@@ -0,0 +1,245 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef BASE_CAMERA_ADAPTER_H
+#define BASE_CAMERA_ADAPTER_H
+
+#include "CameraHal.h"
+
+namespace android {
+
+class BaseCameraAdapter : public CameraAdapter
+{
+
+public:
+
+ BaseCameraAdapter();
+ virtual ~BaseCameraAdapter();
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*, int sensor_index = 0) = 0;
+
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+
+ //Message/Frame notification APIs
+ virtual void enableMsgType(int32_t msgs, frame_callback callback=NULL, event_callback eventCb=NULL, void* cookie=NULL);
+ virtual void disableMsgType(int32_t msgs, void* cookie);
+ virtual void returnFrame(void * frameBuf, CameraFrame::FrameType frameType);
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const CameraParameters& params) = 0;
+ virtual void getParameters(CameraParameters& params) = 0;
+
+ //API to send a command to the camera
+ virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0 );
+
+ virtual status_t registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data);
+
+ virtual status_t registerEndCaptureCallback(end_image_capture_callback callback, void *user_data);
+
+ //Retrieves the current Adapter state
+ virtual AdapterState getState();
+ //Retrieves the next Adapter state
+ virtual AdapterState getNextState();
+
+protected:
+ //The first two methods will try to switch the adapter state.
+ //Every call to setState() should be followed by a corresponding
+ //call to commitState(). If the state switch fails, then it will
+ //get reset to the previous state via rollbackState().
+ virtual status_t setState(CameraCommands operation);
+ virtual status_t commitState();
+ virtual status_t rollbackState();
+
+ // Retrieves the current Adapter state - for internal use (not locked)
+ virtual status_t getState(AdapterState &state);
+ // Retrieves the next Adapter state - for internal use (not locked)
+ virtual status_t getNextState(AdapterState &state);
+
+ //-----------Interface that needs to be implemented by deriving classes --------------------
+
+ //Should be implmented by deriving classes in order to start image capture
+ virtual status_t takePicture();
+
+ //Should be implmented by deriving classes in order to start image capture
+ virtual status_t stopImageCapture();
+
+ //Should be implmented by deriving classes in order to start temporal bracketing
+ virtual status_t startBracketing(int range);
+
+ //Should be implemented by deriving classes in order to stop temporal bracketing
+ virtual status_t stopBracketing();
+
+ //Should be implemented by deriving classes in oder to initiate autoFocus
+ virtual status_t autoFocus();
+
+ //Should be implemented by deriving classes in oder to initiate autoFocus
+ virtual status_t cancelAutoFocus();
+
+ //Should be called by deriving classes in order to do some bookkeeping
+ virtual status_t startVideoCapture();
+
+ //Should be called by deriving classes in order to do some bookkeeping
+ virtual status_t stopVideoCapture();
+
+ //Should be implemented by deriving classes in order to start camera preview
+ virtual status_t startPreview();
+
+ //Should be implemented by deriving classes in order to stop camera preview
+ virtual status_t stopPreview();
+
+ //Should be implemented by deriving classes in order to start smooth zoom
+ virtual status_t startSmoothZoom(int targetIdx);
+
+ //Should be implemented by deriving classes in order to stop smooth zoom
+ virtual status_t stopSmoothZoom();
+
+ //Should be implemented by deriving classes in order to stop smooth zoom
+ virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+
+ //Should be implemented by deriving classes in order queue a released buffer in CameraAdapter
+ virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+
+ //API to get the frame size required to be allocated. This size is used to override the size passed
+ //by camera service when VSTAB/VNF is turned ON for example
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+
+ //API to get required data frame size
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+
+ //API to get required picture buffers size with the current configuration in CameraParameters
+ virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+
+ // Should be implemented by deriving classes in order to start face detection
+ // ( if supported )
+ virtual status_t startFaceDetection();
+
+ // Should be implemented by deriving classes in order to stop face detection
+ // ( if supported )
+ virtual status_t stopFaceDetection();
+
+ // Receive orientation events from CameraHal
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+ // ---------------------Interface ends-----------------------------------
+
+ status_t notifyFocusSubscribers(bool status);
+ status_t notifyShutterSubscribers();
+ status_t notifyZoomSubscribers(int zoomIdx, bool targetReached);
+ status_t notifyFaceSubscribers(sp<CameraFDResult> &faces);
+
+ //Send the frame to subscribers
+ status_t sendFrameToSubscribers(CameraFrame *frame);
+
+ //Resets the refCount for this particular frame
+ status_t resetFrameRefCount(CameraFrame &frame);
+
+ //A couple of helper functions
+ void setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount);
+ int getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType);
+
+ enum FrameState {
+ STOPPED = 0,
+ RUNNING
+ };
+
+ enum FrameCommands {
+ START_PREVIEW = 0,
+ START_RECORDING,
+ RETURN_FRAME,
+ STOP_PREVIEW,
+ STOP_RECORDING,
+ DO_AUTOFOCUS,
+ TAKE_PICTURE,
+ FRAME_EXIT
+ };
+
+ enum AdapterCommands {
+ ACK = 0,
+ ERROR
+ };
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ struct timeval mStartFocus;
+ struct timeval mStartCapture;
+
+#endif
+
+ mutable Mutex mReturnFrameLock;
+
+ //Lock protecting the Adapter state
+ mutable Mutex mLock;
+ AdapterState mAdapterState;
+ AdapterState mNextState;
+
+ //Different frame subscribers get stored using these
+ KeyedVector<int, frame_callback> mFrameSubscribers;
+ KeyedVector<int, frame_callback> mFrameDataSubscribers;
+ KeyedVector<int, frame_callback> mVideoSubscribers;
+ KeyedVector<int, frame_callback> mImageSubscribers;
+ KeyedVector<int, frame_callback> mRawSubscribers;
+ KeyedVector<int, event_callback> mFocusSubscribers;
+ KeyedVector<int, event_callback> mZoomSubscribers;
+ KeyedVector<int, event_callback> mShutterSubscribers;
+ KeyedVector<int, event_callback> mFaceSubscribers;
+
+ //Preview buffer management data
+ int *mPreviewBuffers;
+ int mPreviewBufferCount;
+ size_t mPreviewBuffersLength;
+ KeyedVector<int, int> mPreviewBuffersAvailable;
+ mutable Mutex mPreviewBufferLock;
+
+ //Video buffer management data
+ int *mVideoBuffers;
+ KeyedVector<int, int> mVideoBuffersAvailable;
+ int mVideoBuffersCount;
+ size_t mVideoBuffersLength;
+ mutable Mutex mVideoBufferLock;
+
+ //Image buffer management data
+ int *mCaptureBuffers;
+ KeyedVector<int, bool> mCaptureBuffersAvailable;
+ int mCaptureBuffersCount;
+ size_t mCaptureBuffersLength;
+ mutable Mutex mCaptureBufferLock;
+
+ //Metadata buffermanagement
+ int *mPreviewDataBuffers;
+ KeyedVector<int, bool> mPreviewDataBuffersAvailable;
+ int mPreviewDataBuffersCount;
+ size_t mPreviewDataBuffersLength;
+ mutable Mutex mPreviewDataBufferLock;
+
+ TIUTILS::MessageQueue mFrameQ;
+ TIUTILS::MessageQueue mAdapterQ;
+ mutable Mutex mSubscriberLock;
+ ErrorNotifier *mErrorNotifier;
+ release_image_buffers_callback mReleaseImageBuffersCallback;
+ end_image_capture_callback mEndImageCaptureCallback;
+ void *mReleaseData;
+ void *mEndCaptureData;
+ bool mRecording;
+};
+
+};
+
+#endif //BASE_CAMERA_ADAPTER_H
+
+
diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h
new file mode 100644
index 0000000..94ed67d
--- /dev/null
+++ b/camera/inc/CameraHal.h
@@ -0,0 +1,1144 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_H
+#define ANDROID_HARDWARE_CAMERA_HARDWARE_H
+
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <time.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <linux/videodev2.h>
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include <utils/threads.h>
+#include <camera/CameraParameters.h>
+#include <hardware/camera.h>
+#include "MessageQueue.h"
+#include "Semaphore.h"
+#include "CameraProperties.h"
+#include "DebugUtils.h"
+#include "SensorListener.h"
+
+#define MIN_WIDTH 640
+#define MIN_HEIGHT 480
+#define PICTURE_WIDTH 3264 /* 5mp - 2560. 8mp - 3280 */ /* Make sure it is a multiple of 16. */
+#define PICTURE_HEIGHT 2448 /* 5mp - 2048. 8mp - 2464 */ /* Make sure it is a multiple of 16. */
+#define PREVIEW_WIDTH 176
+#define PREVIEW_HEIGHT 144
+#define PIXEL_FORMAT V4L2_PIX_FMT_UYVY
+
+#define VIDEO_FRAME_COUNT_MAX 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_CAMERA_BUFFERS 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_ZOOM 3
+#define THUMB_WIDTH 80
+#define THUMB_HEIGHT 60
+#define PIX_YUV422I 0
+#define PIX_YUV420P 1
+
+#define SATURATION_OFFSET 100
+#define SHARPNESS_OFFSET 100
+#define CONTRAST_OFFSET 100
+
+//Enables Absolute PPM measurements in logcat
+#define PPM_INSTRUMENTATION_ABS 1
+
+//Uncomment to enable more verbose/debug logs
+//#define DEBUG_LOG
+
+///Camera HAL Logging Functions
+#ifndef DEBUG_LOG
+
+#define CAMHAL_LOGDA(str)
+#define CAMHAL_LOGDB(str, ...)
+#define CAMHAL_LOGVA(str)
+#define CAMHAL_LOGVB(str, ...)
+
+#define CAMHAL_LOGEA LOGE
+#define CAMHAL_LOGEB LOGE
+
+#undef LOG_FUNCTION_NAME
+#undef LOG_FUNCTION_NAME_EXIT
+#define LOG_FUNCTION_NAME
+#define LOG_FUNCTION_NAME_EXIT
+
+#else
+
+#define CAMHAL_LOGDA DBGUTILS_LOGDA
+#define CAMHAL_LOGDB DBGUTILS_LOGDB
+#define CAMHAL_LOGVA DBGUTILS_LOGVA
+#define CAMHAL_LOGVB DBGUTILS_LOGVB
+
+#define CAMHAL_LOGEA DBGUTILS_LOGEA
+#define CAMHAL_LOGEB DBGUTILS_LOGEB
+
+#endif
+
+
+
+#define NONNEG_ASSIGN(x,y) \
+ if(x > -1) \
+ y = x
+
+namespace android {
+
+#define PARAM_BUFFER 6000
+
+///Forward declarations
+class CameraHal;
+class CameraFrame;
+class CameraHalEvent;
+class DisplayFrame;
+
+class CameraArea : public RefBase
+{
+public:
+
+ CameraArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ size_t weight) : mTop(top),
+ mLeft(left),
+ mBottom(bottom),
+ mRight(right),
+ mWeight(weight) {}
+
+ status_t transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight);
+
+ bool isValid()
+ {
+ return ( ( 0 != mTop ) || ( 0 != mLeft ) || ( 0 != mBottom ) || ( 0 != mRight) );
+ }
+
+ size_t getWeight()
+ {
+ return mWeight;
+ }
+
+ static status_t parseFocusArea(const char *area,
+ size_t areaLength,
+ Vector< sp<CameraArea> > &areas);
+
+private:
+
+ static const ssize_t TOP = -1000;
+ static const ssize_t LEFT = -1000;
+ static const ssize_t BOTTOM = 1000;
+ static const ssize_t RIGHT = 1000;
+
+ ssize_t mTop;
+ ssize_t mLeft;
+ ssize_t mBottom;
+ ssize_t mRight;
+ size_t mWeight;
+};
+
+class CameraFDResult : public RefBase
+{
+public:
+
+ CameraFDResult() : mFaceData(NULL) {};
+ CameraFDResult(camera_frame_metadata_t *faces) : mFaceData(faces) {};
+
+ virtual ~CameraFDResult() {
+ if ( ( NULL != mFaceData ) && ( NULL != mFaceData->faces ) ) {
+ free(mFaceData->faces);
+ free(mFaceData);
+ }
+ }
+
+ camera_frame_metadata_t *getFaceResult() { return mFaceData; };
+
+ static const ssize_t TOP = -1000;
+ static const ssize_t LEFT = -1000;
+ static const ssize_t BOTTOM = 1000;
+ static const ssize_t RIGHT = 1000;
+ static const ssize_t INVALID_DATA = -2000;
+
+private:
+
+ camera_frame_metadata_t *mFaceData;
+};
+
+class CameraFrame
+{
+ public:
+
+ enum FrameType
+ {
+ PREVIEW_FRAME_SYNC = 0x1, ///SYNC implies that the frame needs to be explicitly returned after consuming in order to be filled by camera again
+ PREVIEW_FRAME = 0x2 , ///Preview frame includes viewfinder and snapshot frames
+ IMAGE_FRAME_SYNC = 0x4, ///Image Frame is the image capture output frame
+ IMAGE_FRAME = 0x8,
+ VIDEO_FRAME_SYNC = 0x10, ///Timestamp will be updated for these frames
+ VIDEO_FRAME = 0x20,
+ FRAME_DATA_SYNC = 0x40, ///Any extra data assosicated with the frame. Always synced with the frame
+ FRAME_DATA= 0x80,
+ RAW_FRAME = 0x100,
+ SNAPSHOT_FRAME = 0x200,
+ ALL_FRAMES = 0xFFFF ///Maximum of 16 frame types supported
+ };
+
+ //default contrustor
+ CameraFrame():
+ mCookie(NULL),
+ mBuffer(NULL),
+ mFrameType(0),
+ mTimestamp(0),
+ mWidth(0),
+ mHeight(0),
+ mOffset(0),
+ mAlignment(0),
+ mFd(0),
+ mLength(0) {}
+
+ //copy constructor
+ CameraFrame(const CameraFrame &frame) :
+ mCookie(frame.mCookie),
+ mBuffer(frame.mBuffer),
+ mFrameType(frame.mFrameType),
+ mTimestamp(frame.mTimestamp),
+ mWidth(frame.mWidth),
+ mHeight(frame.mHeight),
+ mOffset(frame.mOffset),
+ mAlignment(frame.mAlignment),
+ mFd(frame.mFd),
+ mLength(frame.mLength) {}
+
+ void *mCookie;
+ void *mBuffer;
+ int mFrameType;
+ nsecs_t mTimestamp;
+ unsigned int mWidth, mHeight;
+ uint32_t mOffset;
+ unsigned int mAlignment;
+ int mFd;
+ size_t mLength;
+ ///@todo add other member vars like stride etc
+};
+
+///Common Camera Hal Event class which is visible to CameraAdapter,DisplayAdapter and AppCallbackNotifier
+///@todo Rename this class to CameraEvent
+class CameraHalEvent
+{
+public:
+ //Enums
+ enum CameraHalEventType {
+ NO_EVENTS = 0x0,
+ EVENT_FOCUS_LOCKED = 0x1,
+ EVENT_FOCUS_ERROR = 0x2,
+ EVENT_ZOOM_INDEX_REACHED = 0x4,
+ EVENT_SHUTTER = 0x8,
+ EVENT_FACE = 0x10,
+ ///@remarks Future enum related to display, like frame displayed event, could be added here
+ ALL_EVENTS = 0xFFFF ///Maximum of 16 event types supported
+ };
+
+ ///Class declarations
+ ///@remarks Add a new class for a new event type added above
+
+ //Shutter event specific data
+ typedef struct ShutterEventData_t {
+ bool shutterClosed;
+ }ShutterEventData;
+
+ ///Focus event specific data
+ typedef struct FocusEventData_t {
+ bool focusLocked;
+ bool focusError;
+ int currentFocusValue;
+ } FocusEventData;
+
+ ///Zoom specific event data
+ typedef struct ZoomEventData_t {
+ int currentZoomIndex;
+ bool targetZoomIndexReached;
+ } ZoomEventData;
+
+ typedef struct FaceData_t {
+ ssize_t top;
+ ssize_t left;
+ ssize_t bottom;
+ ssize_t right;
+ size_t score;
+ } FaceData;
+
+ typedef sp<CameraFDResult> FaceEventData;
+
+ class CameraHalEventData : public RefBase{
+
+ public:
+
+ CameraHalEvent::FocusEventData focusEvent;
+ CameraHalEvent::ZoomEventData zoomEvent;
+ CameraHalEvent::ShutterEventData shutterEvent;
+ CameraHalEvent::FaceEventData faceEvent;
+ };
+
+ //default contrustor
+ CameraHalEvent():
+ mCookie(NULL),
+ mEventType(NO_EVENTS) {}
+
+ //copy constructor
+ CameraHalEvent(const CameraHalEvent &event) :
+ mCookie(event.mCookie),
+ mEventType(event.mEventType),
+ mEventData(event.mEventData) {};
+
+ void* mCookie;
+ CameraHalEventType mEventType;
+ sp<CameraHalEventData> mEventData;
+
+};
+
+/// Have a generic callback class based on template - to adapt CameraFrame and Event
+typedef void (*frame_callback) (CameraFrame *cameraFrame);
+typedef void (*event_callback) (CameraHalEvent *event);
+
+//signals CameraHAL to relase image buffers
+typedef void (*release_image_buffers_callback) (void *userData);
+typedef void (*end_image_capture_callback) (void *userData);
+
+/**
+ * Interface class implemented by classes that have some events to communicate to dependendent classes
+ * Dependent classes use this interface for registering for events
+ */
+class MessageNotifier
+{
+public:
+ static const uint32_t EVENT_BIT_FIELD_POSITION;
+ static const uint32_t FRAME_BIT_FIELD_POSITION;
+
+ ///@remarks Msg type comes from CameraFrame and CameraHalEvent classes
+ /// MSB 16 bits is for events and LSB 16 bits is for frame notifications
+ /// FrameProvider and EventProvider classes act as helpers to event/frame
+ /// consumers to call this api
+ virtual void enableMsgType(int32_t msgs, frame_callback frameCb=NULL, event_callback eventCb=NULL, void* cookie=NULL) = 0;
+ virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
+
+ virtual ~MessageNotifier() {};
+};
+
+class ErrorNotifier : public virtual RefBase
+{
+public:
+ virtual void errorNotify(int error) = 0;
+
+ virtual ~ErrorNotifier() {};
+};
+
+
+/**
+ * Interace class abstraction for Camera Adapter to act as a frame provider
+ * This interface is fully implemented by Camera Adapter
+ */
+class FrameNotifier : public MessageNotifier
+{
+public:
+ virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
+
+ virtual ~FrameNotifier() {};
+};
+
+/** * Wrapper class around Frame Notifier, which is used by display and notification classes for interacting with Camera Adapter
+ */
+class FrameProvider
+{
+ FrameNotifier* mFrameNotifier;
+ void* mCookie;
+ frame_callback mFrameCallback;
+
+public:
+ FrameProvider(FrameNotifier *fn, void* cookie, frame_callback frameCallback)
+ :mFrameNotifier(fn), mCookie(cookie),mFrameCallback(frameCallback) { }
+
+ int enableFrameNotification(int32_t frameTypes);
+ int disableFrameNotification(int32_t frameTypes);
+ int returnFrame(void *frameBuf, CameraFrame::FrameType frameType);
+};
+
+/** Wrapper class around MessageNotifier, which is used by display and notification classes for interacting with
+ * Camera Adapter
+ */
+class EventProvider
+{
+public:
+ MessageNotifier* mEventNotifier;
+ void* mCookie;
+ event_callback mEventCallback;
+
+public:
+ EventProvider(MessageNotifier *mn, void* cookie, event_callback eventCallback)
+ :mEventNotifier(mn), mCookie(cookie), mEventCallback(eventCallback) {}
+
+ int enableEventNotification(int32_t eventTypes);
+ int disableEventNotification(int32_t eventTypes);
+};
+
+/*
+ * Interface for providing buffers
+ */
+class BufferProvider
+{
+public:
+ virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) = 0;
+
+ //additional methods used for memory mapping
+ virtual uint32_t * getOffsets() = 0;
+ virtual int getFd() = 0;
+
+ virtual int freeBuffer(void* buf) = 0;
+
+ virtual ~BufferProvider() {}
+};
+
+/**
+ * Class for handling data and notify callbacks to application
+ */
+class AppCallbackNotifier: public ErrorNotifier , public virtual RefBase
+{
+
+public:
+
+ ///Constants
+ static const int NOTIFIER_TIMEOUT;
+ static const int32_t MAX_BUFFERS = 8;
+
+ enum NotifierCommands
+ {
+ NOTIFIER_CMD_PROCESS_EVENT,
+ NOTIFIER_CMD_PROCESS_FRAME,
+ NOTIFIER_CMD_PROCESS_ERROR
+ };
+
+ enum NotifierState
+ {
+ NOTIFIER_STOPPED,
+ NOTIFIER_STARTED,
+ NOTIFIER_EXITED
+ };
+
+public:
+
+ ~AppCallbackNotifier();
+
+ ///Initialzes the callback notifier, creates any resources required
+ status_t initialize();
+
+ ///Starts the callbacks to application
+ status_t start();
+
+ ///Stops the callbacks from going to application
+ status_t stop();
+
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+ void setFrameProvider(FrameNotifier *frameProvider);
+
+ //All sub-components of Camera HAL call this whenever any error happens
+ virtual void errorNotify(int error);
+
+ status_t startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
+ status_t stopPreviewCallbacks();
+
+ status_t enableMsgType(int32_t msgType);
+ status_t disableMsgType(int32_t msgType);
+
+ //API for enabling/disabling measurement data
+ void setMeasurements(bool enable);
+
+ //thread loops
+ void notificationThread();
+
+ ///Notification callback functions
+ static void frameCallbackRelay(CameraFrame* caFrame);
+ static void eventCallbackRelay(CameraHalEvent* chEvt);
+ void frameCallback(CameraFrame* caFrame);
+ void eventCallback(CameraHalEvent* chEvt);
+
+ void setCallbacks(CameraHal *cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ //Set Burst mode
+ void setBurst(bool burst);
+
+ //Notifications from CameraHal for video recording case
+ status_t startRecording();
+ status_t stopRecording();
+ status_t initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
+ status_t releaseRecordingFrame(const void *opaque);
+
+ status_t useMetaDataBufferMode(bool enable);
+
+ //Internal class definitions
+ class NotificationThread : public Thread {
+ AppCallbackNotifier* mAppCallbackNotifier;
+ TIUTILS::MessageQueue mNotificationThreadQ;
+ public:
+ enum NotificationThreadCommands
+ {
+ NOTIFIER_START,
+ NOTIFIER_STOP,
+ NOTIFIER_EXIT,
+ };
+ public:
+ NotificationThread(AppCallbackNotifier* nh)
+ : Thread(false), mAppCallbackNotifier(nh) { }
+ virtual bool threadLoop() {
+ mAppCallbackNotifier->notificationThread();
+ return false;
+ }
+
+ TIUTILS::MessageQueue &msgQ() { return mNotificationThreadQ;}
+ };
+
+ //Friend declarations
+ friend class NotificationThread;
+
+private:
+ void notifyEvent();
+ void notifyFrame();
+ bool processMessage();
+ void releaseSharedVideoBuffers();
+
+private:
+ mutable Mutex mLock;
+ mutable Mutex mBurstLock;
+ CameraHal* mCameraHal;
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mRequestMemory;
+ void *mCallbackCookie;
+
+ //Keeps Video MemoryHeaps and Buffers within
+ //these objects
+ KeyedVector<unsigned int, unsigned int> mVideoHeaps;
+ KeyedVector<unsigned int, unsigned int> mVideoBuffers;
+ KeyedVector<unsigned int, unsigned int> mVideoMap;
+
+ //Keeps list of Gralloc handles and associated Video Metadata Buffers
+ KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferMemoryMap;
+ KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferReverseMap;
+
+ bool mBufferReleased;
+
+ sp< NotificationThread> mNotificationThread;
+ EventProvider *mEventProvider;
+ FrameProvider *mFrameProvider;
+ TIUTILS::MessageQueue mEventQ;
+ TIUTILS::MessageQueue mFrameQ;
+ NotifierState mNotifierState;
+
+ bool mPreviewing;
+ camera_memory_t* mPreviewMemory;
+ unsigned char* mPreviewBufs[MAX_BUFFERS];
+ int mPreviewBufCount;
+ const char *mPreviewPixelFormat;
+ KeyedVector<unsigned int, sp<MemoryHeapBase> > mSharedPreviewHeaps;
+ KeyedVector<unsigned int, sp<MemoryBase> > mSharedPreviewBuffers;
+
+ //Burst mode active
+ bool mBurst;
+ mutable Mutex mRecordingLock;
+ bool mRecording;
+ bool mMeasurementEnabled;
+
+ bool mUseMetaDataBufferMode;
+
+};
+
+
+/**
+ * Class used for allocating memory for JPEG bit stream buffers, output buffers of camera in no overlay case
+ */
+class MemoryManager : public BufferProvider, public virtual RefBase
+{
+public:
+ ///Initializes the display adapter creates any resources required
+ status_t initialize(){ return NO_ERROR; }
+
+ int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual uint32_t * getOffsets();
+ virtual int getFd() ;
+ virtual int freeBuffer(void* buf);
+
+private:
+
+ sp<ErrorNotifier> mErrorNotifier;
+};
+
+
+
+
+/**
+ * CameraAdapter interface class
+ * Concrete classes derive from this class and provide implementations based on the specific camera h/w interface
+ */
+
+class CameraAdapter: public FrameNotifier, public virtual RefBase
+{
+protected:
+ enum AdapterActiveStates {
+ INTIALIZED_ACTIVE = 1 << 0,
+ LOADED_PREVIEW_ACTIVE = 1 << 1,
+ PREVIEW_ACTIVE = 1 << 2,
+ LOADED_CAPTURE_ACTIVE = 1 << 3,
+ CAPTURE_ACTIVE = 1 << 4,
+ BRACKETING_ACTIVE = 1 << 5,
+ AF_ACTIVE = 1 << 6,
+ ZOOM_ACTIVE = 1 << 7,
+ VIDEO_ACTIVE = 1 << 8,
+ };
+public:
+ typedef struct
+ {
+ void *mBuffers;
+ uint32_t *mOffsets;
+ int mFd;
+ size_t mLength;
+ size_t mCount;
+ size_t mMaxQueueable;
+ } BuffersDescriptor;
+
+ enum CameraCommands
+ {
+ CAMERA_START_PREVIEW = 0,
+ CAMERA_STOP_PREVIEW = 1,
+ CAMERA_START_VIDEO = 2,
+ CAMERA_STOP_VIDEO = 3,
+ CAMERA_START_IMAGE_CAPTURE = 4,
+ CAMERA_STOP_IMAGE_CAPTURE = 5,
+ CAMERA_PERFORM_AUTOFOCUS = 6,
+ CAMERA_CANCEL_AUTOFOCUS = 7,
+ CAMERA_PREVIEW_FLUSH_BUFFERS = 8,
+ CAMERA_START_SMOOTH_ZOOM = 9,
+ CAMERA_STOP_SMOOTH_ZOOM = 10,
+ CAMERA_USE_BUFFERS_PREVIEW = 11,
+ CAMERA_SET_TIMEOUT = 12,
+ CAMERA_CANCEL_TIMEOUT = 13,
+ CAMERA_START_BRACKET_CAPTURE = 14,
+ CAMERA_STOP_BRACKET_CAPTURE = 15,
+ CAMERA_QUERY_RESOLUTION_PREVIEW = 16,
+ CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE = 17,
+ CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA = 18,
+ CAMERA_USE_BUFFERS_IMAGE_CAPTURE = 19,
+ CAMERA_USE_BUFFERS_PREVIEW_DATA = 20,
+ CAMERA_TIMEOUT_EXPIRED = 21,
+ CAMERA_START_FD = 22,
+ CAMERA_STOP_FD = 23,
+ };
+
+ enum CameraMode
+ {
+ CAMERA_PREVIEW,
+ CAMERA_IMAGE_CAPTURE,
+ CAMERA_VIDEO,
+ CAMERA_MEASUREMENT
+ };
+
+ enum AdapterState {
+ INTIALIZED_STATE = INTIALIZED_ACTIVE,
+ LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE ,
+ AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ };
+
+public:
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual int initialize(CameraProperties::Properties*, int sensor_index=0) = 0;
+
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
+
+ //Message/Frame notification APIs
+ virtual void enableMsgType(int32_t msgs,
+ frame_callback callback = NULL,
+ event_callback eventCb = NULL,
+ void *cookie = NULL) = 0;
+ virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
+ virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual int setParameters(const CameraParameters& params) = 0;
+ virtual void getParameters(CameraParameters& params) = 0;
+
+ //API to flush the buffers from Camera
+ status_t flushBuffers()
+ {
+ return sendCommand(CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS);
+ }
+
+ //Registers callback for returning image buffers back to CameraHAL
+ virtual int registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) = 0;
+
+ //Registers callback, which signals a completed image capture
+ virtual int registerEndCaptureCallback(end_image_capture_callback callback, void *user_data) = 0;
+
+ //API to send a command to the camera
+ virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0) = 0;
+
+ virtual ~CameraAdapter() {};
+
+ //Retrieves the current Adapter state
+ virtual AdapterState getState() = 0;
+
+ //Retrieves the next Adapter state
+ virtual AdapterState getNextState() = 0;
+
+ // Receive orientation events from CameraHal
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt) = 0;
+protected:
+ //The first two methods will try to switch the adapter state.
+ //Every call to setState() should be followed by a corresponding
+ //call to commitState(). If the state switch fails, then it will
+ //get reset to the previous state via rollbackState().
+ virtual status_t setState(CameraCommands operation) = 0;
+ virtual status_t commitState() = 0;
+ virtual status_t rollbackState() = 0;
+
+ // Retrieves the current Adapter state - for internal use (not locked)
+ virtual status_t getState(AdapterState &state) = 0;
+ // Retrieves the next Adapter state - for internal use (not locked)
+ virtual status_t getNextState(AdapterState &state) = 0;
+};
+
+class DisplayAdapter : public BufferProvider, public virtual RefBase
+{
+public:
+ typedef struct S3DParameters_t
+ {
+ int mode;
+ int framePacking;
+ int order;
+ int subSampling;
+ } S3DParameters;
+
+ ///Initializes the display adapter creates any resources required
+ virtual int initialize() = 0;
+
+ virtual int setPreviewWindow(struct preview_stream_ops *window) = 0;
+ virtual int setFrameProvider(FrameNotifier *frameProvider) = 0;
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL) = 0;
+ virtual int disableDisplay() = 0;
+ //Used for Snapshot review temp. pause
+ virtual int pauseDisplay(bool pause) = 0;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //Used for shot to snapshot measurement
+ virtual int setSnapshotTimeRef(struct timeval *refTime = NULL) = 0;
+#endif
+
+ virtual int useBuffers(void *bufArr, int num) = 0;
+ virtual bool supportsExternalBuffering() = 0;
+
+ // Get max queueable buffers display supports
+ // This function should only be called after
+ // allocateBuffer
+ virtual int maxQueueableBuffers(unsigned int& queueable) = 0;
+};
+
+static void releaseImageBuffers(void *userData);
+
+static void endImageCapture(void *userData);
+
+ /**
+ Implementation of the Android Camera hardware abstraction layer
+
+ This class implements the interface methods defined in CameraHardwareInterface
+ for the OMAP4 platform
+
+*/
+class CameraHal
+
+{
+
+public:
+ ///Constants
+ static const int NO_BUFFERS_PREVIEW;
+ static const int NO_BUFFERS_IMAGE_CAPTURE;
+ static const uint32_t VFR_SCALE = 1000;
+
+
+ /*--------------------Interface Methods---------------------------------*/
+
+ //@{
+public:
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ /** Receives orientation events from SensorListener **/
+ void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+ /**
+ * The following three functions all take a msgtype,
+ * which is a bitmask of the messages defined in
+ * include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msgType);
+
+ /**
+ * Disable a message, or a set of messages.
+ */
+ void disableMsgType(int32_t msgType);
+
+ /**
+ * Query whether a message, or a set of messages, is enabled.
+ * Note that this is operates as an AND, if any of the messages
+ * queried are off, this will return false.
+ */
+ int msgTypeEnabled(int32_t msgType);
+
+ /**
+ * Start preview mode.
+ */
+ int startPreview();
+
+ /**
+ * Only used if overlays are used for camera preview.
+ */
+ int setPreviewWindow(struct preview_stream_ops *window);
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview();
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ bool previewEnabled();
+
+ /**
+ * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
+ * message is sent with the corresponding frame. Every record frame must be released
+ * by calling releaseRecordingFrame().
+ */
+ int startRecording();
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording();
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled();
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ */
+ void releaseRecordingFrame(const void *opaque);
+
+ /**
+ * Set the camera parameters specific to Video Recording.
+ */
+ status_t setVideoModeParameters();
+
+ /**
+ * Start auto focus, the notification callback routine is called
+ * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
+ * will be called again if another auto focus is needed.
+ */
+ int autoFocus();
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress
+ * or not, this function will return the focus position to the default.
+ * If the camera does not support auto-focus, this is a no-op.
+ */
+ int cancelAutoFocus();
+
+ /**
+ * Take a picture.
+ */
+ int takePicture();
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this
+ * method when no picture is being taken is a no-op.
+ */
+ int cancelPicture();
+
+ /** Set the camera parameters. */
+ int setParameters(const char* params);
+ int setParameters(const CameraParameters& params);
+
+ /** Return the camera parameters. */
+ char* getParameters();
+ void putParameters(char *);
+
+ /**
+ * Send command to camera driver.
+ */
+ int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release();
+
+ /**
+ * Dump state of the camera hardware
+ */
+ int dump(int fd) const;
+
+
+ status_t storeMetaDataInBuffers(bool enable);
+
+ //@}
+
+/*--------------------Internal Member functions - Public---------------------------------*/
+
+public:
+ /** @name internalFunctionsPublic */
+ //@{
+
+ /** Constructor of CameraHal */
+ CameraHal(int cameraId);
+
+ // Destructor of CameraHal
+ ~CameraHal();
+
+ /** Initialize CameraHal */
+ status_t initialize(CameraProperties::Properties*);
+
+ /** Deinitialize CameraHal */
+ void deinitialize();
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Uses the constructor timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *);
+ //Uses a user provided timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *, struct timeval*, ...);
+
+#endif
+
+ /** Free image bufs */
+ status_t freeImageBufs();
+
+ //Signals the end of image capture
+ status_t signalEndImageCapture();
+
+ //Events
+ static void eventCallbackRelay(CameraHalEvent* event);
+ void eventCallback(CameraHalEvent* event);
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+
+/*--------------------Internal Member functions - Private---------------------------------*/
+private:
+
+ /** @name internalFunctionsPrivate */
+ //@{
+
+ status_t parseResolution(const char *resStr, int &width, int &height);
+
+ void insertSupportedParams();
+
+ /** Allocate preview data buffers */
+ status_t allocPreviewDataBufs(size_t size, size_t bufferCount);
+
+ /** Free preview data buffers */
+ status_t freePreviewDataBufs();
+
+ /** Allocate preview buffers */
+ status_t allocPreviewBufs(int width, int height, const char* previewFormat, unsigned int bufferCount, unsigned int &max_queueable);
+
+ /** Allocate video buffers */
+ status_t allocVideoBufs(int width, int height, const char* previewFormat);
+
+ /** Allocate image capture buffers */
+ status_t allocImageBufs(unsigned int width, unsigned int height, size_t length, const char* previewFormat, unsigned int bufferCount);
+
+ /** Free preview buffers */
+ status_t freePreviewBufs();
+
+ /** Free video bufs */
+ status_t freeVideoBufs();
+
+ //Check if a given resolution is supported by the current camera
+ //instance
+ bool isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions);
+
+ //Check if a given parameter is supported by the current camera
+ // instance
+ bool isParameterValid(const char *param, const char *supportedParams);
+ bool isParameterValid(int param, const char *supportedParams);
+
+ /** Initialize default parameters */
+ void initDefaultParameters();
+
+ void dumpProperties(CameraProperties::Properties& cameraProps);
+
+ status_t startImageBracketing();
+
+ status_t stopImageBracketing();
+
+ void setShutter(bool enable);
+
+ void forceStopPreview();
+
+ //@}
+
+
+/*----------Member variables - Public ---------------------*/
+public:
+ int32_t mMsgEnabled;
+ bool mRecordEnabled;
+ nsecs_t mCurrentTime;
+ bool mFalsePreview;
+ bool mPreviewEnabled;
+ uint32_t mTakePictureQueue;
+ bool mBracketingEnabled;
+ bool mBracketingRunning;
+ //User shutter override
+ bool mShutterEnabled;
+ bool mMeasurementEnabled;
+ //Google's parameter delimiter
+ static const char PARAMS_DELIMITER[];
+
+ CameraAdapter *mCameraAdapter;
+ sp<AppCallbackNotifier> mAppCallbackNotifier;
+ sp<DisplayAdapter> mDisplayAdapter;
+ sp<MemoryManager> mMemoryManager;
+
+ sp<IMemoryHeap> mPictureHeap;
+
+ int* mGrallocHandles;
+
+
+
+///static member vars
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Timestamp from the CameraHal constructor
+ static struct timeval ppm_start;
+ //Timestamp of the autoFocus command
+ static struct timeval mStartFocus;
+ //Timestamp of the startPreview command
+ static struct timeval mStartPreview;
+ //Timestamp of the takePicture command
+ static struct timeval mStartCapture;
+
+#endif
+
+/*----------Member variables - Private ---------------------*/
+private:
+ //keeps paused state of display
+ bool mDisplayPaused;
+ //Index of current camera adapter
+ int mCameraIndex;
+
+ mutable Mutex mLock;
+
+ sp<SensorListener> mSensorListener;
+
+ void* mCameraAdapterHandle;
+
+ CameraParameters mParameters;
+ bool mPreviewRunning;
+ bool mPreviewStateOld;
+ bool mRecordingEnabled;
+ EventProvider *mEventProvider;
+
+ int32_t *mPreviewDataBufs;
+ uint32_t *mPreviewDataOffsets;
+ int mPreviewDataFd;
+ int mPreviewDataLength;
+ int32_t *mImageBufs;
+ uint32_t *mImageOffsets;
+ int mImageFd;
+ int mImageLength;
+ int32_t *mPreviewBufs;
+ uint32_t *mPreviewOffsets;
+ int mPreviewLength;
+ int mPreviewFd;
+ int32_t *mVideoBufs;
+ uint32_t *mVideoOffsets;
+ int mVideoFd;
+ int mVideoLength;
+
+ int32_t mLastPreviewFramerate;
+
+ int mBracketRangePositive;
+ int mBracketRangeNegative;
+
+ ///@todo Rename this as preview buffer provider
+ BufferProvider *mBufProvider;
+ BufferProvider *mVideoBufProvider;
+
+
+ CameraProperties::Properties* mCameraProperties;
+
+ bool mPreviewStartInProgress;
+
+ bool mSetPreviewWindowCalled;
+
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+ int32_t mMaxZoomSupported;
+};
+
+
+}; // namespace android
+
+#endif
diff --git a/camera/inc/CameraProperties.h b/camera/inc/CameraProperties.h
new file mode 100644
index 0000000..83d1b10
--- /dev/null
+++ b/camera/inc/CameraProperties.h
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#ifndef CAMERA_PROPERTIES_H
+#define CAMERA_PROPERTIES_H
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <stdio.h>
+#include <dirent.h>
+#include <errno.h>
+#include <stdio.h>
+#include <string.h>
+
+namespace android {
+
+#define MAX_CAMERAS_SUPPORTED 2
+#define MAX_PROP_NAME_LENGTH 50
+#define MAX_PROP_VALUE_LENGTH 2048
+
+#define EXIF_MAKE_DEFAULT "default_make"
+#define EXIF_MODEL_DEFAULT "default_model"
+
+// Class that handles the Camera Properties
+class CameraProperties
+{
+public:
+ static const char INVALID[];
+ static const char CAMERA_NAME[];
+ static const char CAMERA_SENSOR_INDEX[];
+ static const char ORIENTATION_INDEX[];
+ static const char FACING_INDEX[];
+ static const char S3D_SUPPORTED[];
+ static const char SUPPORTED_PREVIEW_SIZES[];
+ static const char SUPPORTED_PREVIEW_FORMATS[];
+ static const char SUPPORTED_PREVIEW_FRAME_RATES[];
+ static const char SUPPORTED_PICTURE_SIZES[];
+ static const char SUPPORTED_PICTURE_FORMATS[];
+ static const char SUPPORTED_THUMBNAIL_SIZES[];
+ static const char SUPPORTED_WHITE_BALANCE[];
+ static const char SUPPORTED_EFFECTS[];
+ static const char SUPPORTED_ANTIBANDING[];
+ static const char SUPPORTED_EXPOSURE_MODES[];
+ static const char SUPPORTED_EV_MIN[];
+ static const char SUPPORTED_EV_MAX[];
+ static const char SUPPORTED_EV_STEP[];
+ static const char SUPPORTED_ISO_VALUES[];
+ static const char SUPPORTED_SCENE_MODES[];
+ static const char SUPPORTED_FLASH_MODES[];
+ static const char SUPPORTED_FOCUS_MODES[];
+ static const char REQUIRED_PREVIEW_BUFS[];
+ static const char REQUIRED_IMAGE_BUFS[];
+ static const char SUPPORTED_ZOOM_RATIOS[];
+ static const char SUPPORTED_ZOOM_STAGES[];
+ static const char SUPPORTED_IPP_MODES[];
+ static const char SMOOTH_ZOOM_SUPPORTED[];
+ static const char ZOOM_SUPPORTED[];
+ static const char PREVIEW_SIZE[];
+ static const char PREVIEW_FORMAT[];
+ static const char PREVIEW_FRAME_RATE[];
+ static const char ZOOM[];
+ static const char PICTURE_SIZE[];
+ static const char PICTURE_FORMAT[];
+ static const char JPEG_THUMBNAIL_SIZE[];
+ static const char WHITEBALANCE[];
+ static const char EFFECT[];
+ static const char ANTIBANDING[];
+ static const char EXPOSURE_MODE[];
+ static const char EV_COMPENSATION[];
+ static const char ISO_MODE[];
+ static const char FOCUS_MODE[];
+ static const char SCENE_MODE[];
+ static const char FLASH_MODE[];
+ static const char JPEG_QUALITY[];
+ static const char BRIGHTNESS[];
+ static const char SATURATION[];
+ static const char SHARPNESS[];
+ static const char CONTRAST[];
+ static const char IPP[];
+ static const char AUTOCONVERGENCE[];
+ static const char AUTOCONVERGENCE_MODE[];
+ static const char MANUALCONVERGENCE_VALUES[];
+ static const char SENSOR_ORIENTATION[];
+ static const char SENSOR_ORIENTATION_VALUES[];
+ static const char REVISION[];
+ static const char FOCAL_LENGTH[];
+ static const char HOR_ANGLE[];
+ static const char VER_ANGLE[];
+ static const char EXIF_MAKE[];
+ static const char EXIF_MODEL[];
+ static const char JPEG_THUMBNAIL_QUALITY[];
+ static const char MAX_FOCUS_AREAS[];
+ static const char MAX_FD_HW_FACES[];
+ static const char MAX_FD_SW_FACES[];
+
+ static const char PARAMS_DELIMITER [];
+
+ static const char S3D2D_PREVIEW[];
+ static const char S3D2D_PREVIEW_MODES[];
+ static const char VSTAB[];
+ static const char VSTAB_VALUES[];
+ static const char FRAMERATE_RANGE[];
+ static const char FRAMERATE_RANGE_SUPPORTED[];
+
+ static const char DEFAULT_VALUE[];
+
+ static const char AUTO_EXPOSURE_LOCK[];
+ static const char AUTO_EXPOSURE_LOCK_SUPPORTED[];
+ static const char AUTO_WHITEBALANCE_LOCK[];
+ static const char AUTO_WHITEBALANCE_LOCK_SUPPORTED[];
+ static const char MAX_NUM_METERING_AREAS[];
+ static const char METERING_AREAS[];
+
+ CameraProperties();
+ ~CameraProperties();
+
+ // container class passed around for accessing properties
+ class Properties
+ {
+ public:
+ Properties()
+ {
+ mProperties = new DefaultKeyedVector<String8, String8>(String8(DEFAULT_VALUE));
+ // set properties that are same for all cameras
+ set(EXIF_MAKE, EXIF_MAKE_DEFAULT);
+ set(EXIF_MODEL, EXIF_MODEL_DEFAULT);
+ }
+ ~Properties()
+ {
+ delete mProperties;
+ }
+ ssize_t set(const char *prop, const char *value);
+ ssize_t set(const char *prop, int value);
+ const char* get(const char * prop);
+ void dump();
+
+ protected:
+ const char* keyAt(unsigned int);
+ const char* valueAt(unsigned int);
+
+ private:
+ DefaultKeyedVector<String8, String8>* mProperties;
+
+ };
+
+ ///Initializes the CameraProperties class
+ status_t initialize();
+ status_t loadProperties();
+ int camerasSupported();
+ int getProperties(int cameraIndex, Properties** properties);
+
+private:
+
+ uint32_t mCamerasSupported;
+ int mInitialized;
+
+ Properties mCameraProps[MAX_CAMERAS_SUPPORTED];
+
+};
+
+};
+
+#endif //CAMERA_PROPERTIES_H
+
diff --git a/camera/inc/General3A_Settings.h b/camera/inc/General3A_Settings.h
new file mode 100644
index 0000000..af33518
--- /dev/null
+++ b/camera/inc/General3A_Settings.h
@@ -0,0 +1,262 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file General3A_Settings.h
+*
+* This file maps the Camera Hardware Interface to OMX.
+*
+*/
+
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Common.h"
+#include "OMX_TI_Index.h"
+#include "TICameraParameters.h"
+
+#ifndef GENERAL_3A_SETTINGS_H
+#define GENERAL_3A_SETTINGS_H
+
+#define FOCUS_FACE_PRIORITY OMX_IMAGE_FocusControlMax -1
+#define FOCUS_REGION_PRIORITY OMX_IMAGE_FocusControlMax -2
+#define WB_FACE_PRIORITY OMX_WhiteBalControlMax -1
+#define EXPOSURE_FACE_PRIORITY OMX_ExposureControlMax - 1
+
+namespace android {
+
+struct userToOMX_LUT{
+ const char * userDefinition;
+ int omxDefinition;
+};
+
+struct LUTtype{
+ int size;
+ const userToOMX_LUT *Table;
+};
+
+const userToOMX_LUT isoUserToOMX[] = {
+ { TICameraParameters::ISO_MODE_AUTO, 0 },
+ { TICameraParameters::ISO_MODE_100, 100 },
+ { TICameraParameters::ISO_MODE_200, 200 },
+ { TICameraParameters::ISO_MODE_400, 400 },
+ { TICameraParameters::ISO_MODE_800, 800 },
+ { TICameraParameters::ISO_MODE_1000, 1000 },
+ { TICameraParameters::ISO_MODE_1200, 1200 },
+ { TICameraParameters::ISO_MODE_1600, 1600 },
+};
+
+const userToOMX_LUT effects_UserToOMX [] = {
+ { CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
+ { CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
+ { CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
+ { CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
+ { CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
+ { TICameraParameters::EFFECT_NATURAL, OMX_ImageFilterNatural },
+ { TICameraParameters::EFFECT_VIVID, OMX_ImageFilterVivid },
+ { TICameraParameters::EFFECT_COLOR_SWAP, OMX_ImageFilterColourSwap },
+ { CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
+ { CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
+ { CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
+ { CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
+ { TICameraParameters::EFFECT_BLACKWHITE, OMX_TI_ImageFilterBlackWhite }
+};
+
+const userToOMX_LUT scene_UserToOMX [] = {
+ { CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
+ { TICameraParameters::SCENE_MODE_CLOSEUP, OMX_Closeup },
+ { CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
+ { TICameraParameters::SCENE_MODE_AQUA, OMX_Underwater },
+ { TICameraParameters::SCENE_MODE_SPORT, OMX_Sport },
+ { TICameraParameters::SCENE_MODE_MOOD, OMX_Mood },
+ { CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
+ { TICameraParameters::SCENE_MODE_NIGHT_INDOOR, OMX_NightIndoor },
+ { CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
+ { TICameraParameters::SCENE_MODE_DOCUMENT, OMX_Document },
+ { TICameraParameters::SCENE_MODE_BARCODE, OMX_Barcode },
+ { TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT, OMX_SuperNight },
+ { TICameraParameters::SCENE_MODE_VIDEO_CINE, OMX_Cine },
+ { TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM, OMX_OldFilm },
+ { CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
+ { CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
+ { CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
+ { CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
+ { CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
+ { CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
+ { CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
+ { CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
+ { CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
+ { CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre }
+};
+
+const userToOMX_LUT whiteBal_UserToOMX [] = {
+ { CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
+ { CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
+ { CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
+ { TICameraParameters::WHITE_BALANCE_TUNGSTEN, OMX_WhiteBalControlTungsten },
+ { CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
+ { CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
+ { TICameraParameters::WHITE_BALANCE_HORIZON, OMX_WhiteBalControlHorizon },
+ { CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
+ { CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
+ { CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
+ { TICameraParameters::WHITE_BALANCE_FACE, WB_FACE_PRIORITY },
+ { TICameraParameters::WHITE_BALANCE_SUNSET, OMX_TI_WhiteBalControlSunset }
+};
+
+const userToOMX_LUT antibanding_UserToOMX [] = {
+ { CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
+ { CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
+ { CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
+ { CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
+};
+
+const userToOMX_LUT focus_UserToOMX [] = {
+ { CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
+ { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
+ { CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
+ { TICameraParameters::FOCUS_MODE_PORTRAIT, OMX_IMAGE_FocusControlPortrait },
+ { TICameraParameters::FOCUS_MODE_EXTENDED, OMX_IMAGE_FocusControlExtended },
+ { CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
+ { TICameraParameters::FOCUS_MODE_FACE , FOCUS_FACE_PRIORITY },
+
+};
+
+const userToOMX_LUT exposure_UserToOMX [] = {
+ { TICameraParameters::EXPOSURE_MODE_OFF, OMX_ExposureControlOff },
+ { TICameraParameters::EXPOSURE_MODE_AUTO, OMX_ExposureControlAuto },
+ { TICameraParameters::EXPOSURE_MODE_NIGHT, OMX_ExposureControlNight },
+ { TICameraParameters::EXPOSURE_MODE_BACKLIGHT, OMX_ExposureControlBackLight },
+ { TICameraParameters::EXPOSURE_MODE_SPOTLIGHT, OMX_ExposureControlSpotLight},
+ { TICameraParameters::EXPOSURE_MODE_SPORTS, OMX_ExposureControlSports },
+ { TICameraParameters::EXPOSURE_MODE_SNOW, OMX_ExposureControlSnow },
+ { TICameraParameters::EXPOSURE_MODE_BEACH, OMX_ExposureControlBeach },
+ { TICameraParameters::EXPOSURE_MODE_APERTURE, OMX_ExposureControlLargeAperture },
+ { TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE, OMX_ExposureControlSmallApperture },
+ { TICameraParameters::EXPOSURE_MODE_FACE, EXPOSURE_FACE_PRIORITY },
+};
+
+const userToOMX_LUT flash_UserToOMX [] = {
+ { CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
+ { CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
+ { CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
+ { CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
+ { CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
+ { TICameraParameters::FLASH_MODE_FILL_IN ,OMX_IMAGE_FlashControlFillin }
+};
+
+const LUTtype ExpLUT =
+ {
+ sizeof(exposure_UserToOMX)/sizeof(exposure_UserToOMX[0]),
+ exposure_UserToOMX
+ };
+
+const LUTtype WBalLUT =
+ {
+ sizeof(whiteBal_UserToOMX)/sizeof(whiteBal_UserToOMX[0]),
+ whiteBal_UserToOMX
+ };
+
+const LUTtype FlickerLUT =
+ {
+ sizeof(antibanding_UserToOMX)/sizeof(antibanding_UserToOMX[0]),
+ antibanding_UserToOMX
+ };
+
+const LUTtype SceneLUT =
+ {
+ sizeof(scene_UserToOMX)/sizeof(scene_UserToOMX[0]),
+ scene_UserToOMX
+ };
+
+const LUTtype FlashLUT =
+ {
+ sizeof(flash_UserToOMX)/sizeof(flash_UserToOMX[0]),
+ flash_UserToOMX
+ };
+
+const LUTtype EffLUT =
+ {
+ sizeof(effects_UserToOMX)/sizeof(effects_UserToOMX[0]),
+ effects_UserToOMX
+ };
+
+const LUTtype FocusLUT =
+ {
+ sizeof(focus_UserToOMX)/sizeof(focus_UserToOMX[0]),
+ focus_UserToOMX
+ };
+
+const LUTtype IsoLUT =
+ {
+ sizeof(isoUserToOMX)/sizeof(isoUserToOMX[0]),
+ isoUserToOMX
+ };
+
+/*
+* class Gen3A_settings
+* stores the 3A settings
+* also defines the look up tables
+* for mapping settings from Hal to OMX
+*/
+class Gen3A_settings{
+ public:
+
+ int Exposure;
+ int WhiteBallance;
+ int Flicker;
+ int SceneMode;
+ int Effect;
+ int Focus;
+ int EVCompensation;
+ int Contrast;
+ int Saturation;
+ int Sharpness;
+ int ISO;
+ int FlashMode;
+
+ unsigned int Brightness;
+ OMX_BOOL ExposureLock;
+ OMX_BOOL WhiteBalanceLock;
+};
+
+/*
+* Flags raised when a setting is changed
+*/
+enum E3ASettingsFlags
+{
+ SetSceneMode = 1 << 0,
+ SetEVCompensation = 1 << 1,
+ SetWhiteBallance = 1 << 2,
+ SetFlicker = 1 << 3,
+ SetExposure = 1 << 4,
+ SetSharpness = 1 << 5,
+ SetBrightness = 1 << 6,
+ SetContrast = 1 << 7,
+ SetISO = 1 << 8,
+ SetSaturation = 1 << 9,
+ SetEffect = 1 << 10,
+ SetFocus = 1 << 11,
+ SetExpMode = 1 << 14,
+ SetFlash = 1 << 15,
+ SetExpLock = 1 << 16,
+ SetWBLock = 1 << 17,
+
+ E3aSettingMax,
+ E3AsettingsAll = ( ((E3aSettingMax -1 ) << 1) -1 ) /// all possible flags raised
+};
+
+};
+
+#endif //GENERAL_3A_SETTINGS_H
diff --git a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
new file mode 100644
index 0000000..2a41ba0
--- /dev/null
+++ b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
@@ -0,0 +1,834 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef OMX_CAMERA_ADAPTER_H
+#define OMX_CAMERA_ADAPTER_H
+
+#include "CameraHal.h"
+#include "OMX_Types.h"
+#include "OMX_Core.h"
+#include "OMX_CoreExt.h"
+#include "OMX_IVCommon.h"
+#include "OMX_Component.h"
+#include "OMX_Index.h"
+#include "OMX_IndexExt.h"
+#include "OMX_TI_Index.h"
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Common.h"
+#include "OMX_TI_Image.h"
+#include "General3A_Settings.h"
+
+#include "BaseCameraAdapter.h"
+#include "DebugUtils.h"
+
+
+extern "C"
+{
+#include "timm_osal_error.h"
+#include "timm_osal_events.h"
+#include "timm_osal_trace.h"
+#include "timm_osal_semaphores.h"
+}
+
+namespace android {
+
+#define Q16_OFFSET 16
+
+#define OMX_CMD_TIMEOUT 3000000 //3 sec.
+#define AF_CALLBACK_TIMEOUT 10000000 //10 seconds timeout
+#define OMX_CAPTURE_TIMEOUT 5000000 //5 sec.
+
+#define FOCUS_THRESHOLD 5 //[s.]
+
+#define MIN_JPEG_QUALITY 1
+#define MAX_JPEG_QUALITY 100
+#define EXP_BRACKET_RANGE 10
+
+#define FOCUS_DIST_SIZE 100
+#define FOCUS_DIST_BUFFER_SIZE 500
+
+#define TOUCH_DATA_SIZE 200
+#define DEFAULT_THUMB_WIDTH 160
+#define DEFAULT_THUMB_HEIGHT 120
+#define FRAME_RATE_FULL_HD 27
+#define ZOOM_STAGES 61
+
+#define FACE_DETECTION_BUFFER_SIZE 0x1000
+
+#define EXIF_MODEL_SIZE 100
+#define EXIF_MAKE_SIZE 100
+#define EXIF_DATE_TIME_SIZE 20
+
+#define GPS_TIMESTAMP_SIZE 6
+#define GPS_DATESTAMP_SIZE 11
+#define GPS_REF_SIZE 2
+#define GPS_MAPDATUM_SIZE 100
+#define GPS_PROCESSING_SIZE 100
+#define GPS_VERSION_SIZE 4
+#define GPS_NORTH_REF "N"
+#define GPS_SOUTH_REF "S"
+#define GPS_EAST_REF "E"
+#define GPS_WEST_REF "W"
+
+/* Default portstartnumber of Camera component */
+#define OMX_CAMERA_DEFAULT_START_PORT_NUM 0
+
+/* Define number of ports for differt domains */
+#define OMX_CAMERA_PORT_OTHER_NUM 1
+#define OMX_CAMERA_PORT_VIDEO_NUM 4
+#define OMX_CAMERA_PORT_IMAGE_NUM 1
+#define OMX_CAMERA_PORT_AUDIO_NUM 0
+#define OMX_CAMERA_NUM_PORTS (OMX_CAMERA_PORT_OTHER_NUM + OMX_CAMERA_PORT_VIDEO_NUM + OMX_CAMERA_PORT_IMAGE_NUM + OMX_CAMERA_PORT_AUDIO_NUM)
+
+/* Define start port number for differt domains */
+#define OMX_CAMERA_PORT_OTHER_START OMX_CAMERA_DEFAULT_START_PORT_NUM
+#define OMX_CAMERA_PORT_VIDEO_START (OMX_CAMERA_PORT_OTHER_START + OMX_CAMERA_PORT_OTHER_NUM)
+#define OMX_CAMERA_PORT_IMAGE_START (OMX_CAMERA_PORT_VIDEO_START + OMX_CAMERA_PORT_VIDEO_NUM)
+#define OMX_CAMERA_PORT_AUDIO_START (OMX_CAMERA_PORT_IMAGE_START + OMX_CAMERA_PORT_IMAGE_NUM)
+
+/* Port index for camera component */
+#define OMX_CAMERA_PORT_OTHER_IN (OMX_CAMERA_PORT_OTHER_START + 0)
+#define OMX_CAMERA_PORT_VIDEO_IN_VIDEO (OMX_CAMERA_PORT_VIDEO_START + 0)
+#define OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW (OMX_CAMERA_PORT_VIDEO_START + 1)
+#define OMX_CAMERA_PORT_VIDEO_OUT_VIDEO (OMX_CAMERA_PORT_VIDEO_START + 2)
+#define OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT (OMX_CAMERA_PORT_VIDEO_START + 3)
+#define OMX_CAMERA_PORT_IMAGE_OUT_IMAGE (OMX_CAMERA_PORT_IMAGE_START + 0)
+
+
+#define OMX_INIT_STRUCT(_s_, _name_) \
+ memset(&(_s_), 0x0, sizeof(_name_)); \
+ (_s_).nSize = sizeof(_name_); \
+ (_s_).nVersion.s.nVersionMajor = 0x1; \
+ (_s_).nVersion.s.nVersionMinor = 0x1; \
+ (_s_).nVersion.s.nRevision = 0x0; \
+ (_s_).nVersion.s.nStep = 0x0
+
+#define OMX_INIT_STRUCT_PTR(_s_, _name_) \
+ memset((_s_), 0x0, sizeof(_name_)); \
+ (_s_)->nSize = sizeof(_name_); \
+ (_s_)->nVersion.s.nVersionMajor = 0x1; \
+ (_s_)->nVersion.s.nVersionMinor = 0x1; \
+ (_s_)->nVersion.s.nRevision = 0x0; \
+ (_s_)->nVersion.s.nStep = 0x0
+
+#define GOTO_EXIT_IF(_CONDITION,_ERROR) { \
+ if ((_CONDITION)) { \
+ eError = (_ERROR); \
+ goto EXIT; \
+ } \
+}
+
+///OMX Specific Functions
+static OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData);
+
+static OMX_ERRORTYPE OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffer);
+
+static OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader);
+
+struct CapResolution {
+ size_t width, height;
+ const char *param;
+};
+
+struct CapPixelformat {
+ OMX_COLOR_FORMATTYPE pixelformat;
+ const char *param;
+};
+
+struct CapU32 {
+ OMX_U32 num;
+ const char *param;
+};
+
+struct CapS32 {
+ OMX_S32 num;
+ const char *param;
+};
+
+typedef CapU32 CapFramerate;
+typedef CapU32 CapISO;
+typedef CapU32 CapSensorName;
+typedef CapS32 CapZoom;
+typedef CapS32 CapEVComp;
+
+/**
+ * Class which completely abstracts the camera hardware interaction from camera hal
+ * TODO: Need to list down here, all the message types that will be supported by this class
+ Need to implement BufferProvider interface to use AllocateBuffer of OMX if needed
+ */
+class OMXCameraAdapter : public BaseCameraAdapter
+{
+public:
+
+ /*--------------------Constant declarations----------------------------------------*/
+ static const int32_t MAX_NO_BUFFERS = 20;
+
+ ///@remarks OMX Camera has six ports - buffer input, time input, preview, image, video, and meta data
+ static const int MAX_NO_PORTS = 6;
+
+ ///Five second timeout
+ static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000;
+
+ //EXIF ASCII prefix
+ static const char EXIFASCIIPrefix[];
+
+ enum OMXCameraEvents
+ {
+ CAMERA_PORT_ENABLE = 0x1,
+ CAMERA_PORT_FLUSH = 0x2,
+ CAMERA_PORT_DISABLE = 0x4,
+ };
+
+ enum CaptureMode
+ {
+ HIGH_SPEED = 1,
+ HIGH_QUALITY = 2,
+ VIDEO_MODE = 3,
+ HIGH_QUALITY_ZSL = 4,
+ };
+
+ enum IPPMode
+ {
+ IPP_NONE = 0,
+ IPP_NSF,
+ IPP_LDC,
+ IPP_LDCNSF,
+ };
+
+ enum CodingMode
+ {
+ CodingNone = 0,
+ CodingJPS,
+ CodingMPO,
+ CodingRAWJPEG,
+ CodingRAWMPO,
+ };
+
+ enum Algorithm3A
+ {
+ WHITE_BALANCE_ALGO = 0x1,
+ EXPOSURE_ALGO = 0x2,
+ FOCUS_ALGO = 0x4,
+ };
+
+ enum AlgoPriority
+ {
+ FACE_PRIORITY = 0,
+ REGION_PRIORITY,
+ };
+
+ enum BrightnessMode
+ {
+ BRIGHTNESS_OFF = 0,
+ BRIGHTNESS_ON,
+ BRIGHTNESS_AUTO,
+ };
+
+ class GPSData
+ {
+ public:
+ int mLongDeg, mLongMin, mLongSec;
+ char mLongRef[GPS_REF_SIZE];
+ bool mLongValid;
+ int mLatDeg, mLatMin, mLatSec;
+ char mLatRef[GPS_REF_SIZE];
+ bool mLatValid;
+ int mAltitude;
+ unsigned char mAltitudeRef;
+ bool mAltitudeValid;
+ char mMapDatum[GPS_MAPDATUM_SIZE];
+ bool mMapDatumValid;
+ char mVersionId[GPS_VERSION_SIZE];
+ bool mVersionIdValid;
+ char mProcMethod[GPS_PROCESSING_SIZE];
+ bool mProcMethodValid;
+ char mDatestamp[GPS_DATESTAMP_SIZE];
+ bool mDatestampValid;
+ uint32_t mTimeStampHour;
+ uint32_t mTimeStampMin;
+ uint32_t mTimeStampSec;
+ bool mTimeStampValid;
+ };
+
+ class EXIFData
+ {
+ public:
+ GPSData mGPSData;
+ bool mMakeValid;
+ bool mModelValid;
+ };
+
+ ///Parameters specific to any port of the OMX Camera component
+ class OMXCameraPortParameters
+ {
+ public:
+ OMX_U32 mHostBufaddr[MAX_NO_BUFFERS];
+ OMX_BUFFERHEADERTYPE *mBufferHeader[MAX_NO_BUFFERS];
+ OMX_U32 mWidth;
+ OMX_U32 mHeight;
+ OMX_U32 mStride;
+ OMX_U8 mNumBufs;
+
+ // defines maximum number of buffers our of mNumBufs
+ // queueable at given moment
+ OMX_U8 mMaxQueueable;
+
+ OMX_U32 mBufSize;
+ OMX_COLOR_FORMATTYPE mColorFormat;
+ OMX_PARAM_VIDEONOISEFILTERTYPE mVNFMode;
+ OMX_PARAM_VIDEOYUVRANGETYPE mYUVRange;
+ OMX_CONFIG_BOOLEANTYPE mVidStabParam;
+ OMX_CONFIG_FRAMESTABTYPE mVidStabConfig;
+ OMX_U32 mCapFrame;
+ OMX_U32 mFrameRate;
+ OMX_S32 mMinFrameRate;
+ OMX_S32 mMaxFrameRate;
+ CameraFrame::FrameType mImageType;
+ };
+
+ ///Context of the OMX Camera component
+ class OMXCameraAdapterComponentContext
+ {
+ public:
+ OMX_HANDLETYPE mHandleComp;
+ OMX_U32 mNumPorts;
+ OMX_STATETYPE mState ;
+ OMX_U32 mVideoPortIndex;
+ OMX_U32 mPrevPortIndex;
+ OMX_U32 mImagePortIndex;
+ OMX_U32 mMeasurementPortIndex;
+ OMXCameraPortParameters mCameraPortParams[MAX_NO_PORTS];
+ };
+
+public:
+
+ OMXCameraAdapter();
+ ~OMXCameraAdapter();
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*, int sensor_index=0);
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const CameraParameters& params);
+ virtual void getParameters(CameraParameters& params);
+
+ // API
+ virtual status_t UseBuffersPreview(void* bufArr, int num);
+
+ //API to flush the buffers for preview
+ status_t flushBuffers();
+
+ // API
+ virtual status_t setFormat(OMX_U32 port, OMXCameraPortParameters &cap);
+
+ // Function to get and populate caps from handle
+ static status_t getCaps(CameraProperties::Properties* props, OMX_HANDLETYPE handle);
+ static const char* getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT);
+ static int getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT);
+
+ OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData);
+
+ OMX_ERRORTYPE OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffer);
+
+ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader);
+protected:
+
+ //Parent class method implementation
+ virtual status_t takePicture();
+ virtual status_t stopImageCapture();
+ virtual status_t startBracketing(int range);
+ virtual status_t stopBracketing();
+ virtual status_t autoFocus();
+ virtual status_t cancelAutoFocus();
+ virtual status_t startSmoothZoom(int targetIdx);
+ virtual status_t stopSmoothZoom();
+ virtual status_t startVideoCapture();
+ virtual status_t stopVideoCapture();
+ virtual status_t startPreview();
+ virtual status_t stopPreview();
+ virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+ virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+ virtual status_t startFaceDetection();
+ virtual status_t stopFaceDetection();
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+private:
+
+ status_t switchToLoaded();
+
+ OMXCameraPortParameters *getPortParams(CameraFrame::FrameType frameType);
+
+ OMX_ERRORTYPE SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData);
+
+ status_t RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1,
+ OMX_IN OMX_U32 nData2,
+ OMX_IN Semaphore &semaphore);
+
+ status_t setPictureRotation(unsigned int degree);
+ status_t setSensorOrientation(unsigned int degree);
+ status_t setImageQuality(unsigned int quality);
+ status_t setThumbnailParams(unsigned int width, unsigned int height, unsigned int quality);
+
+ //EXIF
+ status_t setParametersEXIF(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state);
+ status_t convertGPSCoord(double coord, int *deg, int *min, int *sec);
+ status_t setupEXIF();
+
+ //Focus functionality
+ status_t doAutoFocus();
+ status_t stopAutoFocus();
+ status_t checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus);
+ status_t returnFocusStatus(bool timeoutReached);
+
+ //Focus distances
+ status_t setParametersFocus(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state);
+ status_t addFocusDistances(OMX_U32 &near,
+ OMX_U32 &optimal,
+ OMX_U32 &far,
+ CameraParameters& params);
+ status_t encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length);
+ status_t getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far);
+
+ //VSTAB and VNF Functionality
+ status_t enableVideoNoiseFilter(bool enable);
+ status_t enableVideoStabilization(bool enable);
+
+ //Digital zoom
+ status_t setParametersZoom(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state);
+ status_t doZoom(int index);
+ status_t advanceZoom();
+
+ //3A related parameters
+ status_t setParameters3A(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state);
+ status_t setScene(Gen3A_settings& Gen3A);
+
+ //Flash modes
+ status_t setFlashMode(Gen3A_settings& Gen3A);
+ status_t setFocusMode(Gen3A_settings& Gen3A);
+
+ //Exposure Modes
+ status_t setExposureMode(Gen3A_settings& Gen3A);
+ status_t setEVCompensation(Gen3A_settings& Gen3A);
+ status_t setWBMode(Gen3A_settings& Gen3A);
+ status_t setFlicker(Gen3A_settings& Gen3A);
+ status_t setBrightness(Gen3A_settings& Gen3A);
+ status_t setContrast(Gen3A_settings& Gen3A);
+ status_t setSharpness(Gen3A_settings& Gen3A);
+ status_t setSaturation(Gen3A_settings& Gen3A);
+ status_t setISO(Gen3A_settings& Gen3A);
+ status_t setEffect(Gen3A_settings& Gen3A);
+
+ status_t setExposureLock(Gen3A_settings& Gen3A);
+ status_t setWhiteBalanceLock(Gen3A_settings& Gen3A);
+ status_t release3ALock();
+
+ //API to set FrameRate using VFR interface
+ status_t setVFramerate(OMX_U32 minFrameRate,OMX_U32 maxFrameRate);
+
+ status_t setParametersAlgo(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state);
+
+ //Noise filtering
+ status_t setNSF(OMXCameraAdapter::IPPMode mode);
+
+ //LDC
+ status_t setLDC(OMXCameraAdapter::IPPMode mode);
+
+ //GLBCE
+ status_t setGLBCE(OMXCameraAdapter::BrightnessMode mode);
+
+ //GBCE
+ status_t setGBCE(OMXCameraAdapter::BrightnessMode mode);
+
+ status_t printComponentVersion(OMX_HANDLETYPE handle);
+
+ //Touch AF
+ status_t setTouchFocus(size_t posX,
+ size_t posY,
+ size_t posWidth,
+ size_t posHeight,
+ size_t previewWidth,
+ size_t previewHeight);
+
+ //Face detection
+ status_t setParametersFD(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state);
+ status_t updateFocusDistances(CameraParameters &params);
+ status_t setFaceDetection(bool enable, OMX_U32 orientation);
+ status_t detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
+ sp<CameraFDResult> &result,
+ size_t previewWidth,
+ size_t previewHeight);
+ status_t encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
+ camera_frame_metadata_t **pFaces,
+ size_t previewWidth,
+ size_t previewHeight);
+
+ //3A Algorithms priority configuration
+ status_t setAlgoPriority(AlgoPriority priority, Algorithm3A algo, bool enable);
+
+ //Sensor overclocking
+ status_t setSensorOverclock(bool enable);
+
+ // Utility methods for OMX Capabilities
+ static status_t insertCapabilities(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t encodeSizeCap(OMX_TI_CAPRESTYPE&, const CapResolution *, size_t, char *, size_t);
+ static status_t encodeISOCap(OMX_U32, const CapISO*, size_t, char*, size_t);
+ static size_t encodeZoomCap(OMX_S32, const CapZoom*, size_t, char*, size_t);
+ static status_t encodeFramerateCap(OMX_U32, OMX_U32, const CapFramerate*, size_t, char*, size_t);
+ static status_t encodeVFramerateCap(OMX_TI_CAPTYPE&, char*, char*, size_t);
+ static status_t encodePixelformatCap(OMX_COLOR_FORMATTYPE,
+ const CapPixelformat*,
+ size_t,
+ char*,
+ size_t);
+ static status_t insertImageSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertPreviewSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertThumbSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertZoomStages(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertImageFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertPreviewFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertVFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertEVs(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertISOModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertIPPModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &);
+ static status_t insertWBModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertEffects(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertExpModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertSceneModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &);
+ static status_t insertFocusModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertFlickerModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertFlashModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertSenMount(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertDefaults(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+ static status_t insertLocks(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
+
+ status_t setParametersCapture(const CameraParameters &params,
+ BaseCameraAdapter::AdapterState state);
+
+ //Exposure Bracketing
+ status_t setExposureBracketing(int *evValues, size_t evCount, size_t frameCount);
+ status_t parseExpRange(const char *rangeStr, int * expRange, size_t count, size_t &validEntries);
+
+ //Temporal Bracketing
+ status_t doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader, CameraFrame::FrameType typeOfFrame);
+ status_t sendBracketFrames();
+
+ // Image Capture Service
+ status_t startImageCapture();
+
+ //Shutter callback notifications
+ status_t setShutterCallback(bool enabled);
+
+ //Sets eithter HQ or HS mode and the frame count
+ status_t setCaptureMode(OMXCameraAdapter::CaptureMode mode);
+ status_t UseBuffersCapture(void* bufArr, int num);
+ status_t UseBuffersPreviewData(void* bufArr, int num);
+
+ //Used for calculation of the average frame rate during preview
+ status_t recalculateFPS();
+
+ //Helper method for initializing a CameFrame object
+ status_t initCameraFrame(CameraFrame &frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, int typeOfFrame, OMXCameraPortParameters *port);
+
+ //Sends the incoming OMX buffer header to subscribers
+ status_t sendFrame(CameraFrame &frame);
+
+ status_t apply3Asettings( Gen3A_settings& Gen3A );
+
+ // AutoConvergence
+ status_t setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode, OMX_S32 pManualConverence);
+ status_t getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode, OMX_S32 *pManualConverence);
+
+ class CommandHandler : public Thread {
+ public:
+ CommandHandler(OMXCameraAdapter* ca)
+ : Thread(false), mCameraAdapter(ca) { }
+
+ virtual bool threadLoop() {
+ bool ret;
+ ret = Handler();
+ return ret;
+ }
+
+ status_t put(TIUTILS::Message* msg){
+ return mCommandMsgQ.put(msg);
+ }
+
+ enum {
+ COMMAND_EXIT = -1,
+ CAMERA_START_IMAGE_CAPTURE = 0,
+ CAMERA_PERFORM_AUTOFOCUS
+ };
+
+ private:
+ bool Handler();
+ TIUTILS::MessageQueue mCommandMsgQ;
+ OMXCameraAdapter* mCameraAdapter;
+ };
+ sp<CommandHandler> mCommandHandler;
+
+public:
+
+ class OMXCallbackHandler : public Thread {
+ public:
+ OMXCallbackHandler(OMXCameraAdapter* ca)
+ : Thread(false), mCameraAdapter(ca) { }
+
+ virtual bool threadLoop() {
+ bool ret;
+ ret = Handler();
+ return ret;
+ }
+
+ status_t put(TIUTILS::Message* msg){
+ return mCommandMsgQ.put(msg);
+ }
+
+ enum {
+ COMMAND_EXIT = -1,
+ CAMERA_FILL_BUFFER_DONE,
+ };
+
+ private:
+ bool Handler();
+ TIUTILS::MessageQueue mCommandMsgQ;
+ OMXCameraAdapter* mCameraAdapter;
+ };
+
+ sp<OMXCallbackHandler> mOMXCallbackHandler;
+
+private:
+
+ //AF callback
+ status_t setFocusCallback(bool enabled);
+
+ //OMX Capabilities data
+ static const CapResolution mImageCapRes [];
+ static const CapResolution mPreviewRes [];
+ static const CapResolution mThumbRes [];
+ static const CapPixelformat mPixelformats [];
+ static const CapFramerate mFramerates [];
+ static const CapU32 mSensorNames[] ;
+ static const CapZoom mZoomStages [];
+ static const CapEVComp mEVCompRanges [];
+ static const CapISO mISOStages [];
+
+ // OMX Camera defaults
+ static const char DEFAULT_ANTIBANDING[];
+ static const char DEFAULT_BRIGHTNESS[];
+ static const char DEFAULT_CONTRAST[];
+ static const char DEFAULT_EFFECT[];
+ static const char DEFAULT_EV_COMPENSATION[];
+ static const char DEFAULT_EV_STEP[];
+ static const char DEFAULT_EXPOSURE_MODE[];
+ static const char DEFAULT_FLASH_MODE[];
+ static const char DEFAULT_FOCUS_MODE[];
+ static const char DEFAULT_FRAMERATE_RANGE[];
+ static const char DEFAULT_IPP[];
+ static const char DEFAULT_ISO_MODE[];
+ static const char DEFAULT_JPEG_QUALITY[];
+ static const char DEFAULT_THUMBNAIL_QUALITY[];
+ static const char DEFAULT_THUMBNAIL_SIZE[];
+ static const char DEFAULT_PICTURE_FORMAT[];
+ static const char DEFAULT_PICTURE_SIZE[];
+ static const char DEFAULT_PREVIEW_FORMAT[];
+ static const char DEFAULT_FRAMERATE[];
+ static const char DEFAULT_PREVIEW_SIZE[];
+ static const char DEFAULT_NUM_PREV_BUFS[];
+ static const char DEFAULT_NUM_PIC_BUFS[];
+ static const char DEFAULT_MAX_FOCUS_AREAS[];
+ static const char DEFAULT_SATURATION[];
+ static const char DEFAULT_SCENE_MODE[];
+ static const char DEFAULT_SHARPNESS[];
+ static const char DEFAULT_VSTAB[];
+ static const char DEFAULT_WB[];
+ static const char DEFAULT_ZOOM[];
+ static const char DEFAULT_MAX_FD_HW_FACES[];
+ static const char DEFAULT_MAX_FD_SW_FACES[];
+ static const char DEFAULT_AE_LOCK[];
+ static const char DEFAULT_AWB_LOCK[];
+ static const char DEFAULT_MAX_NUM_METERING_AREAS[];
+ static const char DEFAULT_LOCK_SUPPORTED[];
+ static const char DEFAULT_LOCK_UNSUPPORTED[];
+
+ OMX_VERSIONTYPE mCompRevision;
+
+ //OMX Component UUID
+ OMX_UUIDTYPE mCompUUID;
+
+ //Current Focus distances
+ char mFocusDistNear[FOCUS_DIST_SIZE];
+ char mFocusDistOptimal[FOCUS_DIST_SIZE];
+ char mFocusDistFar[FOCUS_DIST_SIZE];
+ char mFocusDistBuffer[FOCUS_DIST_BUFFER_SIZE];
+
+ // Current Focus areas
+ Vector< sp<CameraArea> > mFocusAreas;
+
+ CaptureMode mCapMode;
+ size_t mBurstFrames;
+ size_t mCapturedFrames;
+
+ bool mMeasurementEnabled;
+
+ //Exposure Bracketing
+ int mExposureBracketingValues[EXP_BRACKET_RANGE];
+ size_t mExposureBracketingValidEntries;
+
+ mutable Mutex mFaceDetectionLock;
+ //Face detection status
+ bool mFaceDetectionRunning;
+
+ //Geo-tagging
+ EXIFData mEXIFData;
+
+ //Image post-processing
+ IPPMode mIPP;
+
+ //jpeg Picture Quality
+ unsigned int mPictureQuality;
+
+ //thumbnail resolution
+ unsigned int mThumbWidth, mThumbHeight;
+
+ //thumbnail quality
+ unsigned int mThumbQuality;
+
+ //variables holding the estimated framerate
+ float mFPS, mLastFPS;
+
+ //automatically disable AF after a given amount of frames
+ unsigned int mFocusThreshold;
+
+ //This is needed for the CTS tests. They falsely assume, that during
+ //smooth zoom the current zoom stage will not change within the
+ //zoom callback scope, which in a real world situation is not always the
+ //case. This variable will "simulate" the expected behavior
+ unsigned int mZoomParameterIdx;
+
+ //current zoom
+ Mutex mZoomLock;
+ unsigned int mCurrentZoomIdx, mTargetZoomIdx;
+ int mZoomInc;
+ bool mReturnZoomStatus;
+ static const int32_t ZOOM_STEPS [];
+
+ //local copy
+ OMX_VERSIONTYPE mLocalVersionParam;
+
+ unsigned int mPending3Asettings;
+ Gen3A_settings mParameters3A;
+
+ CameraParameters mParams;
+ CameraProperties::Properties* mCapabilities;
+ unsigned int mPictureRotation;
+ bool mWaitingForSnapshot;
+ int mSnapshotCount;
+ bool mCaptureConfigured;
+
+ //Temporal bracketing management data
+ mutable Mutex mBracketingLock;
+ bool *mBracketingBuffersQueued;
+ int mBracketingBuffersQueuedCount;
+ int mLastBracetingBufferIdx;
+ bool mBracketingEnabled;
+ int mBracketingRange;
+
+ CameraParameters mParameters;
+ OMXCameraAdapterComponentContext mCameraAdapterParameters;
+ bool mFirstTimeInit;
+
+ ///Semaphores used internally
+ Semaphore mDoAFSem;
+ Semaphore mInitSem;
+ Semaphore mFlushSem;
+ Semaphore mUsePreviewDataSem;
+ Semaphore mUsePreviewSem;
+ Semaphore mUseCaptureSem;
+ Semaphore mStartPreviewSem;
+ Semaphore mStopPreviewSem;
+ Semaphore mStartCaptureSem;
+ Semaphore mStopCaptureSem;
+ Semaphore mSwitchToLoadedSem;
+
+ Vector<struct TIUTILS::Message *> mEventSignalQ;
+ Mutex mEventLock;
+
+ OMX_STATETYPE mComponentState;
+
+ bool mVnfEnabled;
+ bool mVstabEnabled;
+
+ int mSensorOrientation;
+ int mDeviceOrientation;
+ bool mSensorOverclock;
+
+ //Indicates if we should leave
+ //OMX_Executing state during
+ //stop-/startPreview
+ bool mOMXStateSwitch;
+
+ int mFrameCount;
+ int mLastFrameCount;
+ unsigned int mIter;
+ nsecs_t mLastFPSTime;
+
+ int mSensorIndex;
+ CodingMode mCodingMode;
+
+ // Time source delta of ducati & system time
+ OMX_TICKS mTimeSourceDelta;
+ bool onlyOnce;
+
+ Semaphore mCaptureSem;
+ bool mCaptureSignalled;
+
+};
+}; //// namespace
+#endif //OMX_CAMERA_ADAPTER_H
+
diff --git a/camera/inc/SensorListener.h b/camera/inc/SensorListener.h
new file mode 100644
index 0000000..913eb95
--- /dev/null
+++ b/camera/inc/SensorListener.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file SensorListener.h
+*
+* This defines API for camerahal to get sensor events
+*
+*/
+
+#ifndef ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H
+#define ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H
+
+#include <android/sensor.h>
+#include <gui/Sensor.h>
+#include <gui/SensorManager.h>
+#include <gui/SensorEventQueue.h>
+#include <utils/Looper.h>
+
+namespace android {
+
+/**
+ * SensorListner class - Registers with sensor manager to get sensor events
+ */
+
+typedef void (*orientation_callback_t) (uint32_t orientation, uint32_t tilt, void* cookie);
+
+class SensorLooperThread : public Thread {
+ public:
+ SensorLooperThread(Looper* looper)
+ : Thread(false) {
+ mLooper = sp<Looper>(looper);
+ }
+ ~SensorLooperThread() {
+ mLooper.clear();
+ }
+
+ virtual bool threadLoop() {
+ int32_t ret = mLooper->pollOnce(-1);
+ return true;
+ }
+
+ // force looper wake up
+ void wake() {
+ mLooper->wake();
+ }
+ private:
+ sp<Looper> mLooper;
+};
+
+
+class SensorListener : public RefBase
+{
+/* public - types */
+public:
+ typedef enum {
+ SENSOR_ACCELEROMETER = 1 << 0,
+ SENSOR_MAGNETIC_FIELD = 1 << 1,
+ SENSOR_GYROSCOPE = 1 << 2,
+ SENSOR_LIGHT = 1 << 3,
+ SENSOR_PROXIMITY = 1 << 4,
+ SENSOR_ORIENTATION = 1 << 5,
+ } sensor_type_t;
+/* public - functions */
+public:
+ SensorListener();
+ ~SensorListener();
+ status_t initialize();
+ void setCallbacks(orientation_callback_t orientation_cb, void *cookie);
+ void enableSensor(sensor_type_t type);
+ void disableSensor(sensor_type_t type);
+ void handleOrientation(uint32_t orientation, uint32_t tilt);
+/* public - member variables */
+public:
+ sp<SensorEventQueue> mSensorEventQueue;
+/* private - member variables */
+private:
+ int sensorsEnabled;
+ orientation_callback_t mOrientationCb;
+ void *mCbCookie;
+ sp<Looper> mLooper;
+ sp<SensorLooperThread> mSensorLooperThread;
+ Mutex mLock;
+};
+
+}
+
+#endif
diff --git a/camera/inc/TICameraParameters.h b/camera/inc/TICameraParameters.h
new file mode 100644
index 0000000..1886d0c
--- /dev/null
+++ b/camera/inc/TICameraParameters.h
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#ifndef TI_CAMERA_PARAMETERS_H
+#define TI_CAMERA_PARAMETERS_H
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+
+namespace android {
+
+///TI Specific Camera Parameters
+class TICameraParameters
+{
+public:
+
+// Supported Camera indexes
+// Example value: "0,1,2,3", where 0-primary, 1-secondary1, 2-secondary2, 3-sterocamera
+static const char KEY_SUPPORTED_CAMERAS[];
+// Select logical Camera index
+static const char KEY_CAMERA[];
+static const char KEY_CAMERA_NAME[];
+static const char KEY_S3D_SUPPORTED[];
+static const char KEY_BURST[];
+static const char KEY_CAP_MODE[];
+static const char KEY_VSTAB[];
+static const char KEY_VSTAB_VALUES[];
+static const char KEY_VNF[];
+static const char KEY_SATURATION[];
+static const char KEY_BRIGHTNESS[];
+static const char KEY_EXPOSURE_MODE[];
+static const char KEY_SUPPORTED_EXPOSURE[];
+static const char KEY_CONTRAST[];
+static const char KEY_SHARPNESS[];
+static const char KEY_ISO[];
+static const char KEY_SUPPORTED_ISO_VALUES[];
+static const char KEY_SUPPORTED_IPP[];
+static const char KEY_IPP[];
+static const char KEY_MAN_EXPOSURE[];
+static const char KEY_METERING_MODE[];
+static const char KEY_PADDED_WIDTH[];
+static const char KEY_PADDED_HEIGHT[];
+static const char KEY_EXP_BRACKETING_RANGE[];
+static const char KEY_TEMP_BRACKETING[];
+static const char KEY_TEMP_BRACKETING_RANGE_POS[];
+static const char KEY_TEMP_BRACKETING_RANGE_NEG[];
+static const char KEY_SHUTTER_ENABLE[];
+static const char KEY_MEASUREMENT_ENABLE[];
+static const char KEY_INITIAL_VALUES[];
+static const char KEY_GBCE[];
+static const char KEY_GLBCE[];
+static const char KEY_MINFRAMERATE[];
+static const char KEY_MAXFRAMERATE[];
+
+static const char KEY_CURRENT_ISO[];
+
+static const char KEY_SENSOR_ORIENTATION[];
+static const char KEY_SENSOR_ORIENTATION_VALUES[];
+
+//TI extensions for zoom
+static const char ZOOM_SUPPORTED[];
+static const char ZOOM_UNSUPPORTED[];
+
+//TI extensions for camera capabilies
+static const char INITIAL_VALUES_TRUE[];
+static const char INITIAL_VALUES_FALSE[];
+
+//TI extensions for enabling/disabling measurements
+static const char MEASUREMENT_ENABLE[];
+static const char MEASUREMENT_DISABLE[];
+
+// TI extensions to add values for ManualConvergence and AutoConvergence mode
+static const char KEY_AUTOCONVERGENCE[];
+static const char KEY_AUTOCONVERGENCE_MODE[];
+static const char KEY_MANUALCONVERGENCE_VALUES[];
+
+//TI extensions for enabling/disabling GLBCE
+static const char GLBCE_ENABLE[];
+static const char GLBCE_DISABLE[];
+
+//TI extensions for enabling/disabling GBCE
+static const char GBCE_ENABLE[];
+static const char GBCE_DISABLE[];
+
+// TI extensions to add Min frame rate Values
+static const char VIDEO_MINFRAMERATE_5[];
+static const char VIDEO_MINFRAMERATE_10[];
+static const char VIDEO_MINFRAMERATE_15[];
+static const char VIDEO_MINFRAMERATE_20[];
+static const char VIDEO_MINFRAMERATE_24[];
+static const char VIDEO_MINFRAMERATE_25[];
+static const char VIDEO_MINFRAMERATE_30[];
+static const char VIDEO_MINFRAMERATE_33[];
+
+// TI extensions for Manual Gain and Manual Exposure
+static const char KEY_MANUAL_EXPOSURE_LEFT[];
+static const char KEY_MANUAL_EXPOSURE_RIGHT[];
+static const char KEY_MANUAL_EXPOSURE_MODES[];
+static const char KEY_MANUAL_GAIN_EV_RIGHT[];
+static const char KEY_MANUAL_GAIN_EV_LEFT[];
+static const char KEY_MANUAL_GAIN_ISO_RIGHT[];
+static const char KEY_MANUAL_GAIN_ISO_LEFT[];
+static const char KEY_MANUAL_GAIN_MODES[];
+
+//TI extensions for setting EXIF tags
+static const char KEY_EXIF_MODEL[];
+static const char KEY_EXIF_MAKE[];
+
+//TI extensions for additional GPS data
+static const char KEY_GPS_MAPDATUM[];
+static const char KEY_GPS_VERSION[];
+static const char KEY_GPS_DATESTAMP[];
+
+//TI extensions for enabling/disabling shutter sound
+static const char SHUTTER_ENABLE[];
+static const char SHUTTER_DISABLE[];
+
+//TI extensions for Temporal bracketing
+static const char BRACKET_ENABLE[];
+static const char BRACKET_DISABLE[];
+
+//TI extensions to Image post-processing
+static const char IPP_LDCNSF[];
+static const char IPP_LDC[];
+static const char IPP_NSF[];
+static const char IPP_NONE[];
+
+//TI extensions to camera mode
+static const char HIGH_PERFORMANCE_MODE[];
+static const char HIGH_QUALITY_MODE[];
+static const char HIGH_QUALITY_ZSL_MODE[];
+static const char VIDEO_MODE[];
+
+
+// TI extensions to standard android pixel formats
+static const char PIXEL_FORMAT_RAW[];
+static const char PIXEL_FORMAT_JPS[];
+static const char PIXEL_FORMAT_MPO[];
+static const char PIXEL_FORMAT_RAW_JPEG[];
+static const char PIXEL_FORMAT_RAW_MPO[];
+
+// TI extensions to standard android scene mode settings
+static const char SCENE_MODE_SPORT[];
+static const char SCENE_MODE_CLOSEUP[];
+static const char SCENE_MODE_AQUA[];
+static const char SCENE_MODE_SNOWBEACH[];
+static const char SCENE_MODE_MOOD[];
+static const char SCENE_MODE_NIGHT_INDOOR[];
+static const char SCENE_MODE_DOCUMENT[];
+static const char SCENE_MODE_BARCODE[];
+static const char SCENE_MODE_VIDEO_SUPER_NIGHT[];
+static const char SCENE_MODE_VIDEO_CINE[];
+static const char SCENE_MODE_VIDEO_OLD_FILM[];
+
+// TI extensions to standard android white balance settings.
+static const char WHITE_BALANCE_TUNGSTEN[];
+static const char WHITE_BALANCE_HORIZON[];
+static const char WHITE_BALANCE_SUNSET[];
+static const char WHITE_BALANCE_FACE[];
+
+// TI extensions to add exposure preset modes to android api
+static const char EXPOSURE_MODE_OFF[];
+static const char EXPOSURE_MODE_AUTO[];
+static const char EXPOSURE_MODE_NIGHT[];
+static const char EXPOSURE_MODE_BACKLIGHT[];
+static const char EXPOSURE_MODE_SPOTLIGHT[];
+static const char EXPOSURE_MODE_SPORTS[];
+static const char EXPOSURE_MODE_SNOW[];
+static const char EXPOSURE_MODE_BEACH[];
+static const char EXPOSURE_MODE_APERTURE[];
+static const char EXPOSURE_MODE_SMALL_APERTURE[];
+static const char EXPOSURE_MODE_FACE[];
+
+// TI extensions to standard android focus presets.
+static const char FOCUS_MODE_PORTRAIT[];
+static const char FOCUS_MODE_EXTENDED[];
+static const char FOCUS_MODE_FACE[];
+
+// TI extensions to add iso values
+static const char ISO_MODE_AUTO[];
+static const char ISO_MODE_100[];
+static const char ISO_MODE_200[];
+static const char ISO_MODE_400[];
+static const char ISO_MODE_800[];
+static const char ISO_MODE_1000[];
+static const char ISO_MODE_1200[];
+static const char ISO_MODE_1600[];
+
+// TI extensions to add values for effect settings.
+static const char EFFECT_NATURAL[];
+static const char EFFECT_VIVID[];
+static const char EFFECT_COLOR_SWAP[];
+static const char EFFECT_BLACKWHITE[];
+
+static const char KEY_S3D2D_PREVIEW[];
+static const char KEY_S3D2D_PREVIEW_MODE[];
+
+// TI extensions to add values for AutoConvergence settings.
+static const char AUTOCONVERGENCE_MODE_DISABLE[];
+static const char AUTOCONVERGENCE_MODE_FRAME[];
+static const char AUTOCONVERGENCE_MODE_CENTER[];
+static const char AUTOCONVERGENCE_MODE_FFT[];
+static const char AUTOCONVERGENCE_MODE_MANUAL[];
+
+
+//TI extensions for flash mode settings
+static const char FLASH_MODE_FILL_IN[];
+
+//TI extensions to add sensor orientation parameters
+static const char ORIENTATION_SENSOR_NONE[];
+static const char ORIENTATION_SENSOR_90[];
+static const char ORIENTATION_SENSOR_180[];
+static const char ORIENTATION_SENSOR_270[];
+
+
+//TI values for camera direction
+static const char FACING_FRONT[];
+static const char FACING_BACK[];
+
+};
+
+};
+
+#endif
+
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
new file mode 100644
index 0000000..b9d3952
--- /dev/null
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef V4L_CAMERA_ADAPTER_H
+#define V4L_CAMERA_ADAPTER_H
+
+#include "CameraHal.h"
+#include "BaseCameraAdapter.h"
+#include "DebugUtils.h"
+
+namespace android {
+
+#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
+#define NB_BUFFER 10
+#define DEVICE "/dev/video4"
+
+
+struct VideoInfo {
+ struct v4l2_capability cap;
+ struct v4l2_format format;
+ struct v4l2_buffer buf;
+ struct v4l2_requestbuffers rb;
+ void *mem[NB_BUFFER];
+ bool isStreaming;
+ int width;
+ int height;
+ int formatIn;
+ int framesizeIn;
+};
+
+
+/**
+ * Class which completely abstracts the camera hardware interaction from camera hal
+ * TODO: Need to list down here, all the message types that will be supported by this class
+ Need to implement BufferProvider interface to use AllocateBuffer of OMX if needed
+ */
+class V4LCameraAdapter : public BaseCameraAdapter
+{
+public:
+
+ /*--------------------Constant declarations----------------------------------------*/
+ static const int32_t MAX_NO_BUFFERS = 20;
+
+ ///@remarks OMX Camera has six ports - buffer input, time input, preview, image, video, and meta data
+ static const int MAX_NO_PORTS = 6;
+
+ ///Five second timeout
+ static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000;
+
+public:
+
+ V4LCameraAdapter();
+ ~V4LCameraAdapter();
+
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*, int sensor_index=0);
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const CameraParameters& params);
+ virtual void getParameters(CameraParameters& params);
+
+ // API
+ virtual status_t UseBuffersPreview(void* bufArr, int num);
+
+ //API to flush the buffers for preview
+ status_t flushBuffers();
+
+protected:
+
+//----------Parent class method implementation------------------------------------
+ virtual status_t startPreview();
+ virtual status_t stopPreview();
+ virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+ virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+//-----------------------------------------------------------------------------
+
+
+private:
+
+ class PreviewThread : public Thread {
+ V4LCameraAdapter* mAdapter;
+ public:
+ PreviewThread(V4LCameraAdapter* hw) :
+ Thread(false), mAdapter(hw) { }
+ virtual void onFirstRef() {
+ run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY);
+ }
+ virtual bool threadLoop() {
+ mAdapter->previewThread();
+ // loop until we need to quit
+ return true;
+ }
+ };
+
+ //Used for calculation of the average frame rate during preview
+ status_t recalculateFPS();
+
+ char * GetFrame(int &index);
+
+ int previewThread();
+
+public:
+
+private:
+ int mPreviewBufferCount;
+ KeyedVector<int, int> mPreviewBufs;
+ mutable Mutex mPreviewBufsLock;
+
+ CameraParameters mParams;
+
+ bool mPreviewing;
+ bool mCapturing;
+ Mutex mLock;
+
+ int mFrameCount;
+ int mLastFrameCount;
+ unsigned int mIter;
+ nsecs_t mLastFPSTime;
+
+ //variables holding the estimated framerate
+ float mFPS, mLastFPS;
+
+ int mSensorIndex;
+
+ // protected by mLock
+ sp<PreviewThread> mPreviewThread;
+
+ struct VideoInfo *mVideoInfo;
+ int mCameraHandle;
+
+
+ int nQueued;
+ int nDequeued;
+
+};
+}; //// namespace
+#endif //V4L_CAMERA_ADAPTER_H
+
diff --git a/camera/inc/VideoMetadata.h b/camera/inc/VideoMetadata.h
new file mode 100644
index 0000000..f05ee50
--- /dev/null
+++ b/camera/inc/VideoMetadata.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_METADATA_H
+#define VIDEO_METADATA_H
+
+/* This structure is used to pass buffer offset from Camera-Hal to Encoder component
+ * for specific algorithms like VSTAB & VNF
+ */
+
+typedef struct
+{
+ int metadataBufferType;
+ void* handle;
+ int offset;
+}
+video_metadata_t;
+
+#endif