From 46658da7a37c43075b66575e639872b5ecce3155 Mon Sep 17 00:00:00 2001 From: Ziyann Date: Mon, 24 Nov 2014 21:31:16 +0100 Subject: tuna: add open-source domx/camera stuff Camera is still half-broken. Credits to @MWisBest Change-Id: I87a802abfacaf36ab22676f5284f0cc1996f6b03 --- camera/ANativeWindowDisplayAdapter.cpp | 1269 ++++++++ camera/Android.mk | 137 + camera/AppCallbackNotifier.cpp | 1851 +++++++++++ camera/BaseCameraAdapter.cpp | 2329 ++++++++++++++ camera/CameraHal.cpp | 3569 +++++++++++++++++++++ camera/CameraHalCommon.cpp | 121 + camera/CameraHalUtilClasses.cpp | 362 +++ camera/CameraHal_Module.cpp | 700 ++++ camera/CameraParameters.cpp | 192 ++ camera/CameraProperties.cpp | 129 + camera/Encoder_libjpeg.cpp | 472 +++ camera/MemoryManager.cpp | 228 ++ camera/NV12_resize.c | 307 ++ camera/OMXCameraAdapter/OMX3A.cpp | 1731 ++++++++++ camera/OMXCameraAdapter/OMXAlgo.cpp | 1180 +++++++ camera/OMXCameraAdapter/OMXCameraAdapter.cpp | 3713 ++++++++++++++++++++++ camera/OMXCameraAdapter/OMXCapabilities.cpp | 1279 ++++++++ camera/OMXCameraAdapter/OMXCapture.cpp | 1229 +++++++ camera/OMXCameraAdapter/OMXDefaults.cpp | 83 + camera/OMXCameraAdapter/OMXExif.cpp | 839 +++++ camera/OMXCameraAdapter/OMXFD.cpp | 490 +++ camera/OMXCameraAdapter/OMXFocus.cpp | 840 +++++ camera/OMXCameraAdapter/OMXZoom.cpp | 296 ++ camera/SensorListener.cpp | 233 ++ camera/TICameraParameters.cpp | 202 ++ camera/V4LCameraAdapter/V4LCameraAdapter.cpp | 611 ++++ camera/inc/ANativeWindowDisplayAdapter.h | 188 ++ camera/inc/BaseCameraAdapter.h | 272 ++ camera/inc/CameraHal.h | 1272 ++++++++ camera/inc/CameraProperties.h | 198 ++ camera/inc/Encoder_libjpeg.h | 209 ++ camera/inc/General3A_Settings.h | 280 ++ camera/inc/NV12_resize.h | 148 + camera/inc/OMXCameraAdapter/OMXCameraAdapter.h | 962 ++++++ camera/inc/OMXCameraAdapter/OMXSceneModeTables.h | 247 ++ camera/inc/SensorListener.h | 101 + camera/inc/TICameraParameters.h | 242 ++ camera/inc/V4LCameraAdapter/V4LCameraAdapter.h | 158 + camera/inc/VideoMetadata.h | 32 + 39 files changed, 28701 insertions(+) create mode 100644 camera/ANativeWindowDisplayAdapter.cpp create mode 100644 camera/Android.mk create mode 100644 camera/AppCallbackNotifier.cpp create mode 100644 camera/BaseCameraAdapter.cpp create mode 100644 camera/CameraHal.cpp create mode 100644 camera/CameraHalCommon.cpp create mode 100644 camera/CameraHalUtilClasses.cpp create mode 100644 camera/CameraHal_Module.cpp create mode 100644 camera/CameraParameters.cpp create mode 100644 camera/CameraProperties.cpp create mode 100644 camera/Encoder_libjpeg.cpp create mode 100644 camera/MemoryManager.cpp create mode 100644 camera/NV12_resize.c create mode 100644 camera/OMXCameraAdapter/OMX3A.cpp create mode 100644 camera/OMXCameraAdapter/OMXAlgo.cpp create mode 100755 camera/OMXCameraAdapter/OMXCameraAdapter.cpp create mode 100644 camera/OMXCameraAdapter/OMXCapabilities.cpp create mode 100644 camera/OMXCameraAdapter/OMXCapture.cpp create mode 100644 camera/OMXCameraAdapter/OMXDefaults.cpp create mode 100644 camera/OMXCameraAdapter/OMXExif.cpp create mode 100644 camera/OMXCameraAdapter/OMXFD.cpp create mode 100644 camera/OMXCameraAdapter/OMXFocus.cpp create mode 100644 camera/OMXCameraAdapter/OMXZoom.cpp create mode 100644 camera/SensorListener.cpp create mode 100644 camera/TICameraParameters.cpp create mode 100644 camera/V4LCameraAdapter/V4LCameraAdapter.cpp create mode 100644 camera/inc/ANativeWindowDisplayAdapter.h create mode 100644 camera/inc/BaseCameraAdapter.h create mode 100644 camera/inc/CameraHal.h create mode 100644 camera/inc/CameraProperties.h create mode 100644 camera/inc/Encoder_libjpeg.h create mode 100644 camera/inc/General3A_Settings.h create mode 100644 camera/inc/NV12_resize.h create mode 100644 camera/inc/OMXCameraAdapter/OMXCameraAdapter.h create mode 100644 camera/inc/OMXCameraAdapter/OMXSceneModeTables.h create mode 100644 camera/inc/SensorListener.h create mode 100644 camera/inc/TICameraParameters.h create mode 100644 camera/inc/V4LCameraAdapter/V4LCameraAdapter.h create mode 100644 camera/inc/VideoMetadata.h (limited to 'camera') diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp new file mode 100644 index 0000000..e4a70ae --- /dev/null +++ b/camera/ANativeWindowDisplayAdapter.cpp @@ -0,0 +1,1269 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + +#define LOG_TAG "CameraHAL" + +#include "ANativeWindowDisplayAdapter.h" +#include +#include +#include +#include + +namespace android { + +///Constant declarations +///@todo Check the time units +const int ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT = 1000; // seconds + +//Suspends buffers after given amount of failed dq's +const int ANativeWindowDisplayAdapter::FAILED_DQS_TO_SUSPEND = 3; + + +OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format) +{ + OMX_COLOR_FORMATTYPE pixFormat; + + if ( parameters_format != NULL ) + { + if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) + { + CAMHAL_LOGDA("CbYCrY format selected"); + pixFormat = OMX_COLOR_FormatCbYCrY; + } + else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) + { + CAMHAL_LOGDA("YUV420SP format selected"); + pixFormat = OMX_COLOR_FormatYUV420SemiPlanar; + } + else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) + { + CAMHAL_LOGDA("RGB565 format selected"); + pixFormat = OMX_COLOR_Format16bitRGB565; + } + else + { + CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default"); + pixFormat = OMX_COLOR_FormatCbYCrY; + } + } + else { + CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY"); + pixFormat = OMX_COLOR_FormatCbYCrY; + } + + return pixFormat; +} + +const char* getPixFormatConstant(const char* parameters_format) +{ + const char* pixFormat; + + if ( parameters_format != NULL ) + { + if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) + { + CAMHAL_LOGVA("CbYCrY format selected"); + pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I; + } + else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 || + strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0) + { + // TODO(XXX): We are treating YV12 the same as YUV420SP + CAMHAL_LOGVA("YUV420SP format selected"); + pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP; + } + else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) + { + CAMHAL_LOGVA("RGB565 format selected"); + pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_RGB565; + } + else + { + CAMHAL_LOGEA("Invalid format, CbYCrY format selected as default"); + pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I; + } + } + else + { + CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY"); + pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I; + } + + return pixFormat; +} + +const size_t getBufSize(const char* parameters_format, int width, int height) +{ + int buf_size; + + if ( parameters_format != NULL ) { + if (strcmp(parameters_format, + (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) { + buf_size = width * height * 2; + } + else if((strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) || + (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) { + buf_size = width * height * 3 / 2; + } + else if(strcmp(parameters_format, + (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) { + buf_size = width * height * 2; + } else { + CAMHAL_LOGEA("Invalid format"); + buf_size = 0; + } + } else { + CAMHAL_LOGEA("Preview format is NULL"); + buf_size = 0; + } + + return buf_size; +} +/*--------------------ANativeWindowDisplayAdapter Class STARTS here-----------------------------*/ + + +/** + * Display Adapter class STARTS here.. + */ +ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL), + mDisplayState(ANativeWindowDisplayAdapter::DISPLAY_INIT), + mDisplayEnabled(false), + mBufferCount(0) + + + +{ + LOG_FUNCTION_NAME; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + mShotToShot = false; + mStartCapture.tv_sec = 0; + mStartCapture.tv_usec = 0; + mStandbyToShot.tv_sec = 0; + mStandbyToShot.tv_usec = 0; + mMeasureStandby = false; +#endif + + mPixelFormat = NULL; + mBufferHandleMap = NULL; + mGrallocHandleMap = NULL; + mOffsetsMap = NULL; + mFrameProvider = NULL; + mANativeWindow = NULL; + + mFrameWidth = 0; + mFrameHeight = 0; + mPreviewWidth = 0; + mPreviewHeight = 0; + + mSuspend = false; + mFailedDQs = 0; + + mPaused = false; + mXOff = -1; + mYOff = -1; + mFirstInit = false; + + mFD = -1; + + LOG_FUNCTION_NAME_EXIT; +} + +ANativeWindowDisplayAdapter::~ANativeWindowDisplayAdapter() +{ + Semaphore sem; + TIUTILS::Message msg; + + LOG_FUNCTION_NAME; + + ///If Frame provider exists + if (mFrameProvider) { + // Unregister with the frame provider + mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES); + delete mFrameProvider; + mFrameProvider = NULL; + } + + ///The ANativeWindow object will get destroyed here + destroy(); + + ///If Display thread exists + if(mDisplayThread.get()) + { + ///Kill the display thread + sem.Create(); + msg.command = DisplayThread::DISPLAY_EXIT; + + // Send the semaphore to signal once the command is completed + msg.arg1 = &sem; + + ///Post the message to display thread + mDisplayThread->msgQ().put(&msg); + + ///Wait for the ACK - implies that the thread is now started and waiting for frames + sem.Wait(); + + // Exit and cleanup the thread + mDisplayThread->requestExitAndWait(); + + // Delete the display thread + mDisplayThread.clear(); + } + + LOG_FUNCTION_NAME_EXIT; + +} + +status_t ANativeWindowDisplayAdapter::initialize() +{ + LOG_FUNCTION_NAME; + + ///Create the display thread + mDisplayThread = new DisplayThread(this); + if ( !mDisplayThread.get() ) + { + CAMHAL_LOGEA("Couldn't create display thread"); + LOG_FUNCTION_NAME_EXIT; + return NO_MEMORY; + } + + ///Start the display thread + status_t ret = mDisplayThread->run("DisplayThread", PRIORITY_URGENT_DISPLAY); + if ( ret != NO_ERROR ) + { + CAMHAL_LOGEA("Couldn't run display thread"); + LOG_FUNCTION_NAME_EXIT; + return ret; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +int ANativeWindowDisplayAdapter::setPreviewWindow(preview_stream_ops_t* window) +{ + LOG_FUNCTION_NAME; + ///Note that Display Adapter cannot work without a valid window object + if ( !window) + { + CAMHAL_LOGEA("NULL window object passed to DisplayAdapter"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + if ( window == mANativeWindow ) { + return ALREADY_EXISTS; + } + + ///Destroy the existing window object, if it exists + destroy(); + + ///Move to new window obj + mANativeWindow = window; + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +int ANativeWindowDisplayAdapter::setFrameProvider(FrameNotifier *frameProvider) +{ + LOG_FUNCTION_NAME; + + // Check for NULL pointer + if ( !frameProvider ) { + CAMHAL_LOGEA("NULL passed for frame provider"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + //Release any previous frame providers + if ( NULL != mFrameProvider ) { + delete mFrameProvider; + } + + /** Dont do anything here, Just save the pointer for use when display is + actually enabled or disabled + */ + mFrameProvider = new FrameProvider(frameProvider, this, frameCallbackRelay); + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +int ANativeWindowDisplayAdapter::setErrorHandler(ErrorNotifier *errorNotifier) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NULL == errorNotifier ) + { + CAMHAL_LOGEA("Invalid Error Notifier reference"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + mErrorNotifier = errorNotifier; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + +status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NULL != refTime ) + { + Mutex::Autolock lock(mLock); + memcpy(&mStartCapture, refTime, sizeof(struct timeval)); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +#endif + + +int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime, S3DParameters *s3dParams) +{ + Semaphore sem; + TIUTILS::Message msg; + + LOG_FUNCTION_NAME; + + if ( mDisplayEnabled ) + { + CAMHAL_LOGDA("Display is already enabled"); + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; + } + +#if 0 //TODO: s3d is not part of bringup...will reenable + if (s3dParams) + mOverlay->set_s3d_params(s3dParams->mode, s3dParams->framePacking, + s3dParams->order, s3dParams->subSampling); +#endif + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + if ( NULL != refTime ) + { + Mutex::Autolock lock(mLock); + memcpy(&mStandbyToShot, refTime, sizeof(struct timeval)); + mMeasureStandby = true; + } + +#endif + + //Send START_DISPLAY COMMAND to display thread. Display thread will start and then wait for a message + sem.Create(); + msg.command = DisplayThread::DISPLAY_START; + + // Send the semaphore to signal once the command is completed + msg.arg1 = &sem; + + ///Post the message to display thread + mDisplayThread->msgQ().put(&msg); + + ///Wait for the ACK - implies that the thread is now started and waiting for frames + sem.Wait(); + + // Register with the frame provider for frames + mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC); + + mDisplayEnabled = true; + mPreviewWidth = width; + mPreviewHeight = height; + + CAMHAL_LOGVB("mPreviewWidth = %d mPreviewHeight = %d", mPreviewWidth, mPreviewHeight); + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer) +{ + status_t ret = NO_ERROR; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + + LOG_FUNCTION_NAME; + + if(!mDisplayEnabled) + { + CAMHAL_LOGDA("Display is already disabled"); + LOG_FUNCTION_NAME_EXIT; + return ALREADY_EXISTS; + } + + // Unregister with the frame provider here + mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC); + mFrameProvider->removeFramePointers(); + + if ( NULL != mDisplayThread.get() ) + { + //Send STOP_DISPLAY COMMAND to display thread. Display thread will stop and dequeue all messages + // and then wait for message + Semaphore sem; + sem.Create(); + TIUTILS::Message msg; + msg.command = DisplayThread::DISPLAY_STOP; + + // Send the semaphore to signal once the command is completed + msg.arg1 = &sem; + + ///Post the message to display thread + mDisplayThread->msgQ().put(&msg); + + ///Wait for the ACK for display to be disabled + + sem.Wait(); + + } + + Mutex::Autolock lock(mLock); + { + ///Reset the display enabled flag + mDisplayEnabled = false; + + ///Reset the offset values + mXOff = -1; + mYOff = -1; + + ///Reset the frame width and height values + mFrameWidth =0; + mFrameHeight = 0; + mPreviewWidth = 0; + mPreviewHeight = 0; + + if(cancel_buffer) + { + // Return the buffers to ANativeWindow here, the mFramesWithCameraAdapterMap is also cleared inside + returnBuffersToWindow(); + } + else + { + mANativeWindow = NULL; + // Clear the frames with camera adapter map + mFramesWithCameraAdapterMap.clear(); + } + + + } + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +status_t ANativeWindowDisplayAdapter::pauseDisplay(bool pause) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + { + Mutex::Autolock lock(mLock); + mPaused = pause; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + + +void ANativeWindowDisplayAdapter::destroy() +{ + LOG_FUNCTION_NAME; + + ///Check if the display is disabled, if not disable it + if ( mDisplayEnabled ) + { + CAMHAL_LOGDA("WARNING: Calling destroy of Display adapter when display enabled. Disabling display.."); + disableDisplay(false); + } + + mBufferCount = 0; + + LOG_FUNCTION_NAME_EXIT; +} + +// Implementation of inherited interfaces +void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) +{ + LOG_FUNCTION_NAME; + status_t err; + int i = -1; + const int lnumBufs = numBufs; + mBufferHandleMap = new buffer_handle_t*[lnumBufs]; + mGrallocHandleMap = new IMG_native_handle_t*[lnumBufs]; + int undequeued = 0; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + Rect bounds; + + + if ( NULL == mANativeWindow ) { + return NULL; + } + + // Set gralloc usage bits for window. + err = mANativeWindow->set_usage(mANativeWindow, CAMHAL_GRALLOC_USAGE); + if (err != 0) { + ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + return NULL; + } + + CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs); + ///Set the number of buffers needed for camera preview + err = mANativeWindow->set_buffer_count(mANativeWindow, numBufs); + if (err != 0) { + ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + return NULL; + } + CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs); + mBufferCount = numBufs; + + + // Set window geometry + err = mANativeWindow->set_buffers_geometry( + mANativeWindow, + width, + height, + /*toOMXPixFormat(format)*/HAL_PIXEL_FORMAT_TI_NV12); // Gralloc only supports NV12 alloc! + + if (err != 0) { + ALOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + return NULL; + } + + ///We just return the buffers from ANativeWindow, if the width and height are same, else (vstab, vnf case) + ///re-allocate buffers using ANativeWindow and then get them + ///@todo - Re-allocate buffers for vnf and vstab using the width, height, format, numBufs etc + if ( mBufferHandleMap == NULL ) + { + CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers"); + LOG_FUNCTION_NAME_EXIT; + return NULL; + } + + mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued); + + for ( i=0; i < mBufferCount; i++ ) + { + IMG_native_handle_t** hndl2hndl; + IMG_native_handle_t* handle; + int stride; // dummy variable to get stride + // TODO(XXX): Do we need to keep stride information in camera hal? + + err = mANativeWindow->dequeue_buffer(mANativeWindow, (buffer_handle_t**) &hndl2hndl, &stride); + + if (err != 0) { + CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + goto fail; + } + + handle = *hndl2hndl; + + mBufferHandleMap[i] = (buffer_handle_t*) hndl2hndl; + mGrallocHandleMap[i] = handle; + mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i); + + bytes = getBufSize(format, width, height); + + } + + // lock the initial queueable buffers + bounds.left = 0; + bounds.top = 0; + bounds.right = width; + bounds.bottom = height; + + for( i = 0; i < mBufferCount-undequeued; i++ ) + { + void *y_uv[2]; + + mANativeWindow->lock_buffer(mANativeWindow, mBufferHandleMap[i]); + + mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv); + mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv); + } + + // return the rest of the buffers back to ANativeWindow + for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++) + { + err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]); + if (err != 0) { + CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + goto fail; + } + mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[i]); + //LOCK UNLOCK TO GET YUV POINTERS + void *y_uv[2]; + mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv); + mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv); + mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]); + } + + mFirstInit = true; + mPixelFormat = getPixFormatConstant(format); + mFrameWidth = width; + mFrameHeight = height; + + return mGrallocHandleMap; + + fail: + // need to cancel buffers if any were dequeued + for (int start = 0; start < i && i > 0; start++) { + int err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[start]); + if (err != 0) { + CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err); + break; + } + mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[start]); + } + + freeBuffer(mGrallocHandleMap); + + CAMHAL_LOGEA("Error occurred, performing cleanup"); + + if ( NULL != mErrorNotifier.get() ) + { + mErrorNotifier->errorNotify(-ENOMEM); + } + + LOG_FUNCTION_NAME_EXIT; + return NULL; + +} + +uint32_t * ANativeWindowDisplayAdapter::getOffsets() +{ + const int lnumBufs = mBufferCount; + + LOG_FUNCTION_NAME; + + // TODO(XXX): Need to remove getOffsets from the API. No longer needed + + if ( NULL == mANativeWindow ) + { + CAMHAL_LOGEA("mANativeWindow reference is missing"); + goto fail; + } + + if( mBufferHandleMap == NULL) + { + CAMHAL_LOGEA("Buffers not allocated yet!!"); + goto fail; + } + + if(mOffsetsMap == NULL) + { + mOffsetsMap = new uint32_t[lnumBufs]; + for(int i = 0; i < mBufferCount; i++) + { + IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[i]); + mOffsetsMap[i] = 0; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return mOffsetsMap; + + fail: + + if ( NULL != mOffsetsMap ) + { + delete [] mOffsetsMap; + mOffsetsMap = NULL; + } + + if ( NULL != mErrorNotifier.get() ) + { + mErrorNotifier->errorNotify(-ENOSYS); + } + + LOG_FUNCTION_NAME_EXIT; + + return NULL; +} + +int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable) +{ + LOG_FUNCTION_NAME; + int ret = NO_ERROR; + int undequeued = 0; + + if(mBufferCount == 0) + { + ret = -ENOSYS; + goto end; + } + + if(!mANativeWindow) + { + ret = -ENOSYS; + goto end; + } + + ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret); + + if ( ENODEV == ret ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + return -ret; + } + + queueable = mBufferCount - undequeued; + + end: + return ret; + LOG_FUNCTION_NAME_EXIT; +} + +int ANativeWindowDisplayAdapter::getFd() +{ + LOG_FUNCTION_NAME; + + if(mFD == -1) + { + IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[0]); + // TODO: should we dup the fd? not really necessary and another thing for ANativeWindow + // to manage and close... + mFD = dup(handle->fd[0]); + } + + LOG_FUNCTION_NAME_EXIT; + + return mFD; + +} + +status_t ANativeWindowDisplayAdapter::returnBuffersToWindow() +{ + status_t ret = NO_ERROR; + + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + //Give the buffers back to display here - sort of free it + if (mANativeWindow) + for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) { + int value = mFramesWithCameraAdapterMap.valueAt(i); + + // unlock buffer before giving it up + mapper.unlock((buffer_handle_t) mGrallocHandleMap[value]); + + ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[value]); + if ( ENODEV == ret ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + return -ret; + } else if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)", + strerror(-ret), + -ret); + return -ret; + } + } + else + ALOGE("mANativeWindow is NULL"); + + ///Clear the frames with camera adapter map + mFramesWithCameraAdapterMap.clear(); + + return ret; + +} + +int ANativeWindowDisplayAdapter::freeBuffer(void* buf) +{ + LOG_FUNCTION_NAME; + + int *buffers = (int *) buf; + status_t ret = NO_ERROR; + + Mutex::Autolock lock(mLock); + + if((int *)mGrallocHandleMap != buffers) + { + CAMHAL_LOGEA("CameraHal passed wrong set of buffers to free!!!"); + if (mGrallocHandleMap != NULL) + delete []mGrallocHandleMap; + mGrallocHandleMap = NULL; + } + + + returnBuffersToWindow(); + + if ( NULL != buf ) + { + delete [] buffers; + mGrallocHandleMap = NULL; + } + + if( mBufferHandleMap != NULL) + { + delete [] mBufferHandleMap; + mBufferHandleMap = NULL; + } + + if ( NULL != mOffsetsMap ) + { + delete [] mOffsetsMap; + mOffsetsMap = NULL; + } + + if( mFD != -1) + { + close(mFD); // close duped handle + mFD = -1; + } + + return NO_ERROR; +} + + +bool ANativeWindowDisplayAdapter::supportsExternalBuffering() +{ + return false; +} + +int ANativeWindowDisplayAdapter::useBuffers(void *bufArr, int num) +{ + return NO_ERROR; +} + +void ANativeWindowDisplayAdapter::displayThread() +{ + bool shouldLive = true; + int timeout = 0; + status_t ret; + + LOG_FUNCTION_NAME; + + while(shouldLive) + { + ret = TIUTILS::MessageQueue::waitForMsg(&mDisplayThread->msgQ() + , &mDisplayQ + , NULL + , ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT); + + if ( !mDisplayThread->msgQ().isEmpty() ) + { + ///Received a message from CameraHal, process it + shouldLive = processHalMsg(); + + } + else if( !mDisplayQ.isEmpty()) + { + if ( mDisplayState== ANativeWindowDisplayAdapter::DISPLAY_INIT ) + { + + ///If display adapter is not started, continue + continue; + + } + else + { + TIUTILS::Message msg; + ///Get the dummy msg from the displayQ + if(mDisplayQ.get(&msg)!=NO_ERROR) + { + CAMHAL_LOGEA("Error in getting message from display Q"); + continue; + } + + // There is a frame from ANativeWindow for us to dequeue + // We dequeue and return the frame back to Camera adapter + if(mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED) + { + handleFrameReturn(); + } + + if (mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_EXITED) + { + ///we exit the thread even though there are frames still to dequeue. They will be dequeued + ///in disableDisplay + shouldLive = false; + } + } + } + } + + LOG_FUNCTION_NAME_EXIT; +} + + +bool ANativeWindowDisplayAdapter::processHalMsg() +{ + TIUTILS::Message msg; + + LOG_FUNCTION_NAME; + + + mDisplayThread->msgQ().get(&msg); + bool ret = true, invalidCommand = false; + + switch ( msg.command ) + { + + case DisplayThread::DISPLAY_START: + + CAMHAL_LOGDA("Display thread received DISPLAY_START command from Camera HAL"); + mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STARTED; + + break; + + case DisplayThread::DISPLAY_STOP: + + ///@bug There is no API to disable SF without destroying it + ///@bug Buffers might still be w/ display and will get displayed + ///@remarks Ideal seqyence should be something like this + ///mOverlay->setParameter("enabled", false); + CAMHAL_LOGDA("Display thread received DISPLAY_STOP command from Camera HAL"); + mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STOPPED; + + break; + + case DisplayThread::DISPLAY_EXIT: + + CAMHAL_LOGDA("Display thread received DISPLAY_EXIT command from Camera HAL."); + CAMHAL_LOGDA("Stopping display thread..."); + mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_EXITED; + ///Note that the SF can have pending buffers when we disable the display + ///This is normal and the expectation is that they may not be displayed. + ///This is to ensure that the user experience is not impacted + ret = false; + break; + + default: + + CAMHAL_LOGEB("Invalid Display Thread Command 0x%x.", msg.command); + invalidCommand = true; + + break; + } + + ///Signal the semaphore if it is sent as part of the message + if ( ( msg.arg1 ) && ( !invalidCommand ) ) + { + + CAMHAL_LOGDA("+Signalling display semaphore"); + Semaphore &sem = *((Semaphore*)msg.arg1); + + sem.Signal(); + + CAMHAL_LOGDA("-Signalling display semaphore"); + } + + + LOG_FUNCTION_NAME_EXIT; + return ret; +} + + +status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame) +{ + status_t ret = NO_ERROR; + uint32_t actualFramesWithDisplay = 0; + android_native_buffer_t *buffer = NULL; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + int i; + + ///@todo Do cropping based on the stabilized frame coordinates + ///@todo Insert logic to drop frames here based on refresh rate of + ///display or rendering rate whichever is lower + ///Queue the buffer to overlay + + if (!mGrallocHandleMap || !dispFrame.mBuffer) { + CAMHAL_LOGEA("NULL sent to PostFrame"); + return -EINVAL; + } + + for ( i = 0; i < mBufferCount; i++ ) + { + if ( ((int) dispFrame.mBuffer ) == (int)mGrallocHandleMap[i] ) + { + break; + } + } + + if ( mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED && + (!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) && + !mSuspend) + { + Mutex::Autolock lock(mLock); + uint32_t xOff = (dispFrame.mOffset% PAGE_SIZE); + uint32_t yOff = (dispFrame.mOffset / PAGE_SIZE); + + // Set crop only if current x and y offsets do not match with frame offsets + if((mXOff!=xOff) || (mYOff!=yOff)) + { + CAMHAL_LOGDB("Offset %d xOff = %d, yOff = %d", dispFrame.mOffset, xOff, yOff); + uint8_t bytesPerPixel; + ///Calculate bytes per pixel based on the pixel format + if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) + { + bytesPerPixel = 2; + } + else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) + { + bytesPerPixel = 2; + } + else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) + { + bytesPerPixel = 1; + } + else + { + bytesPerPixel = 1; + } + + CAMHAL_LOGVB(" crop.left = %d crop.top = %d crop.right = %d crop.bottom = %d", + xOff/bytesPerPixel, yOff , (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight); + // We'll ignore any errors here, if the surface is + // already invalid, we'll know soon enough. + mANativeWindow->set_crop(mANativeWindow, xOff/bytesPerPixel, yOff, + (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight); + + ///Update the current x and y offsets + mXOff = xOff; + mYOff = yOff; + } + + // unlock buffer before sending to display + mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]); + ret = mANativeWindow->enqueue_buffer(mANativeWindow, mBufferHandleMap[i]); + if (ret != 0) { + ALOGE("Surface::queueBuffer returned error %d", ret); + } + + mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer); + + + // HWComposer has not minimum buffer requirement. We should be able to dequeue + // the buffer immediately + TIUTILS::Message msg; + mDisplayQ.put(&msg); + + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + if ( mMeasureStandby ) + { + CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot); + mMeasureStandby = false; + } + else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) + { + CameraHal::PPM("Shot to snapshot: ", &mStartCapture); + mShotToShot = true; + } + else if ( mShotToShot ) + { + CameraHal::PPM("Shot to shot: ", &mStartCapture); + mShotToShot = false; + } +#endif + + } + else + { + Mutex::Autolock lock(mLock); + + // unlock buffer before giving it up + mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]); + + // cancel buffer and dequeue another one + ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]); + if (ret != 0) { + ALOGE("Surface::queueBuffer returned error %d", ret); + } + + mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer); + + TIUTILS::Message msg; + mDisplayQ.put(&msg); + ret = NO_ERROR; + } + + return ret; +} + + +bool ANativeWindowDisplayAdapter::handleFrameReturn() +{ + status_t err; + buffer_handle_t* buf; + int i = 0; + int stride; // dummy variable to get stride + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + Rect bounds; + void *y_uv[2]; + + // TODO(XXX): Do we need to keep stride information in camera hal? + + if ( NULL == mANativeWindow ) { + return false; + } + + err = mANativeWindow->dequeue_buffer(mANativeWindow, &buf, &stride); + if (err != 0) { + CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + return false; + } + + err = mANativeWindow->lock_buffer(mANativeWindow, buf); + if (err != 0) { + CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mANativeWindow = NULL; + } + + return false; + } + + for(i = 0; i < mBufferCount; i++) + { + if (mBufferHandleMap[i] == buf) + break; + } + + // lock buffer before sending to FrameProvider for filling + bounds.left = 0; + bounds.top = 0; + bounds.right = mFrameWidth; + bounds.bottom = mFrameHeight; + + int lock_try_count = 0; + while (mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){ + if (++lock_try_count > LOCK_BUFFER_TRIES){ + if ( NULL != mErrorNotifier.get() ){ + mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN); + } + return false; + } + CAMHAL_LOGEA("Gralloc Lock FrameReturn Error: Sleeping 15ms"); + usleep(15000); + } + + mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i); + + CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount-1); + mFrameProvider->returnFrame( (void*)mGrallocHandleMap[i], CameraFrame::PREVIEW_FRAME_SYNC); + return true; +} + +void ANativeWindowDisplayAdapter::frameCallbackRelay(CameraFrame* caFrame) +{ + + if ( NULL != caFrame ) + { + if ( NULL != caFrame->mCookie ) + { + ANativeWindowDisplayAdapter *da = (ANativeWindowDisplayAdapter*) caFrame->mCookie; + da->frameCallback(caFrame); + } + else + { + CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p", caFrame, caFrame->mCookie); + } + } + else + { + CAMHAL_LOGEB("Invalid Camera Frame = %p", caFrame); + } + +} + +void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame) +{ + ///Call queueBuffer of overlay in the context of the callback thread + DisplayFrame df; + df.mBuffer = caFrame->mBuffer; + df.mType = (CameraFrame::FrameType) caFrame->mFrameType; + df.mOffset = caFrame->mOffset; + df.mWidthStride = caFrame->mAlignment; + df.mLength = caFrame->mLength; + df.mWidth = caFrame->mWidth; + df.mHeight = caFrame->mHeight; + PostFrame(df); +} + + +/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/ + +}; + diff --git a/camera/Android.mk b/camera/Android.mk new file mode 100644 index 0000000..23571f0 --- /dev/null +++ b/camera/Android.mk @@ -0,0 +1,137 @@ +LOCAL_PATH := $(call my-dir) + +OMAP4_CAMERA_HAL_USES := OMX +# OMAP4_CAMERA_HAL_USES := USB + +OMAP4_CAMERA_HAL_SRC := \ + CameraHal_Module.cpp \ + CameraHal.cpp \ + CameraHalUtilClasses.cpp \ + AppCallbackNotifier.cpp \ + ANativeWindowDisplayAdapter.cpp \ + CameraProperties.cpp \ + MemoryManager.cpp \ + Encoder_libjpeg.cpp \ + SensorListener.cpp \ + NV12_resize.c + +OMAP4_CAMERA_COMMON_SRC := \ + CameraParameters.cpp \ + TICameraParameters.cpp \ + CameraHalCommon.cpp + +OMAP4_CAMERA_OMX_SRC := \ + BaseCameraAdapter.cpp \ + OMXCameraAdapter/OMX3A.cpp \ + OMXCameraAdapter/OMXAlgo.cpp \ + OMXCameraAdapter/OMXCameraAdapter.cpp \ + OMXCameraAdapter/OMXCapabilities.cpp \ + OMXCameraAdapter/OMXCapture.cpp \ + OMXCameraAdapter/OMXDefaults.cpp \ + OMXCameraAdapter/OMXExif.cpp \ + OMXCameraAdapter/OMXFD.cpp \ + OMXCameraAdapter/OMXFocus.cpp \ + OMXCameraAdapter/OMXZoom.cpp + +OMAP4_CAMERA_USB_SRC := \ + BaseCameraAdapter.cpp \ + V4LCameraAdapter/V4LCameraAdapter.cpp + +# +# OMX Camera HAL +# + +ifeq ($(OMAP4_CAMERA_HAL_USES),OMX) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + $(OMAP4_CAMERA_HAL_SRC) \ + $(OMAP4_CAMERA_OMX_SRC) \ + $(OMAP4_CAMERA_COMMON_SRC) + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/inc/ \ + $(DEVICE_FOLDER)/hwc \ + $(DEVICE_FOLDER)/include \ + $(LOCAL_PATH)/inc/OMXCameraAdapter \ + $(DEVICE_FOLDER)/libtiutils \ + hardware/ti/omap4xxx/tiler \ + $(DEVICE_FOLDER)/libion_ti \ + $(DOMX_PATH)/omx_core/inc \ + $(DOMX_PATH)/mm_osal/inc \ + frameworks/base/include/media/stagefright \ + frameworks/native/include/media/hardware \ + frameworks/native/include/media/openmax \ + external/jpeg \ + external/jhead + +LOCAL_SHARED_LIBRARIES := \ + libui \ + libbinder \ + libutils \ + libcutils \ + liblog \ + libtiutils_custom \ + libmm_osal \ + libOMX_Core \ + libcamera_client \ + libgui \ + libdomx \ + libion_ti \ + libjpeg \ + libjhead + +LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER + +LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw +LOCAL_MODULE := camera.$(TARGET_BOOTLOADER_BOARD_NAME) +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) + +else +ifeq ($(OMAP4_CAMERA_HAL_USES),USB) + +# +# USB Camera Adapter +# + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + $(OMAP4_CAMERA_HAL_SRC) \ + $(OMAP4_CAMERA_USB_SRC) \ + $(OMAP4_CAMERA_COMMON_SRC) + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/inc/ \ + $(DEVICE_FOLDER)/hwc \ + $(DEVICE_FOLDER)/include \ + $(LOCAL_PATH)/inc/V4LCameraAdapter \ + $(DEVICE_FOLDER)/libtiutils \ + hardware/ti/omap4xxx/tiler \ + $(DEVICE_FOLDER)/libion_ti \ + frameworks/base/include/ui \ + frameworks/base/include/utils \ + frameworks/base/include/media/stagefright/openmax + +LOCAL_SHARED_LIBRARIES := \ + libui \ + libbinder \ + libutils \ + libcutils \ + liblog \ + libtiutils_custom \ + libcamera_client \ + libion_ti + +LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER + +LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw +LOCAL_MODULE := camera.$(TARGET_BOOTLOADER_BOARD_NAME) +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) +endif +endif diff --git a/camera/AppCallbackNotifier.cpp b/camera/AppCallbackNotifier.cpp new file mode 100644 index 0000000..872006b --- /dev/null +++ b/camera/AppCallbackNotifier.cpp @@ -0,0 +1,1851 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + +#define LOG_TAG "CameraHAL" + + +#include "CameraHal.h" +#include "VideoMetadata.h" +#include "Encoder_libjpeg.h" +#include +#include +#include +#include "NV12_resize.h" + +namespace android { + +const int AppCallbackNotifier::NOTIFIER_TIMEOUT = -1; +KeyedVector > gEncoderQueue; + +void AppCallbackNotifierEncoderCallback(void* main_jpeg, + void* thumb_jpeg, + CameraFrame::FrameType type, + void* cookie1, + void* cookie2, + void* cookie3, + bool canceled) +{ + if (cookie1 && !canceled) { + AppCallbackNotifier* cb = (AppCallbackNotifier*) cookie1; + cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3); + } + + if (main_jpeg) { + free(main_jpeg); + } + + if (thumb_jpeg) { + if (((Encoder_libjpeg::params *) thumb_jpeg)->dst) { + free(((Encoder_libjpeg::params *) thumb_jpeg)->dst); + } + free(thumb_jpeg); + } +} + +/*--------------------NotificationHandler Class STARTS here-----------------------------*/ + +void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2) +{ + camera_memory_t* encoded_mem = NULL; + Encoder_libjpeg::params *main_param = NULL, *thumb_param = NULL; + size_t jpeg_size; + uint8_t* src = NULL; + sp encoder = NULL; + + LOG_FUNCTION_NAME; + + camera_memory_t* picture = NULL; + + { + Mutex::Autolock lock(mLock); + + if (!main_jpeg) { + goto exit; + } + + encoded_mem = (camera_memory_t*) cookie1; + main_param = (Encoder_libjpeg::params *) main_jpeg; + jpeg_size = main_param->jpeg_size; + src = main_param->src; + + if(encoded_mem && encoded_mem->data && (jpeg_size > 0)) { + if (cookie2) { + ExifElementsTable* exif = (ExifElementsTable*) cookie2; + Section_t* exif_section = NULL; + + exif->insertExifToJpeg((unsigned char*) encoded_mem->data, jpeg_size); + + if(thumb_jpeg) { + thumb_param = (Encoder_libjpeg::params *) thumb_jpeg; + exif->insertExifThumbnailImage((const char*)thumb_param->dst, + (int)thumb_param->jpeg_size); + } + + exif_section = FindSection(M_EXIF); + + if (exif_section) { + picture = mRequestMemory(-1, jpeg_size + exif_section->Size, 1, NULL); + if (picture && picture->data) { + exif->saveJpeg((unsigned char*) picture->data, jpeg_size + exif_section->Size); + } + } + delete exif; + cookie2 = NULL; + } else { + picture = mRequestMemory(-1, jpeg_size, 1, NULL); + if (picture && picture->data) { + memcpy(picture->data, encoded_mem->data, jpeg_size); + } + } + } + } // scope for mutex lock + + if (!mRawAvailable) { + dummyRaw(); + } else { + mRawAvailable = false; + } + + // Send the callback to the application only if the notifier is started and the message is enabled + if(picture && (mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED) && + (mCameraHal->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE))) + { + Mutex::Autolock lock(mBurstLock); +#if 0 //TODO: enable burst mode later + if ( mBurst ) + { + `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie); + } + else +#endif + { + mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, picture, 0, NULL, mCallbackCookie); + } + } + + exit: + + if (picture) { + picture->release(picture); + } + + if (mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) { + if (encoded_mem) { + encoded_mem->release(encoded_mem); + } + if (cookie2) { + delete (ExifElementsTable*) cookie2; + } + encoder = gEncoderQueue.valueFor(src); + if (encoder.get()) { + gEncoderQueue.removeItem(src); + encoder.clear(); + } + mFrameProvider->returnFrame(src, type); + } + + LOG_FUNCTION_NAME_EXIT; +} + +/** + * NotificationHandler class + */ + +///Initialization function for AppCallbackNotifier +status_t AppCallbackNotifier::initialize() +{ + LOG_FUNCTION_NAME; + + mMeasurementEnabled = false; + + ///Create the app notifier thread + mNotificationThread = new NotificationThread(this); + if(!mNotificationThread.get()) + { + CAMHAL_LOGEA("Couldn't create Notification thread"); + return NO_MEMORY; + } + + ///Start the display thread + status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY); + if(ret!=NO_ERROR) + { + CAMHAL_LOGEA("Couldn't run NotificationThread"); + mNotificationThread.clear(); + return ret; + } + + mUseMetaDataBufferMode = true; + mRawAvailable = false; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void *user) +{ + Mutex::Autolock lock(mLock); + + LOG_FUNCTION_NAME; + + mCameraHal = cameraHal; + mNotifyCb = notify_cb; + mDataCb = data_cb; + mDataCbTimestamp = data_cb_timestamp; + mRequestMemory = get_memory; + mCallbackCookie = user; + + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::setMeasurements(bool enable) +{ + Mutex::Autolock lock(mLock); + + LOG_FUNCTION_NAME; + + mMeasurementEnabled = enable; + + if ( enable ) + { + mFrameProvider->enableFrameNotification(CameraFrame::FRAME_DATA_SYNC); + } + + LOG_FUNCTION_NAME_EXIT; +} + + +//All sub-components of Camera HAL call this whenever any error happens +void AppCallbackNotifier::errorNotify(int error) +{ + LOG_FUNCTION_NAME; + + CAMHAL_LOGEB("AppCallbackNotifier received error %d", error); + + // If it is a fatal error abort here! + if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD)) { + //We kill media server if we encounter these errors as there is + //no point continuing and apps also don't handle errors other + //than media server death always. + abort(); + return; + } + + if ( ( NULL != mCameraHal ) && + ( NULL != mNotifyCb ) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ERROR) ) ) + { + CAMHAL_LOGEB("AppCallbackNotifier mNotifyCb %d", error); + mNotifyCb(CAMERA_MSG_ERROR, CAMERA_ERROR_UNKNOWN, 0, mCallbackCookie); + } + + LOG_FUNCTION_NAME_EXIT; +} + +bool AppCallbackNotifier::notificationThread() +{ + bool shouldLive = true; + status_t ret; + + LOG_FUNCTION_NAME; + + //CAMHAL_LOGDA("Notification Thread waiting for message"); + ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(), + &mEventQ, + &mFrameQ, + AppCallbackNotifier::NOTIFIER_TIMEOUT); + + //CAMHAL_LOGDA("Notification Thread received message"); + + if (mNotificationThread->msgQ().hasMsg()) { + ///Received a message from CameraHal, process it + CAMHAL_LOGDA("Notification Thread received message from Camera HAL"); + shouldLive = processMessage(); + if(!shouldLive) { + CAMHAL_LOGDA("Notification Thread exiting."); + return shouldLive; + } + } + + if(mEventQ.hasMsg()) { + ///Received an event from one of the event providers + CAMHAL_LOGDA("Notification Thread received an event from event provider (CameraAdapter)"); + notifyEvent(); + } + + if(mFrameQ.hasMsg()) { + ///Received a frame from one of the frame providers + //CAMHAL_LOGDA("Notification Thread received a frame from frame provider (CameraAdapter)"); + notifyFrame(); + } + + LOG_FUNCTION_NAME_EXIT; + return shouldLive; +} + +void AppCallbackNotifier::notifyEvent() +{ + ///Receive and send the event notifications to app + TIUTILS::Message msg; + LOG_FUNCTION_NAME; + { + Mutex::Autolock lock(mLock); + if(!mEventQ.isEmpty()) { + mEventQ.get(&msg); + } else { + return; + } + } + bool ret = true; + CameraHalEvent *evt = NULL; + CameraHalEvent::FocusEventData *focusEvtData; + CameraHalEvent::ZoomEventData *zoomEvtData; + CameraHalEvent::FaceEventData faceEvtData; + + if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) + { + return; + } + + switch(msg.command) + { + case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT: + + evt = ( CameraHalEvent * ) msg.arg1; + + if ( NULL == evt ) + { + CAMHAL_LOGEA("Invalid CameraHalEvent"); + return; + } + + switch(evt->mEventType) + { + case CameraHalEvent::EVENT_SHUTTER: + + if ( ( NULL != mCameraHal ) && + ( NULL != mNotifyCb ) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_SHUTTER) ) ) + { + mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie); + } + mRawAvailable = false; + + break; + + case CameraHalEvent::EVENT_FOCUS_LOCKED: + case CameraHalEvent::EVENT_FOCUS_ERROR: + focusEvtData = &evt->mEventData->focusEvent; + if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_SUCCESS ) && + ( NULL != mCameraHal ) && + ( NULL != mNotifyCb ) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) ) + { + mCameraHal->disableMsgType(CAMERA_MSG_FOCUS); + mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie); + } + else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_FAIL ) && + ( NULL != mCameraHal ) && + ( NULL != mNotifyCb ) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) ) + { + mCameraHal->disableMsgType(CAMERA_MSG_FOCUS); + mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie); + } + else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_PENDING ) && + ( NULL != mCameraHal ) && + ( NULL != mNotifyCb ) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) ) + { + mNotifyCb(CAMERA_MSG_FOCUS_MOVE, true, 0, mCallbackCookie); + } + else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_DONE ) && + ( NULL != mCameraHal ) && + ( NULL != mNotifyCb ) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS_MOVE) ) ) + { + mNotifyCb(CAMERA_MSG_FOCUS_MOVE, false, 0, mCallbackCookie); + } + + break; + + case CameraHalEvent::EVENT_ZOOM_INDEX_REACHED: + + zoomEvtData = &evt->mEventData->zoomEvent; + + if ( ( NULL != mCameraHal ) && + ( NULL != mNotifyCb) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ZOOM) ) ) + { + mNotifyCb(CAMERA_MSG_ZOOM, zoomEvtData->currentZoomIndex, zoomEvtData->targetZoomIndexReached, mCallbackCookie); + } + + break; + + case CameraHalEvent::EVENT_FACE: + + faceEvtData = evt->mEventData->faceEvent; + + if ( ( NULL != mCameraHal ) && + ( NULL != mNotifyCb) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) ) + { + // WA for an issue inside CameraService + camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL); + + mDataCb(CAMERA_MSG_PREVIEW_METADATA, + tmpBuffer, + 0, + faceEvtData->getFaceResult(), + mCallbackCookie); + + faceEvtData.clear(); + + if ( NULL != tmpBuffer ) { + tmpBuffer->release(tmpBuffer); + } + + } + + break; + + case CameraHalEvent::ALL_EVENTS: + break; + default: + break; + } + + break; + } + + if ( NULL != evt ) + { + delete evt; + } + + + LOG_FUNCTION_NAME_EXIT; + +} + +static void alignYV12(int width, + int height, + int &yStride, + int &uvStride, + int &ySize, + int &uvSize, + int &size) +{ + yStride = ( width + 0xF ) & ~0xF; + uvStride = ( yStride / 2 + 0xF ) & ~0xF; + ySize = yStride * height; + uvSize = uvStride * height / 2; + size = ySize + uvSize * 2; +} + +static void copy2Dto1D(void *dst, + void *src, + int width, + int height, + size_t stride, + uint32_t offset, + unsigned int bytesPerPixel, + size_t length, + const char *pixelFormat) +{ + unsigned int alignedRow, row; + unsigned char *bufferDst, *bufferSrc; + unsigned char *bufferDstEnd, *bufferSrcEnd; + uint16_t *bufferSrc_UV; + + unsigned int *y_uv = (unsigned int *)src; + + CAMHAL_LOGVB("copy2Dto1D() y= %p ; uv=%p.",y_uv[0], y_uv[1]); + CAMHAL_LOGVB("pixelFormat= %s; offset=%d", pixelFormat,offset); + + if (pixelFormat!=NULL) { + if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) { + bytesPerPixel = 2; + bufferSrc = ( unsigned char * ) y_uv[0] + offset; + uint32_t xOff = offset % stride; + uint32_t yOff = offset / stride; + uint8_t *bufferSrcUV = ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff); + uint8_t *bufferSrcUVEven = bufferSrcUV; + + uint8_t *bufferDstY = ( uint8_t * ) dst; + uint8_t *bufferDstU = bufferDstY + 1; + uint8_t *bufferDstV = bufferDstY + 3; + + // going to convert from NV12 here and return + for ( int i = 0 ; i < height; i ++ ) { + for ( int j = 0 ; j < width / 2 ; j++ ) { + + // Y + *bufferDstY = *bufferSrc; + bufferSrc++; + bufferDstY += 2; + + *bufferDstY = *bufferSrc; + bufferSrc++; + bufferDstY += 2; + + // V + *bufferDstV = *(bufferSrcUV + 1); + bufferDstV += 4; + + // U + *bufferDstU = *bufferSrcUV; + bufferDstU += 4; + + bufferSrcUV += 2; + } + if ( i % 2 ) { + bufferSrcUV += ( stride - width); + bufferSrcUVEven = bufferSrcUV; + } else { + bufferSrcUV = bufferSrcUVEven; + } + bufferSrc += ( stride - width); + } + + return; + } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 || + strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) { + bytesPerPixel = 1; + bufferDst = ( unsigned char * ) dst; + bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel; + bufferSrc = ( unsigned char * ) y_uv[0] + offset; + bufferSrcEnd = ( unsigned char * ) ( ( size_t ) y_uv[0] + length + offset); + row = width*bytesPerPixel; + alignedRow = stride-width; + int stride_bytes = stride / 8; + uint32_t xOff = offset % stride; + uint32_t yOff = offset / stride; + + // going to convert from NV12 here and return + // Step 1: Y plane: iterate through each row and copy + for ( int i = 0 ; i < height ; i++) { + memcpy(bufferDst, bufferSrc, row); + bufferSrc += stride; + bufferDst += row; + if ( ( bufferSrc > bufferSrcEnd ) || ( bufferDst > bufferDstEnd ) ) { + break; + } + } + + bufferSrc_UV = ( uint16_t * ) ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff); + + if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) { + uint16_t *bufferDst_UV; + + // Step 2: UV plane: convert NV12 to NV21 by swapping U & V + bufferDst_UV = (uint16_t *) (((uint8_t*)dst)+row*height); + + for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) { + int n = width; + asm volatile ( + " pld [%[src], %[src_stride], lsl #2] \n\t" + " cmp %[n], #32 \n\t" + " blt 1f \n\t" + "0: @ 32 byte swap \n\t" + " sub %[n], %[n], #32 \n\t" + " vld2.8 {q0, q1} , [%[src]]! \n\t" + " vswp q0, q1 \n\t" + " cmp %[n], #32 \n\t" + " vst2.8 {q0,q1},[%[dst]]! \n\t" + " bge 0b \n\t" + "1: @ Is there enough data? \n\t" + " cmp %[n], #16 \n\t" + " blt 3f \n\t" + "2: @ 16 byte swap \n\t" + " sub %[n], %[n], #16 \n\t" + " vld2.8 {d0, d1} , [%[src]]! \n\t" + " vswp d0, d1 \n\t" + " cmp %[n], #16 \n\t" + " vst2.8 {d0,d1},[%[dst]]! \n\t" + " bge 2b \n\t" + "3: @ Is there enough data? \n\t" + " cmp %[n], #8 \n\t" + " blt 5f \n\t" + "4: @ 8 byte swap \n\t" + " sub %[n], %[n], #8 \n\t" + " vld2.8 {d0, d1} , [%[src]]! \n\t" + " vswp d0, d1 \n\t" + " cmp %[n], #8 \n\t" + " vst2.8 {d0[0],d1[0]},[%[dst]]! \n\t" + " bge 4b \n\t" + "5: @ end \n\t" +#ifdef NEEDS_ARM_ERRATA_754319_754320 + " vmov s0,s0 @ add noop for errata item \n\t" +#endif + : [dst] "+r" (bufferDst_UV), [src] "+r" (bufferSrc_UV), [n] "+r" (n) + : [src_stride] "r" (stride_bytes) + : "cc", "memory", "q0", "q1" + ); + } + } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) { + uint16_t *bufferDst_U; + uint16_t *bufferDst_V; + + // Step 2: UV plane: convert NV12 to YV12 by de-interleaving U & V + // TODO(XXX): This version of CameraHal assumes NV12 format it set at + // camera adapter to support YV12. Need to address for + // USBCamera + + int yStride, uvStride, ySize, uvSize, size; + alignYV12(width, height, yStride, uvStride, ySize, uvSize, size); + + bufferDst_V = (uint16_t *) (((uint8_t*)dst) + ySize); + bufferDst_U = (uint16_t *) (((uint8_t*)dst) + ySize + uvSize); + int inc = (uvStride - width/2)/2; + + for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) { + int n = width; + asm volatile ( + " pld [%[src], %[src_stride], lsl #2] \n\t" + " cmp %[n], #32 \n\t" + " blt 1f \n\t" + "0: @ 32 byte swap \n\t" + " sub %[n], %[n], #32 \n\t" + " vld2.8 {q0, q1} , [%[src]]! \n\t" + " cmp %[n], #32 \n\t" + " vst1.8 {q1},[%[dst_v]]! \n\t" + " vst1.8 {q0},[%[dst_u]]! \n\t" + " bge 0b \n\t" + "1: @ Is there enough data? \n\t" + " cmp %[n], #16 \n\t" + " blt 3f \n\t" + "2: @ 16 byte swap \n\t" + " sub %[n], %[n], #16 \n\t" + " vld2.8 {d0, d1} , [%[src]]! \n\t" + " cmp %[n], #16 \n\t" + " vst1.8 {d1},[%[dst_v]]! \n\t" + " vst1.8 {d0},[%[dst_u]]! \n\t" + " bge 2b \n\t" + "3: @ Is there enough data? \n\t" + " cmp %[n], #8 \n\t" + " blt 5f \n\t" + "4: @ 8 byte swap \n\t" + " sub %[n], %[n], #8 \n\t" + " vld2.8 {d0, d1} , [%[src]]! \n\t" + " cmp %[n], #8 \n\t" + " vst1.8 {d1[0]},[%[dst_v]]! \n\t" + " vst1.8 {d0[0]},[%[dst_u]]! \n\t" + " bge 4b \n\t" + "5: @ end \n\t" +#ifdef NEEDS_ARM_ERRATA_754319_754320 + " vmov s0,s0 @ add noop for errata item \n\t" +#endif + : [dst_u] "+r" (bufferDst_U), [dst_v] "+r" (bufferDst_V), + [src] "+r" (bufferSrc_UV), [n] "+r" (n) + : [src_stride] "r" (stride_bytes) + : "cc", "memory", "q0", "q1" + ); + + bufferDst_U += inc; + bufferDst_V += inc; + } + + } + return ; + + } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) { + bytesPerPixel = 2; + } + } + + bufferDst = ( unsigned char * ) dst; + bufferSrc = ( unsigned char * ) y_uv[0]; + row = width*bytesPerPixel; + alignedRow = ( row + ( stride -1 ) ) & ( ~ ( stride -1 ) ); + + //iterate through each row + for ( int i = 0 ; i < height ; i++, bufferSrc += alignedRow, bufferDst += row) { + memcpy(bufferDst, bufferSrc, row); + } +} + +void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType) +{ + camera_memory_t* picture = NULL; + void *dest = NULL, *src = NULL; + + // scope for lock + { + Mutex::Autolock lock(mLock); + + if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) { + goto exit; + } + + picture = mRequestMemory(-1, frame->mLength, 1, NULL); + + if (NULL != picture) { + dest = picture->data; + if (NULL != dest) { + src = (void *) ((unsigned int) frame->mBuffer + frame->mOffset); + memcpy(dest, src, frame->mLength); + } + } + } + + exit: + mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType); + + if(picture) { + if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) && + mCameraHal->msgTypeEnabled(msgType)) { + mDataCb(msgType, picture, 0, NULL, mCallbackCookie); + } + picture->release(picture); + } +} + +void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType) +{ + camera_memory_t* picture = NULL; + void* dest = NULL; + + // scope for lock + { + Mutex::Autolock lock(mLock); + + if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) { + goto exit; + } + + if (!mPreviewMemory || !frame->mBuffer) { + CAMHAL_LOGDA("Error! One of the buffer is NULL"); + goto exit; + } + + + dest = (void*) mPreviewBufs[mPreviewBufCount]; + + CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)", + __LINE__, + dest, + frame->mBuffer, + frame->mWidth, + frame->mHeight, + frame->mAlignment, + 2, + frame->mLength, + mPreviewPixelFormat); + + if ( NULL != dest ) { + // data sync frames don't need conversion + if (CameraFrame::FRAME_DATA_SYNC == frame->mFrameType) { + if ( (mPreviewMemory->size / MAX_BUFFERS) >= frame->mLength ) { + memcpy(dest, (void*) frame->mBuffer, frame->mLength); + } else { + memset(dest, 0, (mPreviewMemory->size / MAX_BUFFERS)); + } + } else { + if ((NULL == frame->mYuv[0]) || (NULL == frame->mYuv[1])){ + CAMHAL_LOGEA("Error! One of the YUV Pointer is NULL"); + goto exit; + } + else{ + copy2Dto1D(dest, + frame->mYuv, + frame->mWidth, + frame->mHeight, + frame->mAlignment, + frame->mOffset, + 2, + frame->mLength, + mPreviewPixelFormat); + } + } + } + } + + exit: + mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType); + + if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) && + mCameraHal->msgTypeEnabled(msgType) && + (dest != NULL)) { + mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie); + } + + // increment for next buffer + mPreviewBufCount = (mPreviewBufCount + 1) % AppCallbackNotifier::MAX_BUFFERS; +} + +status_t AppCallbackNotifier::dummyRaw() +{ + LOG_FUNCTION_NAME; + + if ( NULL == mRequestMemory ) { + CAMHAL_LOGEA("Can't allocate memory for dummy raw callback!"); + return NO_INIT; + } + + if ( ( NULL != mCameraHal ) && + ( NULL != mDataCb) && + ( NULL != mNotifyCb ) ){ + + if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) ) { + camera_memory_t *dummyRaw = mRequestMemory(-1, 1, 1, NULL); + + if ( NULL == dummyRaw ) { + CAMHAL_LOGEA("Dummy raw buffer allocation failed!"); + return NO_MEMORY; + } + + mDataCb(CAMERA_MSG_RAW_IMAGE, dummyRaw, 0, NULL, mCallbackCookie); + + dummyRaw->release(dummyRaw); + } else if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) { + mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +void AppCallbackNotifier::notifyFrame() +{ + ///Receive and send the frame notifications to app + TIUTILS::Message msg; + CameraFrame *frame; + MemoryHeapBase *heap; + MemoryBase *buffer = NULL; + sp memBase; + void *buf = NULL; + + LOG_FUNCTION_NAME; + + { + Mutex::Autolock lock(mLock); + if(!mFrameQ.isEmpty()) { + mFrameQ.get(&msg); + } else { + return; + } + } + + bool ret = true; + + frame = NULL; + switch(msg.command) + { + case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME: + + frame = (CameraFrame *) msg.arg1; + if(!frame) + { + break; + } + + if ( (CameraFrame::RAW_FRAME == frame->mFrameType )&& + ( NULL != mCameraHal ) && + ( NULL != mDataCb) && + ( NULL != mNotifyCb ) ) + { + + if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) ) + { +#ifdef COPY_IMAGE_BUFFER + copyAndSendPictureFrame(frame, CAMERA_MSG_RAW_IMAGE); +#else + //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase +#endif + } + else { + if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) { + mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie); + } + mFrameProvider->returnFrame(frame->mBuffer, + (CameraFrame::FrameType) frame->mFrameType); + } + + mRawAvailable = true; + + } + else if ( (CameraFrame::IMAGE_FRAME == frame->mFrameType) && + (NULL != mCameraHal) && + (NULL != mDataCb) && + (CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG & frame->mQuirks) ) + { + + int encode_quality = 100, tn_quality = 100; + int tn_width, tn_height; + unsigned int current_snapshot = 0; + Encoder_libjpeg::params *main_jpeg = NULL, *tn_jpeg = NULL; + void* exif_data = NULL; + camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL); + + if(raw_picture) { + buf = raw_picture->data; + } + + CameraParameters parameters; + char *params = mCameraHal->getParameters(); + const String8 strParams(params); + parameters.unflatten(strParams); + + encode_quality = parameters.getInt(CameraParameters::KEY_JPEG_QUALITY); + if (encode_quality < 0 || encode_quality > 100) { + encode_quality = 100; + } + + tn_quality = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + if (tn_quality < 0 || tn_quality > 100) { + tn_quality = 100; + } + + if (CameraFrame::HAS_EXIF_DATA & frame->mQuirks) { + exif_data = frame->mCookie2; + } + + main_jpeg = (Encoder_libjpeg::params*) + malloc(sizeof(Encoder_libjpeg::params)); + + // Video snapshot with LDCNSF on adds a few bytes start offset + // and a few bytes on every line. They must be skipped. + int rightCrop = frame->mAlignment/2 - frame->mWidth; + + CAMHAL_LOGDB("Video snapshot right crop = %d", rightCrop); + CAMHAL_LOGDB("Video snapshot offset = %d", frame->mOffset); + + if (main_jpeg) { + main_jpeg->src = (uint8_t*) frame->mBuffer; + main_jpeg->src_size = frame->mLength; + main_jpeg->dst = (uint8_t*) buf; + main_jpeg->dst_size = frame->mLength; + main_jpeg->quality = encode_quality; + main_jpeg->in_width = frame->mAlignment/2; // use stride here + main_jpeg->in_height = frame->mHeight; + main_jpeg->out_width = frame->mAlignment/2; + main_jpeg->out_height = frame->mHeight; + main_jpeg->right_crop = rightCrop; + main_jpeg->start_offset = frame->mOffset; + main_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV422I; + } + + tn_width = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); + tn_height = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); + + if ((tn_width > 0) && (tn_height > 0)) { + tn_jpeg = (Encoder_libjpeg::params*) + malloc(sizeof(Encoder_libjpeg::params)); + // if malloc fails just keep going and encode main jpeg + if (!tn_jpeg) { + tn_jpeg = NULL; + } + } + + if (tn_jpeg) { + int width, height; + parameters.getPreviewSize(&width,&height); + current_snapshot = (mPreviewBufCount + MAX_BUFFERS - 1) % MAX_BUFFERS; + tn_jpeg->src = (uint8_t*) mPreviewBufs[current_snapshot]; + tn_jpeg->src_size = mPreviewMemory->size / MAX_BUFFERS; + tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->src_size); + tn_jpeg->dst_size = tn_jpeg->src_size; + tn_jpeg->quality = tn_quality; + tn_jpeg->in_width = width; + tn_jpeg->in_height = height; + tn_jpeg->out_width = tn_width; + tn_jpeg->out_height = tn_height; + tn_jpeg->right_crop = 0; + tn_jpeg->start_offset = 0; + tn_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV420SP;; + } + + sp encoder = new Encoder_libjpeg(main_jpeg, + tn_jpeg, + AppCallbackNotifierEncoderCallback, + (CameraFrame::FrameType)frame->mFrameType, + this, + raw_picture, + exif_data); + gEncoderQueue.add(frame->mBuffer, encoder); + encoder->run(); + encoder.clear(); + if (params != NULL) + { + mCameraHal->putParameters(params); + } + } + else if ( ( CameraFrame::IMAGE_FRAME == frame->mFrameType ) && + ( NULL != mCameraHal ) && + ( NULL != mDataCb) ) + { + + // CTS, MTS requirements: Every 'takePicture()' call + // who registers a raw callback should receive one + // as well. This is not always the case with + // CameraAdapters though. + if (!mRawAvailable) { + dummyRaw(); + } else { + mRawAvailable = false; + } + +#ifdef COPY_IMAGE_BUFFER + { + Mutex::Autolock lock(mBurstLock); +#if 0 //TODO: enable burst mode later + if ( mBurst ) + { + `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie); + } + else +#endif + { + copyAndSendPictureFrame(frame, CAMERA_MSG_COMPRESSED_IMAGE); + } + } +#else + //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase +#endif + } + else if ( ( CameraFrame::VIDEO_FRAME_SYNC == frame->mFrameType ) && + ( NULL != mCameraHal ) && + ( NULL != mDataCb) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) ) + { + mRecordingLock.lock(); + if(mRecording) + { + if(mUseMetaDataBufferMode) + { + camera_memory_t *videoMedatadaBufferMemory = + (camera_memory_t *) mVideoMetadataBufferMemoryMap.valueFor((uint32_t) frame->mBuffer); + video_metadata_t *videoMetadataBuffer = (video_metadata_t *) videoMedatadaBufferMemory->data; + + if( (NULL == videoMedatadaBufferMemory) || (NULL == videoMetadataBuffer) || (NULL == frame->mBuffer) ) + { + CAMHAL_LOGEA("Error! One of the video buffers is NULL"); + break; + } + + if ( mUseVideoBuffers ) + { + int vBuf = mVideoMap.valueFor((uint32_t) frame->mBuffer); + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + Rect bounds; + bounds.left = 0; + bounds.top = 0; + bounds.right = mVideoWidth; + bounds.bottom = mVideoHeight; + + void *y_uv[2]; + mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv); + + structConvImage input = {(int)frame->mWidth, + (int)frame->mHeight, + 4096, + IC_FORMAT_YCbCr420_lp, + (mmByte *)frame->mYuv[0], + (mmByte *)frame->mYuv[1], + (int)frame->mOffset}; + + structConvImage output = {mVideoWidth, + mVideoHeight, + 4096, + IC_FORMAT_YCbCr420_lp, + (mmByte *)y_uv[0], + (mmByte *)y_uv[1], + 0}; + + VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0); + mapper.unlock((buffer_handle_t)vBuf); + videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource; + videoMetadataBuffer->handle = (void *)vBuf; + videoMetadataBuffer->offset = 0; + } + else + { + videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource; + videoMetadataBuffer->handle = frame->mBuffer; + videoMetadataBuffer->offset = frame->mOffset; + } + + CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x", + frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory); + + mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, + videoMedatadaBufferMemory, 0, mCallbackCookie); + } + else + { + //TODO: Need to revisit this, should ideally be mapping the TILER buffer using mRequestMemory + camera_memory_t* fakebuf = mRequestMemory(-1, sizeof(buffer_handle_t), 1, NULL); + if( (NULL == fakebuf) || ( NULL == fakebuf->data) || ( NULL == frame->mBuffer)) + { + CAMHAL_LOGEA("Error! One of the video buffers is NULL"); + break; + } + + *reinterpret_cast(fakebuf->data) = reinterpret_cast(frame->mBuffer); + mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, fakebuf, 0, mCallbackCookie); + fakebuf->release(fakebuf); + } + } + mRecordingLock.unlock(); + + } + else if(( CameraFrame::SNAPSHOT_FRAME == frame->mFrameType ) && + ( NULL != mCameraHal ) && + ( NULL != mDataCb) && + ( NULL != mNotifyCb)) { + //When enabled, measurement data is sent instead of video data + if ( !mMeasurementEnabled ) { + copyAndSendPreviewFrame(frame, CAMERA_MSG_POSTVIEW_FRAME); + } else { + mFrameProvider->returnFrame(frame->mBuffer, + (CameraFrame::FrameType) frame->mFrameType); + } + } + else if ( ( CameraFrame::PREVIEW_FRAME_SYNC== frame->mFrameType ) && + ( NULL != mCameraHal ) && + ( NULL != mDataCb) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) { + //When enabled, measurement data is sent instead of video data + if ( !mMeasurementEnabled ) { + copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME); + } else { + mFrameProvider->returnFrame(frame->mBuffer, + (CameraFrame::FrameType) frame->mFrameType); + } + } + else if ( ( CameraFrame::FRAME_DATA_SYNC == frame->mFrameType ) && + ( NULL != mCameraHal ) && + ( NULL != mDataCb) && + ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) { + copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME); + } else { + mFrameProvider->returnFrame(frame->mBuffer, + ( CameraFrame::FrameType ) frame->mFrameType); + CAMHAL_LOGDB("Frame type 0x%x is still unsupported!", frame->mFrameType); + } + + break; + + default: + + break; + + }; + +exit: + + if ( NULL != frame ) + { + delete frame; + } + + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::frameCallbackRelay(CameraFrame* caFrame) +{ + LOG_FUNCTION_NAME; + AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (caFrame->mCookie); + appcbn->frameCallback(caFrame); + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::frameCallback(CameraFrame* caFrame) +{ + ///Post the event to the event queue of AppCallbackNotifier + TIUTILS::Message msg; + CameraFrame *frame; + + LOG_FUNCTION_NAME; + + if ( NULL != caFrame ) + { + + frame = new CameraFrame(*caFrame); + if ( NULL != frame ) + { + msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME; + msg.arg1 = frame; + mFrameQ.put(&msg); + } + else + { + CAMHAL_LOGEA("Not enough resources to allocate CameraFrame"); + } + + } + + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::flushAndReturnFrames() +{ + TIUTILS::Message msg; + CameraFrame *frame; + + Mutex::Autolock lock(mLock); + while (!mFrameQ.isEmpty()) { + mFrameQ.get(&msg); + frame = (CameraFrame*) msg.arg1; + if (frame) { + mFrameProvider->returnFrame(frame->mBuffer, + (CameraFrame::FrameType) frame->mFrameType); + } + } + + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::eventCallbackRelay(CameraHalEvent* chEvt) +{ + LOG_FUNCTION_NAME; + AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (chEvt->mCookie); + appcbn->eventCallback(chEvt); + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt) +{ + + ///Post the event to the event queue of AppCallbackNotifier + TIUTILS::Message msg; + CameraHalEvent *event; + + + LOG_FUNCTION_NAME; + + if ( NULL != chEvt ) + { + + event = new CameraHalEvent(*chEvt); + if ( NULL != event ) + { + msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT; + msg.arg1 = event; + { + Mutex::Autolock lock(mLock); + mEventQ.put(&msg); + } + } + else + { + CAMHAL_LOGEA("Not enough resources to allocate CameraHalEvent"); + } + + } + + LOG_FUNCTION_NAME_EXIT; +} + + +void AppCallbackNotifier::flushEventQueue() +{ + + { + Mutex::Autolock lock(mLock); + mEventQ.clear(); + } +} + + +bool AppCallbackNotifier::processMessage() +{ + ///Retrieve the command from the command queue and process it + TIUTILS::Message msg; + + LOG_FUNCTION_NAME; + + CAMHAL_LOGDA("+Msg get..."); + mNotificationThread->msgQ().get(&msg); + CAMHAL_LOGDA("-Msg get..."); + bool ret = true; + + switch(msg.command) + { + case NotificationThread::NOTIFIER_EXIT: + { + CAMHAL_LOGDA("Received NOTIFIER_EXIT command from Camera HAL"); + mNotifierState = AppCallbackNotifier::NOTIFIER_EXITED; + ret = false; + break; + } + default: + { + CAMHAL_LOGEA("Error: ProcessMsg() command from Camera HAL"); + break; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; + + +} + +AppCallbackNotifier::~AppCallbackNotifier() +{ + LOG_FUNCTION_NAME; + + ///Stop app callback notifier if not already stopped + stop(); + + ///Unregister with the frame provider + if ( NULL != mFrameProvider ) + { + mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES); + } + + //unregister with the event provider + if ( NULL != mEventProvider ) + { + mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS); + } + + TIUTILS::Message msg = {0,0,0,0,0,0}; + msg.command = NotificationThread::NOTIFIER_EXIT; + + ///Post the message to display thread + mNotificationThread->msgQ().put(&msg); + + //Exit and cleanup the thread + mNotificationThread->requestExit(); + mNotificationThread->join(); + + //Delete the display thread + mNotificationThread.clear(); + + + ///Free the event and frame providers + if ( NULL != mEventProvider ) + { + ///Deleting the event provider + CAMHAL_LOGDA("Stopping Event Provider"); + delete mEventProvider; + mEventProvider = NULL; + } + + if ( NULL != mFrameProvider ) + { + ///Deleting the frame provider + CAMHAL_LOGDA("Stopping Frame Provider"); + delete mFrameProvider; + mFrameProvider = NULL; + } + + releaseSharedVideoBuffers(); + + LOG_FUNCTION_NAME_EXIT; +} + +//Free all video heaps and buffers +void AppCallbackNotifier::releaseSharedVideoBuffers() +{ + LOG_FUNCTION_NAME; + + if(mUseMetaDataBufferMode) + { + camera_memory_t* videoMedatadaBufferMemory; + for (unsigned int i = 0; i < mVideoMetadataBufferMemoryMap.size(); i++) + { + videoMedatadaBufferMemory = (camera_memory_t*) mVideoMetadataBufferMemoryMap.valueAt(i); + if(NULL != videoMedatadaBufferMemory) + { + videoMedatadaBufferMemory->release(videoMedatadaBufferMemory); + CAMHAL_LOGDB("Released videoMedatadaBufferMemory=0x%x", videoMedatadaBufferMemory); + } + } + + mVideoMetadataBufferMemoryMap.clear(); + mVideoMetadataBufferReverseMap.clear(); + if (mUseVideoBuffers) + { + mVideoMap.clear(); + } + } + + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier) +{ + + LOG_FUNCTION_NAME; + ///@remarks There is no NULL check here. We will check + ///for NULL when we get start command from CameraHal + ///@Remarks Currently only one event provider (CameraAdapter) is supported + ///@todo Have an array of event providers for each event bitmask + mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay); + if ( NULL == mEventProvider ) + { + CAMHAL_LOGEA("Error in creating EventProvider"); + } + else + { + mEventProvider->enableEventNotification(eventMask); + } + + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier) +{ + LOG_FUNCTION_NAME; + ///@remarks There is no NULL check here. We will check + ///for NULL when we get the start command from CameraAdapter + mFrameProvider = new FrameProvider(frameNotifier, this, frameCallbackRelay); + if ( NULL == mFrameProvider ) + { + CAMHAL_LOGEA("Error in creating FrameProvider"); + } + else + { + //Register only for captured images and RAW for now + //TODO: Register for and handle all types of frames + mFrameProvider->enableFrameNotification(CameraFrame::IMAGE_FRAME); + mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME); + } + + LOG_FUNCTION_NAME_EXIT; +} + +status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters ¶ms, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count) +{ + sp heap; + sp buffer; + unsigned int *bufArr; + int size = 0; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + + if ( NULL == mFrameProvider ) + { + CAMHAL_LOGEA("Trying to start video recording without FrameProvider"); + return -EINVAL; + } + + if ( mPreviewing ) + { + CAMHAL_LOGDA("+Already previewing"); + return NO_INIT; + } + + int w,h; + ///Get preview size + params.getPreviewSize(&w, &h); + + //Get the preview pixel format + mPreviewPixelFormat = params.getPreviewFormat(); + + if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) + { + size = w*h*2; + mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV422I; + } + else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ) + { + size = (w*h*3)/2; + mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420SP; + } + else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) + { + size = w*h*2; + mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_RGB565; + } + else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0) + { + int yStride, uvStride, ySize, uvSize; + alignYV12(w, h, yStride, uvStride, ySize, uvSize, size); + mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420P; + } + + mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL); + if (!mPreviewMemory) { + return NO_MEMORY; + } + + for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) { + mPreviewBufs[i] = (unsigned char*) mPreviewMemory->data + (i*size); + } + + if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) { + mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC); + } + + mPreviewBufCount = 0; + + mPreviewing = true; + + LOG_FUNCTION_NAME; + + return NO_ERROR; +} + +void AppCallbackNotifier::setBurst(bool burst) +{ + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mBurstLock); + + mBurst = burst; + + LOG_FUNCTION_NAME_EXIT; +} + +void AppCallbackNotifier::useVideoBuffers(bool useVideoBuffers) +{ + LOG_FUNCTION_NAME; + + mUseVideoBuffers = useVideoBuffers; + + LOG_FUNCTION_NAME_EXIT; +} + +bool AppCallbackNotifier::getUesVideoBuffers() +{ + return mUseVideoBuffers; +} + +void AppCallbackNotifier::setVideoRes(int width, int height) +{ + LOG_FUNCTION_NAME; + + mVideoWidth = width; + mVideoHeight = height; + + LOG_FUNCTION_NAME_EXIT; +} + +status_t AppCallbackNotifier::stopPreviewCallbacks() +{ + sp heap; + sp buffer; + + LOG_FUNCTION_NAME; + + if ( NULL == mFrameProvider ) + { + CAMHAL_LOGEA("Trying to stop preview callbacks without FrameProvider"); + return -EINVAL; + } + + if ( !mPreviewing ) + { + return NO_INIT; + } + + mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC); + + { + Mutex::Autolock lock(mLock); + mPreviewMemory->release(mPreviewMemory); + } + + mPreviewing = false; + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; + +} + +status_t AppCallbackNotifier::useMetaDataBufferMode(bool enable) +{ + mUseMetaDataBufferMode = enable; + + return NO_ERROR; +} + + +status_t AppCallbackNotifier::startRecording() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mRecordingLock); + + if ( NULL == mFrameProvider ) + { + CAMHAL_LOGEA("Trying to start video recording without FrameProvider"); + ret = -1; + } + + if(mRecording) + { + return NO_INIT; + } + + if ( NO_ERROR == ret ) + { + mFrameProvider->enableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC); + } + + mRecording = true; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +//Allocate metadata buffers for video recording +status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs) +{ + status_t ret = NO_ERROR; + LOG_FUNCTION_NAME; + + if(mUseMetaDataBufferMode) + { + uint32_t *bufArr = NULL; + camera_memory_t* videoMedatadaBufferMemory = NULL; + + if(NULL == buffers) + { + CAMHAL_LOGEA("Error! Video buffers are NULL"); + return BAD_VALUE; + } + bufArr = (uint32_t *) buffers; + + for (uint32_t i = 0; i < count; i++) + { + videoMedatadaBufferMemory = mRequestMemory(-1, sizeof(video_metadata_t), 1, NULL); + if((NULL == videoMedatadaBufferMemory) || (NULL == videoMedatadaBufferMemory->data)) + { + CAMHAL_LOGEA("Error! Could not allocate memory for Video Metadata Buffers"); + return NO_MEMORY; + } + + mVideoMetadataBufferMemoryMap.add(bufArr[i], (uint32_t)(videoMedatadaBufferMemory)); + mVideoMetadataBufferReverseMap.add((uint32_t)(videoMedatadaBufferMemory->data), bufArr[i]); + CAMHAL_LOGDB("bufArr[%d]=0x%x, videoMedatadaBufferMemory=0x%x, videoMedatadaBufferMemory->data=0x%x", + i, bufArr[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data); + + if (vidBufs != NULL) + { + uint32_t *vBufArr = (uint32_t *) vidBufs; + mVideoMap.add(bufArr[i], vBufArr[i]); + CAMHAL_LOGVB("bufArr[%d]=0x%x, vBuffArr[%d]=0x%x", i, bufArr[i], i, vBufArr[i]); + } + } + } + +exit: + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t AppCallbackNotifier::stopRecording() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mRecordingLock); + + if ( NULL == mFrameProvider ) + { + CAMHAL_LOGEA("Trying to stop video recording without FrameProvider"); + ret = -1; + } + + if(!mRecording) + { + return NO_INIT; + } + + if ( NO_ERROR == ret ) + { + mFrameProvider->disableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC); + } + + ///Release the shared video buffers + releaseSharedVideoBuffers(); + + mRecording = false; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem) +{ + status_t ret = NO_ERROR; + void *frame = NULL; + + LOG_FUNCTION_NAME; + if ( NULL == mFrameProvider ) + { + CAMHAL_LOGEA("Trying to stop video recording without FrameProvider"); + ret = -1; + } + + if ( NULL == mem ) + { + CAMHAL_LOGEA("Video Frame released is invalid"); + ret = -1; + } + + if( NO_ERROR != ret ) + { + return ret; + } + + if(mUseMetaDataBufferMode) + { + video_metadata_t *videoMetadataBuffer = (video_metadata_t *) mem ; + frame = (void*) mVideoMetadataBufferReverseMap.valueFor((uint32_t) videoMetadataBuffer); + CAMHAL_LOGVB("Releasing frame with videoMetadataBuffer=0x%x, videoMetadataBuffer->handle=0x%x & frame handle=0x%x\n", + videoMetadataBuffer, videoMetadataBuffer->handle, frame); + } + else + { + frame = (void*)(*((uint32_t *)mem)); + } + + if ( NO_ERROR == ret ) + { + ret = mFrameProvider->returnFrame(frame, CameraFrame::VIDEO_FRAME_SYNC); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t AppCallbackNotifier::enableMsgType(int32_t msgType) +{ + if( msgType & (CAMERA_MSG_POSTVIEW_FRAME | CAMERA_MSG_PREVIEW_FRAME) ) { + mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC); + } + + return NO_ERROR; +} + +status_t AppCallbackNotifier::disableMsgType(int32_t msgType) +{ + if( msgType & (CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_POSTVIEW_FRAME) ) { + mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC); + } + + return NO_ERROR; + +} + +status_t AppCallbackNotifier::start() +{ + LOG_FUNCTION_NAME; + if(mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED) + { + CAMHAL_LOGDA("AppCallbackNotifier already running"); + LOG_FUNCTION_NAME_EXIT; + return ALREADY_EXISTS; + } + + ///Check whether initial conditions are met for us to start + ///A frame provider should be available, if not return error + if(!mFrameProvider) + { + ///AppCallbackNotifier not properly initialized + CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Frame provider is NULL"); + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + ///At least one event notifier should be available, if not return error + ///@todo Modify here when there is an array of event providers + if(!mEventProvider) + { + CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Event provider is NULL"); + LOG_FUNCTION_NAME_EXIT; + ///AppCallbackNotifier not properly initialized + return NO_INIT; + } + + mNotifierState = AppCallbackNotifier::NOTIFIER_STARTED; + CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STARTED \n"); + + gEncoderQueue.clear(); + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; + +} + +status_t AppCallbackNotifier::stop() +{ + LOG_FUNCTION_NAME; + + if(mNotifierState!=AppCallbackNotifier::NOTIFIER_STARTED) + { + CAMHAL_LOGDA("AppCallbackNotifier already in stopped state"); + LOG_FUNCTION_NAME_EXIT; + return ALREADY_EXISTS; + } + { + Mutex::Autolock lock(mLock); + + mNotifierState = AppCallbackNotifier::NOTIFIER_STOPPED; + CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STOPPED \n"); + } + + while(!gEncoderQueue.isEmpty()) { + sp encoder = gEncoderQueue.valueAt(0); + camera_memory_t* encoded_mem = NULL; + ExifElementsTable* exif = NULL; + + if(encoder.get()) { + encoder->cancel(); + + encoder->getCookies(NULL, (void**) &encoded_mem, (void**) &exif); + if (encoded_mem) { + encoded_mem->release(encoded_mem); + } + if (exif) { + delete exif; + } + + encoder.clear(); + } + gEncoderQueue.removeItemsAt(0); + } + + LOG_FUNCTION_NAME_EXIT; + return NO_ERROR; +} + + +/*--------------------NotificationHandler Class ENDS here-----------------------------*/ + + + +}; diff --git a/camera/BaseCameraAdapter.cpp b/camera/BaseCameraAdapter.cpp new file mode 100644 index 0000000..309e547 --- /dev/null +++ b/camera/BaseCameraAdapter.cpp @@ -0,0 +1,2329 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#define LOG_TAG "CameraHAL" + +#include "BaseCameraAdapter.h" + +namespace android { + +/*--------------------Camera Adapter Class STARTS here-----------------------------*/ + +BaseCameraAdapter::BaseCameraAdapter() +{ + mReleaseImageBuffersCallback = NULL; + mEndImageCaptureCallback = NULL; + mErrorNotifier = NULL; + mEndCaptureData = NULL; + mReleaseData = NULL; + mRecording = false; + + mPreviewBuffers = NULL; + mPreviewBufferCount = 0; + mPreviewBuffersLength = 0; + + mVideoBuffers = NULL; + mVideoBuffersCount = 0; + mVideoBuffersLength = 0; + + mCaptureBuffers = NULL; + mCaptureBuffersCount = 0; + mCaptureBuffersLength = 0; + + mPreviewDataBuffers = NULL; + mPreviewDataBuffersCount = 0; + mPreviewDataBuffersLength = 0; + + mAdapterState = INTIALIZED_STATE; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + mStartFocus.tv_sec = 0; + mStartFocus.tv_usec = 0; + mStartCapture.tv_sec = 0; + mStartCapture.tv_usec = 0; +#endif + +} + +BaseCameraAdapter::~BaseCameraAdapter() +{ + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mSubscriberLock); + + mFrameSubscribers.clear(); + mImageSubscribers.clear(); + mRawSubscribers.clear(); + mVideoSubscribers.clear(); + mFocusSubscribers.clear(); + mShutterSubscribers.clear(); + mZoomSubscribers.clear(); + mFaceSubscribers.clear(); + + LOG_FUNCTION_NAME_EXIT; +} + +status_t BaseCameraAdapter::registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + mReleaseImageBuffersCallback = callback; + mReleaseData = user_data; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::registerEndCaptureCallback(end_image_capture_callback callback, void *user_data) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + mEndImageCaptureCallback= callback; + mEndCaptureData = user_data; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NULL == errorNotifier ) + { + CAMHAL_LOGEA("Invalid Error Notifier reference"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + mErrorNotifier = errorNotifier; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, event_callback eventCb, void* cookie) +{ + Mutex::Autolock lock(mSubscriberLock); + + LOG_FUNCTION_NAME; + + if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs ) + { + mFrameSubscribers.add((int) cookie, callback); + } + else if ( CameraFrame::FRAME_DATA_SYNC == msgs ) + { + mFrameDataSubscribers.add((int) cookie, callback); + } + else if ( CameraFrame::IMAGE_FRAME == msgs) + { + mImageSubscribers.add((int) cookie, callback); + } + else if ( CameraFrame::RAW_FRAME == msgs) + { + mRawSubscribers.add((int) cookie, callback); + } + else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs) + { + mVideoSubscribers.add((int) cookie, callback); + } + else if ( CameraHalEvent::ALL_EVENTS == msgs) + { + mFocusSubscribers.add((int) cookie, eventCb); + mShutterSubscribers.add((int) cookie, eventCb); + mZoomSubscribers.add((int) cookie, eventCb); + mFaceSubscribers.add((int) cookie, eventCb); + } + else + { + CAMHAL_LOGEA("Message type subscription no supported yet!"); + } + + LOG_FUNCTION_NAME_EXIT; +} + +void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie) +{ + Mutex::Autolock lock(mSubscriberLock); + + LOG_FUNCTION_NAME; + + if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs ) + { + mFrameSubscribers.removeItem((int) cookie); + } + else if ( CameraFrame::FRAME_DATA_SYNC == msgs ) + { + mFrameDataSubscribers.removeItem((int) cookie); + } + else if ( CameraFrame::IMAGE_FRAME == msgs) + { + mImageSubscribers.removeItem((int) cookie); + } + else if ( CameraFrame::RAW_FRAME == msgs) + { + mRawSubscribers.removeItem((int) cookie); + } + else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs) + { + mVideoSubscribers.removeItem((int) cookie); + } + else if ( CameraFrame::ALL_FRAMES == msgs ) + { + mFrameSubscribers.removeItem((int) cookie); + mFrameDataSubscribers.removeItem((int) cookie); + mImageSubscribers.removeItem((int) cookie); + mRawSubscribers.removeItem((int) cookie); + mVideoSubscribers.removeItem((int) cookie); + } + else if ( CameraHalEvent::ALL_EVENTS == msgs) + { + //Subscribe only for focus + //TODO: Process case by case + mFocusSubscribers.removeItem((int) cookie); + mShutterSubscribers.removeItem((int) cookie); + mZoomSubscribers.removeItem((int) cookie); + mFaceSubscribers.removeItem((int) cookie); + } + else + { + CAMHAL_LOGEB("Message type 0x%x subscription no supported yet!", msgs); + } + + LOG_FUNCTION_NAME_EXIT; +} + +void BaseCameraAdapter::addFramePointers(void *frameBuf, void *buf) +{ + unsigned int *pBuf = (unsigned int *)buf; + Mutex::Autolock lock(mSubscriberLock); + + if ((frameBuf != NULL) && ( pBuf != NULL) ) + { + CameraFrame *frame = new CameraFrame; + frame->mBuffer = frameBuf; + frame->mYuv[0] = pBuf[0]; + frame->mYuv[1] = pBuf[1]; + mFrameQueue.add(frameBuf, frame); + + CAMHAL_LOGVB("Adding Frame=0x%x Y=0x%x UV=0x%x", frame->mBuffer, frame->mYuv[0], frame->mYuv[1]); + } +} + +void BaseCameraAdapter::removeFramePointers() +{ + Mutex::Autolock lock(mSubscriberLock); + + int size = mFrameQueue.size(); + CAMHAL_LOGVB("Removing %d Frames = ", size); + for (int i = 0; i < size; i++) + { + CameraFrame *frame = (CameraFrame *)mFrameQueue.valueAt(i); + CAMHAL_LOGVB("Free Frame=0x%x Y=0x%x UV=0x%x", frame->mBuffer, frame->mYuv[0], frame->mYuv[1]); + delete frame; + } + mFrameQueue.clear(); +} + +void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frameType) +{ + status_t res = NO_ERROR; + size_t subscriberCount = 0; + int refCount = -1; + + if ( NULL == frameBuf ) + { + CAMHAL_LOGEA("Invalid frameBuf"); + return; + } + + if ( NO_ERROR == res) + { + Mutex::Autolock lock(mReturnFrameLock); + + refCount = getFrameRefCount(frameBuf, frameType); + + if(frameType == CameraFrame::PREVIEW_FRAME_SYNC) + { + mFramesWithDisplay--; + } + else if(frameType == CameraFrame::VIDEO_FRAME_SYNC) + { + mFramesWithEncoder--; + } + + if ( 0 < refCount ) + { + + refCount--; + setFrameRefCount(frameBuf, frameType, refCount); + + + if ( mRecording && (CameraFrame::VIDEO_FRAME_SYNC == frameType) ) { + refCount += getFrameRefCount(frameBuf, CameraFrame::PREVIEW_FRAME_SYNC); + } else if ( mRecording && (CameraFrame::PREVIEW_FRAME_SYNC == frameType) ) { + refCount += getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC); + } else if ( mRecording && (CameraFrame::SNAPSHOT_FRAME == frameType) ) { + refCount += getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC); + } + + + } + else + { + CAMHAL_LOGDA("Frame returned when ref count is already zero!!"); + return; + } + } + + CAMHAL_LOGVB("REFCOUNT 0x%x %d", frameBuf, refCount); + + if ( NO_ERROR == res ) + { + //check if someone is holding this buffer + if ( 0 == refCount ) + { +#ifdef DEBUG_LOG + if(mBuffersWithDucati.indexOfKey((int)frameBuf)>=0) + { + ALOGE("Buffer already with Ducati!! 0x%x", frameBuf); + for(int i=0;imBuffers; + mPreviewBuffersLength = desc->mLength; + mPreviewBuffersAvailable.clear(); + for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ ) + { + mPreviewBuffersAvailable.add(mPreviewBuffers[i], 0); + } + // initial ref count for undeqeueued buffers is 1 since buffer provider + // is still holding on to it + for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) + { + mPreviewBuffersAvailable.add(mPreviewBuffers[i], 1); + } + } + + if ( NULL != desc ) + { + ret = useBuffers(CameraAdapter::CAMERA_PREVIEW, + desc->mBuffers, + desc->mCount, + desc->mLength, + desc->mMaxQueueable); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA: + CAMHAL_LOGDA("Use buffers for preview data"); + desc = ( BuffersDescriptor * ) value1; + + if ( NULL == desc ) + { + CAMHAL_LOGEA("Invalid preview data buffers!"); + return -EINVAL; + } + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + Mutex::Autolock lock(mPreviewDataBufferLock); + mPreviewDataBuffers = (int *) desc->mBuffers; + mPreviewDataBuffersLength = desc->mLength; + mPreviewDataBuffersAvailable.clear(); + for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ ) + { + mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 0); + } + // initial ref count for undeqeueued buffers is 1 since buffer provider + // is still holding on to it + for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) + { + mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 1); + } + } + + if ( NULL != desc ) + { + ret = useBuffers(CameraAdapter::CAMERA_MEASUREMENT, + desc->mBuffers, + desc->mCount, + desc->mLength, + desc->mMaxQueueable); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE: + CAMHAL_LOGDA("Use buffers for image capture"); + desc = ( BuffersDescriptor * ) value1; + + if ( NULL == desc ) + { + CAMHAL_LOGEA("Invalid capture buffers!"); + return -EINVAL; + } + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + Mutex::Autolock lock(mCaptureBufferLock); + mCaptureBuffers = (int *) desc->mBuffers; + mCaptureBuffersLength = desc->mLength; + mCaptureBuffersAvailable.clear(); + for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ ) + { + mCaptureBuffersAvailable.add(mCaptureBuffers[i], 0); + } + // initial ref count for undeqeueued buffers is 1 since buffer provider + // is still holding on to it + for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ ) + { + mCaptureBuffersAvailable.add(mCaptureBuffers[i], 1); + } + } + + if ( NULL != desc ) + { + ret = useBuffers(CameraAdapter::CAMERA_IMAGE_CAPTURE, + desc->mBuffers, + desc->mCount, + desc->mLength, + desc->mMaxQueueable); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_START_SMOOTH_ZOOM: + { + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = startSmoothZoom(value1); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM: + { + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = stopSmoothZoom(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_START_PREVIEW: + { + + CAMHAL_LOGDA("Start Preview"); + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = startPreview(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_STOP_PREVIEW: + { + + CAMHAL_LOGDA("Stop Preview"); + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = stopPreview(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_START_VIDEO: + { + + CAMHAL_LOGDA("Start video recording"); + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = startVideoCapture(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_STOP_VIDEO: + { + + CAMHAL_LOGDA("Stop video recording"); + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = stopVideoCapture(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS: + { + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = flushBuffers(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_START_IMAGE_CAPTURE: + { + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + refTimestamp = ( struct timeval * ) value1; + if ( NULL != refTimestamp ) + { + memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval )); + } + +#endif + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = takePicture(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE: + { + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = stopImageCapture(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_START_BRACKET_CAPTURE: + { + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + refTimestamp = ( struct timeval * ) value2; + if ( NULL != refTimestamp ) + { + memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval )); + } + +#endif + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = startBracketing(value1); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE: + { + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = stopBracketing(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + } + + case CameraAdapter::CAMERA_PERFORM_AUTOFOCUS: + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + refTimestamp = ( struct timeval * ) value1; + if ( NULL != refTimestamp ) + { + memcpy( &mStartFocus, refTimestamp, sizeof( struct timeval )); + } + +#endif + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = autoFocus(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_CANCEL_AUTOFOCUS: + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + ret = cancelAutoFocus(); + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW: + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + frame = ( CameraFrame * ) value1; + + if ( NULL != frame ) + { + ret = getFrameSize(frame->mWidth, frame->mHeight); + } + else + { + ret = -EINVAL; + } + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE: + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + frame = ( CameraFrame * ) value1; + + if ( NULL != frame ) + { + ret = getPictureBufferSize(frame->mLength, value2); + } + else + { + ret = -EINVAL; + } + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA: + + if ( ret == NO_ERROR ) + { + ret = setState(operation); + } + + if ( ret == NO_ERROR ) + { + frame = ( CameraFrame * ) value1; + + if ( NULL != frame ) + { + ret = getFrameDataSize(frame->mLength, value2); + } + else + { + ret = -EINVAL; + } + } + + if ( ret == NO_ERROR ) + { + ret = commitState(); + } + else + { + ret |= rollbackState(); + } + + break; + + case CameraAdapter::CAMERA_START_FD: + + ret = startFaceDetection(); + + break; + + case CameraAdapter::CAMERA_STOP_FD: + + ret = stopFaceDetection(); + + break; + + case CameraAdapter::CAMERA_SWITCH_TO_EXECUTING: + ret = switchToExecuting(); + break; + + default: + CAMHAL_LOGEB("Command 0x%x unsupported!", operation); + break; + }; + + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +status_t BaseCameraAdapter::notifyFocusSubscribers(CameraHalEvent::FocusStatus status) +{ + event_callback eventCb; + CameraHalEvent focusEvent; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( mFocusSubscribers.size() == 0 ) { + CAMHAL_LOGDA("No Focus Subscribers!"); + return NO_INIT; + } + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + if (status == CameraHalEvent::FOCUS_STATUS_PENDING) { + gettimeofday(&mStartFocus, NULL); + } else { + //dump the AF latency + CameraHal::PPM("Focus finished in: ", &mStartFocus); + } +#endif + + focusEvent.mEventData = new CameraHalEvent::CameraHalEventData(); + if ( NULL == focusEvent.mEventData.get() ) { + return -ENOMEM; + } + + focusEvent.mEventType = CameraHalEvent::EVENT_FOCUS_LOCKED; + focusEvent.mEventData->focusEvent.focusStatus = status; + + for (unsigned int i = 0 ; i < mFocusSubscribers.size(); i++ ) + { + focusEvent.mCookie = (void *) mFocusSubscribers.keyAt(i); + eventCb = (event_callback) mFocusSubscribers.valueAt(i); + eventCb ( &focusEvent ); + } + + focusEvent.mEventData.clear(); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::notifyShutterSubscribers() +{ + CameraHalEvent shutterEvent; + event_callback eventCb; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( mShutterSubscribers.size() == 0 ) + { + CAMHAL_LOGEA("No shutter Subscribers!"); + return NO_INIT; + } + + shutterEvent.mEventData = new CameraHalEvent::CameraHalEventData(); + if ( NULL == shutterEvent.mEventData.get() ) { + return -ENOMEM; + } + + shutterEvent.mEventType = CameraHalEvent::EVENT_SHUTTER; + shutterEvent.mEventData->shutterEvent.shutterClosed = true; + + for (unsigned int i = 0 ; i < mShutterSubscribers.size() ; i++ ) { + shutterEvent.mCookie = ( void * ) mShutterSubscribers.keyAt(i); + eventCb = ( event_callback ) mShutterSubscribers.valueAt(i); + + CAMHAL_LOGDA("Sending shutter callback"); + + eventCb ( &shutterEvent ); + } + + shutterEvent.mEventData.clear(); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t BaseCameraAdapter::notifyZoomSubscribers(int zoomIdx, bool targetReached) +{ + event_callback eventCb; + CameraHalEvent zoomEvent; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( mZoomSubscribers.size() == 0 ) { + CAMHAL_LOGDA("No zoom Subscribers!"); + return NO_INIT; + } + + zoomEvent.mEventData = new CameraHalEvent::CameraHalEventData(); + if ( NULL == zoomEvent.mEventData.get() ) { + return -ENOMEM; + } + + zoomEvent.mEventType = CameraHalEvent::EVENT_ZOOM_INDEX_REACHED; + zoomEvent.mEventData->zoomEvent.currentZoomIndex = zoomIdx; + zoomEvent.mEventData->zoomEvent.targetZoomIndexReached = targetReached; + + for (unsigned int i = 0 ; i < mZoomSubscribers.size(); i++ ) { + zoomEvent.mCookie = (void *) mZoomSubscribers.keyAt(i); + eventCb = (event_callback) mZoomSubscribers.valueAt(i); + + eventCb ( &zoomEvent ); + } + + zoomEvent.mEventData.clear(); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::notifyFaceSubscribers(sp &faces) +{ + event_callback eventCb; + CameraHalEvent faceEvent; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( mFaceSubscribers.size() == 0 ) { + CAMHAL_LOGDA("No face detection subscribers!"); + return NO_INIT; + } + + faceEvent.mEventData = new CameraHalEvent::CameraHalEventData(); + if ( NULL == faceEvent.mEventData.get() ) { + return -ENOMEM; + } + + faceEvent.mEventType = CameraHalEvent::EVENT_FACE; + faceEvent.mEventData->faceEvent = faces; + + for (unsigned int i = 0 ; i < mFaceSubscribers.size(); i++ ) { + faceEvent.mCookie = (void *) mFaceSubscribers.keyAt(i); + eventCb = (event_callback) mFaceSubscribers.valueAt(i); + + eventCb ( &faceEvent ); + } + + faceEvent.mEventData.clear(); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame) +{ + status_t ret = NO_ERROR; + unsigned int mask; + + if ( NULL == frame ) + { + CAMHAL_LOGEA("Invalid CameraFrame"); + return -EINVAL; + } + + for( mask = 1; mask < CameraFrame::ALL_FRAMES; mask <<= 1){ + if( mask & frame->mFrameMask ){ + switch( mask ){ + + case CameraFrame::IMAGE_FRAME: + { +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + CameraHal::PPM("Shot to Jpeg: ", &mStartCapture); +#endif + ret = __sendFrameToSubscribers(frame, &mImageSubscribers, CameraFrame::IMAGE_FRAME); + } + break; + case CameraFrame::RAW_FRAME: + { + ret = __sendFrameToSubscribers(frame, &mRawSubscribers, CameraFrame::RAW_FRAME); + } + break; + case CameraFrame::PREVIEW_FRAME_SYNC: + { + ret = __sendFrameToSubscribers(frame, &mFrameSubscribers, CameraFrame::PREVIEW_FRAME_SYNC); + } + break; + case CameraFrame::SNAPSHOT_FRAME: + { + ret = __sendFrameToSubscribers(frame, &mFrameSubscribers, CameraFrame::SNAPSHOT_FRAME); + } + break; + case CameraFrame::VIDEO_FRAME_SYNC: + { + ret = __sendFrameToSubscribers(frame, &mVideoSubscribers, CameraFrame::VIDEO_FRAME_SYNC); + } + break; + case CameraFrame::FRAME_DATA_SYNC: + { + ret = __sendFrameToSubscribers(frame, &mFrameDataSubscribers, CameraFrame::FRAME_DATA_SYNC); + } + break; + default: + CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", mask); + break; + }//SWITCH + frame->mFrameMask &= ~mask; + + if (ret != NO_ERROR) { + goto EXIT; + } + }//IF + }//FOR + + EXIT: + return ret; +} + +status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame, + KeyedVector *subscribers, + CameraFrame::FrameType frameType) +{ + size_t refCount = 0; + status_t ret = NO_ERROR; + frame_callback callback = NULL; + + frame->mFrameType = frameType; + + if ( (frameType == CameraFrame::PREVIEW_FRAME_SYNC) || + (frameType == CameraFrame::VIDEO_FRAME_SYNC) || + (frameType == CameraFrame::SNAPSHOT_FRAME) ){ + if (mFrameQueue.size() > 0){ + CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(frame->mBuffer); + frame->mYuv[0] = lframe->mYuv[0]; + frame->mYuv[1] = lframe->mYuv[1]; + } + else{ + CAMHAL_LOGDA("Empty Frame Queue"); + return -EINVAL; + } + } + + if (NULL != subscribers) { + refCount = getFrameRefCount(frame->mBuffer, frameType); + + if (refCount == 0) { + CAMHAL_LOGDA("Invalid ref count of 0"); + return -EINVAL; + } + + if (refCount > subscribers->size()) { + CAMHAL_LOGEB("Invalid ref count for frame type: 0x%x", frameType); + return -EINVAL; + } + + CAMHAL_LOGVB("Type of Frame: 0x%x address: 0x%x refCount start %d", + frame->mFrameType, + ( uint32_t ) frame->mBuffer, + refCount); + + for ( unsigned int i = 0 ; i < refCount; i++ ) { + frame->mCookie = ( void * ) subscribers->keyAt(i); + callback = (frame_callback) subscribers->valueAt(i); + + if (!callback) { + CAMHAL_LOGEB("callback not set for frame type: 0x%x", frameType); + return -EINVAL; + } + + callback(frame); + } + } else { + CAMHAL_LOGEA("Subscribers is null??"); + return -EINVAL; + } + + return ret; +} + +int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask) +{ + int ret = NO_ERROR; + unsigned int lmask; + + LOG_FUNCTION_NAME; + + if (buf == NULL) + { + return -EINVAL; + } + + for( lmask = 1; lmask < CameraFrame::ALL_FRAMES; lmask <<= 1){ + if( lmask & mask ){ + switch( lmask ){ + + case CameraFrame::IMAGE_FRAME: + { + setFrameRefCount(buf, CameraFrame::IMAGE_FRAME, (int) mImageSubscribers.size()); + } + break; + case CameraFrame::RAW_FRAME: + { + setFrameRefCount(buf, CameraFrame::RAW_FRAME, mRawSubscribers.size()); + } + break; + case CameraFrame::PREVIEW_FRAME_SYNC: + { + setFrameRefCount(buf, CameraFrame::PREVIEW_FRAME_SYNC, mFrameSubscribers.size()); + } + break; + case CameraFrame::SNAPSHOT_FRAME: + { + setFrameRefCount(buf, CameraFrame::SNAPSHOT_FRAME, mFrameSubscribers.size()); + } + break; + case CameraFrame::VIDEO_FRAME_SYNC: + { + setFrameRefCount(buf,CameraFrame::VIDEO_FRAME_SYNC, mVideoSubscribers.size()); + } + break; + case CameraFrame::FRAME_DATA_SYNC: + { + setFrameRefCount(buf, CameraFrame::FRAME_DATA_SYNC, mFrameDataSubscribers.size()); + } + break; + default: + CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", lmask); + break; + }//SWITCH + mask &= ~lmask; + }//IF + }//FOR + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType) +{ + int res = -1; + + LOG_FUNCTION_NAME; + + switch ( frameType ) + { + case CameraFrame::IMAGE_FRAME: + case CameraFrame::RAW_FRAME: + { + Mutex::Autolock lock(mCaptureBufferLock); + res = mCaptureBuffersAvailable.valueFor( ( unsigned int ) frameBuf ); + } + break; + case CameraFrame::PREVIEW_FRAME_SYNC: + case CameraFrame::SNAPSHOT_FRAME: + { + Mutex::Autolock lock(mPreviewBufferLock); + res = mPreviewBuffersAvailable.valueFor( ( unsigned int ) frameBuf ); + } + break; + case CameraFrame::FRAME_DATA_SYNC: + { + Mutex::Autolock lock(mPreviewDataBufferLock); + res = mPreviewDataBuffersAvailable.valueFor( ( unsigned int ) frameBuf ); + } + break; + case CameraFrame::VIDEO_FRAME_SYNC: + { + Mutex::Autolock lock(mVideoBufferLock); + res = mVideoBuffersAvailable.valueFor( ( unsigned int ) frameBuf ); + } + break; + default: + break; + }; + + LOG_FUNCTION_NAME_EXIT; + + return res; +} + +void BaseCameraAdapter::setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount) +{ + + LOG_FUNCTION_NAME; + + switch ( frameType ) + { + case CameraFrame::IMAGE_FRAME: + case CameraFrame::RAW_FRAME: + { + Mutex::Autolock lock(mCaptureBufferLock); + mCaptureBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount); + } + break; + case CameraFrame::PREVIEW_FRAME_SYNC: + case CameraFrame::SNAPSHOT_FRAME: + { + Mutex::Autolock lock(mPreviewBufferLock); + mPreviewBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount); + } + break; + case CameraFrame::FRAME_DATA_SYNC: + { + Mutex::Autolock lock(mPreviewDataBufferLock); + mPreviewDataBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount); + } + break; + case CameraFrame::VIDEO_FRAME_SYNC: + { + Mutex::Autolock lock(mVideoBufferLock); + mVideoBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount); + } + break; + default: + break; + }; + + LOG_FUNCTION_NAME_EXIT; + +} + +status_t BaseCameraAdapter::startVideoCapture() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mVideoBufferLock); + + //If the capture is already ongoing, return from here. + if ( mRecording ) + { + ret = NO_INIT; + } + + + if ( NO_ERROR == ret ) + { + + mVideoBuffersAvailable.clear(); + + for ( unsigned int i = 0 ; i < mPreviewBuffersAvailable.size() ; i++ ) + { + mVideoBuffersAvailable.add(mPreviewBuffersAvailable.keyAt(i), 0); + } + + mRecording = true; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::stopVideoCapture() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( !mRecording ) + { + ret = NO_INIT; + } + + if ( NO_ERROR == ret ) + { + for ( unsigned int i = 0 ; i < mVideoBuffersAvailable.size() ; i++ ) + { + void *frameBuf = ( void * ) mVideoBuffersAvailable.keyAt(i); + if( getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC) > 0) + { + returnFrame(frameBuf, CameraFrame::VIDEO_FRAME_SYNC); + } + } + + mRecording = false; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +//-----------------Stub implementation of the interface ------------------------------ + +status_t BaseCameraAdapter::takePicture() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::stopImageCapture() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::startBracketing(int range) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::stopBracketing() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::autoFocus() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + notifyFocusSubscribers(CameraHalEvent::FOCUS_STATUS_FAIL); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::cancelAutoFocus() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::startSmoothZoom(int targetIdx) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::stopSmoothZoom() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::startPreview() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::stopPreview() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::getFrameSize(size_t &width, size_t &height) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::startFaceDetection() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::stopFaceDetection() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::switchToExecuting() +{ + status_t ret = NO_ERROR; + LOG_FUNCTION_NAME; + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +status_t BaseCameraAdapter::setState(CameraCommands operation) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + mLock.lock(); + + switch ( mAdapterState ) + { + + case INTIALIZED_STATE: + + switch ( operation ) + { + + case CAMERA_USE_BUFFERS_PREVIEW: + CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = 0x%x", + operation); + mNextState = LOADED_PREVIEW_STATE; + break; + + //These events don't change the current state + case CAMERA_QUERY_RESOLUTION_PREVIEW: + case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE: + case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA: + CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = 0x%x", + operation); + mNextState = INTIALIZED_STATE; + break; + + case CAMERA_CANCEL_AUTOFOCUS: + case CAMERA_STOP_BRACKET_CAPTURE: + case CAMERA_STOP_IMAGE_CAPTURE: + ret = INVALID_OPERATION; + break; + + default: + CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case LOADED_PREVIEW_STATE: + + switch ( operation ) + { + + case CAMERA_START_PREVIEW: + CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = PREVIEW_STATE; + break; + + case CAMERA_STOP_PREVIEW: + CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->INTIALIZED_STATE event = 0x%x", + operation); + mNextState = INTIALIZED_STATE; + break; + + //These events don't change the current state + case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE: + case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA: + case CAMERA_USE_BUFFERS_PREVIEW_DATA: + CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = 0x%x", + operation); + mNextState = LOADED_PREVIEW_STATE; + break; + + default: + CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case PREVIEW_STATE: + + switch ( operation ) + { + + case CAMERA_STOP_PREVIEW: + CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = 0x%x", + operation); + mNextState = INTIALIZED_STATE; + break; + + case CAMERA_PERFORM_AUTOFOCUS: + CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = 0x%x", + operation); + mNextState = AF_STATE; + break; + + case CAMERA_START_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = 0x%x", + operation); + mNextState = ZOOM_STATE; + break; + + case CAMERA_USE_BUFFERS_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = 0x%x", + operation); + mNextState = LOADED_CAPTURE_STATE; + break; + + case CAMERA_START_VIDEO: + CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = 0x%x", + operation); + mNextState = VIDEO_STATE; + break; + + case CAMERA_CANCEL_AUTOFOCUS: + case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE: + case CAMERA_STOP_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = 0x%x", + operation); + mNextState = PREVIEW_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case LOADED_CAPTURE_STATE: + + switch ( operation ) + { + + case CAMERA_START_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x", + operation); + mNextState = CAPTURE_STATE; + break; + + case CAMERA_START_BRACKET_CAPTURE: + CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = 0x%x", + operation); + mNextState = BRACKETING_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case CAPTURE_STATE: + + switch ( operation ) + { + case CAMERA_STOP_IMAGE_CAPTURE: + case CAMERA_STOP_BRACKET_CAPTURE: + CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = PREVIEW_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case BRACKETING_STATE: + + switch ( operation ) + { + + case CAMERA_STOP_IMAGE_CAPTURE: + case CAMERA_STOP_BRACKET_CAPTURE: + CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = PREVIEW_STATE; + break; + + case CAMERA_START_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = 0x%x", + operation); + mNextState = CAPTURE_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case AF_STATE: + + switch ( operation ) + { + + case CAMERA_CANCEL_AUTOFOCUS: + CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = PREVIEW_STATE; + break; + + case CAMERA_START_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch AF_STATE->CAPTURE_STATE event = 0x%x", + operation); + mNextState = CAPTURE_STATE; + break; + + case CAMERA_START_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = 0x%x", + operation); + mNextState = AF_ZOOM_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case ZOOM_STATE: + + switch ( operation ) + { + + case CAMERA_CANCEL_AUTOFOCUS: + CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = ZOOM_STATE; + break; + + case CAMERA_STOP_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = PREVIEW_STATE; + break; + + case CAMERA_PERFORM_AUTOFOCUS: + CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = 0x%x", + operation); + mNextState = AF_ZOOM_STATE; + break; + + case CAMERA_START_VIDEO: + CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->VIDEO_ZOOM_STATE event = 0x%x", + operation); + mNextState = VIDEO_ZOOM_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case VIDEO_STATE: + + switch ( operation ) + { + + case CAMERA_STOP_VIDEO: + CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = PREVIEW_STATE; + break; + + case CAMERA_PERFORM_AUTOFOCUS: + CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_AF_STATE event = 0x%x", + operation); + mNextState = VIDEO_AF_STATE; + break; + + case CAMERA_START_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_ZOOM_STATE event = 0x%x", + operation); + mNextState = VIDEO_ZOOM_STATE; + break; + + case CAMERA_USE_BUFFERS_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_LOADED_CAPTURE_STATE event = 0x%x", + operation); + mNextState = VIDEO_LOADED_CAPTURE_STATE; + break; + + case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_STATE event = 0x%x", + operation); + mNextState = VIDEO_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case VIDEO_AF_STATE: + + switch ( operation ) + { + + case CAMERA_CANCEL_AUTOFOCUS: + CAMHAL_LOGDB("Adapter state switch VIDEO_AF_STATE->VIDEO_STATE event = 0x%x", + operation); + mNextState = VIDEO_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch VIDEO_AF_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case VIDEO_LOADED_CAPTURE_STATE: + + switch ( operation ) + { + + case CAMERA_START_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x", + operation); + mNextState = VIDEO_CAPTURE_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case VIDEO_CAPTURE_STATE: + + switch ( operation ) + { + case CAMERA_STOP_IMAGE_CAPTURE: + CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x", + operation); + mNextState = VIDEO_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case AF_ZOOM_STATE: + + switch ( operation ) + { + + case CAMERA_STOP_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = 0x%x", + operation); + mNextState = AF_STATE; + break; + + case CAMERA_CANCEL_AUTOFOCUS: + CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = 0x%x", + operation); + mNextState = ZOOM_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case VIDEO_ZOOM_STATE: + + switch ( operation ) + { + + case CAMERA_STOP_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = 0x%x", + operation); + mNextState = VIDEO_STATE; + break; + + case CAMERA_STOP_VIDEO: + CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->ZOOM_STATE event = 0x%x", + operation); + mNextState = ZOOM_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + case BRACKETING_ZOOM_STATE: + + switch ( operation ) + { + + case CAMERA_STOP_SMOOTH_ZOOM: + CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = 0x%x", + operation); + mNextState = BRACKETING_STATE; + break; + + default: + CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = 0x%x", + operation); + ret = INVALID_OPERATION; + break; + + } + + break; + + default: + CAMHAL_LOGEA("Invalid Adapter state!"); + ret = INVALID_OPERATION; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::rollbackToInitializedState() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + while ((getState() != INTIALIZED_STATE) && (ret == NO_ERROR)) { + ret = rollbackToPreviousState(); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::rollbackToPreviousState() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + CameraAdapter::AdapterState currentState = getState(); + + switch (currentState) { + case INTIALIZED_STATE: + return NO_ERROR; + + case PREVIEW_STATE: + ret = sendCommand(CAMERA_STOP_PREVIEW); + break; + + case CAPTURE_STATE: + ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE); + break; + + case BRACKETING_STATE: + ret = sendCommand(CAMERA_STOP_BRACKET_CAPTURE); + break; + + case AF_STATE: + ret = sendCommand(CAMERA_CANCEL_AUTOFOCUS); + break; + + case ZOOM_STATE: + ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM); + break; + + case VIDEO_STATE: + ret = sendCommand(CAMERA_STOP_VIDEO); + break; + + case VIDEO_AF_STATE: + ret = sendCommand(CAMERA_CANCEL_AUTOFOCUS); + break; + + case VIDEO_CAPTURE_STATE: + ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE); + break; + + case AF_ZOOM_STATE: + ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM); + break; + + case VIDEO_ZOOM_STATE: + ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM); + break; + + case BRACKETING_ZOOM_STATE: + ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM); + break; + + default: + CAMHAL_LOGEA("Invalid Adapter state!"); + ret = INVALID_OPERATION; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +//State transition finished successfully. +//Commit the state and unlock the adapter state. +status_t BaseCameraAdapter::commitState() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + mAdapterState = mNextState; + + mLock.unlock(); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::rollbackState() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + mNextState = mAdapterState; + + mLock.unlock(); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +// getNextState() and getState() +// publicly exposed functions to retrieve the adapter states +// please notice that these functions are locked +CameraAdapter::AdapterState BaseCameraAdapter::getState() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + + LOG_FUNCTION_NAME_EXIT; + + return mAdapterState; +} + +CameraAdapter::AdapterState BaseCameraAdapter::getNextState() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + + LOG_FUNCTION_NAME_EXIT; + + return mNextState; +} + +// getNextState() and getState() +// internal protected functions to retrieve the adapter states +// please notice that these functions are NOT locked to help +// internal functions query state in the middle of state +// transition +status_t BaseCameraAdapter::getState(AdapterState &state) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + state = mAdapterState; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t BaseCameraAdapter::getNextState(AdapterState &state) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + state = mNextState; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +void BaseCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt) +{ + LOG_FUNCTION_NAME; + LOG_FUNCTION_NAME_EXIT; +} +//----------------------------------------------------------------------------- + + + +}; + +/*--------------------Camera Adapter Class ENDS here-----------------------------*/ + diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp new file mode 100644 index 0000000..20e05c4 --- /dev/null +++ b/camera/CameraHal.cpp @@ -0,0 +1,3569 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file CameraHal.cpp +* +* This file maps the Camera Hardware Interface to V4L2. +* +*/ + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "ANativeWindowDisplayAdapter.h" +#include "TICameraParameters.h" +#include "CameraProperties.h" +#include + +#include +#include + +namespace android { + +extern "C" CameraAdapter* CameraAdapter_Factory(size_t); + +/*****************************************************************************/ + +////Constant definitions and declarations +////@todo Have a CameraProperties class to store these parameters as constants for every camera +//// Currently, they are hard-coded + +const int CameraHal::NO_BUFFERS_PREVIEW = MAX_CAMERA_BUFFERS; +const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 2; + +const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 0; +const uint32_t MessageNotifier::FRAME_BIT_FIELD_POSITION = 0; + +/******************************************************************************/ + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + +struct timeval CameraHal::mStartPreview; +struct timeval CameraHal::mStartFocus; +struct timeval CameraHal::mStartCapture; + +#endif + +static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) { + CameraHal *camera = NULL; + + if (cookie) { + camera = (CameraHal*) cookie; + camera->onOrientationEvent(orientation, tilt); + } + +} +/*-------------Camera Hal Interface Method definitions STARTS here--------------------*/ + +/** + Callback function to receive orientation events from SensorListener + */ +void CameraHal::onOrientationEvent(uint32_t orientation, uint32_t tilt) { + LOG_FUNCTION_NAME; + + if ( NULL != mCameraAdapter ) { + mCameraAdapter->onOrientationEvent(orientation, tilt); + } + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Set the notification and data callbacks + + @param[in] notify_cb Notify callback for notifying the app about events and errors + @param[in] data_cb Buffer callback for sending the preview/raw frames to the app + @param[in] data_cb_timestamp Buffer callback for sending the video frames w/ timestamp + @param[in] user Callback cookie + @return none + + */ +void CameraHal::setCallbacks(camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void *user) +{ + LOG_FUNCTION_NAME; + + if ( NULL != mAppCallbackNotifier.get() ) + { + mAppCallbackNotifier->setCallbacks(this, + notify_cb, + data_cb, + data_cb_timestamp, + get_memory, + user); + } + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Enable a message, or set of messages. + + @param[in] msgtype Bitmask of the messages to enable (defined in include/ui/Camera.h) + @return none + + */ +void CameraHal::enableMsgType(int32_t msgType) +{ + LOG_FUNCTION_NAME; + + if ( ( msgType & CAMERA_MSG_SHUTTER ) && ( !mShutterEnabled ) ) + { + msgType &= ~CAMERA_MSG_SHUTTER; + } + + // ignoring enable focus message from camera service + // we will enable internally in autoFocus call + msgType &= ~(CAMERA_MSG_FOCUS | CAMERA_MSG_FOCUS_MOVE); + + { + Mutex::Autolock lock(mLock); + mMsgEnabled |= msgType; + } + + if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) + { + if(mDisplayPaused) + { + CAMHAL_LOGDA("Preview currently paused...will enable preview callback when restarted"); + msgType &= ~CAMERA_MSG_PREVIEW_FRAME; + }else + { + CAMHAL_LOGDA("Enabling Preview Callback"); + } + } + else + { + CAMHAL_LOGDB("Preview callback not enabled %x", msgType); + } + + + ///Configure app callback notifier with the message callback required + mAppCallbackNotifier->enableMsgType (msgType); + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Disable a message, or set of messages. + + @param[in] msgtype Bitmask of the messages to disable (defined in include/ui/Camera.h) + @return none + + */ +void CameraHal::disableMsgType(int32_t msgType) +{ + LOG_FUNCTION_NAME; + + { + Mutex::Autolock lock(mLock); + mMsgEnabled &= ~msgType; + } + + if( msgType & CAMERA_MSG_PREVIEW_FRAME) + { + CAMHAL_LOGDA("Disabling Preview Callback"); + } + + ///Configure app callback notifier + mAppCallbackNotifier->disableMsgType (msgType); + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Query whether a message, or a set of messages, is enabled. + + Note that this is operates as an AND, if any of the messages queried are off, this will + return false. + + @param[in] msgtype Bitmask of the messages to query (defined in include/ui/Camera.h) + @return true If all message types are enabled + false If any message type + + */ +int CameraHal::msgTypeEnabled(int32_t msgType) +{ + LOG_FUNCTION_NAME; + Mutex::Autolock lock(mLock); + LOG_FUNCTION_NAME_EXIT; + return (mMsgEnabled & msgType); +} + +/** + @brief Set the camera parameters. + + @param[in] params Camera parameters to configure the camera + @return NO_ERROR + @todo Define error codes + + */ +int CameraHal::setParameters(const char* parameters) +{ + + LOG_FUNCTION_NAME; + + CameraParameters params; + + String8 str_params(parameters); + params.unflatten(str_params); + + LOG_FUNCTION_NAME_EXIT; + + return setParameters(params); +} + +/** + @brief Set the camera parameters. + + @param[in] params Camera parameters to configure the camera + @return NO_ERROR + @todo Define error codes + + */ +int CameraHal::setParameters(const CameraParameters& params) +{ + + LOG_FUNCTION_NAME; + + int w, h; + int w_orig, h_orig; + int framerate,minframerate; + int maxFPS, minFPS; + const char *valstr = NULL; + int varint = 0; + status_t ret = NO_ERROR; + CameraParameters oldParams = mParameters; + // Needed for KEY_RECORDING_HINT + bool restartPreviewRequired = false; + bool updateRequired = false; + bool videoMode = false; + + { + Mutex::Autolock lock(mLock); + + ///Ensure that preview is not enabled when the below parameters are changed. + if(!previewEnabled()) + { + + CAMHAL_LOGDB("PreviewFormat %s", params.getPreviewFormat()); + + if ((valstr = params.getPreviewFormat()) != NULL) { + if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS))) { + mParameters.setPreviewFormat(valstr); + } else { + CAMHAL_LOGEB("Invalid preview format.Supported: %s", mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS)); + return BAD_VALUE; + } + } + + varint = params.getInt(TICameraParameters::KEY_VNF); + valstr = params.get(TICameraParameters::KEY_VNF); + if ( valstr != NULL ) { + if ( ( varint == 0 ) || ( varint == 1 ) ) { + CAMHAL_LOGDB("VNF set %s", valstr); + mParameters.set(TICameraParameters::KEY_VNF, varint); + } else { + CAMHAL_LOGEB("ERROR: Invalid VNF: %s", valstr); + return BAD_VALUE; + } + } + + if ((valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) { + // make sure we support vstab...if we don't and application is trying to set + // vstab then return an error + if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED), + CameraParameters::TRUE) == 0) { + CAMHAL_LOGDB("VSTAB %s",valstr); + mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION, valstr); + } else if (strcmp(valstr, CameraParameters::TRUE) == 0) { + CAMHAL_LOGEB("ERROR: Invalid VSTAB: %s", valstr); + return BAD_VALUE; + } else { + mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION, + CameraParameters::FALSE); + } + } + + + + if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL) + { + CAMHAL_LOGDB("Capture mode set %s", valstr); + mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr); + } + + if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) { + if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) { + CAMHAL_LOGDB("IPP mode set %s", valstr); + mParameters.set(TICameraParameters::KEY_IPP, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s", valstr); + return BAD_VALUE; + } + } + +#ifdef OMAP_ENHANCEMENT + + if((valstr = params.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL) + { + CAMHAL_LOGDB("Stereo 3D->2D Preview mode is %s", params.get(TICameraParameters::KEY_S3D2D_PREVIEW)); + mParameters.set(TICameraParameters::KEY_S3D2D_PREVIEW, valstr); + } + + if((valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL) + { + CAMHAL_LOGDB("AutoConvergence mode is %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE)); + mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr); + } +#endif + + } + + params.getPreviewSize(&w, &h); + if (w == -1 && h == -1) { + CAMHAL_LOGEA("Unable to get preview size"); + return BAD_VALUE; + } + + int oldWidth, oldHeight; + mParameters.getPreviewSize(&oldWidth, &oldHeight); + +#ifdef OMAP_ENHANCEMENT + + int orientation =0; + if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL) + { + CAMHAL_LOGDB("Sensor Orientation is set to %s", params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)); + mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr); + orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION); + } + + if(orientation ==90 || orientation ==270) + { + if ( !isResolutionValid(h,w, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES))) + { + CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h); + return BAD_VALUE; + } + else + { + mParameters.setPreviewSize(w, h); + mVideoWidth = w; + mVideoHeight = h; + } + } + else + { + if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES))) + { + CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h); + return BAD_VALUE; + } + else + { + mParameters.setPreviewSize(w, h); + } + } + + +#else + + if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES))) { + CAMHAL_LOGEB("Invalid preview resolution %d x %d", w, h); + return BAD_VALUE; + } else { + mParameters.setPreviewSize(w, h); + } + +#endif + + if ( ( oldWidth != w ) || ( oldHeight != h ) ) { + restartPreviewRequired |= true; + } + + CAMHAL_LOGDB("PreviewResolution by App %d x %d", w, h); + + // Handle RECORDING_HINT to Set/Reset Video Mode Parameters + valstr = params.get(CameraParameters::KEY_RECORDING_HINT); + if(valstr != NULL) + { + if(strcmp(valstr, CameraParameters::TRUE) == 0) + { + CAMHAL_LOGDB("Recording Hint is set to %s", valstr); + mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr); + videoMode = true; + int w, h; + + params.getPreviewSize(&w, &h); + CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h); + //HACK FOR MMS + mVideoWidth = w; + mVideoHeight = h; + CAMHAL_LOGVB("%s Video Width=%d Height=%d\n", __FUNCTION__, mVideoWidth, mVideoHeight); + + setPreferredPreviewRes(w, h); + mParameters.getPreviewSize(&w, &h); + CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h); + //Avoid restarting preview for MMS HACK + if ((w != mVideoWidth) && (h != mVideoHeight)) + { + restartPreviewRequired = false; + } + + restartPreviewRequired |= setVideoModeParameters(params); + } + else if(strcmp(valstr, CameraParameters::FALSE) == 0) + { + CAMHAL_LOGDB("Recording Hint is set to %s", valstr); + mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr); + restartPreviewRequired |= resetVideoModeParameters(); + params.getPreviewSize(&mVideoWidth, &mVideoHeight); + } + else + { + CAMHAL_LOGEA("Invalid RECORDING_HINT"); + return BAD_VALUE; + } + } + else + { + // This check is required in following case. + // If VideoRecording activity sets KEY_RECORDING_HINT to TRUE and + // ImageCapture activity doesnot set KEY_RECORDING_HINT to FALSE (i.e. simply NULL), + // then Video Mode parameters may remain present in ImageCapture activity as well. + CAMHAL_LOGDA("Recording Hint is set to NULL"); + mParameters.set(CameraParameters::KEY_RECORDING_HINT, ""); + restartPreviewRequired |= resetVideoModeParameters(); + params.getPreviewSize(&mVideoWidth, &mVideoHeight); + } + + if ((valstr = params.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES))) { + CAMHAL_LOGDB("Focus mode set %s", valstr); + + // we need to take a decision on the capture mode based on whether CAF picture or + // video is chosen so the behavior of each is consistent to the application + if(strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){ + restartPreviewRequired |= resetVideoModeParameters(); + } else if (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){ + restartPreviewRequired |= setVideoModeParameters(params); + } + + mParameters.set(CameraParameters::KEY_FOCUS_MODE, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid FOCUS mode = %s", valstr); + return BAD_VALUE; + } + } + + ///Below parameters can be changed when the preview is running + if ( (valstr = params.getPictureFormat()) != NULL ) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) { + mParameters.setPictureFormat(valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr); + return BAD_VALUE; + } + } + + params.getPictureSize(&w, &h); + if ( isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES))) { + mParameters.setPictureSize(w, h); + } else { + CAMHAL_LOGEB("ERROR: Invalid picture resolution %dx%d", w, h); + return BAD_VALUE; + } + + CAMHAL_LOGDB("Picture Size by App %d x %d", w, h); + +#ifdef OMAP_ENHANCEMENT + + if ((valstr = params.get(TICameraParameters::KEY_BURST)) != NULL) { + if (params.getInt(TICameraParameters::KEY_BURST) >=0) { + CAMHAL_LOGDB("Burst set %s", valstr); + mParameters.set(TICameraParameters::KEY_BURST, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Burst value: %s",valstr); + return BAD_VALUE; + } + } + +#endif + + framerate = params.getPreviewFrameRate(); + valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE); + CAMHAL_LOGDB("FRAMERATE %d", framerate); + + CAMHAL_LOGVB("Passed FRR: %s, Supported FRR %s", valstr + , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED)); + CAMHAL_LOGVB("Passed FR: %d, Supported FR %s", framerate + , mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES)); + + + //Perform parameter validation + if(!isParameterValid(valstr + , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED)) + || !isParameterValid(framerate, + mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES))) + { + CAMHAL_LOGEA("Invalid frame rate range or frame rate"); + return BAD_VALUE; + } + + // Variable framerate ranges have higher priority over + // deprecated constant FPS. "KEY_PREVIEW_FPS_RANGE" should + // be cleared by the client in order for constant FPS to get + // applied. + if ( strcmp(valstr, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE)) != 0) + { + // APP wants to set FPS range + //Set framerate = MAXFPS + CAMHAL_LOGDA("APP IS CHANGING FRAME RATE RANGE"); + params.getPreviewFpsRange(&minFPS, &maxFPS); + + if ( ( 0 > minFPS ) || ( 0 > maxFPS ) ) + { + CAMHAL_LOGEA("ERROR: FPS Range is negative!"); + return BAD_VALUE; + } + + framerate = maxFPS /CameraHal::VFR_SCALE; + + } + else + { + if ( framerate != atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)) ) + { + + selectFPSRange(framerate, &minFPS, &maxFPS); + CAMHAL_LOGDB("Select FPS Range %d %d", minFPS, maxFPS); + } + else + { + if (videoMode) { + valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_VIDEO); + CameraParameters temp; + temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr); + temp.getPreviewFpsRange(&minFPS, &maxFPS); + } + else { + valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_IMAGE); + CameraParameters temp; + temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr); + temp.getPreviewFpsRange(&minFPS, &maxFPS); + } + + framerate = maxFPS / CameraHal::VFR_SCALE; + } + + } + + CAMHAL_LOGDB("FPS Range = %s", valstr); + CAMHAL_LOGDB("DEFAULT FPS Range = %s", mCameraProperties->get(CameraProperties::FRAMERATE_RANGE)); + + minFPS /= CameraHal::VFR_SCALE; + maxFPS /= CameraHal::VFR_SCALE; + + if ( ( 0 == minFPS ) || ( 0 == maxFPS ) ) + { + CAMHAL_LOGEA("ERROR: FPS Range is invalid!"); + return BAD_VALUE; + } + + if ( maxFPS < minFPS ) + { + CAMHAL_LOGEA("ERROR: Max FPS is smaller than Min FPS!"); + return BAD_VALUE; + } + CAMHAL_LOGDB("SET FRAMERATE %d", framerate); + mParameters.setPreviewFrameRate(framerate); + mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE)); + + CAMHAL_LOGDB("FPS Range [%d, %d]", minFPS, maxFPS); + mParameters.set(TICameraParameters::KEY_MINFRAMERATE, minFPS); + mParameters.set(TICameraParameters::KEY_MAXFRAMERATE, maxFPS); + + if( ( valstr = params.get(TICameraParameters::KEY_GBCE) ) != NULL ) + { + CAMHAL_LOGDB("GBCE Value = %s", valstr); + mParameters.set(TICameraParameters::KEY_GBCE, valstr); + } + + if( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL ) + { + CAMHAL_LOGDB("GLBCE Value = %s", valstr); + mParameters.set(TICameraParameters::KEY_GLBCE, valstr); + } + +#ifdef OMAP_ENHANCEMENT + + ///Update the current parameter set + if( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL) + { + CAMHAL_LOGDB("AutoConvergence Mode is set = %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE)); + mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr); + } + + if( (valstr = params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES)) !=NULL ) + { + CAMHAL_LOGDB("ManualConvergence Value = %s", params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES)); + mParameters.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, valstr); + } + + if ((valstr = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES))) { + CAMHAL_LOGDB("Exposure set = %s", valstr); + mParameters.set(TICameraParameters::KEY_EXPOSURE_MODE, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Exposure = %s", valstr); + return BAD_VALUE; + } + } + +#endif + + if ((valstr = params.get(CameraParameters::KEY_WHITE_BALANCE)) != NULL) { + if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE))) { + CAMHAL_LOGDB("White balance set %s", valstr); + mParameters.set(CameraParameters::KEY_WHITE_BALANCE, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid white balance = %s", valstr); + return BAD_VALUE; + } + } + +#ifdef OMAP_ENHANCEMENT + + if ((valstr = params.get(TICameraParameters::KEY_CONTRAST)) != NULL) { + if (params.getInt(TICameraParameters::KEY_CONTRAST) >= 0 ) { + CAMHAL_LOGDB("Contrast set %s", valstr); + mParameters.set(TICameraParameters::KEY_CONTRAST, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Contrast = %s", valstr); + return BAD_VALUE; + } + } + + if ((valstr =params.get(TICameraParameters::KEY_SHARPNESS)) != NULL) { + if (params.getInt(TICameraParameters::KEY_SHARPNESS) >= 0 ) { + CAMHAL_LOGDB("Sharpness set %s", valstr); + mParameters.set(TICameraParameters::KEY_SHARPNESS, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Sharpness = %s", valstr); + return BAD_VALUE; + } + } + + if ((valstr = params.get(TICameraParameters::KEY_SATURATION)) != NULL) { + if (params.getInt(TICameraParameters::KEY_SATURATION) >= 0 ) { + CAMHAL_LOGDB("Saturation set %s", valstr); + mParameters.set(TICameraParameters::KEY_SATURATION, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Saturation = %s", valstr); + return BAD_VALUE; + } + } + + if ((valstr = params.get(TICameraParameters::KEY_BRIGHTNESS)) != NULL) { + if (params.getInt(TICameraParameters::KEY_BRIGHTNESS) >= 0 ) { + CAMHAL_LOGDB("Brightness set %s", valstr); + mParameters.set(TICameraParameters::KEY_BRIGHTNESS, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Brightness = %s", valstr); + return BAD_VALUE; + } + } + +#endif + + if ((valstr = params.get(CameraParameters::KEY_ANTIBANDING)) != NULL) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING))) { + CAMHAL_LOGDB("Antibanding set %s", valstr); + mParameters.set(CameraParameters::KEY_ANTIBANDING, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Antibanding = %s", valstr); + return BAD_VALUE; + } + } + +#ifdef OMAP_ENHANCEMENT + + if ((valstr = params.get(TICameraParameters::KEY_ISO)) != NULL) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES))) { + CAMHAL_LOGDB("ISO set %s", valstr); + mParameters.set(TICameraParameters::KEY_ISO, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid ISO = %s", valstr); + return BAD_VALUE; + } + } + +#endif + + if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL ) + { + CAMHAL_LOGDB("Focus areas position set %s",valstr); + mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr); + } + +#ifdef OMAP_ENHANCEMENT + + if( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL ) + { + CAMHAL_LOGDB("Measurements set to %s", params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)); + mParameters.set(TICameraParameters::KEY_MEASUREMENT_ENABLE, valstr); + + if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0) + { + mMeasurementEnabled = true; + } + else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0) + { + mMeasurementEnabled = false; + } + else + { + mMeasurementEnabled = false; + } + + } + +#endif + + if( (valstr = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL) + { + CAMHAL_LOGDB("Exposure compensation set %s", valstr); + mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr); + } + + if ((valstr = params.get(CameraParameters::KEY_SCENE_MODE)) != NULL) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES))) { + CAMHAL_LOGDB("Scene mode set %s", valstr); + doesSetParameterNeedUpdate(valstr, + mParameters.get(CameraParameters::KEY_SCENE_MODE), + updateRequired); + mParameters.set(CameraParameters::KEY_SCENE_MODE, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Scene mode = %s", valstr); + return BAD_VALUE; + } + } + + if ((valstr = params.get(CameraParameters::KEY_FLASH_MODE)) != NULL) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES))) { + CAMHAL_LOGDB("Flash mode set %s", valstr); + mParameters.set(CameraParameters::KEY_FLASH_MODE, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Flash mode = %s", valstr); + return BAD_VALUE; + } + } + + if ((valstr = params.get(CameraParameters::KEY_EFFECT)) != NULL) { + if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS))) { + CAMHAL_LOGDB("Effect set %s", valstr); + mParameters.set(CameraParameters::KEY_EFFECT, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Effect = %s", valstr); + return BAD_VALUE; + } + } + + varint = params.getInt(CameraParameters::KEY_ROTATION); + if( varint >=0 ) + { + CAMHAL_LOGDB("Rotation set %d", varint); + mParameters.set(CameraParameters::KEY_ROTATION, varint); + } + + varint = params.getInt(CameraParameters::KEY_JPEG_QUALITY); + if( varint >= 0 ) + { + CAMHAL_LOGDB("Jpeg quality set %d", varint); + mParameters.set(CameraParameters::KEY_JPEG_QUALITY, varint); + } + + varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); + if( varint >=0 ) + { + CAMHAL_LOGDB("Thumbnail width set %d", varint); + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, varint); + } + + varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); + if( varint >=0 ) + { + CAMHAL_LOGDB("Thumbnail width set %d", varint); + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, varint); + } + + varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + if( varint >=0 ) + { + CAMHAL_LOGDB("Thumbnail quality set %d", varint); + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, varint); + } + + if( (valstr = params.get(CameraParameters::KEY_GPS_LATITUDE)) != NULL ) + { + CAMHAL_LOGDB("GPS latitude set %s", valstr); + mParameters.set(CameraParameters::KEY_GPS_LATITUDE, valstr); + }else{ + mParameters.remove(CameraParameters::KEY_GPS_LATITUDE); + } + + if( (valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE)) != NULL ) + { + CAMHAL_LOGDB("GPS longitude set %s", valstr); + mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, valstr); + }else{ + mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE); + } + + if( (valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE)) != NULL ) + { + CAMHAL_LOGDB("GPS altitude set %s", valstr); + mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, valstr); + }else{ + mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE); + } + + if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL ) + { + CAMHAL_LOGDB("GPS timestamp set %s", valstr); + mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, valstr); + }else{ + mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP); + } + + if( (valstr = params.get(TICameraParameters::KEY_GPS_DATESTAMP)) != NULL ) + { + CAMHAL_LOGDB("GPS datestamp set %s", valstr); + mParameters.set(TICameraParameters::KEY_GPS_DATESTAMP, valstr); + }else{ + mParameters.remove(TICameraParameters::KEY_GPS_DATESTAMP); + } + + if( (valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL ) + { + CAMHAL_LOGDB("GPS processing method set %s", valstr); + mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr); + }else{ + mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD); + } + + if( (valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM )) != NULL ) + { + CAMHAL_LOGDB("GPS MAPDATUM set %s", valstr); + mParameters.set(TICameraParameters::KEY_GPS_MAPDATUM, valstr); + }else{ + mParameters.remove(TICameraParameters::KEY_GPS_MAPDATUM); + } + + if( (valstr = params.get(TICameraParameters::KEY_GPS_VERSION)) != NULL ) + { + CAMHAL_LOGDB("GPS MAPDATUM set %s", valstr); + mParameters.set(TICameraParameters::KEY_GPS_VERSION, valstr); + }else{ + mParameters.remove(TICameraParameters::KEY_GPS_VERSION); + } + + if( (valstr = params.get(TICameraParameters::KEY_EXIF_MODEL)) != NULL ) + { + CAMHAL_LOGDB("EXIF Model set %s", valstr); + mParameters.set(TICameraParameters::KEY_EXIF_MODEL, valstr); + } + + if( (valstr = params.get(TICameraParameters::KEY_EXIF_MAKE)) != NULL ) + { + CAMHAL_LOGDB("EXIF Make set %s", valstr); + mParameters.set(TICameraParameters::KEY_EXIF_MAKE, valstr); + } + +#ifdef OMAP_ENHANCEMENT + + if( (valstr = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL ) + { + CAMHAL_LOGDB("Exposure Bracketing set %s", params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)); + mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valstr); + } + else + { + mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE); + } + +#endif + + valstr = params.get(CameraParameters::KEY_ZOOM); + varint = params.getInt(CameraParameters::KEY_ZOOM); + if ( valstr != NULL ) { + if ( ( varint >= 0 ) && ( varint <= mMaxZoomSupported ) ) { + CAMHAL_LOGDB("Zoom set %s", valstr); + doesSetParameterNeedUpdate(valstr, + mParameters.get(CameraParameters::KEY_ZOOM), + updateRequired); + mParameters.set(CameraParameters::KEY_ZOOM, valstr); + } else { + CAMHAL_LOGEB("ERROR: Invalid Zoom: %s", valstr); + return BAD_VALUE; + } + } + + if( (valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL ) + { + CAMHAL_LOGDB("Auto Exposure Lock set %s", valstr); + doesSetParameterNeedUpdate(valstr, + mParameters.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK), + updateRequired); + mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr); + } + + if( (valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL ) + { + CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", valstr); + doesSetParameterNeedUpdate(valstr, + mParameters.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK), + updateRequired); + mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr); + } + if( (valstr = params.get(CameraParameters::KEY_METERING_AREAS)) != NULL ) + { + CAMHAL_LOGDB("Metering areas position set %s", valstr); + mParameters.set(CameraParameters::KEY_METERING_AREAS, valstr); + } + + // Only send parameters to adapter if preview is already + // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter, + // will be called in startPreview() + // TODO(XXX): Need to identify other parameters that need update from camera adapter + if ( (NULL != mCameraAdapter) && (mPreviewEnabled || updateRequired) ) { + ret |= mCameraAdapter->setParameters(mParameters); + } + +#ifdef OMAP_ENHANCEMENT + + if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) ) + { + int posBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS); + if ( 0 < posBracketRange ) + { + mBracketRangePositive = posBracketRange; + } + } + CAMHAL_LOGDB("Positive bracketing range %d", mBracketRangePositive); + + + if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG) ) + { + int negBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG); + if ( 0 < negBracketRange ) + { + mBracketRangeNegative = negBracketRange; + } + } + CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative); + + if( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL) && + ( strcmp(valstr, TICameraParameters::BRACKET_ENABLE) == 0 )) + { + if ( !mBracketingEnabled ) + { + CAMHAL_LOGDA("Enabling bracketing"); + mBracketingEnabled = true; + + //Wait for AF events to enable bracketing + if ( NULL != mCameraAdapter ) + { + setEventProvider( CameraHalEvent::ALL_EVENTS, mCameraAdapter ); + } + } + else + { + CAMHAL_LOGDA("Bracketing already enabled"); + } + } + else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) && + ( strcmp(valstr, TICameraParameters::BRACKET_DISABLE) == 0 )) + { + CAMHAL_LOGDA("Disabling bracketing"); + + mBracketingEnabled = false; + stopImageBracketing(); + + //Remove AF events subscription + if ( NULL != mEventProvider ) + { + mEventProvider->disableEventNotification( CameraHalEvent::ALL_EVENTS ); + delete mEventProvider; + mEventProvider = NULL; + } + + } + + if( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) && + ( strcmp(valstr, TICameraParameters::SHUTTER_ENABLE) == 0 )) + { + CAMHAL_LOGDA("Enabling shutter sound"); + + mShutterEnabled = true; + mMsgEnabled |= CAMERA_MSG_SHUTTER; + mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr); + } + else if ( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) && + ( strcmp(valstr, TICameraParameters::SHUTTER_DISABLE) == 0 )) + { + CAMHAL_LOGDA("Disabling shutter sound"); + + mShutterEnabled = false; + mMsgEnabled &= ~CAMERA_MSG_SHUTTER; + mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr); + } + +#endif + + } + + //On fail restore old parameters + if ( NO_ERROR != ret ) { + mParameters = oldParams; + } + + // Restart Preview if needed by KEY_RECODING_HINT only if preview is already running. + // If preview is not started yet, Video Mode parameters will take effect on next startPreview() + if (restartPreviewRequired && previewEnabled() && !mRecordingEnabled) { + CAMHAL_LOGDA("Restarting Preview"); + ret = restartPreview(); + } else if (restartPreviewRequired && !previewEnabled() && + mDisplayPaused && !mRecordingEnabled) { + CAMHAL_LOGDA("Stopping Preview"); + forceStopPreview(); + } + + if (ret != NO_ERROR) + { + CAMHAL_LOGEA("Failed to restart Preview"); + return ret; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t CameraHal::allocPreviewBufs(int width, int height, const char* previewFormat, + unsigned int buffercount, unsigned int &max_queueable) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if(mDisplayAdapter.get() == NULL) + { + // Memory allocation of preview buffers is now placed in gralloc + // CameraHal should not allocate preview buffers without DisplayAdapter + return NO_MEMORY; + } + + if(!mPreviewBufs) + { + ///@todo Pluralise the name of this method to allocateBuffers + mPreviewLength = 0; + mPreviewBufs = (int32_t *) mDisplayAdapter->allocateBuffer(width, height, + previewFormat, + mPreviewLength, + buffercount); + + if (NULL == mPreviewBufs ) { + CAMHAL_LOGEA("Couldn't allocate preview buffers"); + return NO_MEMORY; + } + + mPreviewOffsets = (uint32_t *) mDisplayAdapter->getOffsets(); + if ( NULL == mPreviewOffsets ) { + CAMHAL_LOGEA("Buffer mapping failed"); + return BAD_VALUE; + } + + mPreviewFd = mDisplayAdapter->getFd(); + if ( -1 == mPreviewFd ) { + CAMHAL_LOGEA("Invalid handle"); + return BAD_VALUE; + } + + mBufProvider = (BufferProvider*) mDisplayAdapter.get(); + + ret = mDisplayAdapter->maxQueueableBuffers(max_queueable); + if (ret != NO_ERROR) { + return ret; + } + + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; + +} + +status_t CameraHal::freePreviewBufs() +{ + status_t ret = NO_ERROR; + LOG_FUNCTION_NAME; + + CAMHAL_LOGDB("mPreviewBufs = 0x%x", (unsigned int)mPreviewBufs); + if(mPreviewBufs) + { + ///@todo Pluralise the name of this method to freeBuffers + ret = mBufProvider->freeBuffer(mPreviewBufs); + mPreviewBufs = NULL; + LOG_FUNCTION_NAME_EXIT; + return ret; + } + LOG_FUNCTION_NAME_EXIT; + return ret; +} + + +status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount) +{ + status_t ret = NO_ERROR; + int bytes; + + LOG_FUNCTION_NAME; + + bytes = size; + + if ( NO_ERROR == ret ) + { + if( NULL != mPreviewDataBufs ) + { + ret = freePreviewDataBufs(); + } + } + + if ( NO_ERROR == ret ) + { + bytes = ((bytes+4095)/4096)*4096; + mPreviewDataBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, NULL, bytes, bufferCount); + + CAMHAL_LOGDB("Size of Preview data buffer = %d", bytes); + if( NULL == mPreviewDataBufs ) + { + CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager"); + ret = -NO_MEMORY; + } + else + { + bytes = size; + } + } + + if ( NO_ERROR == ret ) + { + mPreviewDataFd = mMemoryManager->getFd(); + mPreviewDataLength = bytes; + mPreviewDataOffsets = mMemoryManager->getOffsets(); + } + else + { + mPreviewDataFd = -1; + mPreviewDataLength = 0; + mPreviewDataOffsets = NULL; + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t CameraHal::freePreviewDataBufs() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NO_ERROR == ret ) + { + + if( NULL != mPreviewDataBufs ) + { + + ///@todo Pluralise the name of this method to freeBuffers + ret = mMemoryManager->freeBuffer(mPreviewDataBufs); + mPreviewDataBufs = NULL; + + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, const char* previewFormat, unsigned int bufferCount) +{ + status_t ret = NO_ERROR; + int bytes; + + LOG_FUNCTION_NAME; + + bytes = size; + + // allocate image buffers only if not already allocated + if(NULL != mImageBufs) { + return NO_ERROR; + } + + if ( NO_ERROR == ret ) + { + bytes = ((bytes+4095)/4096)*4096; + mImageBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, previewFormat, bytes, bufferCount); + + CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes); + if( NULL == mImageBufs ) + { + CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager"); + ret = -NO_MEMORY; + } + else + { + bytes = size; + } + } + + if ( NO_ERROR == ret ) + { + mImageFd = mMemoryManager->getFd(); + mImageLength = bytes; + mImageOffsets = mMemoryManager->getOffsets(); + } + else + { + mImageFd = -1; + mImageLength = 0; + mImageOffsets = NULL; + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount) +{ + status_t ret = NO_ERROR; + LOG_FUNCTION_NAME; + + if( NULL != mVideoBufs ){ + ret = freeVideoBufs(mVideoBufs); + mVideoBufs = NULL; + } + + if ( NO_ERROR == ret ){ + int32_t stride; + buffer_handle_t *bufsArr = new buffer_handle_t [bufferCount]; + + if (bufsArr != NULL){ + for (int i = 0; i< bufferCount; i++){ + GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get(); + buffer_handle_t buf; + ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &buf, &stride); + if (ret != NO_ERROR){ + CAMHAL_LOGEA("Couldn't allocate video buffers using Gralloc"); + ret = -NO_MEMORY; + for (int j=0; j< i; j++){ + buf = (buffer_handle_t)bufsArr[j]; + CAMHAL_LOGEB("Freeing Gralloc Buffer 0x%x", buf); + GrallocAlloc.free(buf); + } + delete [] bufsArr; + goto exit; + } + bufsArr[i] = buf; + CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", buf); + } + + mVideoBufs = (int32_t *)bufsArr; + } + else{ + CAMHAL_LOGEA("Couldn't allocate video buffers "); + ret = -NO_MEMORY; + } + } + + exit: + LOG_FUNCTION_NAME; + + return ret; +} + +void endImageCapture( void *userData) +{ + LOG_FUNCTION_NAME; + + if ( NULL != userData ) + { + CameraHal *c = reinterpret_cast(userData); + c->signalEndImageCapture(); + } + + LOG_FUNCTION_NAME_EXIT; +} + +void releaseImageBuffers(void *userData) +{ + LOG_FUNCTION_NAME; + + if (NULL != userData) { + CameraHal *c = reinterpret_cast(userData); + c->freeImageBufs(); + } + + LOG_FUNCTION_NAME_EXIT; +} + +status_t CameraHal::signalEndImageCapture() +{ + status_t ret = NO_ERROR; + int w,h; + CameraParameters adapterParams = mParameters; + Mutex::Autolock lock(mLock); + + LOG_FUNCTION_NAME; + + if ( mBracketingRunning ) { + stopImageBracketing(); + } else { + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t CameraHal::freeImageBufs() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NO_ERROR == ret ) + { + + if( NULL != mImageBufs ) + { + + ///@todo Pluralise the name of this method to freeBuffers + ret = mMemoryManager->freeBuffer(mImageBufs); + mImageBufs = NULL; + + } + else + { + ret = -EINVAL; + } + + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t CameraHal::freeVideoBufs(void *bufs) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + buffer_handle_t *pBuf = (buffer_handle_t*)bufs; + int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS)); + if(pBuf == NULL) + { + CAMHAL_LOGEA("NULL pointer passed to freeVideoBuffer"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get(); + + for(int i = 0; i < count; i++){ + buffer_handle_t ptr = *pBuf++; + CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", ptr); + GrallocAlloc.free(ptr); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/** + @brief Start preview mode. + + @param none + @return NO_ERROR Camera switched to VF mode + @todo Update function header with the different errors that are possible + + */ +status_t CameraHal::startPreview() +{ + + status_t ret = NO_ERROR; + CameraAdapter::BuffersDescriptor desc; + CameraFrame frame; + const char *valstr = NULL; + unsigned int required_buffer_count; + unsigned int max_queueble_buffers; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + gettimeofday(&mStartPreview, NULL); +#endif + + LOG_FUNCTION_NAME; + + if ( mPreviewEnabled ){ + CAMHAL_LOGDA("Preview already running"); + LOG_FUNCTION_NAME_EXIT; + return ALREADY_EXISTS; + } + + if ( NULL != mCameraAdapter ) { + ret = mCameraAdapter->setParameters(mParameters); + } + + if ((mPreviewStartInProgress == false) && (mDisplayPaused == false)){ + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW,( int ) &frame); + if ( NO_ERROR != ret ){ + CAMHAL_LOGEB("Error: CAMERA_QUERY_RESOLUTION_PREVIEW %d", ret); + return ret; + } + + ///Update the current preview width and height + mPreviewWidth = frame.mWidth; + mPreviewHeight = frame.mHeight; + //Update the padded width and height - required for VNF and VSTAB + mParameters.set(TICameraParameters::KEY_PADDED_WIDTH, mPreviewWidth); + mParameters.set(TICameraParameters::KEY_PADDED_HEIGHT, mPreviewHeight); + + } + + ///If we don't have the preview callback enabled and display adapter, + if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){ + CAMHAL_LOGDA("Preview not started. Preview in progress flag set"); + mPreviewStartInProgress = true; + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING); + if ( NO_ERROR != ret ){ + CAMHAL_LOGEB("Error: CAMERA_SWITCH_TO_EXECUTING %d", ret); + return ret; + } + return NO_ERROR; + } + + if( (mDisplayAdapter.get() != NULL) && ( !mPreviewEnabled ) && ( mDisplayPaused ) ) + { + CAMHAL_LOGDA("Preview is in paused state"); + + mDisplayPaused = false; + mPreviewEnabled = true; + if ( NO_ERROR == ret ) + { + ret = mDisplayAdapter->pauseDisplay(mDisplayPaused); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Display adapter resume failed %x", ret); + } + } + //restart preview callbacks + if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) + { + mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME); + } + return ret; + } + + + required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS)); + + ///Allocate the preview buffers + ret = allocPreviewBufs(mPreviewWidth, mPreviewHeight, mParameters.getPreviewFormat(), required_buffer_count, max_queueble_buffers); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEA("Couldn't allocate buffers for Preview"); + goto error; + } + + if ( mMeasurementEnabled ) + { + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA, + ( int ) &frame, + required_buffer_count); + if ( NO_ERROR != ret ) + { + return ret; + } + + ///Allocate the preview data buffers + ret = allocPreviewDataBufs(frame.mLength, required_buffer_count); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEA("Couldn't allocate preview data buffers"); + goto error; + } + + if ( NO_ERROR == ret ) + { + desc.mBuffers = mPreviewDataBufs; + desc.mOffsets = mPreviewDataOffsets; + desc.mFd = mPreviewDataFd; + desc.mLength = mPreviewDataLength; + desc.mCount = ( size_t ) required_buffer_count; + desc.mMaxQueueable = (size_t) required_buffer_count; + + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA, + ( int ) &desc); + } + + } + + ///Pass the buffers to Camera Adapter + desc.mBuffers = mPreviewBufs; + desc.mOffsets = mPreviewOffsets; + desc.mFd = mPreviewFd; + desc.mLength = mPreviewLength; + desc.mCount = ( size_t ) required_buffer_count; + desc.mMaxQueueable = (size_t) max_queueble_buffers; + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW, + ( int ) &desc); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Failed to register preview buffers: 0x%x", ret); + freePreviewBufs(); + return ret; + } + + mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count); + + ///Start the callback notifier + ret = mAppCallbackNotifier->start(); + + if( ALREADY_EXISTS == ret ) + { + //Already running, do nothing + CAMHAL_LOGDA("AppCallbackNotifier already running"); + ret = NO_ERROR; + } + else if ( NO_ERROR == ret ) { + CAMHAL_LOGDA("Started AppCallbackNotifier.."); + mAppCallbackNotifier->setMeasurements(mMeasurementEnabled); + } + else + { + CAMHAL_LOGDA("Couldn't start AppCallbackNotifier"); + goto error; + } + + ///Enable the display adapter if present, actual overlay enable happens when we post the buffer + if(mDisplayAdapter.get() != NULL) + { + CAMHAL_LOGDA("Enabling display"); + bool isS3d = false; + DisplayAdapter::S3DParameters s3dParams; + int width, height; + mParameters.getPreviewSize(&width, &height); +#if 0 //TODO: s3d is not part of bringup...will reenable + if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_SUPPORTED)) != NULL) { + isS3d = (strcmp(valstr, "true") == 0); + } + if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL) { + if (strcmp(valstr, "off") == 0) + { + CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS OFF"); + //TODO: obtain the frame packing configuration from camera or user settings + //once side by side configuration is supported + s3dParams.mode = OVERLAY_S3D_MODE_ON; + s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER; + s3dParams.order = OVERLAY_S3D_ORDER_LF; + s3dParams.subSampling = OVERLAY_S3D_SS_NONE; + } + else + { + CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS ON"); + s3dParams.mode = OVERLAY_S3D_MODE_OFF; + s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER; + s3dParams.order = OVERLAY_S3D_ORDER_LF; + s3dParams.subSampling = OVERLAY_S3D_SS_NONE; + } + } +#endif //if 0 + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview, isS3d ? &s3dParams : NULL); + +#else + + ret = mDisplayAdapter->enableDisplay(width, height, NULL, isS3d ? &s3dParams : NULL); + +#endif + + if ( ret != NO_ERROR ) + { + CAMHAL_LOGEA("Couldn't enable display"); + goto error; + } + + } + + ///Send START_PREVIEW command to adapter + CAMHAL_LOGDA("Starting CameraAdapter preview mode"); + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW); + + if(ret!=NO_ERROR) + { + CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter"); + goto error; + } + CAMHAL_LOGDA("Started preview"); + + mPreviewEnabled = true; + mPreviewStartInProgress = false; + return ret; + + error: + + CAMHAL_LOGEA("Performing cleanup after error"); + + //Do all the cleanup + freePreviewBufs(); + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW); + if(mDisplayAdapter.get() != NULL) + { + mDisplayAdapter->disableDisplay(false); + } + mAppCallbackNotifier->stop(); + mPreviewStartInProgress = false; + mPreviewEnabled = false; + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/** + @brief Sets ANativeWindow object. + + Preview buffers provided to CameraHal via this object. DisplayAdapter will be interfacing with it + to render buffers to display. + + @param[in] window The ANativeWindow object created by Surface flinger + @return NO_ERROR If the ANativeWindow object passes validation criteria + @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios + + */ +status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window) +{ + status_t ret = NO_ERROR; + CameraAdapter::BuffersDescriptor desc; + + LOG_FUNCTION_NAME; + mSetPreviewWindowCalled = true; + + ///If the Camera service passes a null window, we destroy existing window and free the DisplayAdapter + if(!window) + { + if(mDisplayAdapter.get() != NULL) + { + ///NULL window passed, destroy the display adapter if present + CAMHAL_LOGDA("NULL window passed, destroying display adapter"); + mDisplayAdapter.clear(); + ///@remarks If there was a window previously existing, we usually expect another valid window to be passed by the client + ///@remarks so, we will wait until it passes a valid window to begin the preview again + mSetPreviewWindowCalled = false; + } + CAMHAL_LOGDA("NULL ANativeWindow passed to setPreviewWindow"); + return NO_ERROR; + }else if(mDisplayAdapter.get() == NULL) + { + // Need to create the display adapter since it has not been created + // Create display adapter + mDisplayAdapter = new ANativeWindowDisplayAdapter(); + ret = NO_ERROR; + if(!mDisplayAdapter.get() || ((ret=mDisplayAdapter->initialize())!=NO_ERROR)) + { + if(ret!=NO_ERROR) + { + mDisplayAdapter.clear(); + CAMHAL_LOGEA("DisplayAdapter initialize failed"); + LOG_FUNCTION_NAME_EXIT; + return ret; + } + else + { + CAMHAL_LOGEA("Couldn't create DisplayAdapter"); + LOG_FUNCTION_NAME_EXIT; + return NO_MEMORY; + } + } + + // DisplayAdapter needs to know where to get the CameraFrames from inorder to display + // Since CameraAdapter is the one that provides the frames, set it as the frame provider for DisplayAdapter + mDisplayAdapter->setFrameProvider(mCameraAdapter); + + // Any dynamic errors that happen during the camera use case has to be propagated back to the application + // via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application + // Set it as the error handler for the DisplayAdapter + mDisplayAdapter->setErrorHandler(mAppCallbackNotifier.get()); + + // Update the display adapter with the new window that is passed from CameraService + ret = mDisplayAdapter->setPreviewWindow(window); + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret); + } + + if(mPreviewStartInProgress) + { + CAMHAL_LOGDA("setPreviewWindow called when preview running"); + // Start the preview since the window is now available + ret = startPreview(); + } + } else { + // Update the display adapter with the new window that is passed from CameraService + ret = mDisplayAdapter->setPreviewWindow(window); + if ( (NO_ERROR == ret) && previewEnabled() ) { + restartPreview(); + } else if (ret == ALREADY_EXISTS) { + // ALREADY_EXISTS should be treated as a noop in this case + ret = NO_ERROR; + } + } + LOG_FUNCTION_NAME_EXIT; + + return ret; + +} + + +/** + @brief Stop a previously started preview. + + @param none + @return none + + */ +void CameraHal::stopPreview() +{ + LOG_FUNCTION_NAME; + + if( (!previewEnabled() && !mDisplayPaused) || mRecordingEnabled) + { + LOG_FUNCTION_NAME_EXIT; + return; + } + + bool imageCaptureRunning = (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE) && + (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE); + if(mDisplayPaused && !imageCaptureRunning) + { + // Display is paused, which essentially means there is no preview active. + // Note: this is done so that when stopPreview is called by client after + // an image capture, we do not de-initialize the camera adapter and + // restart over again. + + return; + } + + forceStopPreview(); + + // Reset Capture-Mode to default, so that when we switch from VideoRecording + // to ImageCapture, CAPTURE_MODE is not left to VIDEO_MODE. + CAMHAL_LOGDA("Resetting Capture-Mode to default"); + mParameters.set(TICameraParameters::KEY_CAP_MODE, ""); + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Returns true if preview is enabled + + @param none + @return true If preview is running currently + false If preview has been stopped + + */ +bool CameraHal::previewEnabled() +{ + LOG_FUNCTION_NAME; + + return (mPreviewEnabled || mPreviewStartInProgress); +} + +/** + @brief Start record mode. + + When a record image is available a CAMERA_MSG_VIDEO_FRAME message is sent with + the corresponding frame. Every record frame must be released by calling + releaseRecordingFrame(). + + @param none + @return NO_ERROR If recording could be started without any issues + @todo Update the header with possible error values in failure scenarios + + */ +status_t CameraHal::startRecording( ) +{ + int w, h; + const char *valstr = NULL; + bool restartPreviewRequired = false; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + gettimeofday(&mStartPreview, NULL); + +#endif + + if(!previewEnabled()) + { + return NO_INIT; + } + + // set internal recording hint in case camera adapter needs to make some + // decisions....(will only be sent to camera adapter if camera restart is required) + mParameters.set(TICameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE); + + // if application starts recording in continuous focus picture mode... + // then we need to force default capture mode (as opposed to video mode) + if ( ((valstr = mParameters.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) && + (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){ + restartPreviewRequired = resetVideoModeParameters(); + } + + // only need to check recording hint if preview restart is not already needed + valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT); + if ( !restartPreviewRequired && + (!valstr || (valstr && (strcmp(valstr, CameraParameters::TRUE) != 0))) ) { + restartPreviewRequired = setVideoModeParameters(mParameters); + } + + if (restartPreviewRequired) { + ret = restartPreview(); + } + + if ( NO_ERROR == ret ) + { + int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS)); + mParameters.getPreviewSize(&w, &h); + CAMHAL_LOGDB("%s Video Width=%d Height=%d", __FUNCTION__, mVideoWidth, mVideoHeight); + + if ((w != mVideoWidth) && (h != mVideoHeight)) + { + ret = allocVideoBufs(mVideoWidth, mVideoHeight, count); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret); + mParameters.remove(TICameraParameters::KEY_RECORDING_HINT); + return ret; + } + + mAppCallbackNotifier->useVideoBuffers(true); + mAppCallbackNotifier->setVideoRes(mVideoWidth, mVideoHeight); + ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBufs); + } + else + { + mAppCallbackNotifier->useVideoBuffers(false); + mAppCallbackNotifier->setVideoRes(mPreviewWidth, mPreviewHeight); + ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL); + } + } + + if ( NO_ERROR == ret ) + { + ret = mAppCallbackNotifier->startRecording(); + } + + if ( NO_ERROR == ret ) + { + ///Buffers for video capture (if different from preview) are expected to be allocated within CameraAdapter + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_VIDEO); + } + + if ( NO_ERROR == ret ) + { + mRecordingEnabled = true; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/** + @brief Set the camera parameters specific to Video Recording. + + This function checks for the camera parameters which have to be set for recording. + Video Recording needs CAPTURE_MODE to be VIDEO_MODE. This function sets it. + This function also enables Video Recording specific functions like VSTAB & VNF. + + @param none + @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect. + @todo Modify the policies for enabling VSTAB & VNF usecase based later. + + */ +bool CameraHal::setVideoModeParameters(const CameraParameters& params) +{ + const char *valstr = NULL; + const char *valstrRemote = NULL; + bool restartPreviewRequired = false; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview + valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE); + if ( (valstr == NULL) || + ( (valstr != NULL) && (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) ) ) + { + CAMHAL_LOGDA("Set CAPTURE_MODE to VIDEO_MODE"); + mParameters.set(TICameraParameters::KEY_CAP_MODE, (const char *) TICameraParameters::VIDEO_MODE); + restartPreviewRequired = true; + } + + // Check if CAPTURE_MODE is VIDEO_MODE, since VSTAB & VNF work only in VIDEO_MODE. + valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE); + if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) { + valstrRemote = params.get(CameraParameters::KEY_VIDEO_STABILIZATION); + // set VSTAB. restart is required if vstab value has changed + if ( valstrRemote != NULL) { + // make sure we support vstab + if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED), + CameraParameters::TRUE) == 0) { + valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION); + // vstab value has changed + if ((valstr != NULL) && + strcmp(valstr, valstrRemote) != 0) { + restartPreviewRequired = true; + } + mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION, valstrRemote); + } + } else if (mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION)) { + // vstab was configured but now unset + restartPreviewRequired = true; + mParameters.remove(CameraParameters::KEY_VIDEO_STABILIZATION); + } + + // Set VNF + valstrRemote = params.get(TICameraParameters::KEY_VNF); + if ( valstrRemote == NULL) { + CAMHAL_LOGDA("Enable VNF"); + mParameters.set(TICameraParameters::KEY_VNF, "1"); + restartPreviewRequired = true; + } else { + valstr = mParameters.get(TICameraParameters::KEY_VNF); + if (valstr && strcmp(valstr, valstrRemote) != 0) { + restartPreviewRequired = true; + } + mParameters.set(TICameraParameters::KEY_VNF, valstrRemote); + } + + // For VSTAB alone for 1080p resolution, padded width goes > 2048, which cannot be rendered by GPU. + // In such case, there is support in Ducati for combination of VSTAB & VNF requiring padded width < 2048. + // So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution. + valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION); + if (valstr && (strcmp(valstr, CameraParameters::TRUE) == 0) && (mPreviewWidth == 1920)) { + CAMHAL_LOGDA("Force Enable VNF for 1080p"); + mParameters.set(TICameraParameters::KEY_VNF, "1"); + restartPreviewRequired = true; + } + } + LOG_FUNCTION_NAME_EXIT; + + return restartPreviewRequired; +} + +/** + @brief Reset the camera parameters specific to Video Recording. + + This function resets CAPTURE_MODE and disables Recording specific functions like VSTAB & VNF. + + @param none + @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect. + + */ +bool CameraHal::resetVideoModeParameters() +{ + const char *valstr = NULL; + bool restartPreviewRequired = false; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + // ignore this if we are already recording + if (mRecordingEnabled) { + return false; + } + + // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview + valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE); + if ((valstr != NULL) && (strcmp(valstr, TICameraParameters::VIDEO_MODE) == 0)) { + CAMHAL_LOGDA("Reset Capture-Mode to default"); + mParameters.set(TICameraParameters::KEY_CAP_MODE, ""); + restartPreviewRequired = true; + } + + LOG_FUNCTION_NAME_EXIT; + + return restartPreviewRequired; +} + +/** + @brief Restart the preview with setParameter. + + This function restarts preview, for some VIDEO_MODE parameters to take effect. + + @param none + @return NO_ERROR If recording parameters could be set without any issues + + */ +status_t CameraHal::restartPreview() +{ + const char *valstr = NULL; + char tmpvalstr[30]; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + // Retain CAPTURE_MODE before calling stopPreview(), since it is reset in stopPreview(). + tmpvalstr[0] = 0; + valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE); + if(valstr != NULL) + { + if(sizeof(tmpvalstr) < (strlen(valstr)+1)) + { + return -EINVAL; + } + + strncpy(tmpvalstr, valstr, sizeof(tmpvalstr)); + tmpvalstr[sizeof(tmpvalstr)-1] = 0; + } + + forceStopPreview(); + + { + Mutex::Autolock lock(mLock); + mParameters.set(TICameraParameters::KEY_CAP_MODE, tmpvalstr); + mCameraAdapter->setParameters(mParameters); + } + + ret = startPreview(); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/** + @brief Stop a previously started recording. + + @param none + @return none + + */ +void CameraHal::stopRecording() +{ + CameraAdapter::AdapterState currentState; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + + if (!mRecordingEnabled ) + { + return; + } + + currentState = mCameraAdapter->getState(); + if (currentState == CameraAdapter::VIDEO_CAPTURE_STATE) { + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE); + } + + mAppCallbackNotifier->stopRecording(); + + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_VIDEO); + + mRecordingEnabled = false; + + if ( mAppCallbackNotifier->getUesVideoBuffers() ){ + freeVideoBufs(mVideoBufs); + if (mVideoBufs){ + CAMHAL_LOGVB(" FREEING mVideoBufs 0x%x", mVideoBufs); + delete [] mVideoBufs; + } + mVideoBufs = NULL; + } + + // reset internal recording hint in case camera adapter needs to make some + // decisions....(will only be sent to camera adapter if camera restart is required) + mParameters.remove(TICameraParameters::KEY_RECORDING_HINT); + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Returns true if recording is enabled. + + @param none + @return true If recording is currently running + false If recording has been stopped + + */ +int CameraHal::recordingEnabled() +{ + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return mRecordingEnabled; +} + +/** + @brief Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. + + @param[in] mem MemoryBase pointer to the frame being released. Must be one of the buffers + previously given by CameraHal + @return none + + */ +void CameraHal::releaseRecordingFrame(const void* mem) +{ + LOG_FUNCTION_NAME; + + //CAMHAL_LOGDB(" 0x%x", mem->pointer()); + + if ( ( mRecordingEnabled ) && mem != NULL) + { + mAppCallbackNotifier->releaseRecordingFrame(mem); + } + + LOG_FUNCTION_NAME_EXIT; + + return; +} + +/** + @brief Start auto focus + + This call asynchronous. + The notification callback routine is called with CAMERA_MSG_FOCUS once when + focusing is complete. autoFocus() will be called again if another auto focus is + needed. + + @param none + @return NO_ERROR + @todo Define the error codes if the focus is not locked + + */ +status_t CameraHal::autoFocus() +{ + status_t ret = NO_ERROR; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + gettimeofday(&mStartFocus, NULL); + +#endif + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + + mMsgEnabled |= CAMERA_MSG_FOCUS; + + if ( NULL == mCameraAdapter ) + { + ret = -1; + goto EXIT; + } + + CameraAdapter::AdapterState state; + ret = mCameraAdapter->getState(state); + if (ret != NO_ERROR) + { + goto EXIT; + } + + if (state == CameraAdapter::AF_STATE) + { + CAMHAL_LOGI("Ignoring start-AF (already in progress)"); + goto EXIT; + } + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + //pass the autoFocus timestamp along with the command to camera adapter + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS, ( int ) &mStartFocus); + +#else + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS); + +#endif + +EXIT: + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/** + @brief Cancels auto-focus function. + + If the auto-focus is still in progress, this function will cancel it. + Whether the auto-focus is in progress or not, this function will return the + focus position to the default. If the camera does not support auto-focus, this is a no-op. + + + @param none + @return NO_ERROR If the cancel succeeded + @todo Define error codes if cancel didnt succeed + + */ +status_t CameraHal::cancelAutoFocus() +{ + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + CameraParameters adapterParams = mParameters; + mMsgEnabled &= ~CAMERA_MSG_FOCUS; + + if( NULL != mCameraAdapter ) + { + adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, CameraParameters::FALSE); + mCameraAdapter->setParameters(adapterParams); + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS); + mAppCallbackNotifier->flushEventQueue(); + } + + LOG_FUNCTION_NAME_EXIT; + return NO_ERROR; +} + +void CameraHal::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier) +{ + + LOG_FUNCTION_NAME; + + if ( NULL != mEventProvider ) + { + mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS); + delete mEventProvider; + mEventProvider = NULL; + } + + mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay); + if ( NULL == mEventProvider ) + { + CAMHAL_LOGEA("Error in creating EventProvider"); + } + else + { + mEventProvider->enableEventNotification(eventMask); + } + + LOG_FUNCTION_NAME_EXIT; +} + +void CameraHal::eventCallbackRelay(CameraHalEvent* event) +{ + LOG_FUNCTION_NAME; + + CameraHal *appcbn = ( CameraHal * ) (event->mCookie); + appcbn->eventCallback(event ); + + LOG_FUNCTION_NAME_EXIT; +} + +void CameraHal::eventCallback(CameraHalEvent* event) +{ + LOG_FUNCTION_NAME; + + if ( NULL != event ) + { + switch( event->mEventType ) + { + case CameraHalEvent::EVENT_FOCUS_LOCKED: + case CameraHalEvent::EVENT_FOCUS_ERROR: + { + if ( mBracketingEnabled ) + { + startImageBracketing(); + } + break; + } + default: + { + break; + } + }; + } + + LOG_FUNCTION_NAME_EXIT; +} + +status_t CameraHal::startImageBracketing() +{ + status_t ret = NO_ERROR; + CameraFrame frame; + CameraAdapter::BuffersDescriptor desc; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + gettimeofday(&mStartCapture, NULL); + +#endif + + LOG_FUNCTION_NAME; + + if(!previewEnabled() && !mDisplayPaused) + { + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + if ( !mBracketingEnabled ) + { + return ret; + } + + if ( NO_ERROR == ret ) + { + mBracketingRunning = true; + } + + if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) ) + { + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE, + ( int ) &frame, + ( mBracketRangeNegative + 1 )); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret); + } + } + + if ( NO_ERROR == ret ) + { + if ( NULL != mAppCallbackNotifier.get() ) + { + mAppCallbackNotifier->setBurst(true); + } + } + + if ( NO_ERROR == ret ) + { + mParameters.getPictureSize(( int * ) &frame.mWidth, + ( int * ) &frame.mHeight); + + ret = allocImageBufs(frame.mWidth, + frame.mHeight, + frame.mLength, + mParameters.getPictureFormat(), + ( mBracketRangeNegative + 1 )); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret); + } + } + + if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) ) + { + + desc.mBuffers = mImageBufs; + desc.mOffsets = mImageOffsets; + desc.mFd = mImageFd; + desc.mLength = mImageLength; + desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 ); + desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 ); + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE, + ( int ) &desc); + + if ( NO_ERROR == ret ) + { + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + //pass capture timestamp along with the camera adapter command + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ), (int) &mStartCapture); + +#else + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 )); + +#endif + + } + } + + return ret; +} + +status_t CameraHal::stopImageBracketing() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if( !previewEnabled() ) + { + return NO_INIT; + } + + mBracketingRunning = false; + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/** + @brief Take a picture. + + @param none + @return NO_ERROR If able to switch to image capture + @todo Define error codes if unable to switch to image capture + + */ +status_t CameraHal::takePicture( ) +{ + status_t ret = NO_ERROR; + CameraFrame frame; + CameraAdapter::BuffersDescriptor desc; + int burst; + const char *valstr = NULL; + unsigned int bufferCount = 1; + + Mutex::Autolock lock(mLock); + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + gettimeofday(&mStartCapture, NULL); + +#endif + + LOG_FUNCTION_NAME; + + if(!previewEnabled() && !mDisplayPaused) + { + LOG_FUNCTION_NAME_EXIT; + CAMHAL_LOGEA("Preview not started..."); + return NO_INIT; + } + + // return error if we are already capturing + if ( (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE && + mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) || + (mCameraAdapter->getState() == CameraAdapter::VIDEO_CAPTURE_STATE && + mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE) ) { + CAMHAL_LOGEA("Already capturing an image..."); + return NO_INIT; + } + + // we only support video snapshot if we are in video mode (recording hint is set) + valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE); + if ( (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE) && + (valstr && strcmp(valstr, TICameraParameters::VIDEO_MODE)) ) { + CAMHAL_LOGEA("Trying to capture while recording without recording hint set..."); + return INVALID_OPERATION; + } + + if ( !mBracketingRunning ) + { + + if ( NO_ERROR == ret ) + { + burst = mParameters.getInt(TICameraParameters::KEY_BURST); + } + + //Allocate all buffers only in burst capture case + if ( burst > 1 ) + { + bufferCount = CameraHal::NO_BUFFERS_IMAGE_CAPTURE; + if ( NULL != mAppCallbackNotifier.get() ) + { + mAppCallbackNotifier->setBurst(true); + } + } + else + { + if ( NULL != mAppCallbackNotifier.get() ) + { + mAppCallbackNotifier->setBurst(false); + } + } + + // pause preview during normal image capture + // do not pause preview if recording (video state) + if (NO_ERROR == ret && + NULL != mDisplayAdapter.get() && + burst < 1) { + if (mCameraAdapter->getState() != CameraAdapter::VIDEO_STATE) { + mDisplayPaused = true; + mPreviewEnabled = false; + ret = mDisplayAdapter->pauseDisplay(mDisplayPaused); + // since preview is paused we should stop sending preview frames too + if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) { + mAppCallbackNotifier->disableMsgType (CAMERA_MSG_PREVIEW_FRAME); + } + } + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + mDisplayAdapter->setSnapshotTimeRef(&mStartCapture); +#endif + } + + // if we taking video snapshot... + if ((NO_ERROR == ret) && (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE)) { + // enable post view frames if not already enabled so we can internally + // save snapshot frames for generating thumbnail + if((mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) == 0) { + mAppCallbackNotifier->enableMsgType(CAMERA_MSG_POSTVIEW_FRAME); + } + } + + if ( (NO_ERROR == ret) && (NULL != mCameraAdapter) ) + { + if ( NO_ERROR == ret ) + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE, + ( int ) &frame, + bufferCount); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret); + } + } + + if ( NO_ERROR == ret ) + { + mParameters.getPictureSize(( int * ) &frame.mWidth, + ( int * ) &frame.mHeight); + + ret = allocImageBufs(frame.mWidth, + frame.mHeight, + frame.mLength, + mParameters.getPictureFormat(), + bufferCount); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret); + } + } + + if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) ) + { + desc.mBuffers = mImageBufs; + desc.mOffsets = mImageOffsets; + desc.mFd = mImageFd; + desc.mLength = mImageLength; + desc.mCount = ( size_t ) bufferCount; + desc.mMaxQueueable = ( size_t ) bufferCount; + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE, + ( int ) &desc); + } + } + + if ( ( NO_ERROR == ret ) && ( NULL != mCameraAdapter ) ) + { + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + //pass capture timestamp along with the camera adapter command + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE, (int) &mStartCapture); + +#else + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE); + +#endif + + } + + return ret; +} + +/** + @brief Cancel a picture that was started with takePicture. + + Calling this method when no picture is being taken is a no-op. + + @param none + @return NO_ERROR If cancel succeeded. Cancel can succeed if image callback is not sent + @todo Define error codes + + */ +status_t CameraHal::cancelPicture( ) +{ + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE); + + return NO_ERROR; +} + +/** + @brief Return the camera parameters. + + @param none + @return Currently configured camera parameters + + */ +char* CameraHal::getParameters() +{ + String8 params_str8; + char* params_string; + const char * valstr = NULL; + + LOG_FUNCTION_NAME; + + if( NULL != mCameraAdapter ) + { + mCameraAdapter->getParameters(mParameters); + } + + CameraParameters mParams = mParameters; + + // Handle RECORDING_HINT to Set/Reset Video Mode Parameters + valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT); + if(valstr != NULL) + { + if(strcmp(valstr, CameraParameters::TRUE) == 0) + { + //HACK FOR MMS MODE + resetPreviewRes(&mParams, mVideoWidth, mVideoHeight); + } + } + + // do not send internal parameters to upper layers + mParams.remove(TICameraParameters::KEY_RECORDING_HINT); + mParams.remove(TICameraParameters::KEY_AUTO_FOCUS_LOCK); + + params_str8 = mParams.flatten(); + + // camera service frees this string... + params_string = (char*) malloc(sizeof(char) * (params_str8.length()+1)); + strcpy(params_string, params_str8.string()); + + LOG_FUNCTION_NAME_EXIT; + + ///Return the current set of parameters + + return params_string; +} + +void CameraHal::putParameters(char *parms) +{ + free(parms); +} + +/** + @brief Send command to camera driver. + + @param none + @return NO_ERROR If the command succeeds + @todo Define the error codes that this function can return + + */ +status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + + if ( ( NO_ERROR == ret ) && ( NULL == mCameraAdapter ) ) + { + CAMHAL_LOGEA("No CameraAdapter instance"); + return -EINVAL; + } + + /////////////////////////////////////////////////////// + // Following commands do NOT need preview to be started + /////////////////////////////////////////////////////// + switch(cmd) { + case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG: + bool enable = static_cast(arg1); + Mutex::Autolock lock(mLock); + if (enable) { + mMsgEnabled |= CAMERA_MSG_FOCUS_MOVE; + } else { + mMsgEnabled &= ~CAMERA_MSG_FOCUS_MOVE; + } + return NO_ERROR; + break; + } + + if ( ( NO_ERROR == ret ) && ( !previewEnabled() )) + { + CAMHAL_LOGEA("Preview is not running"); + ret = -EINVAL; + } + + /////////////////////////////////////////////////////// + // Following commands NEED preview to be started + /////////////////////////////////////////////////////// + + if ( NO_ERROR == ret ) + { + switch(cmd) + { + case CAMERA_CMD_START_SMOOTH_ZOOM: + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_SMOOTH_ZOOM, arg1); + + break; + case CAMERA_CMD_STOP_SMOOTH_ZOOM: + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM); + break; + + case CAMERA_CMD_START_FACE_DETECTION: + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_FD); + + break; + + case CAMERA_CMD_STOP_FACE_DETECTION: + + ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD); + + break; + + default: + break; + }; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/** + @brief Release the hardware resources owned by this object. + + Note that this is *not* done in the destructor. + + @param none + @return none + + */ +void CameraHal::release() +{ + LOG_FUNCTION_NAME; + ///@todo Investigate on how release is used by CameraService. Vaguely remember that this is called + ///just before CameraHal object destruction + deinitialize(); + LOG_FUNCTION_NAME_EXIT; +} + + +/** + @brief Dump state of the camera hardware + + @param[in] fd File descriptor + @param[in] args Arguments + @return NO_ERROR Dump succeeded + @todo Error codes for dump fail + + */ +status_t CameraHal::dump(int fd) const +{ + LOG_FUNCTION_NAME; + ///Implement this method when the h/w dump function is supported on Ducati side + return NO_ERROR; +} + +/*-------------Camera Hal Interface Method definitions ENDS here--------------------*/ + + + + +/*-------------Camera Hal Internal Method definitions STARTS here--------------------*/ + +/** + @brief Constructor of CameraHal + + Member variables are initialized here. No allocations should be done here as we + don't use c++ exceptions in the code. + + */ +CameraHal::CameraHal(int cameraId) +{ + LOG_FUNCTION_NAME; + + ///Initialize all the member variables to their defaults + mPreviewEnabled = false; + mPreviewBufs = NULL; + mImageBufs = NULL; + mBufProvider = NULL; + mPreviewStartInProgress = false; + mVideoBufs = NULL; + mVideoBufProvider = NULL; + mRecordingEnabled = false; + mDisplayPaused = false; + mSetPreviewWindowCalled = false; + mMsgEnabled = 0; + mAppCallbackNotifier = NULL; + mMemoryManager = NULL; + mCameraAdapter = NULL; + mBracketingEnabled = false; + mBracketingRunning = false; + mEventProvider = NULL; + mBracketRangePositive = 1; + mBracketRangeNegative = 1; + mMaxZoomSupported = 0; + mShutterEnabled = true; + mMeasurementEnabled = false; + mPreviewDataBufs = NULL; + mCameraProperties = NULL; + mCurrentTime = 0; + mFalsePreview = 0; + mImageOffsets = NULL; + mImageLength = 0; + mImageFd = 0; + mVideoOffsets = NULL; + mVideoFd = 0; + mVideoLength = 0; + mPreviewDataOffsets = NULL; + mPreviewDataFd = 0; + mPreviewDataLength = 0; + mPreviewFd = 0; + mPreviewWidth = 0; + mPreviewHeight = 0; + mPreviewLength = 0; + mPreviewOffsets = NULL; + mPreviewRunning = 0; + mPreviewStateOld = 0; + mRecordingEnabled = 0; + mRecordEnabled = 0; + mSensorListener = NULL; + mVideoWidth = 0; + mVideoHeight = 0; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + //Initialize the CameraHAL constructor timestamp, which is used in the + // PPM() method as time reference if the user does not supply one. + gettimeofday(&ppm_start, NULL); + +#endif + + mCameraIndex = cameraId; + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Destructor of CameraHal + + This function simply calls deinitialize() to free up memory allocate during construct + phase + */ +CameraHal::~CameraHal() +{ + LOG_FUNCTION_NAME; + + ///Call de-initialize here once more - it is the last chance for us to relinquish all the h/w and s/w resources + deinitialize(); + + if ( NULL != mEventProvider ) + { + mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS); + delete mEventProvider; + mEventProvider = NULL; + } + + /// Free the callback notifier + mAppCallbackNotifier.clear(); + + /// Free the display adapter + mDisplayAdapter.clear(); + + if ( NULL != mCameraAdapter ) { + int strongCount = mCameraAdapter->getStrongCount(); + + mCameraAdapter->decStrong(mCameraAdapter); + + mCameraAdapter = NULL; + } + + freeImageBufs(); + + /// Free the memory manager + mMemoryManager.clear(); + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Initialize the Camera HAL + + Creates CameraAdapter, AppCallbackNotifier, DisplayAdapter and MemoryManager + + @param None + @return NO_ERROR - On success + NO_MEMORY - On failure to allocate memory for any of the objects + @remarks Camera Hal internal function + + */ + +status_t CameraHal::initialize(CameraProperties::Properties* properties) +{ + LOG_FUNCTION_NAME; + + int sensor_index = 0; + + ///Initialize the event mask used for registering an event provider for AppCallbackNotifier + ///Currently, registering all events as to be coming from CameraAdapter + int32_t eventMask = CameraHalEvent::ALL_EVENTS; + + // Get my camera properties + mCameraProperties = properties; + + if(!mCameraProperties) + { + goto fail_loop; + } + + // Dump the properties of this Camera + // will only print if DEBUG macro is defined + mCameraProperties->dump(); + + if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)) != 0 ) + { + sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)); + } + + CAMHAL_LOGDB("Sensor index %d", sensor_index); + + mCameraAdapter = CameraAdapter_Factory(sensor_index); + if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR)) + { + CAMHAL_LOGEA("Unable to create or initialize CameraAdapter"); + mCameraAdapter = NULL; + goto fail_loop; + } + + mCameraAdapter->incStrong(mCameraAdapter); + mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this); + mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this); + + if(!mAppCallbackNotifier.get()) + { + /// Create the callback notifier + mAppCallbackNotifier = new AppCallbackNotifier(); + if( ( NULL == mAppCallbackNotifier.get() ) || ( mAppCallbackNotifier->initialize() != NO_ERROR)) + { + CAMHAL_LOGEA("Unable to create or initialize AppCallbackNotifier"); + goto fail_loop; + } + } + + if(!mMemoryManager.get()) + { + /// Create Memory Manager + mMemoryManager = new MemoryManager(); + if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR)) + { + CAMHAL_LOGEA("Unable to create or initialize MemoryManager"); + goto fail_loop; + } + } + + ///Setup the class dependencies... + + ///AppCallbackNotifier has to know where to get the Camera frames and the events like auto focus lock etc from. + ///CameraAdapter is the one which provides those events + ///Set it as the frame and event providers for AppCallbackNotifier + ///@remarks setEventProvider API takes in a bit mask of events for registering a provider for the different events + /// That way, if events can come from DisplayAdapter in future, we will be able to add it as provider + /// for any event + mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter); + mAppCallbackNotifier->setFrameProvider(mCameraAdapter); + + ///Any dynamic errors that happen during the camera use case has to be propagated back to the application + ///via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application + ///Set it as the error handler for CameraAdapter + mCameraAdapter->setErrorHandler(mAppCallbackNotifier.get()); + + ///Start the callback notifier + if(mAppCallbackNotifier->start() != NO_ERROR) + { + CAMHAL_LOGEA("Couldn't start AppCallbackNotifier"); + goto fail_loop; + } + + CAMHAL_LOGDA("Started AppCallbackNotifier.."); + mAppCallbackNotifier->setMeasurements(mMeasurementEnabled); + + ///Initialize default parameters + initDefaultParameters(); + + + if ( setParameters(mParameters) != NO_ERROR ) + { + CAMHAL_LOGEA("Failed to set default parameters?!"); + } + + // register for sensor events + mSensorListener = new SensorListener(); + if (mSensorListener.get()) { + if (mSensorListener->initialize() == NO_ERROR) { + mSensorListener->setCallbacks(orientation_cb, this); + mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION); + } else { + CAMHAL_LOGEA("Error initializing SensorListener. not fatal, continuing"); + mSensorListener.clear(); + mSensorListener = NULL; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; + + fail_loop: + + ///Free up the resources because we failed somewhere up + deinitialize(); + LOG_FUNCTION_NAME_EXIT; + + return NO_MEMORY; + +} + +bool CameraHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions) +{ + bool ret = true; + status_t status = NO_ERROR; + char tmpBuffer[PARAM_BUFFER + 1]; + char *pos = NULL; + + LOG_FUNCTION_NAME; + + if ( NULL == supportedResolutions ) + { + CAMHAL_LOGEA("Invalid supported resolutions string"); + ret = false; + goto exit; + } + + status = snprintf(tmpBuffer, PARAM_BUFFER, "%dx%d", width, height); + if ( 0 > status ) + { + CAMHAL_LOGEA("Error encountered while generating validation string"); + ret = false; + goto exit; + } + + pos = strstr(supportedResolutions, tmpBuffer); + if ( NULL == pos ) + { + ret = false; + } + else + { + ret = true; + } + +exit: + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +bool CameraHal::isParameterValid(const char *param, const char *supportedParams) +{ + bool ret = true; + char *pos = NULL; + + LOG_FUNCTION_NAME; + + if ( NULL == supportedParams ) + { + CAMHAL_LOGEA("Invalid supported parameters string"); + ret = false; + goto exit; + } + + if ( NULL == param ) + { + CAMHAL_LOGEA("Invalid parameter string"); + ret = false; + goto exit; + } + + pos = strstr(supportedParams, param); + if ( NULL == pos ) + { + ret = false; + } + else + { + ret = true; + } + +exit: + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +bool CameraHal::isParameterValid(int param, const char *supportedParams) +{ + bool ret = true; + char *pos = NULL; + status_t status; + char tmpBuffer[PARAM_BUFFER + 1]; + + LOG_FUNCTION_NAME; + + if ( NULL == supportedParams ) + { + CAMHAL_LOGEA("Invalid supported parameters string"); + ret = false; + goto exit; + } + + status = snprintf(tmpBuffer, PARAM_BUFFER, "%d", param); + if ( 0 > status ) + { + CAMHAL_LOGEA("Error encountered while generating validation string"); + ret = false; + goto exit; + } + + pos = strstr(supportedParams, tmpBuffer); + if ( NULL == pos ) + { + ret = false; + } + else + { + ret = true; + } + +exit: + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t CameraHal::doesSetParameterNeedUpdate(const char* new_param, const char* old_param, bool& update) { + if (!new_param || !old_param) { + return -EINVAL; + } + + // if params mismatch we should update parameters for camera adapter + if ((strcmp(new_param, old_param) != 0)) { + update = true; + } + + return NO_ERROR; +} + +status_t CameraHal::parseResolution(const char *resStr, int &width, int &height) +{ + status_t ret = NO_ERROR; + char *ctx, *pWidth, *pHeight; + const char *sep = "x"; + char *tmp = NULL; + + LOG_FUNCTION_NAME; + + if ( NULL == resStr ) + { + return -EINVAL; + } + + //This fixes "Invalid input resolution" + char *resStr_copy = (char *)malloc(strlen(resStr) + 1); + if ( NULL!=resStr_copy ) { + if ( NO_ERROR == ret ) + { + strcpy(resStr_copy, resStr); + pWidth = strtok_r( (char *) resStr_copy, sep, &ctx); + + if ( NULL != pWidth ) + { + width = atoi(pWidth); + } + else + { + CAMHAL_LOGEB("Invalid input resolution %s", resStr); + ret = -EINVAL; + } + } + + if ( NO_ERROR == ret ) + { + pHeight = strtok_r(NULL, sep, &ctx); + + if ( NULL != pHeight ) + { + height = atoi(pHeight); + } + else + { + CAMHAL_LOGEB("Invalid input resolution %s", resStr); + ret = -EINVAL; + } + } + + free(resStr_copy); + resStr_copy = NULL; + } + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +void CameraHal::insertSupportedParams() +{ + char tmpBuffer[PARAM_BUFFER + 1]; + + LOG_FUNCTION_NAME; + + CameraParameters &p = mParameters; + + ///Set the name of the camera + p.set(TICameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME)); + + mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES)); + + p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES)); + p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS)); + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)); + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS)); + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES)); + p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES)); + p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE)); + p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS)); + p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES)); + p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES)); + p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES)); + p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING)); + p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX)); + p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN)); + p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP)); + p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES)); + p.set(TICameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES)); + p.set(TICameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES)); + p.set(CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS)); + p.set(CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES)); + p.set(CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED)); + p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED)); + p.set(TICameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES)); + p.set(TICameraParameters::KEY_S3D_SUPPORTED,mCameraProperties->get(CameraProperties::S3D_SUPPORTED)); + p.set(TICameraParameters::KEY_S3D2D_PREVIEW_MODE,mCameraProperties->get(CameraProperties::S3D2D_PREVIEW_MODES)); + p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE)); + p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES)); + p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED)); + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED)); + p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION)); + p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES)); + p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED)); + p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED)); + p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED)); + + LOG_FUNCTION_NAME_EXIT; + +} + +void CameraHal::initDefaultParameters() +{ + //Purpose of this function is to initialize the default current and supported parameters for the currently + //selected camera. + + CameraParameters &p = mParameters; + int currentRevision, adapterRevision; + status_t ret = NO_ERROR; + int width, height; + + LOG_FUNCTION_NAME; + + ret = parseResolution(mCameraProperties->get(CameraProperties::PREVIEW_SIZE), width, height); + + if ( NO_ERROR == ret ) + { + p.setPreviewSize(width, height); + } + else + { + p.setPreviewSize(MIN_WIDTH, MIN_HEIGHT); + } + + ret = parseResolution(mCameraProperties->get(CameraProperties::PICTURE_SIZE), width, height); + + if ( NO_ERROR == ret ) + { + p.setPictureSize(width, height); + } + else + { + p.setPictureSize(PICTURE_WIDTH, PICTURE_HEIGHT); + } + + ret = parseResolution(mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_SIZE), width, height); + + if ( NO_ERROR == ret ) + { + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width); + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height); + } + else + { + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH); + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT); + } + + insertSupportedParams(); + + //Insert default values + p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE))); + p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT)); + p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT)); + p.set(CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY)); + p.set(CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE)); + p.set(CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT)); + p.set(CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING)); + p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE)); + p.set(CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE)); + p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION)); + p.set(CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE)); + p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE)); + p.set(CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM)); + p.set(TICameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST)); + p.set(TICameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION)); + p.set(TICameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS)); + p.set(TICameraParameters::KEY_SHARPNESS, mCameraProperties->get(CameraProperties::SHARPNESS)); + p.set(TICameraParameters::KEY_EXPOSURE_MODE, mCameraProperties->get(CameraProperties::EXPOSURE_MODE)); + p.set(TICameraParameters::KEY_ISO, mCameraProperties->get(CameraProperties::ISO_MODE)); + p.set(TICameraParameters::KEY_IPP, mCameraProperties->get(CameraProperties::IPP)); + p.set(TICameraParameters::KEY_GBCE, mCameraProperties->get(CameraProperties::GBCE)); + p.set(TICameraParameters::KEY_S3D2D_PREVIEW, mCameraProperties->get(CameraProperties::S3D2D_PREVIEW)); + p.set(TICameraParameters::KEY_AUTOCONVERGENCE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE)); + p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES)); + p.set(CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB)); + p.set(CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH)); + p.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE)); + p.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE)); + p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,mCameraProperties->get(CameraProperties::FRAMERATE_RANGE)); + p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION)); + p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES)); + p.set(TICameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE)); + p.set(TICameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL)); + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY)); + p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar"); + p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES)); + p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES)); + + // Only one area a.k.a Touch AF for now. + // TODO: Add support for multiple focus areas. + p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS)); + p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK)); + p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK)); + p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS)); + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Stop a previously started preview. + @param none + @return none + + */ +void CameraHal::forceStopPreview() +{ + LOG_FUNCTION_NAME; + + // stop bracketing if it is running + stopImageBracketing(); + + if(mDisplayAdapter.get() != NULL) { + ///Stop the buffer display first + mDisplayAdapter->disableDisplay(); + } + + if(mAppCallbackNotifier.get() != NULL) { + //Stop the callback sending + mAppCallbackNotifier->stop(); + mAppCallbackNotifier->flushAndReturnFrames(); + mAppCallbackNotifier->stopPreviewCallbacks(); + } + + if ( NULL != mCameraAdapter ) { + // only need to send these control commands to state machine if we are + // passed the LOADED_PREVIEW_STATE + if (mCameraAdapter->getState() > CameraAdapter::LOADED_PREVIEW_STATE) { + // according to javadoc...FD should be stopped in stopPreview + // and application needs to call startFaceDection again + // to restart FD + mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD); + } + + mCameraAdapter->rollbackToInitializedState(); + + } + + freePreviewBufs(); + freePreviewDataBufs(); + + mPreviewEnabled = false; + mDisplayPaused = false; + mPreviewStartInProgress = false; + + LOG_FUNCTION_NAME_EXIT; +} + +/** + @brief Deallocates memory for all the resources held by Camera HAL. + + Frees the following objects- CameraAdapter, AppCallbackNotifier, DisplayAdapter, + and Memory Manager + + @param none + @return none + + */ +void CameraHal::deinitialize() +{ + LOG_FUNCTION_NAME; + + if ( mPreviewEnabled || mDisplayPaused ) { + forceStopPreview(); + } + + mSetPreviewWindowCalled = false; + + if (mSensorListener.get()) { + mSensorListener->disableSensor(SensorListener::SENSOR_ORIENTATION); + mSensorListener.clear(); + mSensorListener = NULL; + } + + LOG_FUNCTION_NAME_EXIT; + +} + +status_t CameraHal::storeMetaDataInBuffers(bool enable) +{ + LOG_FUNCTION_NAME; + + return mAppCallbackNotifier->useMetaDataBufferMode(enable); + + LOG_FUNCTION_NAME_EXIT; +} + +void CameraHal::selectFPSRange(int framerate, int *min_fps, int *max_fps) +{ + char * ptr; + char supported[MAX_PROP_VALUE_LENGTH]; + int fpsrangeArray[2]; + int i = 0; + + LOG_FUNCTION_NAME; + size_t size = strlen(mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))+1; + strncpy(supported, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED), size); + + ptr = strtok (supported," (,)"); + + while (ptr != NULL) + { + fpsrangeArray[i]= atoi(ptr)/CameraHal::VFR_SCALE; + if (i == 1) + { + if (framerate == fpsrangeArray[i]) + { + CAMHAL_LOGDB("SETTING FPS RANGE min = %d max = %d \n", fpsrangeArray[0], fpsrangeArray[1]); + *min_fps = fpsrangeArray[0]*CameraHal::VFR_SCALE; + *max_fps = fpsrangeArray[1]*CameraHal::VFR_SCALE; + break; + } + } + ptr = strtok (NULL, " (,)"); + i++; + i%=2; + } + + LOG_FUNCTION_NAME_EXIT; + +} + +void CameraHal::setPreferredPreviewRes(int width, int height) +{ + LOG_FUNCTION_NAME; + + if ( (width == 320) && (height == 240)){ + mParameters.setPreviewSize(640,480); + } + if ( (width == 176) && (height == 144)){ + mParameters.setPreviewSize(704,576); + } + + LOG_FUNCTION_NAME_EXIT; +} + +void CameraHal::resetPreviewRes(CameraParameters *mParams, int width, int height) +{ + LOG_FUNCTION_NAME; + + if ( (width <= 320) && (height <= 240)){ + mParams->setPreviewSize(mVideoWidth, mVideoHeight); + } + + LOG_FUNCTION_NAME_EXIT; +} + +}; + + diff --git a/camera/CameraHalCommon.cpp b/camera/CameraHalCommon.cpp new file mode 100644 index 0000000..7e81a09 --- /dev/null +++ b/camera/CameraHalCommon.cpp @@ -0,0 +1,121 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "CameraHal.h" + +namespace android { + +const char CameraHal::PARAMS_DELIMITER []= ","; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + +struct timeval CameraHal::ppm_start; + +#endif + +#if PPM_INSTRUMENTATION + +/** + @brief PPM instrumentation + + Dumps the current time offset. The time reference point + lies within the CameraHAL constructor. + + @param str - log message + @return none + + */ +void CameraHal::PPM(const char* str){ + struct timeval ppm; + + gettimeofday(&ppm, NULL); + ppm.tv_sec = ppm.tv_sec - ppm_start.tv_sec; + ppm.tv_sec = ppm.tv_sec * 1000000; + ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_start.tv_usec; + + ALOGD("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 )); +} + +#elif PPM_INSTRUMENTATION_ABS + +/** + @brief PPM instrumentation + + Dumps the current time offset. The time reference point + lies within the CameraHAL constructor. This implemetation + will also dump the abosolute timestamp, which is useful when + post calculation is done with data coming from the upper + layers (Camera application etc.) + + @param str - log message + @return none + + */ +void CameraHal::PPM(const char* str){ + struct timeval ppm; + + unsigned long long elapsed, absolute; + gettimeofday(&ppm, NULL); + elapsed = ppm.tv_sec - ppm_start.tv_sec; + elapsed *= 1000000; + elapsed += ppm.tv_usec - ppm_start.tv_usec; + absolute = ppm.tv_sec; + absolute *= 1000; + absolute += ppm.tv_usec /1000; + + ALOGD("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute); +} + +#endif + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + +/** + @brief PPM instrumentation + + Calculates and dumps the elapsed time using 'ppm_first' as + reference. + + @param str - log message + @return none + + */ +void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){ + char temp_str[256]; + struct timeval ppm; + unsigned long long absolute; + va_list args; + + va_start(args, ppm_first); + vsprintf(temp_str, str, args); + gettimeofday(&ppm, NULL); + absolute = ppm.tv_sec; + absolute *= 1000; + absolute += ppm.tv_usec /1000; + ppm.tv_sec = ppm.tv_sec - ppm_first->tv_sec; + ppm.tv_sec = ppm.tv_sec * 1000000; + ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_first->tv_usec; + + ALOGD("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute); + + va_end(args); +} + +#endif + +}; + + diff --git a/camera/CameraHalUtilClasses.cpp b/camera/CameraHalUtilClasses.cpp new file mode 100644 index 0000000..c74ca60 --- /dev/null +++ b/camera/CameraHalUtilClasses.cpp @@ -0,0 +1,362 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file CameraHalUtilClasses.cpp +* +* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX). +* +*/ + +#define LOG_TAG "CameraHAL" + + +#include "CameraHal.h" + +namespace android { + +/*--------------------FrameProvider Class STARTS here-----------------------------*/ + +int FrameProvider::enableFrameNotification(int32_t frameTypes) +{ + LOG_FUNCTION_NAME; + status_t ret = NO_ERROR; + + ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface) + mFrameNotifier->enableMsgType(frameTypes<disableMsgType(frameTypes<returnFrame(frameBuf, frameType); + + return ret; +} + +void FrameProvider::addFramePointers(void *frameBuf, void *buf) +{ + mFrameNotifier->addFramePointers(frameBuf, buf); + return; +} + +void FrameProvider::removeFramePointers() +{ + mFrameNotifier->removeFramePointers(); + return; +} + +/*--------------------FrameProvider Class ENDS here-----------------------------*/ + +/*--------------------EventProvider Class STARTS here-----------------------------*/ + +int EventProvider::enableEventNotification(int32_t frameTypes) +{ + LOG_FUNCTION_NAME; + status_t ret = NO_ERROR; + + ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface) + mEventNotifier->enableMsgType(frameTypes<disableMsgType(frameTypes< weight ) || ( CameraArea::WEIGHT_MAX < weight ) ) { + CAMHAL_LOGEB("Camera area weight is invalid %d", weight); + return -EINVAL; + } + + if ( ( CameraArea::TOP > top ) || ( CameraArea::BOTTOM < top ) ) { + CAMHAL_LOGEB("Camera area top coordinate is invalid %d", top ); + return -EINVAL; + } + + if ( ( CameraArea::TOP > bottom ) || ( CameraArea::BOTTOM < bottom ) ) { + CAMHAL_LOGEB("Camera area bottom coordinate is invalid %d", bottom ); + return -EINVAL; + } + + if ( ( CameraArea::LEFT > left ) || ( CameraArea::RIGHT < left ) ) { + CAMHAL_LOGEB("Camera area left coordinate is invalid %d", left ); + return -EINVAL; + } + + if ( ( CameraArea::LEFT > right ) || ( CameraArea::RIGHT < right ) ) { + CAMHAL_LOGEB("Camera area right coordinate is invalid %d", right ); + return -EINVAL; + } + + if ( left >= right ) { + CAMHAL_LOGEA("Camera area left larger than right"); + return -EINVAL; + } + + if ( top >= bottom ) { + CAMHAL_LOGEA("Camera area top larger than bottom"); + return -EINVAL; + } + + return NO_ERROR; +} + +status_t CameraArea::parseAreas(const char *area, + uint32_t areaLength, + Vector< sp > &areas) +{ + status_t ret = NO_ERROR; + char *ctx; + char *pArea = NULL; + char *pStart = NULL; + char *pEnd = NULL; + const char *startToken = "("; + const char endToken = ')'; + const char sep = ','; + int32_t top, left, bottom, right, weight; + char *tmpBuffer = NULL; + sp currentArea; + + LOG_FUNCTION_NAME + + if ( ( NULL == area ) || + ( 0 >= areaLength ) ) + { + return -EINVAL; + } + + tmpBuffer = ( char * ) malloc(areaLength); + if ( NULL == tmpBuffer ) + { + return -ENOMEM; + } + + memcpy(tmpBuffer, area, areaLength); + + pArea = strtok_r(tmpBuffer, startToken, &ctx); + + do + { + + pStart = pArea; + if ( NULL == pStart ) + { + CAMHAL_LOGEA("Parsing of the left area coordinate failed!"); + ret = -EINVAL; + break; + } + else + { + left = static_cast(strtol(pStart, &pEnd, 10)); + } + + if ( sep != *pEnd ) + { + CAMHAL_LOGEA("Parsing of the top area coordinate failed!"); + ret = -EINVAL; + break; + } + else + { + top = static_cast(strtol(pEnd+1, &pEnd, 10)); + } + + if ( sep != *pEnd ) + { + CAMHAL_LOGEA("Parsing of the right area coordinate failed!"); + ret = -EINVAL; + break; + } + else + { + right = static_cast(strtol(pEnd+1, &pEnd, 10)); + } + + if ( sep != *pEnd ) + { + CAMHAL_LOGEA("Parsing of the bottom area coordinate failed!"); + ret = -EINVAL; + break; + } + else + { + bottom = static_cast(strtol(pEnd+1, &pEnd, 10)); + } + + if ( sep != *pEnd ) + { + CAMHAL_LOGEA("Parsing of the weight area coordinate failed!"); + ret = -EINVAL; + break; + } + else + { + weight = static_cast(strtol(pEnd+1, &pEnd, 10)); + } + + if ( endToken != *pEnd ) + { + CAMHAL_LOGEA("Malformed area!"); + ret = -EINVAL; + break; + } + + ret = checkArea(top, left, bottom, right, weight); + if ( NO_ERROR != ret ) { + break; + } + + currentArea = new CameraArea(top, left, bottom, right, weight); + CAMHAL_LOGDB("Area parsed [%dx%d, %dx%d] %d", + ( int ) top, + ( int ) left, + ( int ) bottom, + ( int ) right, + ( int ) weight); + if ( NULL != currentArea.get() ) + { + areas.add(currentArea); + } + else + { + ret = -ENOMEM; + break; + } + + pArea = strtok_r(NULL, startToken, &ctx); + + } + while ( NULL != pArea ); + + if ( NULL != tmpBuffer ) + { + free(tmpBuffer); + } + + LOG_FUNCTION_NAME_EXIT + + return ret; +} + +bool CameraArea::areAreasDifferent(Vector< sp > &area1, + Vector< sp > &area2) { + if (area1.size() != area2.size()) { + return true; + } + + // not going to care about sorting order for now + for (int i = 0; i < area1.size(); i++) { + if (!area1.itemAt(i)->compare(area2.itemAt(i))) { + return true; + } + } + + return false; +} + +bool CameraArea::compare(const sp &area) { + return ((mTop == area->mTop) && (mLeft == area->mLeft) && + (mBottom == area->mBottom) && (mRight == area->mRight) && + (mWeight == area->mWeight)); +} + + +/*--------------------CameraArea Class ENDS here-----------------------------*/ + +}; diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp new file mode 100644 index 0000000..cb4e804 --- /dev/null +++ b/camera/CameraHal_Module.cpp @@ -0,0 +1,700 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file CameraHal.cpp +* +* This file maps the Camera Hardware Interface to V4L2. +* +*/ + +#define LOG_TAG "CameraHAL" + +#include + +#include "CameraHal.h" +#include "CameraProperties.h" +#include "TICameraParameters.h" + + +static android::CameraProperties gCameraProperties; +static android::CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED]; +static unsigned int gCamerasOpen = 0; +static android::Mutex gCameraHalDeviceLock; + +static int camera_device_open(const hw_module_t* module, const char* name, + hw_device_t** device); +static int camera_device_close(hw_device_t* device); +static int camera_get_number_of_cameras(void); +static int camera_get_camera_info(int camera_id, struct camera_info *info); + +static struct hw_module_methods_t camera_module_methods = { + open: camera_device_open +}; + +camera_module_t HAL_MODULE_INFO_SYM = { + common: { + tag: HARDWARE_MODULE_TAG, + version_major: 1, + version_minor: 0, + id: CAMERA_HARDWARE_MODULE_ID, + name: "TI OMAP CameraHal Module", + author: "TI", + methods: &camera_module_methods, + dso: NULL, /* remove compilation warnings */ + reserved: {0}, /* remove compilation warnings */ + }, + get_number_of_cameras: camera_get_number_of_cameras, + get_camera_info: camera_get_camera_info, +}; + +typedef struct ti_camera_device { + camera_device_t base; + /* TI specific "private" data can go here (base.priv) */ + int cameraid; +} ti_camera_device_t; + + +/******************************************************************* + * implementation of camera_device_ops functions + *******************************************************************/ + +int camera_set_preview_window(struct camera_device * device, + struct preview_stream_ops *window) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->setPreviewWindow(window); + + return rv; +} + +void camera_set_callbacks(struct camera_device * device, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void *user) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user); +} + +void camera_enable_msg_type(struct camera_device * device, int32_t msg_type) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->enableMsgType(msg_type); +} + +void camera_disable_msg_type(struct camera_device * device, int32_t msg_type) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->disableMsgType(msg_type); +} + +int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return 0; + + ti_dev = (ti_camera_device_t*) device; + + return gCameraHals[ti_dev->cameraid]->msgTypeEnabled(msg_type); +} + +int camera_start_preview(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->startPreview(); + + return rv; +} + +void camera_stop_preview(struct camera_device * device) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->stopPreview(); +} + +int camera_preview_enabled(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->previewEnabled(); + return rv; +} + +int camera_store_meta_data_in_buffers(struct camera_device * device, int enable) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + // TODO: meta data buffer not current supported + rv = gCameraHals[ti_dev->cameraid]->storeMetaDataInBuffers(enable); + return rv; + //return enable ? android::INVALID_OPERATION: android::OK; +} + +int camera_start_recording(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->startRecording(); + return rv; +} + +void camera_stop_recording(struct camera_device * device) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->stopRecording(); +} + +int camera_recording_enabled(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->recordingEnabled(); + return rv; +} + +void camera_release_recording_frame(struct camera_device * device, + const void *opaque) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->releaseRecordingFrame(opaque); +} + +int camera_auto_focus(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->autoFocus(); + return rv; +} + +int camera_cancel_auto_focus(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->cancelAutoFocus(); + return rv; +} + +int camera_take_picture(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->takePicture(); + return rv; +} + +int camera_cancel_picture(struct camera_device * device) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->cancelPicture(); + return rv; +} + +int camera_set_parameters(struct camera_device * device, const char *params) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->setParameters(params); + return rv; +} + +char* camera_get_parameters(struct camera_device * device) +{ + char* param = NULL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return NULL; + + ti_dev = (ti_camera_device_t*) device; + + param = gCameraHals[ti_dev->cameraid]->getParameters(); + + return param; +} + +static void camera_put_parameters(struct camera_device *device, char *parms) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->putParameters(parms); +} + +int camera_send_command(struct camera_device * device, + int32_t cmd, int32_t arg1, int32_t arg2) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->sendCommand(cmd, arg1, arg2); + return rv; +} + +void camera_release(struct camera_device * device) +{ + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + if(!device) + return; + + ti_dev = (ti_camera_device_t*) device; + + gCameraHals[ti_dev->cameraid]->release(); +} + +int camera_dump(struct camera_device * device, int fd) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->dump(fd); + return rv; +} + +extern "C" void heaptracker_free_leaked_memory(void); + +int camera_device_close(hw_device_t* device) +{ + int ret = 0; + ti_camera_device_t* ti_dev = NULL; + + ALOGV("%s", __FUNCTION__); + + android::Mutex::Autolock lock(gCameraHalDeviceLock); + + if (!device) { + ret = -EINVAL; + goto done; + } + + ti_dev = (ti_camera_device_t*) device; + + if (ti_dev) { + if (gCameraHals[ti_dev->cameraid]) { + delete gCameraHals[ti_dev->cameraid]; + gCameraHals[ti_dev->cameraid] = NULL; + gCamerasOpen--; + } + + if (ti_dev->base.ops) { + free(ti_dev->base.ops); + } + free(ti_dev); + } +done: +#ifdef HEAPTRACKER + heaptracker_free_leaked_memory(); +#endif + return ret; +} + +/******************************************************************* + * implementation of camera_module functions + *******************************************************************/ + +/* open device handle to one of the cameras + * + * assume camera service will keep singleton of each camera + * so this function will always only be called once per camera instance + */ + +int camera_device_open(const hw_module_t* module, const char* name, + hw_device_t** device) +{ + int rv = 0; + int num_cameras = 0; + int cameraid; + ti_camera_device_t* camera_device = NULL; + camera_device_ops_t* camera_ops = NULL; + android::CameraHal* camera = NULL; + android::CameraProperties::Properties* properties = NULL; + + android::Mutex::Autolock lock(gCameraHalDeviceLock); + + CAMHAL_LOGI("camera_device open"); + + if (name != NULL) { + cameraid = atoi(name); + num_cameras = gCameraProperties.camerasSupported(); + + if(cameraid > num_cameras) + { + ALOGE("camera service provided cameraid out of bounds, " + "cameraid = %d, num supported = %d", + cameraid, num_cameras); + rv = -EINVAL; + goto fail; + } + + if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED) + { + ALOGE("maximum number of cameras already open"); + rv = -ENOMEM; + goto fail; + } + + camera_device = (ti_camera_device_t*)malloc(sizeof(*camera_device)); + if(!camera_device) + { + ALOGE("camera_device allocation fail"); + rv = -ENOMEM; + goto fail; + } + + camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops)); + if(!camera_ops) + { + ALOGE("camera_ops allocation fail"); + rv = -ENOMEM; + goto fail; + } + + memset(camera_device, 0, sizeof(*camera_device)); + memset(camera_ops, 0, sizeof(*camera_ops)); + + camera_device->base.common.tag = HARDWARE_DEVICE_TAG; + camera_device->base.common.version = 0; + camera_device->base.common.module = (hw_module_t *)(module); + camera_device->base.common.close = camera_device_close; + camera_device->base.ops = camera_ops; + + camera_ops->set_preview_window = camera_set_preview_window; + camera_ops->set_callbacks = camera_set_callbacks; + camera_ops->enable_msg_type = camera_enable_msg_type; + camera_ops->disable_msg_type = camera_disable_msg_type; + camera_ops->msg_type_enabled = camera_msg_type_enabled; + camera_ops->start_preview = camera_start_preview; + camera_ops->stop_preview = camera_stop_preview; + camera_ops->preview_enabled = camera_preview_enabled; + camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers; + camera_ops->start_recording = camera_start_recording; + camera_ops->stop_recording = camera_stop_recording; + camera_ops->recording_enabled = camera_recording_enabled; + camera_ops->release_recording_frame = camera_release_recording_frame; + camera_ops->auto_focus = camera_auto_focus; + camera_ops->cancel_auto_focus = camera_cancel_auto_focus; + camera_ops->take_picture = camera_take_picture; + camera_ops->cancel_picture = camera_cancel_picture; + camera_ops->set_parameters = camera_set_parameters; + camera_ops->get_parameters = camera_get_parameters; + camera_ops->put_parameters = camera_put_parameters; + camera_ops->send_command = camera_send_command; + camera_ops->release = camera_release; + camera_ops->dump = camera_dump; + + *device = &camera_device->base.common; + + // -------- TI specific stuff -------- + + camera_device->cameraid = cameraid; + + if(gCameraProperties.getProperties(cameraid, &properties) < 0) + { + ALOGE("Couldn't get camera properties"); + rv = -ENOMEM; + goto fail; + } + + camera = new android::CameraHal(cameraid); + + if(!camera) + { + ALOGE("Couldn't create instance of CameraHal class"); + rv = -ENOMEM; + goto fail; + } + + if(properties && (camera->initialize(properties) != android::NO_ERROR)) + { + ALOGE("Couldn't initialize camera instance"); + rv = -ENODEV; + goto fail; + } + + gCameraHals[cameraid] = camera; + gCamerasOpen++; + } + + return rv; + +fail: + if(camera_device) { + free(camera_device); + camera_device = NULL; + } + if(camera_ops) { + free(camera_ops); + camera_ops = NULL; + } + if(camera) { + delete camera; + camera = NULL; + } + *device = NULL; + return rv; +} + +int camera_get_number_of_cameras(void) +{ + int num_cameras = MAX_CAMERAS_SUPPORTED; + + // TODO(XXX): Ducati is not loaded yet when camera service gets here + // Lets revisit this later to see if we can somehow get this working +#if 0 + // this going to be the first call from camera service + // initialize camera properties here... + if(gCameraProperties.initialize() != android::NO_ERROR) + { + CAMHAL_LOGEA("Unable to create or initialize CameraProperties"); + return NULL; + } + + num_cameras = gCameraProperties.camerasSupported(); +#endif + + return num_cameras; +} + +int camera_get_camera_info(int camera_id, struct camera_info *info) +{ + int rv = 0; + int face_value = CAMERA_FACING_BACK; + int orientation = 0; + const char *valstr = NULL; + android::CameraProperties::Properties* properties = NULL; + + // this going to be the first call from camera service + // initialize camera properties here... + if(gCameraProperties.initialize() != android::NO_ERROR) + { + CAMHAL_LOGEA("Unable to create or initialize CameraProperties"); + return NULL; + } + + //Get camera properties for camera index + if(gCameraProperties.getProperties(camera_id, &properties) < 0) + { + ALOGE("Couldn't get camera properties"); + rv = -EINVAL; + goto end; + } + + if(properties) + { + valstr = properties->get(android::CameraProperties::FACING_INDEX); + if(valstr != NULL) + { + if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_FRONT) == 0) + { + face_value = CAMERA_FACING_FRONT; + } + else if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_BACK) == 0) + { + face_value = CAMERA_FACING_BACK; + } + } + + valstr = properties->get(android::CameraProperties::ORIENTATION_INDEX); + if(valstr != NULL) + { + orientation = atoi(valstr); + } + } + else + { + CAMHAL_LOGEB("getProperties() returned a NULL property set for Camera id %d", camera_id); + } + + info->facing = face_value; + info->orientation = orientation; + +end: + return rv; +} + + + + + diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp new file mode 100644 index 0000000..527b7c2 --- /dev/null +++ b/camera/CameraParameters.cpp @@ -0,0 +1,192 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file CameraProperties.cpp +* +* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX). +* +*/ + +#include "CameraHal.h" +#include "CameraProperties.h" + +namespace android { + +const char CameraProperties::INVALID[]="prop-invalid-key"; +const char CameraProperties::CAMERA_NAME[]="prop-camera-name"; +const char CameraProperties::CAMERA_SENSOR_INDEX[]="prop-sensor-index"; +const char CameraProperties::ORIENTATION_INDEX[]="prop-orientation"; +const char CameraProperties::FACING_INDEX[]="prop-facing"; +const char CameraProperties::S3D_SUPPORTED[]="prop-s3d-supported"; +const char CameraProperties::SUPPORTED_PREVIEW_SIZES[] = "prop-preview-size-values"; +const char CameraProperties::SUPPORTED_PREVIEW_FORMATS[] = "prop-preview-format-values"; +const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES[] = "prop-preview-frame-rate-values"; +const char CameraProperties::SUPPORTED_PICTURE_SIZES[] = "prop-picture-size-values"; +const char CameraProperties::SUPPORTED_PICTURE_FORMATS[] = "prop-picture-format-values"; +const char CameraProperties::SUPPORTED_THUMBNAIL_SIZES[] = "prop-jpeg-thumbnail-size-values"; +const char CameraProperties::SUPPORTED_WHITE_BALANCE[] = "prop-whitebalance-values"; +const char CameraProperties::SUPPORTED_EFFECTS[] = "prop-effect-values"; +const char CameraProperties::SUPPORTED_ANTIBANDING[] = "prop-antibanding-values"; +const char CameraProperties::SUPPORTED_EXPOSURE_MODES[] = "prop-exposure-mode-values"; +const char CameraProperties::SUPPORTED_EV_MAX[] = "prop-ev-compensation-max"; +const char CameraProperties::SUPPORTED_EV_MIN[] = "prop-ev-compensation-min"; +const char CameraProperties::SUPPORTED_EV_STEP[] = "prop-ev-compensation-step"; +const char CameraProperties::SUPPORTED_ISO_VALUES[] = "prop-iso-mode-values"; +const char CameraProperties::SUPPORTED_SCENE_MODES[] = "prop-scene-mode-values"; +const char CameraProperties::SUPPORTED_FLASH_MODES[] = "prop-flash-mode-values"; +const char CameraProperties::SUPPORTED_FOCUS_MODES[] = "prop-focus-mode-values"; +const char CameraProperties::REQUIRED_PREVIEW_BUFS[] = "prop-required-preview-bufs"; +const char CameraProperties::REQUIRED_IMAGE_BUFS[] = "prop-required-image-bufs"; +const char CameraProperties::SUPPORTED_ZOOM_RATIOS[] = "prop-zoom-ratios"; +const char CameraProperties::SUPPORTED_ZOOM_STAGES[] = "prop-zoom-stages"; +const char CameraProperties::SUPPORTED_IPP_MODES[] = "prop-ipp-values"; +const char CameraProperties::SMOOTH_ZOOM_SUPPORTED[] = "prop-smooth-zoom-supported"; +const char CameraProperties::ZOOM_SUPPORTED[] = "prop-zoom-supported"; +const char CameraProperties::PREVIEW_SIZE[] = "prop-preview-size-default"; +const char CameraProperties::PREVIEW_FORMAT[] = "prop-preview-format-default"; +const char CameraProperties::PREVIEW_FRAME_RATE[] = "prop-preview-frame-rate-default"; +const char CameraProperties::ZOOM[] = "prop-zoom-default"; +const char CameraProperties::PICTURE_SIZE[] = "prop-picture-size-default"; +const char CameraProperties::PICTURE_FORMAT[] = "prop-picture-format-default"; +const char CameraProperties::JPEG_THUMBNAIL_SIZE[] = "prop-jpeg-thumbnail-size-default"; +const char CameraProperties::WHITEBALANCE[] = "prop-whitebalance-default"; +const char CameraProperties::EFFECT[] = "prop-effect-default"; +const char CameraProperties::ANTIBANDING[] = "prop-antibanding-default"; +const char CameraProperties::EXPOSURE_MODE[] = "prop-exposure-mode-default"; +const char CameraProperties::EV_COMPENSATION[] = "prop-ev-compensation-default"; +const char CameraProperties::ISO_MODE[] = "prop-iso-mode-default"; +const char CameraProperties::FOCUS_MODE[] = "prop-focus-mode-default"; +const char CameraProperties::SCENE_MODE[] = "prop-scene-mode-default"; +const char CameraProperties::FLASH_MODE[] = "prop-flash-mode-default"; +const char CameraProperties::JPEG_QUALITY[] = "prop-jpeg-quality-default"; +const char CameraProperties::CONTRAST[] = "prop-contrast-default"; +const char CameraProperties::BRIGHTNESS[] = "prop-brightness-default"; +const char CameraProperties::SATURATION[] = "prop-saturation-default"; +const char CameraProperties::SHARPNESS[] = "prop-sharpness-default"; +const char CameraProperties::IPP[] = "prop-ipp-default"; +const char CameraProperties::GBCE[] = "prop-gbce-default"; +const char CameraProperties::S3D2D_PREVIEW[] = "prop-s3d2d-preview"; +const char CameraProperties::S3D2D_PREVIEW_MODES[] = "prop-s3d2d-preview-values"; +const char CameraProperties::AUTOCONVERGENCE[] = "prop-auto-convergence"; +const char CameraProperties::AUTOCONVERGENCE_MODE[] = "prop-auto-convergence-mode"; +const char CameraProperties::MANUALCONVERGENCE_VALUES[] = "prop-manual-convergence-values"; +const char CameraProperties::VSTAB[] = "prop-vstab-default"; +const char CameraProperties::VSTAB_SUPPORTED[] = "prop-vstab-supported"; +const char CameraProperties::REVISION[] = "prop-revision"; +const char CameraProperties::FOCAL_LENGTH[] = "prop-focal-length"; +const char CameraProperties::HOR_ANGLE[] = "prop-horizontal-angle"; +const char CameraProperties::VER_ANGLE[] = "prop-vertical-angle"; +const char CameraProperties::FRAMERATE_RANGE[] = "prop-framerate-range-default"; +const char CameraProperties::FRAMERATE_RANGE_IMAGE[] = "prop-framerate-range-image-default"; +const char CameraProperties::FRAMERATE_RANGE_VIDEO[]="prop-framerate-range-video-default"; +const char CameraProperties::FRAMERATE_RANGE_SUPPORTED[]="prop-framerate-range-values"; +const char CameraProperties::SENSOR_ORIENTATION[]= "sensor-orientation"; +const char CameraProperties::SENSOR_ORIENTATION_VALUES[]= "sensor-orientation-values"; +const char CameraProperties::EXIF_MAKE[] = "prop-exif-make"; +const char CameraProperties::EXIF_MODEL[] = "prop-exif-model"; +const char CameraProperties::JPEG_THUMBNAIL_QUALITY[] = "prop-jpeg-thumbnail-quality-default"; +const char CameraProperties::MAX_FOCUS_AREAS[] = "prop-max-focus-areas"; +const char CameraProperties::MAX_FD_HW_FACES[] = "prop-max-fd-hw-faces"; +const char CameraProperties::MAX_FD_SW_FACES[] = "prop-max-fd-sw-faces"; +const char CameraProperties::AUTO_EXPOSURE_LOCK[] = "prop-auto-exposure-lock"; +const char CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED[] = "prop-auto-exposure-lock-supported"; +const char CameraProperties::AUTO_WHITEBALANCE_LOCK[] = "prop-auto-whitebalance-lock"; +const char CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED[] = "prop-auto-whitebalance-lock-supported"; +const char CameraProperties::MAX_NUM_METERING_AREAS[] = "prop-max-num-metering-areas"; +const char CameraProperties::METERING_AREAS[] = "prop-metering-areas"; +const char CameraProperties::VIDEO_SNAPSHOT_SUPPORTED[] = "prop-video-snapshot-supported"; +const char CameraProperties::VIDEO_SIZE[] = "video-size"; +const char CameraProperties::SUPPORTED_VIDEO_SIZES[] = "video-size-values"; +const char CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video"; + + +const char CameraProperties::DEFAULT_VALUE[] = ""; + +const char CameraProperties::PARAMS_DELIMITER []= ","; + +// Returns the properties class for a specific Camera +// Each value is indexed by the CameraProperties::CameraPropertyIndex enum +int CameraProperties::getProperties(int cameraIndex, CameraProperties::Properties** properties) +{ + LOG_FUNCTION_NAME; + + if((unsigned int)cameraIndex >= mCamerasSupported) + { + LOG_FUNCTION_NAME_EXIT; + return -EINVAL; + } + + *properties = mCameraProps+cameraIndex; + + LOG_FUNCTION_NAME_EXIT; + return 0; +} + +ssize_t CameraProperties::Properties::set(const char *prop, const char *value) +{ + if(!prop) + return -EINVAL; + if(!value) + value = DEFAULT_VALUE; + + return mProperties->replaceValueFor(String8(prop), String8(value)); +} + +ssize_t CameraProperties::Properties::set(const char *prop, int value) +{ + char s_val[30]; + + sprintf(s_val, "%d", value); + + return set(prop, s_val); +} + +const char* CameraProperties::Properties::get(const char * prop) +{ + String8 value = mProperties->valueFor(String8(prop)); + return value.string(); +} + +void CameraProperties::Properties::dump() +{ + for (size_t i = 0; i < mProperties->size(); i++) + { + CAMHAL_LOGDB("%s = %s\n", + mProperties->keyAt(i).string(), + mProperties->valueAt(i).string()); + } +} + +const char* CameraProperties::Properties::keyAt(unsigned int index) +{ + if(index < mProperties->size()) + { + return mProperties->keyAt(index).string(); + } + return NULL; +} + +const char* CameraProperties::Properties::valueAt(unsigned int index) +{ + if(index < mProperties->size()) + { + return mProperties->valueAt(index).string(); + } + return NULL; +} + +}; diff --git a/camera/CameraProperties.cpp b/camera/CameraProperties.cpp new file mode 100644 index 0000000..5d3ff20 --- /dev/null +++ b/camera/CameraProperties.cpp @@ -0,0 +1,129 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file CameraProperties.cpp +* +* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX). +* +*/ + +//#include "CameraHal.h" +#include + +#include "DebugUtils.h" +#include "CameraProperties.h" + +#define CAMERA_ROOT "CameraRoot" +#define CAMERA_INSTANCE "CameraInstance" + +namespace android { + +// lower entries have higher priority +static const char* g_camera_adapters[] = { +#ifdef OMAP4_SUPPORT_OMX_CAMERA_ADAPTER + "libomxcameraadapter.so", +#endif +#ifdef OMAP4_SUPPORT_USB_CAMERA_ADAPTER + "libusbcameraadapter.so" +#endif +}; + +/********************************************************* + CameraProperties - public function implemetation +**********************************************************/ + +CameraProperties::CameraProperties() : mCamerasSupported(0) +{ + LOG_FUNCTION_NAME; + + mCamerasSupported = 0; + mInitialized = 0; + + LOG_FUNCTION_NAME_EXIT; +} + +CameraProperties::~CameraProperties() +{ + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; +} + + +// Initializes the CameraProperties class +status_t CameraProperties::initialize() +{ + LOG_FUNCTION_NAME; + + status_t ret; + + Mutex::Autolock lock(mLock); + + if(mInitialized) + return NO_ERROR; + + ret = loadProperties(); + + mInitialized = 1; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array, + const unsigned int starting_camera, + const unsigned int max_camera); + +///Loads all the Camera related properties +status_t CameraProperties::loadProperties() +{ + LOG_FUNCTION_NAME; + + status_t ret = NO_ERROR; + + // adapter updates capabilities and we update camera count + mCamerasSupported = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported, MAX_CAMERAS_SUPPORTED); + + if((int)mCamerasSupported < 0) { + ALOGE("error while getting capabilities"); + ret = UNKNOWN_ERROR; + } else if (mCamerasSupported > MAX_CAMERAS_SUPPORTED) { + ALOGE("returned too many adapaters"); + ret = UNKNOWN_ERROR; + } else { + ALOGE("num_cameras = %d", mCamerasSupported); + + for (unsigned int i = 0; i < mCamerasSupported; i++) { + mCameraProps[i].set(CAMERA_SENSOR_INDEX, i); + mCameraProps[i].dump(); + } + } + + ALOGV("mCamerasSupported = %d", mCamerasSupported); + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +// Returns the number of Cameras found +int CameraProperties::camerasSupported() +{ + LOG_FUNCTION_NAME; + return mCamerasSupported; +} + +}; diff --git a/camera/Encoder_libjpeg.cpp b/camera/Encoder_libjpeg.cpp new file mode 100644 index 0000000..6e99a25 --- /dev/null +++ b/camera/Encoder_libjpeg.cpp @@ -0,0 +1,472 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file Encoder_libjpeg.cpp +* +* This file encodes a YUV422I buffer to a jpeg +* TODO(XXX): Need to support formats other than yuv422i +* Change interface to pre/post-proc algo framework +* +*/ + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "Encoder_libjpeg.h" +#include "NV12_resize.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +extern "C" { + #include "jpeglib.h" + #include "jerror.h" +} + +#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0])) +#define MIN(x,y) ((x < y) ? x : y) + +namespace android { +struct integer_string_pair { + unsigned int integer; + const char* string; +}; + +static integer_string_pair degress_to_exif_lut [] = { + // degrees, exif_orientation + {0, "1"}, + {90, "6"}, + {180, "3"}, + {270, "8"}, +}; +struct libjpeg_destination_mgr : jpeg_destination_mgr { + libjpeg_destination_mgr(uint8_t* input, int size); + + uint8_t* buf; + int bufsize; + size_t jpegsize; +}; + +static void libjpeg_init_destination (j_compress_ptr cinfo) { + libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest; + + dest->next_output_byte = dest->buf; + dest->free_in_buffer = dest->bufsize; + dest->jpegsize = 0; +} + +static boolean libjpeg_empty_output_buffer(j_compress_ptr cinfo) { + libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest; + + dest->next_output_byte = dest->buf; + dest->free_in_buffer = dest->bufsize; + return TRUE; // ? +} + +static void libjpeg_term_destination (j_compress_ptr cinfo) { + libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest; + dest->jpegsize = dest->bufsize - dest->free_in_buffer; +} + +libjpeg_destination_mgr::libjpeg_destination_mgr(uint8_t* input, int size) { + this->init_destination = libjpeg_init_destination; + this->empty_output_buffer = libjpeg_empty_output_buffer; + this->term_destination = libjpeg_term_destination; + + this->buf = input; + this->bufsize = size; + + jpegsize = 0; +} + +/* private static functions */ +static void nv21_to_yuv(uint8_t* dst, uint8_t* y, uint8_t* uv, int width) { + if (!dst || !y || !uv) { + return; + } + + while ((width--) > 0) { + uint8_t y0 = y[0]; + uint8_t v0 = uv[0]; + uint8_t u0 = *(uv+1); + dst[0] = y0; + dst[1] = u0; + dst[2] = v0; + dst += 3; + y++; + if(!(width % 2)) uv+=2; + } +} + +static void uyvy_to_yuv(uint8_t* dst, uint32_t* src, int width) { + if (!dst || !src) { + return; + } + + if (width % 2) { + return; // not supporting odd widths + } + + // currently, neon routine only supports multiple of 16 width + if (width % 16) { + while ((width-=2) >= 0) { + uint8_t u0 = (src[0] >> 0) & 0xFF; + uint8_t y0 = (src[0] >> 8) & 0xFF; + uint8_t v0 = (src[0] >> 16) & 0xFF; + uint8_t y1 = (src[0] >> 24) & 0xFF; + dst[0] = y0; + dst[1] = u0; + dst[2] = v0; + dst[3] = y1; + dst[4] = u0; + dst[5] = v0; + dst += 6; + src++; + } + } else { + int n = width; + asm volatile ( + " pld [%[src], %[src_stride], lsl #2] \n\t" + " cmp %[n], #16 \n\t" + " blt 5f \n\t" + "0: @ 16 pixel swap \n\t" + " vld2.8 {q0, q1} , [%[src]]! @ q0 = uv q1 = y \n\t" + " vuzp.8 q0, q2 @ d1 = u d5 = v \n\t" + " vmov d1, d0 @ q0 = u0u1u2..u0u1u2... \n\t" + " vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t" + " vzip.8 d0, d1 @ q0 = u0u0u1u1u2u2... \n\t" + " vzip.8 d4, d5 @ q2 = v0v0v1v1v2v2... \n\t" + " vswp q0, q1 @ now q0 = y q1 = u q2 = v \n\t" + " vst3.8 {d0,d2,d4},[%[dst]]! \n\t" + " vst3.8 {d1,d3,d5},[%[dst]]! \n\t" + " sub %[n], %[n], #16 \n\t" + " cmp %[n], #16 \n\t" + " bge 0b \n\t" + "5: @ end \n\t" +#ifdef NEEDS_ARM_ERRATA_754319_754320 + " vmov s0,s0 @ add noop for errata item \n\t" +#endif + : [dst] "+r" (dst), [src] "+r" (src), [n] "+r" (n) + : [src_stride] "r" (width) + : "cc", "memory", "q0", "q1", "q2" + ); + } +} + +static void resize_nv12(Encoder_libjpeg::params* params, uint8_t* dst_buffer) { + structConvImage o_img_ptr, i_img_ptr; + + if (!params || !dst_buffer) { + return; + } + + //input + i_img_ptr.uWidth = params->in_width; + i_img_ptr.uStride = i_img_ptr.uWidth; + i_img_ptr.uHeight = params->in_height; + i_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp; + i_img_ptr.imgPtr = (uint8_t*) params->src; + i_img_ptr.clrPtr = i_img_ptr.imgPtr + (i_img_ptr.uWidth * i_img_ptr.uHeight); + i_img_ptr.uOffset = 0; + + //ouput + o_img_ptr.uWidth = params->out_width; + o_img_ptr.uStride = o_img_ptr.uWidth; + o_img_ptr.uHeight = params->out_height; + o_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp; + o_img_ptr.imgPtr = dst_buffer; + o_img_ptr.clrPtr = o_img_ptr.imgPtr + (o_img_ptr.uWidth * o_img_ptr.uHeight); + o_img_ptr.uOffset = 0; + + VT_resizeFrame_Video_opt2_lp(&i_img_ptr, &o_img_ptr, NULL, 0); +} + +/* public static functions */ +const char* ExifElementsTable::degreesToExifOrientation(unsigned int degrees) { + for (unsigned int i = 0; i < ARRAY_SIZE(degress_to_exif_lut); i++) { + if (degrees == degress_to_exif_lut[i].integer) { + return degress_to_exif_lut[i].string; + } + } + return NULL; +} + +void ExifElementsTable::stringToRational(const char* str, unsigned int* num, unsigned int* den) { + int len; + char * tempVal = NULL; + + if (str != NULL) { + len = strlen(str); + tempVal = (char*) malloc( sizeof(char) * (len + 1)); + } + + if (tempVal != NULL) { + // convert the decimal string into a rational + size_t den_len; + char *ctx; + unsigned int numerator = 0; + unsigned int denominator = 0; + char* temp = NULL; + + memset(tempVal, '\0', len + 1); + strncpy(tempVal, str, len); + temp = strtok_r(tempVal, ".", &ctx); + + if (temp != NULL) + numerator = atoi(temp); + + if (!numerator) + numerator = 1; + + temp = strtok_r(NULL, ".", &ctx); + if (temp != NULL) { + den_len = strlen(temp); + if(HUGE_VAL == den_len ) { + den_len = 0; + } + + denominator = static_cast(pow(10, den_len)); + numerator = numerator * denominator + atoi(temp); + } else { + denominator = 1; + } + + free(tempVal); + + *num = numerator; + *den = denominator; + } +} + +bool ExifElementsTable::isAsciiTag(const char* tag) { + // TODO(XXX): Add tags as necessary + return (strcmp(tag, TAG_GPS_PROCESSING_METHOD) == 0); +} + +void ExifElementsTable::insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size) { + ReadMode_t read_mode = (ReadMode_t)(READ_METADATA | READ_IMAGE); + + ResetJpgfile(); + if (ReadJpegSectionsFromBuffer(jpeg, jpeg_size, read_mode)) { + jpeg_opened = true; + create_EXIF(table, exif_tag_count, gps_tag_count, has_datetime_tag); + } +} + +status_t ExifElementsTable::insertExifThumbnailImage(const char* thumb, int len) { + status_t ret = NO_ERROR; + + if ((len > 0) && jpeg_opened) { + ret = ReplaceThumbnailFromBuffer(thumb, len) ? NO_ERROR : UNKNOWN_ERROR; + CAMHAL_LOGDB("insertExifThumbnailImage. ReplaceThumbnail(). ret=%d", ret); + } + + return ret; +} + +void ExifElementsTable::saveJpeg(unsigned char* jpeg, size_t jpeg_size) { + if (jpeg_opened) { + WriteJpegToBuffer(jpeg, jpeg_size); + DiscardData(); + jpeg_opened = false; + } +} + +/* public functions */ +ExifElementsTable::~ExifElementsTable() { + int num_elements = gps_tag_count + exif_tag_count; + + for (int i = 0; i < num_elements; i++) { + if (table[i].Value) { + free(table[i].Value); + } + } + + if (jpeg_opened) { + DiscardData(); + } +} + +status_t ExifElementsTable::insertElement(const char* tag, const char* value) { + unsigned int value_length = 0; + status_t ret = NO_ERROR; + + if (!value || !tag) { + return -EINVAL; + } + + if (position >= MAX_EXIF_TAGS_SUPPORTED) { + CAMHAL_LOGEA("Max number of EXIF elements already inserted"); + return NO_MEMORY; + } + + if (isAsciiTag(tag)) { + value_length = sizeof(ExifAsciiPrefix) + strlen(value + sizeof(ExifAsciiPrefix)); + } else { + value_length = strlen(value); + } + + if (IsGpsTag(tag)) { + table[position].GpsTag = TRUE; + table[position].Tag = GpsTagNameToValue(tag); + gps_tag_count++; + } else { + table[position].GpsTag = FALSE; + table[position].Tag = TagNameToValue(tag); + exif_tag_count++; + + if (strcmp(tag, TAG_DATETIME) == 0) { + has_datetime_tag = true; + } + } + + table[position].DataLength = 0; + table[position].Value = (char*) malloc(sizeof(char) * (value_length + 1)); + + if (table[position].Value) { + memcpy(table[position].Value, value, value_length + 1); + table[position].DataLength = value_length + 1; + } + + position++; + return ret; +} + +/* private member functions */ +size_t Encoder_libjpeg::encode(params* input) { + jpeg_compress_struct cinfo; + jpeg_error_mgr jerr; + jpeg_destination_mgr jdest; + uint8_t* src = NULL, *resize_src = NULL; + uint8_t* row_tmp = NULL; + uint8_t* row_src = NULL; + uint8_t* row_uv = NULL; // used only for NV12 + int out_width = 0, in_width = 0; + int out_height = 0, in_height = 0; + int bpp = 2; // for uyvy + int right_crop = 0, start_offset = 0; + + if (!input) { + return 0; + } + + out_width = input->out_width; + in_width = input->in_width; + out_height = input->out_height; + in_height = input->in_height; + right_crop = input->right_crop; + start_offset = input->start_offset; + src = input->src; + input->jpeg_size = 0; + + libjpeg_destination_mgr dest_mgr(input->dst, input->dst_size); + + // param check... + if ((in_width < 2) || (out_width < 2) || (in_height < 2) || (out_height < 2) || + (src == NULL) || (input->dst == NULL) || (input->quality < 1) || (input->src_size < 1) || + (input->dst_size < 1) || (input->format == NULL)) { + goto exit; + } + + if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) { + bpp = 1; + if ((in_width != out_width) || (in_height != out_height)) { + resize_src = (uint8_t*) malloc(input->dst_size); + resize_nv12(input, resize_src); + if (resize_src) src = resize_src; + } + } else if ((in_width != out_width) || (in_height != out_height)) { + CAMHAL_LOGEB("Encoder: resizing is not supported for this format: %s", input->format); + goto exit; + } else if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV422I)) { + // we currently only support yuv422i and yuv420sp + CAMHAL_LOGEB("Encoder: format not supported: %s", input->format); + goto exit; + } + + cinfo.err = jpeg_std_error(&jerr); + + jpeg_create_compress(&cinfo); + + CAMHAL_LOGDB("encoding... \n\t" + "width: %d \n\t" + "height:%d \n\t" + "dest %p \n\t" + "dest size:%d \n\t" + "mSrc %p", + out_width, out_height, input->dst, + input->dst_size, src); + + cinfo.dest = &dest_mgr; + cinfo.image_width = out_width - right_crop; + cinfo.image_height = out_height; + cinfo.input_components = 3; + cinfo.in_color_space = JCS_YCbCr; + cinfo.input_gamma = 1; + + jpeg_set_defaults(&cinfo); + jpeg_set_quality(&cinfo, input->quality, TRUE); + cinfo.dct_method = JDCT_IFAST; + + jpeg_start_compress(&cinfo, TRUE); + + row_tmp = (uint8_t*)malloc((out_width - right_crop) * 3); + row_src = src + start_offset; + row_uv = src + out_width * out_height * bpp; + + while ((cinfo.next_scanline < cinfo.image_height) && !mCancelEncoding) { + JSAMPROW row[1]; /* pointer to JSAMPLE row[s] */ + + // convert input yuv format to yuv444 + if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) { + nv21_to_yuv(row_tmp, row_src, row_uv, out_width - right_crop); + } else { + uyvy_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop); + } + + row[0] = row_tmp; + jpeg_write_scanlines(&cinfo, row, 1); + row_src = row_src + out_width*bpp; + + // move uv row if input format needs it + if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) { + if (!(cinfo.next_scanline % 2)) + row_uv = row_uv + out_width * bpp; + } + } + + // no need to finish encoding routine if we are prematurely stopping + // we will end up crashing in dest_mgr since data is incomplete + if (!mCancelEncoding) + jpeg_finish_compress(&cinfo); + jpeg_destroy_compress(&cinfo); + + if (resize_src) free(resize_src); + if (row_tmp) free(row_tmp); + + exit: + input->jpeg_size = dest_mgr.jpegsize; + return dest_mgr.jpegsize; +} + +} // namespace android diff --git a/camera/MemoryManager.cpp b/camera/MemoryManager.cpp new file mode 100644 index 0000000..e3b642c --- /dev/null +++ b/camera/MemoryManager.cpp @@ -0,0 +1,228 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#define LOG_TAG "CameraHAL" + + +#include "CameraHal.h" +#include "TICameraParameters.h" + +extern "C" { + +#include + +//#include +//#include + + +}; + +namespace android { + +///@todo Move these constants to a common header file, preferably in tiler.h +#define STRIDE_8BIT (4 * 1024) +#define STRIDE_16BIT (4 * 1024) + +#define ALLOCATION_2D 2 + +///Utility Macro Declarations + +/*--------------------MemoryManager Class STARTS here-----------------------------*/ +void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) +{ + LOG_FUNCTION_NAME; + + if(mIonFd < 0) + { + mIonFd = ion_open(); + if(mIonFd < 0) + { + CAMHAL_LOGEA("ion_open failed!!!"); + return NULL; + } + } + + ///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing + ///the buffers + const uint numArrayEntriesC = (uint)(numBufs+1); + + ///Allocate a buffer array + uint32_t *bufsArr = new uint32_t [numArrayEntriesC]; + if(!bufsArr) + { + CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC); + goto error; + } + + ///Initialize the array with zeros - this will help us while freeing the array in case of error + ///If a value of an array element is NULL, it means we didnt allocate it + memset(bufsArr, 0, sizeof(*bufsArr) * numArrayEntriesC); + + //2D Allocations are not supported currently + if(bytes != 0) + { + struct ion_handle *handle; + int mmap_fd; + + ///1D buffers + for (int i = 0; i < numBufs; i++) + { + int ret = ion_alloc(mIonFd, bytes, 0, 1 << ION_HEAP_TYPE_CARVEOUT, &handle); + if(ret < 0) + { + CAMHAL_LOGEB("ion_alloc resulted in error %d", ret); + goto error; + } + + CAMHAL_LOGDB("Before mapping, handle = %x, nSize = %d", handle, bytes); + if ((ret = ion_map(mIonFd, handle, bytes, PROT_READ | PROT_WRITE, MAP_SHARED, 0, + (unsigned char**)&bufsArr[i], &mmap_fd)) < 0) + { + CAMHAL_LOGEB("Userspace mapping of ION buffers returned error %d", ret); + ion_free(mIonFd, handle); + goto error; + } + + mIonHandleMap.add(bufsArr[i], (unsigned int)handle); + mIonFdMap.add(bufsArr[i], (unsigned int) mmap_fd); + mIonBufLength.add(bufsArr[i], (unsigned int) bytes); + } + + } + else // If bytes is not zero, then it is a 2-D tiler buffer request + { + } + + LOG_FUNCTION_NAME_EXIT; + + return (void*)bufsArr; + +error: + ALOGE("Freeing buffers already allocated after error occurred"); + if(bufsArr) + freeBuffer(bufsArr); + + if ( NULL != mErrorNotifier.get() ) + { + mErrorNotifier->errorNotify(-ENOMEM); + } + + if (mIonFd >= 0) + { + ion_close(mIonFd); + mIonFd = -1; + } + + LOG_FUNCTION_NAME_EXIT; + return NULL; +} + +//TODO: Get needed data to map tiler buffers +//Return dummy data for now +uint32_t * MemoryManager::getOffsets() +{ + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return NULL; +} + +int MemoryManager::getFd() +{ + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return -1; +} + +int MemoryManager::freeBuffer(void* buf) +{ + status_t ret = NO_ERROR; + LOG_FUNCTION_NAME; + + uint32_t *bufEntry = (uint32_t*)buf; + + if(!bufEntry) + { + CAMHAL_LOGEA("NULL pointer passed to freebuffer"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + while(*bufEntry) + { + unsigned int ptr = (unsigned int) *bufEntry++; + if(mIonBufLength.valueFor(ptr)) + { + munmap((void *)ptr, mIonBufLength.valueFor(ptr)); + close(mIonFdMap.valueFor(ptr)); + ion_free(mIonFd, (ion_handle*)mIonHandleMap.valueFor(ptr)); + mIonHandleMap.removeItem(ptr); + mIonBufLength.removeItem(ptr); + mIonFdMap.removeItem(ptr); + } + else + { + CAMHAL_LOGEA("Not a valid Memory Manager buffer"); + } + } + + ///@todo Check if this way of deleting array is correct, else use malloc/free + uint32_t * bufArr = (uint32_t*)buf; + delete [] bufArr; + + if(mIonBufLength.size() == 0) + { + if(mIonFd >= 0) + { + ion_close(mIonFd); + mIonFd = -1; + } + } + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NULL == errorNotifier ) + { + CAMHAL_LOGEA("Invalid Error Notifier reference"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + mErrorNotifier = errorNotifier; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +}; + + +/*--------------------MemoryManager Class ENDS here-----------------------------*/ diff --git a/camera/NV12_resize.c b/camera/NV12_resize.c new file mode 100644 index 0000000..7f92fb2 --- /dev/null +++ b/camera/NV12_resize.c @@ -0,0 +1,307 @@ +#include "NV12_resize.h" + +//#define LOG_NDEBUG 0 +#define LOG_NIDEBUG 0 +#define LOG_NDDEBUG 0 + +#define LOG_TAG "NV12_resize" +#define STRIDE 4096 +#include + +/*========================================================================== +* Function Name : VT_resizeFrame_Video_opt2_lp +* +* Description : Resize a yuv frame. +* +* Input(s) : input_img_ptr -> Input Image Structure +* : output_img_ptr -> Output Image Structure +* : cropout -> crop structure +* +* Value Returned : mmBool -> FALSE on error TRUE on success +* NOTE: +* Not tested for crop funtionallity. +* faster version. +============================================================================*/ +mmBool +VT_resizeFrame_Video_opt2_lp +( + structConvImage* i_img_ptr, /* Points to the input image */ + structConvImage* o_img_ptr, /* Points to the output image */ + IC_rect_type* cropout, /* how much to resize to in final image */ + mmUint16 dummy /* Transparent pixel value */ + ) +{ + ALOGV("VT_resizeFrame_Video_opt2_lp+"); + + mmUint16 row,col; + mmUint32 resizeFactorX; + mmUint32 resizeFactorY; + + + mmUint16 x, y; + + mmUchar* ptr8; + mmUchar *ptr8Cb, *ptr8Cr; + + + mmUint16 xf, yf; + mmUchar* inImgPtrY; + mmUchar* inImgPtrU; + mmUchar* inImgPtrV; + mmUint32 cox, coy, codx, cody; + mmUint16 idx,idy, idxC; + + if(i_img_ptr->uWidth == o_img_ptr->uWidth) + { + if(i_img_ptr->uHeight == o_img_ptr->uHeight) + { + ALOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n"); + ALOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth ); + ALOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight ); + ALOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth ); + ALOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight ); + } + } + + if (!i_img_ptr || !i_img_ptr->imgPtr || + !o_img_ptr || !o_img_ptr->imgPtr) + { + ALOGE("Image Point NULL"); + ALOGV("VT_resizeFrame_Video_opt2_lp-"); + return FALSE; + } + + inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset; + inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2; + inImgPtrV = (mmUchar*)inImgPtrU + 1; + + if (cropout == NULL) + { + cox = 0; + coy = 0; + codx = o_img_ptr->uWidth; + cody = o_img_ptr->uHeight; + } + else + { + cox = cropout->x; + coy = cropout->y; + codx = cropout->uWidth; + cody = cropout->uHeight; + } + idx = i_img_ptr->uWidth; + idy = i_img_ptr->uHeight; + + /* make sure valid input size */ + if (idx < 1 || idy < 1 || i_img_ptr->uStride < 1) + { + ALOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride); + ALOGV("VT_resizeFrame_Video_opt2_lp-"); + return FALSE; + } + + resizeFactorX = ((idx-1)<<9) / codx; + resizeFactorY = ((idy-1)<<9) / cody; + + if(i_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp && + o_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp) + { + ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth; + + + ////////////////////////////for Y////////////////////////// + for (row=0; row < cody; row++) + { + mmUchar *pu8Yrow1 = NULL; + mmUchar *pu8Yrow2 = NULL; + y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9); + yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7); + pu8Yrow1 = inImgPtrY + (y) * i_img_ptr->uStride; + pu8Yrow2 = pu8Yrow1 + i_img_ptr->uStride; + + for (col=0; col < codx; col++) + { + mmUchar in11, in12, in21, in22; + mmUchar *pu8ptr1 = NULL; + mmUchar *pu8ptr2 = NULL; + mmUchar w; + mmUint16 accum_1; + //mmUint32 accum_W; + + + + x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9); + xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7); + + + //accum_W = 0; + accum_1 = 0; + + pu8ptr1 = pu8Yrow1 + (x); + pu8ptr2 = pu8Yrow2 + (x); + + /* A pixel */ + //in = *(inImgPtrY + (y)*idx + (x)); + in11 = *(pu8ptr1); + + w = bWeights[xf][yf][0]; + accum_1 = (w * in11); + //accum_W += (w); + + /* B pixel */ + //in = *(inImgPtrY + (y)*idx + (x+1)); + in12 = *(pu8ptr1+1); + w = bWeights[xf][yf][1]; + accum_1 += (w * in12); + //accum_W += (w); + + /* C pixel */ + //in = *(inImgPtrY + (y+1)*idx + (x)); + in21 = *(pu8ptr2); + w = bWeights[xf][yf][3]; + accum_1 += (w * in21); + //accum_W += (w); + + /* D pixel */ + //in = *(inImgPtrY + (y+1)*idx + (x+1)); + in22 = *(pu8ptr2+1); + w = bWeights[xf][yf][2]; + accum_1 += (w * in22); + //accum_W += (w); + + /* divide by sum of the weights */ + //accum_1 /= (accum_W); + //accum_1 = (accum_1/64); + accum_1 = (accum_1>>6); + *ptr8 = (mmUchar)accum_1 ; + + + ptr8++; + } + ptr8 = ptr8 + (o_img_ptr->uStride - codx); + } + ////////////////////////////for Y////////////////////////// + + ///////////////////////////////for Cb-Cr////////////////////// + + ptr8Cb = (mmUchar*)o_img_ptr->clrPtr + cox + coy*o_img_ptr->uWidth; + + ptr8Cr = (mmUchar*)(ptr8Cb+1); + + idxC = (idx>>1); + for (row=0; row < (((cody)>>1)); row++) + { + mmUchar *pu8Cbr1 = NULL; + mmUchar *pu8Cbr2 = NULL; + mmUchar *pu8Crr1 = NULL; + mmUchar *pu8Crr2 = NULL; + + y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9); + yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7); + + pu8Cbr1 = inImgPtrU + (y) * i_img_ptr->uStride; + pu8Cbr2 = pu8Cbr1 + i_img_ptr->uStride; + pu8Crr1 = inImgPtrV + (y) * i_img_ptr->uStride; + pu8Crr2 = pu8Crr1 + i_img_ptr->uStride; + + for (col=0; col < (((codx)>>1)); col++) + { + mmUchar in11, in12, in21, in22; + mmUchar *pu8Cbc1 = NULL; + mmUchar *pu8Cbc2 = NULL; + mmUchar *pu8Crc1 = NULL; + mmUchar *pu8Crc2 = NULL; + + mmUchar w; + mmUint16 accum_1Cb, accum_1Cr; + //mmUint32 accum_WCb, accum_WCr; + + + x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9); + xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7); + + + //accum_WCb = accum_WCr = 0; + accum_1Cb = accum_1Cr = 0; + + pu8Cbc1 = pu8Cbr1 + (x*2); + pu8Cbc2 = pu8Cbr2 + (x*2); + pu8Crc1 = pu8Crr1 + (x*2); + pu8Crc2 = pu8Crr2 + (x*2); + + + + /* A pixel */ + w = bWeights[xf][yf][0]; + + in11 = *(pu8Cbc1); + accum_1Cb = (w * in11); + // accum_WCb += (w); + + in11 = *(pu8Crc1); + accum_1Cr = (w * in11); + //accum_WCr += (w); + + /* B pixel */ + w = bWeights[xf][yf][1]; + + in12 = *(pu8Cbc1+2); + accum_1Cb += (w * in12); + //accum_WCb += (w); + + in12 = *(pu8Crc1+2); + accum_1Cr += (w * in12); + //accum_WCr += (w); + + /* C pixel */ + w = bWeights[xf][yf][3]; + + in21 = *(pu8Cbc2); + accum_1Cb += (w * in21); + //accum_WCb += (w); + + in21 = *(pu8Crc2); + accum_1Cr += (w * in21); + //accum_WCr += (w); + + /* D pixel */ + w = bWeights[xf][yf][2]; + + in22 = *(pu8Cbc2+2); + accum_1Cb += (w * in22); + //accum_WCb += (w); + + in22 = *(pu8Crc2+2); + accum_1Cr += (w * in22); + //accum_WCr += (w); + + /* divide by sum of the weights */ + //accum_1Cb /= (accum_WCb); + accum_1Cb = (accum_1Cb>>6); + *ptr8Cb = (mmUchar)accum_1Cb ; + + + accum_1Cr = (accum_1Cr >> 6); + *ptr8Cr = (mmUchar)accum_1Cr ; + + ptr8Cb++; + ptr8Cr++; + + ptr8Cb++; + ptr8Cr++; + } + ptr8Cb = ptr8Cb + (o_img_ptr->uStride-codx); + ptr8Cr = ptr8Cr + (o_img_ptr->uStride-codx); + } + ///////////////////For Cb- Cr//////////////////////////////////////// + } + else + { + ALOGE("eFormat not supported"); + ALOGV("VT_resizeFrame_Video_opt2_lp-"); + return FALSE; + } + ALOGV("success"); + ALOGV("VT_resizeFrame_Video_opt2_lp-"); + return TRUE; +} diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp new file mode 100644 index 0000000..7ae50e4 --- /dev/null +++ b/camera/OMXCameraAdapter/OMX3A.cpp @@ -0,0 +1,1731 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMX3A.cpp +* +* This file contains functionality for handling 3A configurations. +* +*/ + +#undef LOG_TAG + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" +#include "ErrorUtils.h" + +#include + +#undef TRUE +#undef FALSE +#define TRUE "true" +#define FALSE "false" + +#define METERING_AREAS_RANGE 0xFF + +namespace android { +const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name, + OMX_SCENEMODETYPE scene) { + const SceneModesEntry* cameraLUT = NULL; + const SceneModesEntry* entry = NULL; + unsigned int numEntries = 0; + + // 1. Find camera's scene mode LUT + for (unsigned int i = 0; i < ARRAY_SIZE(CameraToSensorModesLUT); i++) { + if (strcmp(CameraToSensorModesLUT[i].name, name) == 0) { + cameraLUT = CameraToSensorModesLUT[i].Table; + numEntries = CameraToSensorModesLUT[i].size; + break; + } + } + + // 2. Find scene mode entry in table + if (!cameraLUT) { + goto EXIT; + } + + for (unsigned int i = 0; i < numEntries; i++) { + if(cameraLUT[i].scene == scene) { + entry = cameraLUT + i; + break; + } + } + EXIT: + return entry; +} + +status_t OMXCameraAdapter::setParameters3A(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state) +{ + status_t ret = NO_ERROR; + int mode = 0; + const char *str = NULL; + int varint = 0; + BaseCameraAdapter::AdapterState nextState; + BaseCameraAdapter::getNextState(nextState); + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(m3ASettingsUpdateLock); + + str = params.get(CameraParameters::KEY_SCENE_MODE); + mode = getLUTvalue_HALtoOMX( str, SceneLUT); + if ( mFirstTimeInit || ((str != NULL) && ( mParameters3A.SceneMode != mode )) ) { + if ( 0 <= mode ) { + mParameters3A.SceneMode = mode; + if ((mode == OMX_Manual) && (mFirstTimeInit == false)){//Auto mode + mFirstTimeInit = true; + } + if ((mode != OMX_Manual) && + (state & PREVIEW_ACTIVE) && !(nextState & CAPTURE_ACTIVE)) { + // if setting preset scene mode, previewing, and not in the middle of capture + // set preset scene mode immediately instead of in next FBD + // for feedback params to work properly since they need to be read + // by application in subsequent getParameters() + ret |= setScene(mParameters3A); + // re-apply EV compensation after setting scene mode since it probably reset it + if(mParameters3A.EVCompensation) { + setEVCompensation(mParameters3A); + } + return ret; + } else { + mPending3Asettings |= SetSceneMode; + } + } else { + mParameters3A.SceneMode = OMX_Manual; + } + CAMHAL_LOGVB("SceneMode %d", mParameters3A.SceneMode); + } + +#ifdef OMAP_ENHANCEMENT + + str = params.get(TICameraParameters::KEY_EXPOSURE_MODE); + mode = getLUTvalue_HALtoOMX( str, ExpLUT); + if ( ( str != NULL ) && ( mParameters3A.Exposure != mode )) + { + mParameters3A.Exposure = mode; + CAMHAL_LOGDB("Exposure mode %d", mode); + if ( 0 <= mParameters3A.Exposure ) + { + mPending3Asettings |= SetExpMode; + } + } + +#endif + + str = params.get(CameraParameters::KEY_WHITE_BALANCE); + mode = getLUTvalue_HALtoOMX( str, WBalLUT); + if (mFirstTimeInit || ((str != NULL) && (mode != mParameters3A.WhiteBallance))) + { + mParameters3A.WhiteBallance = mode; + CAMHAL_LOGDB("Whitebalance mode %d", mode); + if ( 0 <= mParameters3A.WhiteBallance ) + { + mPending3Asettings |= SetWhiteBallance; + } + } + +#ifdef OMAP_ENHANCEMENT + + varint = params.getInt(TICameraParameters::KEY_CONTRAST); + if ( 0 <= varint ) + { + if ( mFirstTimeInit || + ( (mParameters3A.Contrast + CONTRAST_OFFSET) != varint ) ) + { + mParameters3A.Contrast = varint - CONTRAST_OFFSET; + CAMHAL_LOGDB("Contrast %d", mParameters3A.Contrast); + mPending3Asettings |= SetContrast; + } + } + + varint = params.getInt(TICameraParameters::KEY_SHARPNESS); + if ( 0 <= varint ) + { + if ( mFirstTimeInit || + ((mParameters3A.Sharpness + SHARPNESS_OFFSET) != varint )) + { + mParameters3A.Sharpness = varint - SHARPNESS_OFFSET; + CAMHAL_LOGDB("Sharpness %d", mParameters3A.Sharpness); + mPending3Asettings |= SetSharpness; + } + } + + varint = params.getInt(TICameraParameters::KEY_SATURATION); + if ( 0 <= varint ) + { + if ( mFirstTimeInit || + ((mParameters3A.Saturation + SATURATION_OFFSET) != varint ) ) + { + mParameters3A.Saturation = varint - SATURATION_OFFSET; + CAMHAL_LOGDB("Saturation %d", mParameters3A.Saturation); + mPending3Asettings |= SetSaturation; + } + } + + varint = params.getInt(TICameraParameters::KEY_BRIGHTNESS); + if ( 0 <= varint ) + { + if ( mFirstTimeInit || + (( mParameters3A.Brightness != varint )) ) + { + mParameters3A.Brightness = (unsigned) varint; + CAMHAL_LOGDB("Brightness %d", mParameters3A.Brightness); + mPending3Asettings |= SetBrightness; + } + } + +#endif + + str = params.get(CameraParameters::KEY_ANTIBANDING); + mode = getLUTvalue_HALtoOMX(str,FlickerLUT); + if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Flicker != mode ) )) + { + mParameters3A.Flicker = mode; + CAMHAL_LOGDB("Flicker %d", mParameters3A.Flicker); + if ( 0 <= mParameters3A.Flicker ) + { + mPending3Asettings |= SetFlicker; + } + } + +#ifdef OMAP_ENHANCEMENT + + str = params.get(TICameraParameters::KEY_ISO); + mode = getLUTvalue_HALtoOMX(str, IsoLUT); + CAMHAL_LOGVB("ISO mode arrived in HAL : %s", str); + if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.ISO != mode )) ) + { + mParameters3A.ISO = mode; + CAMHAL_LOGDB("ISO %d", mParameters3A.ISO); + if ( 0 <= mParameters3A.ISO ) + { + mPending3Asettings |= SetISO; + } + } + +#endif + + str = params.get(CameraParameters::KEY_FOCUS_MODE); + mode = getLUTvalue_HALtoOMX(str, FocusLUT); + if ( (mFirstTimeInit || ((str != NULL) && (mParameters3A.Focus != mode)))) + { + mPending3Asettings |= SetFocus; + + mParameters3A.Focus = mode; + + // if focus mode is set to infinity...update focus distance immediately + if (mode == OMX_IMAGE_FocusControlAutoInfinity) { + updateFocusDistances(mParameters); + } + + CAMHAL_LOGDB("Focus %x", mParameters3A.Focus); + } + + str = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION); + varint = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); + if ( mFirstTimeInit || + (( str != NULL ) && + (mParameters3A.EVCompensation != varint ))) + { + CAMHAL_LOGDB("Setting EV Compensation to %d", varint); + + mParameters3A.EVCompensation = varint; + mPending3Asettings |= SetEVCompensation; + } + + str = params.get(CameraParameters::KEY_FLASH_MODE); + mode = getLUTvalue_HALtoOMX( str, FlashLUT); + if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.FlashMode != mode )) ) + { + if ( 0 <= mode ) + { + mParameters3A.FlashMode = mode; + mPending3Asettings |= SetFlash; + } + else + { + mParameters3A.FlashMode = OMX_IMAGE_FlashControlAuto; + } + } + + CAMHAL_LOGVB("Flash Setting %s", str); + CAMHAL_LOGVB("FlashMode %d", mParameters3A.FlashMode); + + str = params.get(CameraParameters::KEY_EFFECT); + mode = getLUTvalue_HALtoOMX( str, EffLUT); + if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.Effect != mode )) ) + { + mParameters3A.Effect = mode; + CAMHAL_LOGDB("Effect %d", mParameters3A.Effect); + if ( 0 <= mParameters3A.Effect ) + { + mPending3Asettings |= SetEffect; + } + } + + str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED); + if ( (str != NULL) && (!strcmp(str, "true")) ) + { + OMX_BOOL lock = OMX_FALSE; + mUserSetExpLock = OMX_FALSE; + str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK); + if (str && ((strcmp(str, "true")) == 0)) + { + CAMHAL_LOGVA("Locking Exposure"); + lock = OMX_TRUE; + mUserSetExpLock = OMX_TRUE; + } + else + { + CAMHAL_LOGVA("UnLocking Exposure"); + } + + if (mParameters3A.ExposureLock != lock) + { + mParameters3A.ExposureLock = lock; + CAMHAL_LOGDB("ExposureLock %d", lock); + mPending3Asettings |= SetExpLock; + } + } + + str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED); + if ( (str != NULL) && (!strcmp(str, "true")) ) + { + OMX_BOOL lock = OMX_FALSE; + mUserSetWbLock = OMX_FALSE; + str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK); + if (str && ((strcmp(str, "true")) == 0)) + { + CAMHAL_LOGVA("Locking WhiteBalance"); + lock = OMX_TRUE; + mUserSetWbLock = OMX_TRUE; + } + else + { + CAMHAL_LOGVA("UnLocking WhiteBalance"); + } + if (mParameters3A.WhiteBalanceLock != lock) + { + mParameters3A.WhiteBalanceLock = lock; + CAMHAL_LOGDB("WhiteBalanceLock %d", lock); + mPending3Asettings |= SetWBLock; + } + } + + str = params.get(TICameraParameters::KEY_AUTO_FOCUS_LOCK); + if (str && (strcmp(str, TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) { + CAMHAL_LOGVA("Locking Focus"); + mParameters3A.FocusLock = OMX_TRUE; + setFocusLock(mParameters3A); + } else if (str && (strcmp(str, FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) { + CAMHAL_LOGVA("UnLocking Focus"); + mParameters3A.FocusLock = OMX_FALSE; + setFocusLock(mParameters3A); + } + + str = params.get(CameraParameters::KEY_METERING_AREAS); + if ( (str != NULL) ) { + size_t MAX_METERING_AREAS; + Vector< sp > tempAreas; + + MAX_METERING_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_METERING_AREAS)); + + Mutex::Autolock lock(mMeteringAreasLock); + + ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas); + + CAMHAL_LOGVB("areAreasDifferent? = %d", + CameraArea::areAreasDifferent(mMeteringAreas, tempAreas)); + + if ( (NO_ERROR == ret) && CameraArea::areAreasDifferent(mMeteringAreas, tempAreas) ) { + mMeteringAreas.clear(); + mMeteringAreas = tempAreas; + + if ( MAX_METERING_AREAS >= mMeteringAreas.size() ) { + CAMHAL_LOGDB("Setting Metering Areas %s", + params.get(CameraParameters::KEY_METERING_AREAS)); + + mPending3Asettings |= SetMeteringAreas; + } else { + CAMHAL_LOGEB("Metering areas supported %d, metering areas set %d", + MAX_METERING_AREAS, mMeteringAreas.size()); + ret = -EINVAL; + } + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +int OMXCameraAdapter::getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT) +{ + int LUTsize = LUT.size; + if( HalValue ) + for(int i = 0; i < LUTsize; i++) + if( 0 == strcmp(LUT.Table[i].userDefinition, HalValue) ) + return LUT.Table[i].omxDefinition; + + return -ENOENT; +} + +const char* OMXCameraAdapter::getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT) +{ + int LUTsize = LUT.size; + for(int i = 0; i < LUTsize; i++) + if( LUT.Table[i].omxDefinition == OMXValue ) + return LUT.Table[i].userDefinition; + + return NULL; +} + +status_t OMXCameraAdapter::init3AParams(Gen3A_settings &Gen3A) +{ + LOG_FUNCTION_NAME; + + Gen3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT); + Gen3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT); + Gen3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT); + Gen3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION); + Gen3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT); + Gen3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT); + Gen3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT); + Gen3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS); + Gen3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET; + Gen3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET; + Gen3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET; + Gen3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT); + Gen3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT); + Gen3A.ExposureLock = OMX_FALSE; + Gen3A.FocusLock = OMX_FALSE; + Gen3A.WhiteBalanceLock = OMX_FALSE; + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_EXPOSURECONTROLTYPE exp; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSURECONTROLTYPE); + exp.nPortIndex = OMX_ALL; + exp.eExposureControl = (OMX_EXPOSURECONTROLTYPE)Gen3A.Exposure; + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposure, + &exp); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring exposure mode 0x%x", eError); + } + else + { + CAMHAL_LOGDA("Camera exposure mode configured successfully"); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +static bool isFlashDisabled() { +#if (PROPERTY_VALUE_MAX < 5) +#error "PROPERTY_VALUE_MAX must be at least 5" +#endif + + // Ignore flash_off system property for user build. + char buildType[PROPERTY_VALUE_MAX]; + if (property_get("ro.build.type", buildType, NULL) && + !strcasecmp(buildType, "user")) { + return false; + } + + char value[PROPERTY_VALUE_MAX]; + if (property_get("camera.flash_off", value, NULL) && + (!strcasecmp(value, "true") || !strcasecmp(value, "1"))) { + ALOGW("flash is disabled for testing purpose"); + return true; + } + + return false; +} + +status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_PARAM_FLASHCONTROLTYPE flash; + OMX_CONFIG_FOCUSASSISTTYPE focusAssist; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&flash, OMX_IMAGE_PARAM_FLASHCONTROLTYPE); + flash.nPortIndex = OMX_ALL; + + if (isFlashDisabled()) { + flash.eFlashControl = ( OMX_IMAGE_FLASHCONTROLTYPE ) OMX_IMAGE_FlashControlOff; + } else { + flash.eFlashControl = ( OMX_IMAGE_FLASHCONTROLTYPE ) Gen3A.FlashMode; + } + + CAMHAL_LOGDB("Configuring flash mode 0x%x", flash.eFlashControl); + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_IndexConfigFlashControl, + &flash); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring flash mode 0x%x", eError); + } + else + { + CAMHAL_LOGDA("Camera flash mode configured successfully"); + } + + if ( OMX_ErrorNone == eError ) + { + OMX_INIT_STRUCT_PTR (&focusAssist, OMX_CONFIG_FOCUSASSISTTYPE); + focusAssist.nPortIndex = OMX_ALL; + if ( flash.eFlashControl == OMX_IMAGE_FlashControlOff ) + { + focusAssist.bFocusAssist = OMX_FALSE; + } + else + { + focusAssist.bFocusAssist = OMX_TRUE; + } + + CAMHAL_LOGDB("Configuring AF Assist mode 0x%x", focusAssist.bFocusAssist); + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_IndexConfigFocusAssist, + &focusAssist); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring AF Assist mode 0x%x", eError); + } + else + { + CAMHAL_LOGDA("Camera AF Assist mode configured successfully"); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_PARAM_FLASHCONTROLTYPE flash; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&flash, OMX_IMAGE_PARAM_FLASHCONTROLTYPE); + flash.nPortIndex = OMX_ALL; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_IndexConfigFlashControl, + &flash); + + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while getting flash mode 0x%x", eError); + } else { + Gen3A.FlashMode = flash.eFlashControl; + CAMHAL_LOGDB("Gen3A.FlashMode 0x%x", Gen3A.FlashMode); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focus; + size_t top, left, width, height, weight; + OMX_CONFIG_BOOLEANTYPE bOMX; + + LOG_FUNCTION_NAME; + + BaseCameraAdapter::AdapterState state; + BaseCameraAdapter::getState(state); + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + + ///Face detection takes precedence over touch AF + if ( mFaceDetectionRunning ) + { + //Disable region priority first + setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false); + + //Enable face algorithm priority for focus + setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true); + + //Do normal focus afterwards + ////FIXME: Check if the extended focus control is needed? this overrides caf + //focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) OMX_IMAGE_FocusControlExtended; + } + else if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea()) ) + { + + //Disable face priority first + setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false); + + //Enable region algorithm priority + setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true); + + + //Do normal focus afterwards + //FIXME: Check if the extended focus control is needed? this overrides caf + //focus.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) OMX_IMAGE_FocusControlExtended; + + } + else + { + + //Disable both region and face priority + setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false); + + setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false); + + } + + if ( NO_ERROR == ret && ((state & AF_ACTIVE) == 0) ) + { + OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE); + + if ( Gen3A.Focus == OMX_IMAGE_FocusControlAutoInfinity) + { + // Don't lock at infinity, otherwise the AF cannot drive + // the lens at infinity position + if( set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_FALSE) != NO_ERROR) + { + CAMHAL_LOGEA("Error Applying 3A locks"); + } else { + CAMHAL_LOGDA("Focus locked. Applied focus locks successfully"); + } + } + if ( Gen3A.Focus == OMX_IMAGE_FocusControlAuto || + Gen3A.Focus == OMX_IMAGE_FocusControlAutoInfinity) + { + // Run focus scanning if switching to continuous infinity focus mode + bOMX.bEnabled = OMX_TRUE; + } + else + { + bOMX.bEnabled = OMX_FALSE; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable, + &bOMX); + + OMX_INIT_STRUCT_PTR (&focus, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE); + focus.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + focus.eFocusControl = (OMX_IMAGE_FOCUSCONTROLTYPE)Gen3A.Focus; + + CAMHAL_LOGDB("Configuring focus mode 0x%x", focus.eFocusControl); + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, OMX_IndexConfigFocusControl, &focus); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring focus mode 0x%x", eError); + } + else + { + CAMHAL_LOGDA("Camera focus mode configured successfully"); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focus; + size_t top, left, width, height, weight; + + LOG_FUNCTION_NAME; + + if (OMX_StateInvalid == mComponentState) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&focus, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE); + focus.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigFocusControl, &focus); + + if (OMX_ErrorNone != eError) { + CAMHAL_LOGEB("Error while configuring focus mode 0x%x", eError); + } else { + Gen3A.Focus = focus.eFocusControl; + CAMHAL_LOGDB("Gen3A.Focus 0x%x", Gen3A.Focus); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_SCENEMODETYPE scene; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&scene, OMX_CONFIG_SCENEMODETYPE); + scene.nPortIndex = OMX_ALL; + scene.eSceneMode = ( OMX_SCENEMODETYPE ) Gen3A.SceneMode; + + CAMHAL_LOGDB("Configuring scene mode 0x%x", scene.eSceneMode); + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSceneMode, + &scene); + + if (OMX_ErrorNone != eError) { + CAMHAL_LOGEB("Error while configuring scene mode 0x%x", eError); + } else { + CAMHAL_LOGDA("Camera scene configured successfully"); + if (Gen3A.SceneMode != OMX_Manual) { + // Get preset scene mode feedback + getFocusMode(Gen3A); + getFlashMode(Gen3A); + getWBMode(Gen3A); + + // TODO(XXX): Re-enable these for mainline + // getSharpness(Gen3A); + // getSaturation(Gen3A); + // getISO(Gen3A); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_EXPOSUREVALUETYPE expValues; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE); + expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + OMX_GetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposureValue, + &expValues); + CAMHAL_LOGDB("old EV Compensation for OMX = 0x%x", (int)expValues.xEVCompensation); + CAMHAL_LOGDB("EV Compensation for HAL = %d", Gen3A.EVCompensation); + + expValues.xEVCompensation = ( Gen3A.EVCompensation * ( 1 << Q16_OFFSET ) ) / 10; + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposureValue, + &expValues); + CAMHAL_LOGDB("new EV Compensation for OMX = 0x%x", (int)expValues.xEVCompensation); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring EV Compensation 0x%x error = 0x%x", + ( unsigned int ) expValues.xEVCompensation, + eError); + } + else + { + CAMHAL_LOGDB("EV Compensation 0x%x configured successfully", + ( unsigned int ) expValues.xEVCompensation); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_EXPOSUREVALUETYPE expValues; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE); + expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposureValue, + &expValues); + + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while getting EV Compensation error = 0x%x", eError); + } else { + Gen3A.EVCompensation = (10 * expValues.xEVCompensation) / (1 << Q16_OFFSET); + CAMHAL_LOGDB("Gen3A.EVCompensation 0x%x", Gen3A.EVCompensation); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_WHITEBALCONTROLTYPE wb; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&wb, OMX_CONFIG_WHITEBALCONTROLTYPE); + wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + wb.eWhiteBalControl = ( OMX_WHITEBALCONTROLTYPE ) Gen3A.WhiteBallance; + + if ( WB_FACE_PRIORITY == Gen3A.WhiteBallance ) + { + //Disable Region priority and enable Face priority + setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false); + setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, true); + + //Then set the mode to auto + wb.eWhiteBalControl = OMX_WhiteBalControlAuto; + } + else + { + //Disable Face and Region priority + setAlgoPriority(FACE_PRIORITY, WHITE_BALANCE_ALGO, false); + setAlgoPriority(REGION_PRIORITY, WHITE_BALANCE_ALGO, false); + } + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonWhiteBalance, + &wb); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Whitebalance mode 0x%x error = 0x%x", + ( unsigned int ) wb.eWhiteBalControl, + eError); + } + else + { + CAMHAL_LOGDB("Whitebalance mode 0x%x configured successfully", + ( unsigned int ) wb.eWhiteBalControl); + } + + LOG_FUNCTION_NAME_EXIT; + + return eError; +} + +status_t OMXCameraAdapter::getWBMode(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_WHITEBALCONTROLTYPE wb; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&wb, OMX_CONFIG_WHITEBALCONTROLTYPE); + wb.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonWhiteBalance, + &wb); + + if (OMX_ErrorNone != eError) { + CAMHAL_LOGEB("Error while getting Whitebalance mode error = 0x%x", eError); + } else { + Gen3A.WhiteBallance = wb.eWhiteBalControl; + CAMHAL_LOGDB("Gen3A.WhiteBallance 0x%x", Gen3A.WhiteBallance); + } + + LOG_FUNCTION_NAME_EXIT; + + return eError; +} + +status_t OMXCameraAdapter::setFlicker(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_FLICKERCANCELTYPE flicker; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&flicker, OMX_CONFIG_FLICKERCANCELTYPE); + flicker.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + flicker.eFlickerCancel = (OMX_COMMONFLICKERCANCELTYPE)Gen3A.Flicker; + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigFlickerCancel, + &flicker ); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Flicker mode 0x%x error = 0x%x", + ( unsigned int ) flicker.eFlickerCancel, + eError); + } + else + { + CAMHAL_LOGDB("Flicker mode 0x%x configured successfully", + ( unsigned int ) flicker.eFlickerCancel); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_BRIGHTNESSTYPE brightness; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&brightness, OMX_CONFIG_BRIGHTNESSTYPE); + brightness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + brightness.nBrightness = Gen3A.Brightness; + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonBrightness, + &brightness); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Brightness 0x%x error = 0x%x", + ( unsigned int ) brightness.nBrightness, + eError); + } + else + { + CAMHAL_LOGDB("Brightness 0x%x configured successfully", + ( unsigned int ) brightness.nBrightness); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setContrast(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_CONTRASTTYPE contrast; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&contrast, OMX_CONFIG_CONTRASTTYPE); + contrast.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + contrast.nContrast = Gen3A.Contrast; + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonContrast, + &contrast); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Contrast 0x%x error = 0x%x", + ( unsigned int ) contrast.nContrast, + eError); + } + else + { + CAMHAL_LOGDB("Contrast 0x%x configured successfully", + ( unsigned int ) contrast.nContrast); + } + + LOG_FUNCTION_NAME_EXIT; + + return eError; +} + +status_t OMXCameraAdapter::setSharpness(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE procSharpness; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&procSharpness, OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE); + procSharpness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + procSharpness.nLevel = Gen3A.Sharpness; + + if( procSharpness.nLevel == 0 ) + { + procSharpness.bAuto = OMX_TRUE; + } + else + { + procSharpness.bAuto = OMX_FALSE; + } + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigSharpeningLevel, + &procSharpness); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Sharpness 0x%x error = 0x%x", + ( unsigned int ) procSharpness.nLevel, + eError); + } + else + { + CAMHAL_LOGDB("Sharpness 0x%x configured successfully", + ( unsigned int ) procSharpness.nLevel); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE procSharpness; + + LOG_FUNCTION_NAME; + + if (OMX_StateInvalid == mComponentState) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&procSharpness, OMX_IMAGE_CONFIG_PROCESSINGLEVELTYPE); + procSharpness.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigSharpeningLevel, + &procSharpness); + + if (OMX_ErrorNone != eError) { + CAMHAL_LOGEB("Error while configuring Sharpness error = 0x%x", eError); + } else { + Gen3A.Sharpness = procSharpness.nLevel; + CAMHAL_LOGDB("Gen3A.Sharpness 0x%x", Gen3A.Sharpness); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_SATURATIONTYPE saturation; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&saturation, OMX_CONFIG_SATURATIONTYPE); + saturation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + saturation.nSaturation = Gen3A.Saturation; + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonSaturation, + &saturation); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Saturation 0x%x error = 0x%x", + ( unsigned int ) saturation.nSaturation, + eError); + } + else + { + CAMHAL_LOGDB("Saturation 0x%x configured successfully", + ( unsigned int ) saturation.nSaturation); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_SATURATIONTYPE saturation; + + LOG_FUNCTION_NAME; + + if (OMX_StateInvalid == mComponentState) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&saturation, OMX_CONFIG_SATURATIONTYPE); + saturation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonSaturation, + &saturation); + + if (OMX_ErrorNone != eError) { + CAMHAL_LOGEB("Error while getting Saturation error = 0x%x", eError); + } else { + Gen3A.Saturation = saturation.nSaturation; + CAMHAL_LOGDB("Gen3A.Saturation 0x%x", Gen3A.Saturation); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_EXPOSUREVALUETYPE expValues; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE); + expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + OMX_GetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposureValue, + &expValues); + + if( 0 == Gen3A.ISO ) + { + expValues.bAutoSensitivity = OMX_TRUE; + } + else + { + expValues.bAutoSensitivity = OMX_FALSE; + expValues.nSensitivity = Gen3A.ISO; + } + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposureValue, + &expValues); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring ISO 0x%x error = 0x%x", + ( unsigned int ) expValues.nSensitivity, + eError); + } + else + { + CAMHAL_LOGDB("ISO 0x%x configured successfully", + ( unsigned int ) expValues.nSensitivity); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_EXPOSUREVALUETYPE expValues; + + LOG_FUNCTION_NAME; + + if (OMX_StateInvalid == mComponentState) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&expValues, OMX_CONFIG_EXPOSUREVALUETYPE); + expValues.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + OMX_GetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposureValue, + &expValues); + + if (OMX_ErrorNone != eError) { + CAMHAL_LOGEB("Error while getting ISO error = 0x%x", eError); + } else { + Gen3A.ISO = expValues.nSensitivity; + CAMHAL_LOGDB("Gen3A.ISO %d", Gen3A.ISO); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_IMAGEFILTERTYPE effect; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&effect, OMX_CONFIG_IMAGEFILTERTYPE); + effect.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + effect.eImageFilter = (OMX_IMAGEFILTERTYPE ) Gen3A.Effect; + + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonImageFilter, + &effect); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Effect 0x%x error = 0x%x", + ( unsigned int ) effect.eImageFilter, + eError); + } + else + { + CAMHAL_LOGDB("Effect 0x%x configured successfully", + ( unsigned int ) effect.eImageFilter); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_LOCKTYPE lock; + + LOG_FUNCTION_NAME + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE); + lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + lock.bLock = Gen3A.WhiteBalanceLock; + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigImageWhiteBalanceLock, + &lock); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring WhiteBalance Lock error = 0x%x", eError); + } + else + { + CAMHAL_LOGDB("WhiteBalance Lock configured successfully %d ", lock.bLock); + } + LOG_FUNCTION_NAME_EXIT + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_LOCKTYPE lock; + + LOG_FUNCTION_NAME + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE); + lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + lock.bLock = Gen3A.ExposureLock; + eError = OMX_SetConfig( mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigImageExposureLock, + &lock); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring Exposure Lock error = 0x%x", eError); + } + else + { + CAMHAL_LOGDB("Exposure Lock configured successfully %d ", lock.bLock); + } + LOG_FUNCTION_NAME_EXIT + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_LOCKTYPE lock; + + LOG_FUNCTION_NAME + + if ( OMX_StateInvalid == mComponentState ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE); + lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + lock.bLock = Gen3A.FocusLock; + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigImageFocusLock, + &lock); + + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while configuring Focus Lock error = 0x%x", eError); + } else { + CAMHAL_LOGDB("Focus Lock configured successfully %d ", lock.bLock); + } + + LOG_FUNCTION_NAME_EXIT + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_LOCKTYPE lock; + + LOG_FUNCTION_NAME + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE); + lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + mParameters3A.ExposureLock = toggleExp; + mParameters3A.FocusLock = toggleFocus; + mParameters3A.WhiteBalanceLock = toggleWb; + + eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigImageExposureLock, + &lock); + + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error GetConfig Exposure Lock error = 0x%x", eError); + goto EXIT; + } + else + { + const char *lock_state_exp = toggleExp ? TRUE : FALSE; + CAMHAL_LOGDA("Exposure Lock GetConfig successfull"); + + /* Apply locks only when not applied already */ + if ( lock.bLock != toggleExp ) + { + setExposureLock(mParameters3A); + } + + mParams.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, lock_state_exp); + } + + OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE); + lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigImageFocusLock, + &lock); + + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error GetConfig Focus Lock error = 0x%x", eError); + goto EXIT; + } + else + { + CAMHAL_LOGDB("Focus Lock GetConfig successfull bLock(%d)", lock.bLock); + + /* Apply locks only when not applied already */ + if ( lock.bLock != toggleFocus ) + { + setFocusLock(mParameters3A); + } + } + + OMX_INIT_STRUCT_PTR (&lock, OMX_IMAGE_CONFIG_LOCKTYPE); + lock.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + eError = OMX_GetConfig( mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexConfigImageWhiteBalanceLock, + &lock); + + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error GetConfig WhiteBalance Lock error = 0x%x", eError); + goto EXIT; + } + else + { + const char *lock_state_wb = toggleWb ? TRUE : FALSE; + CAMHAL_LOGDA("WhiteBalance Lock GetConfig successfull"); + + /* Apply locks only when not applied already */ + if ( lock.bLock != toggleWb ) + { + setWhiteBalanceLock(mParameters3A); + } + + mParams.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, lock_state_wb); + } + EXIT: + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + OMX_ALGOAREASTYPE **meteringAreas; + OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer; + MemoryManager memMgr; + int areasSize = 0; + + LOG_FUNCTION_NAME + + Mutex::Autolock lock(mMeteringAreasLock); + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096; + meteringAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1); + + OMXCameraPortParameters * mPreviewData = NULL; + mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + + if (!meteringAreas) + { + CAMHAL_LOGEB("Error allocating buffer for metering areas %d", eError); + return -ENOMEM; + } + + OMX_INIT_STRUCT_PTR (meteringAreas[0], OMX_ALGOAREASTYPE); + + meteringAreas[0]->nPortIndex = OMX_ALL; + meteringAreas[0]->nNumAreas = mMeteringAreas.size(); + meteringAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaExposure; + + for ( unsigned int n = 0; n < mMeteringAreas.size(); n++) + { + // transform the coordinates to 3A-type coordinates + mMeteringAreas.itemAt(n)->transfrom(mPreviewData->mWidth, + mPreviewData->mHeight, + meteringAreas[0]->tAlgoAreas[n].nTop, + meteringAreas[0]->tAlgoAreas[n].nLeft, + meteringAreas[0]->tAlgoAreas[n].nWidth, + meteringAreas[0]->tAlgoAreas[n].nHeight); + + meteringAreas[0]->tAlgoAreas[n].nLeft = + ( meteringAreas[0]->tAlgoAreas[n].nLeft * METERING_AREAS_RANGE ) / mPreviewData->mWidth; + meteringAreas[0]->tAlgoAreas[n].nTop = + ( meteringAreas[0]->tAlgoAreas[n].nTop* METERING_AREAS_RANGE ) / mPreviewData->mHeight; + meteringAreas[0]->tAlgoAreas[n].nWidth = + ( meteringAreas[0]->tAlgoAreas[n].nWidth * METERING_AREAS_RANGE ) / mPreviewData->mWidth; + meteringAreas[0]->tAlgoAreas[n].nHeight = + ( meteringAreas[0]->tAlgoAreas[n].nHeight * METERING_AREAS_RANGE ) / mPreviewData->mHeight; + + meteringAreas[0]->tAlgoAreas[n].nPriority = mMeteringAreas.itemAt(n)->getWeight(); + + CAMHAL_LOGDB("Metering area %d : top = %d left = %d width = %d height = %d prio = %d", + n, (int)meteringAreas[0]->tAlgoAreas[n].nTop, (int)meteringAreas[0]->tAlgoAreas[n].nLeft, + (int)meteringAreas[0]->tAlgoAreas[n].nWidth, (int)meteringAreas[0]->tAlgoAreas[n].nHeight, + (int)meteringAreas[0]->tAlgoAreas[n].nPriority); + + } + + OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER); + + sharedBuffer.nPortIndex = OMX_ALL; + sharedBuffer.nSharedBuffSize = areasSize; + sharedBuffer.pSharedBuff = (OMX_U8 *) meteringAreas[0]; + + if ( NULL == sharedBuffer.pSharedBuff ) + { + CAMHAL_LOGEA("No resources to allocate OMX shared buffer"); + ret = -ENOMEM; + goto EXIT; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_TI_IndexConfigAlgoAreas, &sharedBuffer); + + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while setting Focus Areas configuration 0x%x", eError); + ret = -EINVAL; + } + else + { + CAMHAL_LOGDA("Metering Areas SetConfig successfull."); + } + + EXIT: + if (NULL != meteringAreas) + { + memMgr.freeBuffer((void*) meteringAreas); + meteringAreas = NULL; + } + + return ret; +} + +status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A ) +{ + status_t ret = NO_ERROR; + unsigned int currSett; // 32 bit + int portIndex; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(m3ASettingsUpdateLock); + + /* + * Scenes have a priority during the process + * of applying 3A related parameters. + * They can override pretty much all other 3A + * settings and similarly get overridden when + * for instance the focus mode gets switched. + * There is only one exception to this rule, + * the manual a.k.a. auto scene. + */ + if (SetSceneMode & mPending3Asettings) { + mPending3Asettings &= ~SetSceneMode; + ret |= setScene(Gen3A); + // re-apply EV compensation after setting scene mode since it probably reset it + if(Gen3A.EVCompensation) { + setEVCompensation(Gen3A); + } + return ret; + } else if (OMX_Manual != Gen3A.SceneMode) { + // only certain settings are allowed when scene mode is set + mPending3Asettings &= (SetEVCompensation | SetFocus | SetWBLock | + SetExpLock | SetWhiteBallance | SetFlash); + if ( mPending3Asettings == 0 ) return NO_ERROR; + } + + for( currSett = 1; currSett < E3aSettingMax; currSett <<= 1) + { + if( currSett & mPending3Asettings ) + { + switch( currSett ) + { + case SetEVCompensation: + { + ret |= setEVCompensation(Gen3A); + break; + } + + case SetWhiteBallance: + { + ret |= setWBMode(Gen3A); + break; + } + + case SetFlicker: + { + ret |= setFlicker(Gen3A); + break; + } + + case SetBrightness: + { + ret |= setBrightness(Gen3A); + break; + } + + case SetContrast: + { + ret |= setContrast(Gen3A); + break; + } + + case SetSharpness: + { + ret |= setSharpness(Gen3A); + break; + } + + case SetSaturation: + { + ret |= setSaturation(Gen3A); + break; + } + + case SetISO: + { + ret |= setISO(Gen3A); + break; + } + + case SetEffect: + { + ret |= setEffect(Gen3A); + break; + } + + case SetFocus: + { + ret |= setFocusMode(Gen3A); + break; + } + + case SetExpMode: + { + ret |= setExposureMode(Gen3A); + break; + } + + case SetFlash: + { + ret |= setFlashMode(Gen3A); + break; + } + + case SetExpLock: + { + ret |= setExposureLock(Gen3A); + break; + } + + case SetWBLock: + { + ret |= setWhiteBalanceLock(Gen3A); + break; + } + case SetMeteringAreas: + { + ret |= setMeteringAreas(Gen3A); + } + break; + default: + CAMHAL_LOGEB("this setting (0x%x) is still not supported in CameraAdapter ", + currSett); + break; + } + mPending3Asettings &= ~currSett; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +}; diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp new file mode 100644 index 0000000..12b9058 --- /dev/null +++ b/camera/OMXCameraAdapter/OMXAlgo.cpp @@ -0,0 +1,1180 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXAlgo.cpp +* +* This file contains functionality for handling algorithm configurations. +* +*/ + +#undef LOG_TAG + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" +#include "ErrorUtils.h" + +#undef TRUE + +namespace android { + +status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state) +{ + status_t ret = NO_ERROR; + const char *valstr = NULL; + const char *oldstr = NULL; + + LOG_FUNCTION_NAME; + + CaptureMode capMode; + CAMHAL_LOGDB("Capture mode %s", params.get(TICameraParameters::KEY_CAP_MODE)); + if ( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL ) + { + if (strcmp(valstr, (const char *) TICameraParameters::HIGH_PERFORMANCE_MODE) == 0) + { + capMode = OMXCameraAdapter::HIGH_SPEED; + } + else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_MODE) == 0) + { + capMode = OMXCameraAdapter::HIGH_QUALITY; + } + else if (strcmp(valstr, (const char *) TICameraParameters::HIGH_QUALITY_ZSL_MODE) == 0) + { + capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL; + } + else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) + { + capMode = OMXCameraAdapter::VIDEO_MODE; + } + else + { + capMode = OMXCameraAdapter::HIGH_QUALITY; + } + } + else + { + capMode = OMXCameraAdapter::HIGH_QUALITY_ZSL; + + } + + if ( mCapMode != capMode ) + { + mCapMode = capMode; + mOMXStateSwitch = true; + } + + CAMHAL_LOGDB("Capture Mode set %d", mCapMode); + + /// Configure IPP, LDCNSF, GBCE and GLBCE only in HQ mode + IPPMode ipp; + if((mCapMode == OMXCameraAdapter::HIGH_QUALITY) || (mCapMode == OMXCameraAdapter::HIGH_QUALITY_ZSL) + || (mCapMode == OMXCameraAdapter::VIDEO_MODE) ) + { + if ( (valstr = params.get(TICameraParameters::KEY_IPP)) != NULL ) + { + if (strcmp(valstr, (const char *) TICameraParameters::IPP_LDCNSF) == 0) + { + ipp = OMXCameraAdapter::IPP_LDCNSF; + } + else if (strcmp(valstr, (const char *) TICameraParameters::IPP_LDC) == 0) + { + ipp = OMXCameraAdapter::IPP_LDC; + } + else if (strcmp(valstr, (const char *) TICameraParameters::IPP_NSF) == 0) + { + ipp = OMXCameraAdapter::IPP_NSF; + } + else if (strcmp(valstr, (const char *) TICameraParameters::IPP_NONE) == 0) + { + ipp = OMXCameraAdapter::IPP_NONE; + } + else + { + ipp = OMXCameraAdapter::IPP_NONE; + } + } + else + { + ipp = OMXCameraAdapter::IPP_NONE; + } + + CAMHAL_LOGVB("IPP Mode set %d", ipp); + + if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) ) + { + // Configure GBCE only if the setting has changed since last time + oldstr = mParams.get(TICameraParameters::KEY_GBCE); + bool cmpRes = true; + if ( NULL != oldstr ) + { + cmpRes = strcmp(valstr, oldstr) != 0; + } + else + { + cmpRes = true; + } + + + if( cmpRes ) + { + if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_ENABLE ) == 0) + { + setGBCE(OMXCameraAdapter::BRIGHTNESS_ON); + } + else if (strcmp(valstr, ( const char * ) TICameraParameters::GBCE_DISABLE ) == 0) + { + setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF); + } + else + { + setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF); + } + } + } + else if(mParams.get(TICameraParameters::KEY_GBCE) || mFirstTimeInit) + { + //Disable GBCE by default + setGBCE(OMXCameraAdapter::BRIGHTNESS_OFF); + } + + if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL ) + { + // Configure GLBCE only if the setting has changed since last time + + oldstr = mParams.get(TICameraParameters::KEY_GLBCE); + bool cmpRes = true; + if ( NULL != oldstr ) + { + cmpRes = strcmp(valstr, oldstr) != 0; + } + else + { + cmpRes = true; + } + + + if( cmpRes ) + { + if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_ENABLE ) == 0) + { + setGLBCE(OMXCameraAdapter::BRIGHTNESS_ON); + } + else if (strcmp(valstr, ( const char * ) TICameraParameters::GLBCE_DISABLE ) == 0) + { + setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF); + } + else + { + setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF); + } + } + } + else if(mParams.get(TICameraParameters::KEY_GLBCE) || mFirstTimeInit) + { + //Disable GLBCE by default + setGLBCE(OMXCameraAdapter::BRIGHTNESS_OFF); + } + } + else + { + ipp = OMXCameraAdapter::IPP_NONE; + } + + if ( mIPP != ipp ) + { + mIPP = ipp; + mOMXStateSwitch = true; + } + + ///Set VNF Configuration + bool vnfEnabled = false; + if ( params.getInt(TICameraParameters::KEY_VNF) > 0 ) + { + CAMHAL_LOGDA("VNF Enabled"); + vnfEnabled = true; + } + else + { + CAMHAL_LOGDA("VNF Disabled"); + vnfEnabled = false; + } + + if ( mVnfEnabled != vnfEnabled ) + { + mVnfEnabled = vnfEnabled; + mOMXStateSwitch = true; + } + + ///Set VSTAB Configuration + bool vstabEnabled = false; + valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION); + if (valstr && strcmp(valstr, CameraParameters::TRUE) == 0) { + CAMHAL_LOGDA("VSTAB Enabled"); + vstabEnabled = true; + } + else + { + CAMHAL_LOGDA("VSTAB Disabled"); + vstabEnabled = false; + } + + if ( mVstabEnabled != vstabEnabled ) + { + mVstabEnabled = vstabEnabled; + mOMXStateSwitch = true; + } + + //A work-around for a failing call to OMX flush buffers + if ( ( capMode = OMXCameraAdapter::VIDEO_MODE ) && + ( mVstabEnabled ) ) + { + mOMXStateSwitch = true; + } + +#ifdef OMAP_ENHANCEMENT + + //Set Auto Convergence Mode + valstr = params.get((const char *) TICameraParameters::KEY_AUTOCONVERGENCE); + if ( valstr != NULL ) + { + // Set ManualConvergence default value + OMX_S32 manualconvergence = -30; + if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE) == 0 ) + { + setAutoConvergence(OMX_TI_AutoConvergenceModeDisable, manualconvergence); + } + else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FRAME) == 0 ) + { + setAutoConvergence(OMX_TI_AutoConvergenceModeFrame, manualconvergence); + } + else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_CENTER) == 0 ) + { + setAutoConvergence(OMX_TI_AutoConvergenceModeCenter, manualconvergence); + } + else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_FFT) == 0 ) + { + setAutoConvergence(OMX_TI_AutoConvergenceModeFocusFaceTouch, manualconvergence); + } + else if ( strcmp (valstr, (const char *) TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL) == 0 ) + { + manualconvergence = (OMX_S32)params.getInt(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES); + setAutoConvergence(OMX_TI_AutoConvergenceModeManual, manualconvergence); + } + CAMHAL_LOGVB("AutoConvergenceMode %s, value = %d", valstr, (int) manualconvergence); + } + +#endif + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +// Get AutoConvergence +status_t OMXCameraAdapter::getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode, + OMX_S32 *pManualConverence) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_CONFIG_CONVERGENCETYPE ACParams; + + ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE); + ACParams.nVersion = mLocalVersionParam; + ACParams.nPortIndex = OMX_ALL; + + LOG_FUNCTION_NAME; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence, + &ACParams); + if ( eError != OMX_ErrorNone ) + { + CAMHAL_LOGEB("Error while getting AutoConvergence 0x%x", eError); + ret = -EINVAL; + } + else + { + *pManualConverence = ACParams.nManualConverence; + *pACMode = ACParams.eACMode; + CAMHAL_LOGDA("AutoConvergence got successfully"); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +// Set AutoConvergence +status_t OMXCameraAdapter::setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode, + OMX_S32 pManualConverence) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_CONFIG_CONVERGENCETYPE ACParams; + + LOG_FUNCTION_NAME; + + ACParams.nSize = sizeof(OMX_TI_CONFIG_CONVERGENCETYPE); + ACParams.nVersion = mLocalVersionParam; + ACParams.nPortIndex = OMX_ALL; + ACParams.nManualConverence = pManualConverence; + ACParams.eACMode = pACMode; + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence, + &ACParams); + if ( eError != OMX_ErrorNone ) + { + CAMHAL_LOGEB("Error while setting AutoConvergence 0x%x", eError); + ret = -EINVAL; + } + else + { + CAMHAL_LOGDA("AutoConvergence applied successfully"); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::enableVideoNoiseFilter(bool enable) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_PARAM_VIDEONOISEFILTERTYPE vnfCfg; + + + LOG_FUNCTION_NAME; + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&vnfCfg, OMX_PARAM_VIDEONOISEFILTERTYPE); + + if ( enable ) + { + CAMHAL_LOGDA("VNF is enabled"); + vnfCfg.eMode = OMX_VideoNoiseFilterModeOn; + } + else + { + CAMHAL_LOGDA("VNF is disabled"); + vnfCfg.eMode = OMX_VideoNoiseFilterModeOff; + } + + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexParamVideoNoiseFilter, + &vnfCfg); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring video noise filter 0x%x", eError); + ret = -1; + } + else + { + CAMHAL_LOGDA("Video noise filter is configured successfully"); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::enableVideoStabilization(bool enable) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_FRAMESTABTYPE frameStabCfg; + + + LOG_FUNCTION_NAME; + + if ( NO_ERROR == ret ) + { + OMX_CONFIG_BOOLEANTYPE vstabp; + OMX_INIT_STRUCT_PTR (&vstabp, OMX_CONFIG_BOOLEANTYPE); + if(enable) + { + vstabp.bEnabled = OMX_TRUE; + } + else + { + vstabp.bEnabled = OMX_FALSE; + } + + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexParamFrameStabilisation, + &vstabp); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring video stabilization param 0x%x", eError); + ret = -1; + } + else + { + CAMHAL_LOGDA("Video stabilization param configured successfully"); + } + + } + + if ( NO_ERROR == ret ) + { + + OMX_INIT_STRUCT_PTR (&frameStabCfg, OMX_CONFIG_FRAMESTABTYPE); + + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexConfigCommonFrameStabilisation, + &frameStabCfg); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while getting video stabilization mode 0x%x", + (unsigned int)eError); + ret = -1; + } + + CAMHAL_LOGDB("VSTAB Port Index = %d", (int)frameStabCfg.nPortIndex); + + frameStabCfg.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + if ( enable ) + { + CAMHAL_LOGDA("VSTAB is enabled"); + frameStabCfg.bStab = OMX_TRUE; + } + else + { + CAMHAL_LOGDA("VSTAB is disabled"); + frameStabCfg.bStab = OMX_FALSE; + + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexConfigCommonFrameStabilisation, + &frameStabCfg); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring video stabilization mode 0x%x", eError); + ret = -1; + } + else + { + CAMHAL_LOGDA("Video stabilization mode configured successfully"); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setGBCE(OMXCameraAdapter::BrightnessMode mode) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE bControl; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&bControl, OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE); + + bControl.nPortIndex = OMX_ALL; + + switch ( mode ) + { + case OMXCameraAdapter::BRIGHTNESS_ON: + { + bControl.eControl = OMX_TI_BceModeOn; + break; + } + case OMXCameraAdapter::BRIGHTNESS_AUTO: + { + bControl.eControl = OMX_TI_BceModeAuto; + break; + } + case OMXCameraAdapter::BRIGHTNESS_OFF: + default: + { + bControl.eControl = OMX_TI_BceModeOff; + break; + } + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigGlobalBrightnessContrastEnhance, + &bControl); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while setting GBCE 0x%x", eError); + } + else + { + CAMHAL_LOGDB("GBCE configured successfully 0x%x", mode); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setGLBCE(OMXCameraAdapter::BrightnessMode mode) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE bControl; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&bControl, OMX_TI_CONFIG_LOCAL_AND_GLOBAL_BRIGHTNESSCONTRASTTYPE); + bControl.nPortIndex = OMX_ALL; + + switch ( mode ) + { + case OMXCameraAdapter::BRIGHTNESS_ON: + { + bControl.eControl = OMX_TI_BceModeOn; + break; + } + case OMXCameraAdapter::BRIGHTNESS_AUTO: + { + bControl.eControl = OMX_TI_BceModeAuto; + break; + } + case OMXCameraAdapter::BRIGHTNESS_OFF: + default: + { + bControl.eControl = OMX_TI_BceModeOff; + break; + } + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigLocalBrightnessContrastEnhance, + &bControl); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configure GLBCE 0x%x", eError); + } + else + { + CAMHAL_LOGDA("GLBCE configured successfully"); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_CAMOPERATINGMODETYPE camMode; + OMX_TI_PARAM_ZSLHISTORYLENTYPE zslHistoryLen; + OMX_CONFIG_BOOLEANTYPE bCAC; + + LOG_FUNCTION_NAME; + + //ZSL have 4 buffers history by default + OMX_INIT_STRUCT_PTR (&zslHistoryLen, OMX_TI_PARAM_ZSLHISTORYLENTYPE); + zslHistoryLen.nHistoryLen = 4; + + //CAC is disabled by default + OMX_INIT_STRUCT_PTR (&bCAC, OMX_CONFIG_BOOLEANTYPE); + bCAC.bEnabled = OMX_FALSE; + + if ( NO_ERROR == ret ) + { + + OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE); + if ( mSensorIndex == OMX_TI_StereoSensor ) + { + CAMHAL_LOGDA("Camera mode: STEREO"); + camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture; + } + else if ( OMXCameraAdapter::HIGH_SPEED == mode ) + { + CAMHAL_LOGDA("Camera mode: HIGH SPEED"); + camMode.eCamOperatingMode = OMX_CaptureImageHighSpeedTemporalBracketing; + } + else if( OMXCameraAdapter::HIGH_QUALITY == mode ) + { + CAMHAL_LOGDA("Camera mode: HIGH QUALITY"); + camMode.eCamOperatingMode = OMX_CaptureImageProfileBase; + } + else if( OMXCameraAdapter::HIGH_QUALITY_ZSL== mode ) + { + const char* valstr = NULL; + CAMHAL_LOGDA("Camera mode: HIGH QUALITY_ZSL"); + camMode.eCamOperatingMode = OMX_TI_CaptureImageProfileZeroShutterLag; + + if ( !mIternalRecordingHint ) { + zslHistoryLen.nHistoryLen = 5; + } + + } + else if( OMXCameraAdapter::VIDEO_MODE == mode ) + { + CAMHAL_LOGDA("Camera mode: VIDEO MODE"); + camMode.eCamOperatingMode = OMX_CaptureVideo; + } + else + { + CAMHAL_LOGEA("Camera mode: INVALID mode passed!"); + return BAD_VALUE; + } + + if( NO_ERROR == ret ) + { + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexParamZslHistoryLen, + &zslHistoryLen); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring ZSL History len 0x%x", eError); + // Don't return status for now + // as high history values might lead + // to errors on some platforms. + // ret = ErrorUtils::omxToAndroidError(eError); + } + else + { + CAMHAL_LOGDA("ZSL History len configured successfully"); + } + } + + if( NO_ERROR == ret ) + { + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexCameraOperatingMode, + &camMode); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring camera mode 0x%x", eError); + ret = ErrorUtils::omxToAndroidError(eError); + } + else + { + CAMHAL_LOGDA("Camera mode configured successfully"); + } + } + + if( NO_ERROR == ret ) + { + //Configure CAC + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexConfigChromaticAberrationCorrection, + &bCAC); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring CAC 0x%x", eError); + ret = ErrorUtils::omxToAndroidError(eError); + } + else + { + CAMHAL_LOGDA("CAC configured successfully"); + } + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setLDC(OMXCameraAdapter::IPPMode mode) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_BOOLEANTYPE bOMX; + + LOG_FUNCTION_NAME; + + if ( OMX_StateLoaded != mComponentState ) + { + CAMHAL_LOGEA("OMX component is not in loaded state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE); + + switch ( mode ) + { + case OMXCameraAdapter::IPP_LDCNSF: + case OMXCameraAdapter::IPP_LDC: + { + bOMX.bEnabled = OMX_TRUE; + break; + } + case OMXCameraAdapter::IPP_NONE: + case OMXCameraAdapter::IPP_NSF: + default: + { + bOMX.bEnabled = OMX_FALSE; + break; + } + } + + CAMHAL_LOGVB("Configuring LDC mode 0x%x", bOMX.bEnabled); + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexParamLensDistortionCorrection, + &bOMX); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEA("Error while setting LDC"); + ret = -1; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setNSF(OMXCameraAdapter::IPPMode mode) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_PARAM_ISONOISEFILTERTYPE nsf; + + LOG_FUNCTION_NAME; + + if ( OMX_StateLoaded != mComponentState ) + { + CAMHAL_LOGEA("OMX component is not in loaded state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&nsf, OMX_PARAM_ISONOISEFILTERTYPE); + nsf.nPortIndex = OMX_ALL; + + switch ( mode ) + { + case OMXCameraAdapter::IPP_LDCNSF: + case OMXCameraAdapter::IPP_NSF: + { + nsf.eMode = OMX_ISONoiseFilterModeOn; + break; + } + case OMXCameraAdapter::IPP_LDC: + case OMXCameraAdapter::IPP_NONE: + default: + { + nsf.eMode = OMX_ISONoiseFilterModeOff; + break; + } + } + + CAMHAL_LOGVB("Configuring NSF mode 0x%x", nsf.eMode); + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_IndexParamHighISONoiseFiler, + &nsf); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEA("Error while setting NSF"); + ret = -1; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setImageQuality(unsigned int quality) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_PARAM_QFACTORTYPE jpegQualityConf; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT(jpegQualityConf, OMX_IMAGE_PARAM_QFACTORTYPE); + jpegQualityConf.nQFactor = quality; + jpegQualityConf.nPortIndex = mCameraAdapterParameters.mImagePortIndex; + + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + OMX_IndexParamQFactor, + &jpegQualityConf); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring jpeg Quality 0x%x", eError); + ret = -1; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setThumbnailParams(unsigned int width, + unsigned int height, + unsigned int quality) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_PARAM_THUMBNAILTYPE thumbConf; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT(thumbConf, OMX_PARAM_THUMBNAILTYPE); + thumbConf.nPortIndex = mCameraAdapterParameters.mImagePortIndex; + + eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexParamThumbnail, + &thumbConf); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while retrieving thumbnail size 0x%x", eError); + ret = -1; + } + + //CTS Requirement: width or height equal to zero should + //result in absent EXIF thumbnail + if ( ( 0 == width ) || ( 0 == height ) ) + { + thumbConf.nWidth = mThumbRes[0].width; + thumbConf.nHeight = mThumbRes[0].height; + thumbConf.eCompressionFormat = OMX_IMAGE_CodingUnused; + } + else + { + thumbConf.nWidth = width; + thumbConf.nHeight = height; + thumbConf.nQuality = quality; + thumbConf.eCompressionFormat = OMX_IMAGE_CodingJPEG; + } + + CAMHAL_LOGDB("Thumbnail width = %d, Thumbnail Height = %d", width, height); + + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexParamThumbnail, + &thumbConf); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring thumbnail size 0x%x", eError); + ret = -1; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setAlgoPriority(AlgoPriority priority, + Algorithm3A algo, + bool enable) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + if ( FACE_PRIORITY == priority ) { + + if ( algo & WHITE_BALANCE_ALGO ) { + if ( enable ) { + mFacePriority.bAwbFaceEnable = OMX_TRUE; + } else { + mFacePriority.bAwbFaceEnable = OMX_FALSE; + } + } + + if ( algo & EXPOSURE_ALGO ) { + if ( enable ) { + mFacePriority.bAeFaceEnable = OMX_TRUE; + } else { + mFacePriority.bAeFaceEnable = OMX_FALSE; + } + } + + if ( algo & FOCUS_ALGO ) { + if ( enable ) { + mFacePriority.bAfFaceEnable = OMX_TRUE; + } else { + mFacePriority.bAfFaceEnable = OMX_FALSE; + } + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFacePriority3a, + &mFacePriority); + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while configuring face priority 0x%x", eError); + } else { + CAMHAL_LOGDB("Face priority for algorithms set successfully 0x%x, 0x%x, 0x%x", + mFacePriority.bAfFaceEnable, + mFacePriority.bAeFaceEnable, + mFacePriority.bAwbFaceEnable); + } + + } else if ( REGION_PRIORITY == priority ) { + + if ( algo & WHITE_BALANCE_ALGO ) { + if ( enable ) { + mRegionPriority.bAwbRegionEnable= OMX_TRUE; + } else { + mRegionPriority.bAwbRegionEnable = OMX_FALSE; + } + } + + if ( algo & EXPOSURE_ALGO ) { + if ( enable ) { + mRegionPriority.bAeRegionEnable = OMX_TRUE; + } else { + mRegionPriority.bAeRegionEnable = OMX_FALSE; + } + } + + if ( algo & FOCUS_ALGO ) { + if ( enable ) { + mRegionPriority.bAfRegionEnable = OMX_TRUE; + } else { + mRegionPriority.bAfRegionEnable = OMX_FALSE; + } + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigRegionPriority3a, + &mRegionPriority); + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while configuring region priority 0x%x", eError); + } else { + CAMHAL_LOGDB("Region priority for algorithms set successfully 0x%x, 0x%x, 0x%x", + mRegionPriority.bAfRegionEnable, + mRegionPriority.bAeRegionEnable, + mRegionPriority.bAwbRegionEnable); + } + + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::setPictureRotation(unsigned int degree) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_ROTATIONTYPE rotation; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -1; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT(rotation, OMX_CONFIG_ROTATIONTYPE); + rotation.nRotation = degree; + rotation.nPortIndex = mCameraAdapterParameters.mImagePortIndex; + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonRotate, + &rotation); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setSensorOrientation(unsigned int degree) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_ROTATIONTYPE sensorOrientation; + int tmpHeight, tmpWidth; + OMXCameraPortParameters *mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + + LOG_FUNCTION_NAME; + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -1; + } + + /* Set Temproary Port resolution. + * For resolution with height > 1008,resolution cannot be set without configuring orientation. + * So we first set a temp resolution. We have used VGA + */ + tmpHeight = mPreviewData->mHeight; + tmpWidth = mPreviewData->mWidth; + mPreviewData->mWidth = 640; + mPreviewData->mHeight = 480; + ret = setFormat(OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, *mPreviewData); + if ( ret != NO_ERROR ) + { + CAMHAL_LOGEB("setFormat() failed %d", ret); + } + + /* Now set Required Orientation*/ + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT(sensorOrientation, OMX_CONFIG_ROTATIONTYPE); + sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonRotate, + &sensorOrientation); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError); + } + CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d", + ( unsigned int ) sensorOrientation.nRotation); + sensorOrientation.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + sensorOrientation.nRotation = degree; + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonRotate, + &sensorOrientation); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring rotation 0x%x", eError); + } + CAMHAL_LOGVA(" Read the Parameters that are set"); + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonRotate, + &sensorOrientation); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while Reading Sensor Orientation : 0x%x", eError); + } + CAMHAL_LOGVB(" Currently Sensor Orientation is set to : %d", + ( unsigned int ) sensorOrientation.nRotation); + CAMHAL_LOGVB(" Sensor Configured for Port : %d", + ( unsigned int ) sensorOrientation.nPortIndex); + } + + /* Now set the required resolution as requested */ + + mPreviewData->mWidth = tmpWidth; + mPreviewData->mHeight = tmpHeight; + if ( NO_ERROR == ret ) + { + ret = setFormat (mCameraAdapterParameters.mPrevPortIndex, + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("setFormat() failed %d", ret); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setVFramerate(OMX_U32 minFrameRate, OMX_U32 maxFrameRate) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_CONFIG_VARFRMRANGETYPE vfr; + OMXCameraPortParameters * mPreviewData = + &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + // The port framerate should never be smaller + // than max framerate. + if ( mPreviewData->mFrameRate < maxFrameRate ) { + return NO_INIT; + } + + if ( NO_ERROR == ret ) { + OMX_INIT_STRUCT_PTR (&vfr, OMX_TI_CONFIG_VARFRMRANGETYPE); + + vfr.xMin = minFrameRate<<16; + vfr.xMax = maxFrameRate<<16; + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_TI_IndexConfigVarFrmRange, + &vfr); + if(OMX_ErrorNone != eError) { + CAMHAL_LOGEB("Error while setting VFR min = %d, max = %d, error = 0x%x", + ( unsigned int ) minFrameRate, + ( unsigned int ) maxFrameRate, + eError); + ret = -1; + } else { + CAMHAL_LOGDB("VFR Configured Successfully [%d:%d]", + ( unsigned int ) minFrameRate, + ( unsigned int ) maxFrameRate); + } + } + + return ret; + } + +}; diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp new file mode 100755 index 0000000..49f9f8c --- /dev/null +++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp @@ -0,0 +1,3713 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXCameraAdapter.cpp +* +* This file maps the Camera Hardware Interface to OMX. +* +*/ + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" +#include "ErrorUtils.h" +#include "TICameraParameters.h" +#include +#include + +#include +#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false )) +static int mDebugFps = 0; +static int mDebugFcs = 0; + +#undef TRUE +#undef FALSE + +#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);} + +namespace android { + +#undef LOG_TAG +///Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific +#define LOG_TAG "CameraHAL" + +//frames skipped before recalculating the framerate +#define FPS_PERIOD 30 + +Mutex gAdapterLock; +/*--------------------Camera Adapter Class STARTS here-----------------------------*/ + +status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps) +{ + LOG_FUNCTION_NAME; + + char value[PROPERTY_VALUE_MAX]; + property_get("debug.camera.showfps", value, "0"); + mDebugFps = atoi(value); + property_get("debug.camera.framecounts", value, "0"); + mDebugFcs = atoi(value); + + TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone; + OMX_ERRORTYPE eError = OMX_ErrorNone; + status_t ret = NO_ERROR; + + + mLocalVersionParam.s.nVersionMajor = 0x1; + mLocalVersionParam.s.nVersionMinor = 0x1; + mLocalVersionParam.s.nRevision = 0x0 ; + mLocalVersionParam.s.nStep = 0x0; + + mPending3Asettings = 0;//E3AsettingsAll; + mPendingCaptureSettings = 0; + + if ( 0 != mInitSem.Count() ) + { + CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count()); + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + ///Update the preview and image capture port indexes + mCameraAdapterParameters.mPrevPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW; + // temp changed in order to build OMX_CAMERA_PORT_VIDEO_OUT_IMAGE; + mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE; + mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT; + //currently not supported use preview port instead + mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW; + + eError = OMX_Init(); + if (eError != OMX_ErrorNone) { + CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError); + return ErrorUtils::omxToAndroidError(eError); + } + mOmxInitialized = true; + + ///Get the handle to the OMX Component + eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, (OMX_PTR)this); + if(eError != OMX_ErrorNone) { + CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError); + } + GOTO_EXIT_IF((eError != OMX_ErrorNone), eError); + + mComponentState = OMX_StateLoaded; + + CAMHAL_LOGVB("OMX_GetHandle -0x%x sensor_index = %lu", eError, mSensorIndex); + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortDisable, + OMX_ALL, + NULL); + + if(eError != OMX_ErrorNone) { + CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortDisable) -0x%x", eError); + } + GOTO_EXIT_IF((eError != OMX_ErrorNone), eError); + + // Register for port enable event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + mInitSem); + if(ret != NO_ERROR) { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + // Enable PREVIEW Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + if(eError != OMX_ErrorNone) { + CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + // Wait for the port enable event to occur + ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT); + if ( NO_ERROR == ret ) { + CAMHAL_LOGDA("-Port enable event arrived"); + } else { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + CAMHAL_LOGEA("Timeout for enabling preview port expired!"); + goto EXIT; + } + + // Select the sensor + OMX_CONFIG_SENSORSELECTTYPE sensorSelect; + OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE); + sensorSelect.eSensor = (OMX_SENSORSELECT) mSensorIndex; + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect); + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while selecting the sensor index as %d - 0x%x", mSensorIndex, eError); + return BAD_VALUE; + } else { + CAMHAL_LOGDB("Sensor %d selected successfully", mSensorIndex); + } + + printComponentVersion(mCameraAdapterParameters.mHandleComp); + + mBracketingEnabled = false; + mBracketingBuffersQueuedCount = 0; + mBracketingRange = 1; + mLastBracetingBufferIdx = 0; + mOMXStateSwitch = false; + + mCaptureSignalled = false; + mCaptureConfigured = false; + mRecording = false; + mWaitingForSnapshot = false; + mSnapshotCount = 0; + + mCapMode = HIGH_QUALITY; + mIPP = IPP_NULL; + mVstabEnabled = false; + mVnfEnabled = false; + mBurstFrames = 1; + mCapturedFrames = 0; + mPictureQuality = 100; + mCurrentZoomIdx = 0; + mTargetZoomIdx = 0; + mPreviousZoomIndx = 0; + mReturnZoomStatus = false; + mZoomInc = 1; + mZoomParameterIdx = 0; + mExposureBracketingValidEntries = 0; + mSensorOverclock = false; + mIternalRecordingHint = false; + + mDeviceOrientation = 0; + mCapabilities = caps; + mZoomUpdating = false; + mZoomUpdate = false; + + mEXIFData.mGPSData.mAltitudeValid = false; + mEXIFData.mGPSData.mDatestampValid = false; + mEXIFData.mGPSData.mLatValid = false; + mEXIFData.mGPSData.mLongValid = false; + mEXIFData.mGPSData.mMapDatumValid = false; + mEXIFData.mGPSData.mProcMethodValid = false; + mEXIFData.mGPSData.mVersionIdValid = false; + mEXIFData.mGPSData.mTimeStampValid = false; + mEXIFData.mModelValid = false; + mEXIFData.mMakeValid = false; + + // initialize command handling thread + if(mCommandHandler.get() == NULL) + mCommandHandler = new CommandHandler(this); + + if ( NULL == mCommandHandler.get() ) + { + CAMHAL_LOGEA("Couldn't create command handler"); + return NO_MEMORY; + } + + ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY); + if ( ret != NO_ERROR ) + { + if( ret == INVALID_OPERATION){ + CAMHAL_LOGDA("command handler thread already runnning!!"); + ret = NO_ERROR; + } else + { + CAMHAL_LOGEA("Couldn't run command handlerthread"); + return ret; + } + } + + // initialize omx callback handling thread + if(mOMXCallbackHandler.get() == NULL) + mOMXCallbackHandler = new OMXCallbackHandler(this); + + if ( NULL == mOMXCallbackHandler.get() ) + { + CAMHAL_LOGEA("Couldn't create omx callback handler"); + return NO_MEMORY; + } + + ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY); + if ( ret != NO_ERROR ) + { + if( ret == INVALID_OPERATION){ + CAMHAL_LOGDA("omx callback handler thread already runnning!!"); + ret = NO_ERROR; + }else + { + CAMHAL_LOGEA("Couldn't run omx callback handler thread"); + return ret; + } + } + + //Remove any unhandled events + if (!mEventSignalQ.isEmpty()) { + for (unsigned int i = 0 ;i < mEventSignalQ.size(); i++ ) { + TIUTILS::Message *msg = mEventSignalQ.itemAt(i); + //remove from queue and free msg + if ( NULL != msg ) { + free(msg); + } + } + mEventSignalQ.clear(); + } + + OMX_INIT_STRUCT_PTR (&mRegionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY); + OMX_INIT_STRUCT_PTR (&mFacePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY); + mRegionPriority.nPortIndex = OMX_ALL; + mFacePriority.nPortIndex = OMX_ALL; + + //Setting this flag will that the first setParameter call will apply all 3A settings + //and will not conditionally apply based on current values. + mFirstTimeInit = true; + + memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int)); + mMeasurementEnabled = false; + mFaceDetectionRunning = false; + mFaceDetectionPaused = false; + mFDSwitchAlgoPriority = false; + + memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters)); + memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters)); + + //Initialize 3A defaults + ret = init3AParams(mParameters3A); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEA("Couldn't init 3A params!"); + goto EXIT; + } + + LOG_FUNCTION_NAME_EXIT; + return ErrorUtils::omxToAndroidError(eError); + + EXIT: + + CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return ErrorUtils::omxToAndroidError(eError); +} + +void OMXCameraAdapter::performCleanupAfterError() +{ + if(mCameraAdapterParameters.mHandleComp) + { + ///Free the OMX component handle in case of error + OMX_FreeHandle(mCameraAdapterParameters.mHandleComp); + mCameraAdapterParameters.mHandleComp = NULL; + } + + ///De-init the OMX + OMX_Deinit(); + mComponentState = OMX_StateInvalid; +} + +OMXCameraAdapter::OMXCameraPortParameters *OMXCameraAdapter::getPortParams(CameraFrame::FrameType frameType) +{ + OMXCameraAdapter::OMXCameraPortParameters *ret = NULL; + + switch ( frameType ) + { + case CameraFrame::IMAGE_FRAME: + case CameraFrame::RAW_FRAME: + ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + break; + case CameraFrame::PREVIEW_FRAME_SYNC: + case CameraFrame::SNAPSHOT_FRAME: + case CameraFrame::VIDEO_FRAME_SYNC: + ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + break; + case CameraFrame::FRAME_DATA_SYNC: + ret = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex]; + break; + default: + break; + }; + + return ret; +} + +status_t OMXCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType) +{ + status_t ret = NO_ERROR; + OMXCameraPortParameters *port = NULL; + OMX_ERRORTYPE eError = OMX_ErrorNone; + BaseCameraAdapter::AdapterState state; + BaseCameraAdapter::getState(state); + + if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE ) + { + return NO_INIT; + } + + if ( NULL == frameBuf ) + { + return -EINVAL; + } + + if ( (NO_ERROR == ret) && + ((CameraFrame::IMAGE_FRAME == frameType) || (CameraFrame::RAW_FRAME == frameType)) && + (1 > mCapturedFrames) && + (!mBracketingEnabled)) { + // Signal end of image capture + if ( NULL != mEndImageCaptureCallback) { + mEndImageCaptureCallback(mEndCaptureData); + } + return NO_ERROR; + } + + if ( NO_ERROR == ret ) + { + port = getPortParams(frameType); + if ( NULL == port ) + { + CAMHAL_LOGEB("Invalid frameType 0x%x", frameType); + ret = -EINVAL; + } + } + + if ( NO_ERROR == ret ) + { + + for ( int i = 0 ; i < port->mNumBufs ; i++) + { + if ( port->mBufferHeader[i]->pBuffer == frameBuf ) + { + eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, port->mBufferHeader[i]); + if ( eError != OMX_ErrorNone ) + { + CAMHAL_LOGEB("OMX_FillThisBuffer 0x%x", eError); + goto EXIT; + } + mFramesWithDucati++; + break; + } + } + + } + + LOG_FUNCTION_NAME_EXIT; + return ret; + +EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + //Since fillthisbuffer is called asynchronously, make sure to signal error to the app + mErrorNotifier->errorNotify(CAMERA_ERROR_HARD); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +status_t OMXCameraAdapter::setParameters(const CameraParameters ¶ms) +{ + LOG_FUNCTION_NAME; + + const char * str = NULL; + int mode = 0; + status_t ret = NO_ERROR; + bool updateImagePortParams = false; + int minFramerate, maxFramerate, frameRate; + const char *valstr = NULL; + const char *oldstr = NULL; + int w, h; + OMX_COLOR_FORMATTYPE pixFormat; + BaseCameraAdapter::AdapterState state; + BaseCameraAdapter::getState(state); + + ///@todo Include more camera parameters + if ( (valstr = params.getPreviewFormat()) != NULL ) + { + if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 || + strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0 || + strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) + { + CAMHAL_LOGDA("YUV420SP format selected"); + pixFormat = OMX_COLOR_FormatYUV420SemiPlanar; + } + else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) + { + CAMHAL_LOGDA("RGB565 format selected"); + pixFormat = OMX_COLOR_Format16bitRGB565; + } + else + { + CAMHAL_LOGDA("Invalid format, CbYCrY format selected as default"); + pixFormat = OMX_COLOR_FormatCbYCrY; + } + } + else + { + CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY"); + pixFormat = OMX_COLOR_FormatCbYCrY; + } + + OMXCameraPortParameters *cap; + cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + + params.getPreviewSize(&w, &h); + frameRate = params.getPreviewFrameRate(); + minFramerate = params.getInt(TICameraParameters::KEY_MINFRAMERATE); + maxFramerate = params.getInt(TICameraParameters::KEY_MAXFRAMERATE); + if ( ( 0 < minFramerate ) && + ( 0 < maxFramerate ) ) + { + if ( minFramerate > maxFramerate ) + { + CAMHAL_LOGEA(" Min FPS set higher than MAX. So setting MIN and MAX to the higher value"); + maxFramerate = minFramerate; + } + + if ( 0 >= frameRate ) + { + frameRate = maxFramerate; + } + + if( ( cap->mMinFrameRate != minFramerate ) || + ( cap->mMaxFrameRate != maxFramerate ) ) + { + cap->mMinFrameRate = minFramerate; + cap->mMaxFrameRate = maxFramerate; + setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate); + } + } + + // TODO(XXX): Limiting 1080p to (24,24) or (15,15) for now. Need to remove later. + if ((w >= 1920) && (h >= 1080)) { + cap->mMaxFrameRate = cap->mMinFrameRate; + setVFramerate(cap->mMinFrameRate, cap->mMaxFrameRate); + } + + if ( 0 < frameRate ) + { + cap->mColorFormat = pixFormat; + cap->mWidth = w; + cap->mHeight = h; + cap->mFrameRate = frameRate; + + CAMHAL_LOGVB("Prev: cap.mColorFormat = %d", (int)cap->mColorFormat); + CAMHAL_LOGVB("Prev: cap.mWidth = %d", (int)cap->mWidth); + CAMHAL_LOGVB("Prev: cap.mHeight = %d", (int)cap->mHeight); + CAMHAL_LOGVB("Prev: cap.mFrameRate = %d", (int)cap->mFrameRate); + + //TODO: Add an additional parameter for video resolution + //use preview resolution for now + cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + cap->mColorFormat = pixFormat; + cap->mWidth = w; + cap->mHeight = h; + cap->mFrameRate = frameRate; + + CAMHAL_LOGVB("Video: cap.mColorFormat = %d", (int)cap->mColorFormat); + CAMHAL_LOGVB("Video: cap.mWidth = %d", (int)cap->mWidth); + CAMHAL_LOGVB("Video: cap.mHeight = %d", (int)cap->mHeight); + CAMHAL_LOGVB("Video: cap.mFrameRate = %d", (int)cap->mFrameRate); + + ///mStride is set from setBufs() while passing the APIs + cap->mStride = 4096; + cap->mBufSize = cap->mStride * cap->mHeight; + } + + if ( ( cap->mWidth >= 1920 ) && + ( cap->mHeight >= 1080 ) && + ( cap->mFrameRate >= FRAME_RATE_FULL_HD ) && + ( !mSensorOverclock ) ) + { + mOMXStateSwitch = true; + } + else if ( ( ( cap->mWidth < 1920 ) || + ( cap->mHeight < 1080 ) || + ( cap->mFrameRate < FRAME_RATE_FULL_HD ) ) && + ( mSensorOverclock ) ) + { + mOMXStateSwitch = true; + } + + valstr = params.get(TICameraParameters::KEY_RECORDING_HINT); + if (!valstr || (valstr && (strcmp(valstr, CameraParameters::FALSE)))) { + mIternalRecordingHint = false; + } else { + mIternalRecordingHint = true; + } + +#ifdef OMAP_ENHANCEMENT + + if ( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL ) + { + if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0) + { + mMeasurementEnabled = true; + } + else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0) + { + mMeasurementEnabled = false; + } + else + { + mMeasurementEnabled = false; + } + } + else + { + //Disable measurement data by default + mMeasurementEnabled = false; + } + +#endif + + ret |= setParametersCapture(params, state); + + ret |= setParameters3A(params, state); + + ret |= setParametersAlgo(params, state); + + ret |= setParametersFocus(params, state); + + ret |= setParametersFD(params, state); + + ret |= setParametersZoom(params, state); + + ret |= setParametersEXIF(params, state); + + mParams = params; + mFirstTimeInit = false; + + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +void saveFile(unsigned char *buff, int width, int height, int format) { + static int counter = 1; + int fd = -1; + char fn[256]; + + LOG_FUNCTION_NAME; + + fn[0] = 0; + sprintf(fn, "/preview%03d.yuv", counter); + fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777); + if(fd < 0) { + ALOGE("Unable to open file %s: %s", fn, strerror(fd)); + return; + } + + CAMHAL_LOGVB("Copying from 0x%x, size=%d x %d", buff, width, height); + + //method currently supports only nv12 dumping + int stride = width; + uint8_t *bf = (uint8_t*) buff; + for(int i=0;iget(CameraProperties::CAMERA_NAME), + (OMX_SCENEMODETYPE) mParameters3A.SceneMode); + if(entry) { + mParameters3A.Focus = entry->focus; + mParameters3A.FlashMode = entry->flash; + mParameters3A.WhiteBallance = entry->wb; + } + } + + valstr = getLUTvalue_OMXtoHAL(mParameters3A.WhiteBallance, WBalLUT); + valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE); + if (valstr && valstr_supported && strstr(valstr_supported, valstr)) + params.set(CameraParameters::KEY_WHITE_BALANCE , valstr); + + valstr = getLUTvalue_OMXtoHAL(mParameters3A.FlashMode, FlashLUT); + valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES); + if (valstr && valstr_supported && strstr(valstr_supported, valstr)) + params.set(CameraParameters::KEY_FLASH_MODE, valstr); + + if ((mParameters3A.Focus == OMX_IMAGE_FocusControlAuto) && + (mCapMode != OMXCameraAdapter::VIDEO_MODE)) { + valstr = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE; + } else { + valstr = getLUTvalue_OMXtoHAL(mParameters3A.Focus, FocusLUT); + } + valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES); + if (valstr && valstr_supported && strstr(valstr_supported, valstr)) + params.set(CameraParameters::KEY_FOCUS_MODE, valstr); + } + + //Query focus distances only when focus is running + if ( ( AF_ACTIVE & state ) || + ( NULL == mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES) ) ) + { + updateFocusDistances(params); + } + else + { + params.set(CameraParameters::KEY_FOCUS_DISTANCES, + mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES)); + } + +#ifdef OMAP_ENHANCEMENT + + OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSUREVALUETYPE); + exp.nPortIndex = OMX_ALL; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonExposureValue, + &exp); + if ( OMX_ErrorNone == eError ) + { + params.set(TICameraParameters::KEY_CURRENT_ISO, exp.nSensitivity); + } + else + { + CAMHAL_LOGEB("OMX error 0x%x, while retrieving current ISO value", eError); + } + +#endif + + { + Mutex::Autolock lock(mZoomLock); + //Immediate zoom should not be avaialable while smooth zoom is running + if ( ZOOM_ACTIVE & state ) + { + if ( mZoomParameterIdx != mCurrentZoomIdx ) + { + mZoomParameterIdx += mZoomInc; + } + params.set( CameraParameters::KEY_ZOOM, mZoomParameterIdx); + if ( ( mCurrentZoomIdx == mTargetZoomIdx ) && + ( mZoomParameterIdx == mCurrentZoomIdx ) ) + { + + if ( NO_ERROR == ret ) + { + + ret = BaseCameraAdapter::setState(CAMERA_STOP_SMOOTH_ZOOM); + + if ( NO_ERROR == ret ) + { + ret = BaseCameraAdapter::commitState(); + } + else + { + ret |= BaseCameraAdapter::rollbackState(); + } + + } + + } + + CAMHAL_LOGDB("CameraParameters Zoom = %d", mCurrentZoomIdx); + } + else + { + params.set( CameraParameters::KEY_ZOOM, mCurrentZoomIdx); + } + } + + //Populate current lock status + if ( mParameters3A.ExposureLock ) { + params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, + CameraParameters::TRUE); + } else { + params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, + CameraParameters::FALSE); + } + + if ( mParameters3A.WhiteBalanceLock ) { + params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, + CameraParameters::TRUE); + } else { + params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, + CameraParameters::FALSE); + } + + LOG_FUNCTION_NAME_EXIT; +} + +status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &portParams) +{ + size_t bufferCount; + + LOG_FUNCTION_NAME; + + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_PARAM_PORTDEFINITIONTYPE portCheck; + + OMX_INIT_STRUCT_PTR (&portCheck, OMX_PARAM_PORTDEFINITIONTYPE); + + portCheck.nPortIndex = port; + + eError = OMX_GetParameter (mCameraAdapterParameters.mHandleComp, + OMX_IndexParamPortDefinition, &portCheck); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_GetParameter - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + if ( OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW == port ) + { + portCheck.format.video.nFrameWidth = portParams.mWidth; + portCheck.format.video.nFrameHeight = portParams.mHeight; + portCheck.format.video.eColorFormat = portParams.mColorFormat; + portCheck.format.video.nStride = portParams.mStride; + if( ( portCheck.format.video.nFrameWidth >= 1920 ) && + ( portCheck.format.video.nFrameHeight >= 1080 ) && + ( portParams.mFrameRate >= FRAME_RATE_FULL_HD ) ) + { + setSensorOverclock(true); + } + else + { + setSensorOverclock(false); + } + + portCheck.format.video.xFramerate = portParams.mFrameRate<<16; + portCheck.nBufferSize = portParams.mStride * portParams.mHeight; + portCheck.nBufferCountActual = portParams.mNumBufs; + mFocusThreshold = FOCUS_THRESHOLD * portParams.mFrameRate; + } + else if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port ) + { + portCheck.format.image.nFrameWidth = portParams.mWidth; + portCheck.format.image.nFrameHeight = portParams.mHeight; + if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingNone ) + { + portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY; + portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG; + } + else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingJPS ) + { + portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY; + portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingJPS; + } + else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingMPO ) + { + portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY; + portCheck.format.image.eCompressionFormat = (OMX_IMAGE_CODINGTYPE) OMX_TI_IMAGE_CodingMPO; + } + else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWJPEG ) + { + //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWJPEG when + // RAW format is supported + portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY; + portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG; + } + else if ( OMX_COLOR_FormatUnused == portParams.mColorFormat && mCodingMode == CodingRAWMPO ) + { + //TODO: OMX_IMAGE_CodingJPEG should be changed to OMX_IMAGE_CodingRAWMPO when + // RAW format is supported + portCheck.format.image.eColorFormat = OMX_COLOR_FormatCbYCrY; + portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG; + } + else + { + portCheck.format.image.eColorFormat = portParams.mColorFormat; + portCheck.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused; + } + + //Stride for 1D tiler buffer is zero + portCheck.format.image.nStride = 0; + portCheck.nBufferSize = portParams.mStride * portParams.mWidth * portParams.mHeight; + portCheck.nBufferCountActual = portParams.mNumBufs; + } + else + { + CAMHAL_LOGEB("Unsupported port index 0x%x", (unsigned int)port); + } + + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + OMX_IndexParamPortDefinition, &portCheck); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_SetParameter - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + /* check if parameters are set correctly by calling GetParameter() */ + eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, + OMX_IndexParamPortDefinition, &portCheck); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_GetParameter - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + portParams.mBufSize = portCheck.nBufferSize; + portParams.mStride = portCheck.format.image.nStride; + + if ( OMX_CAMERA_PORT_IMAGE_OUT_IMAGE == port ) + { + CAMHAL_LOGDB("\n *** IMG Width = %ld", portCheck.format.image.nFrameWidth); + CAMHAL_LOGDB("\n ***IMG Height = %ld", portCheck.format.image.nFrameHeight); + + CAMHAL_LOGDB("\n ***IMG IMG FMT = %x", portCheck.format.image.eColorFormat); + CAMHAL_LOGDB("\n ***IMG portCheck.nBufferSize = %ld\n",portCheck.nBufferSize); + CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountMin = %ld\n", + portCheck.nBufferCountMin); + CAMHAL_LOGDB("\n ***IMG portCheck.nBufferCountActual = %ld\n", + portCheck.nBufferCountActual); + CAMHAL_LOGDB("\n ***IMG portCheck.format.image.nStride = %ld\n", + portCheck.format.image.nStride); + } + else + { + CAMHAL_LOGDB("\n *** PRV Width = %ld", portCheck.format.video.nFrameWidth); + CAMHAL_LOGDB("\n ***PRV Height = %ld", portCheck.format.video.nFrameHeight); + + CAMHAL_LOGDB("\n ***PRV IMG FMT = %x", portCheck.format.video.eColorFormat); + CAMHAL_LOGDB("\n ***PRV portCheck.nBufferSize = %ld\n",portCheck.nBufferSize); + CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountMin = %ld\n", + portCheck.nBufferCountMin); + CAMHAL_LOGDB("\n ***PRV portCheck.nBufferCountActual = %ld\n", + portCheck.nBufferCountActual); + CAMHAL_LOGDB("\n ***PRV portCheck.format.video.nStride = %ld\n", + portCheck.format.video.nStride); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); + + EXIT: + + CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError); + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::flushBuffers() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + TIMM_OSAL_ERRORTYPE err; + TIMM_OSAL_U32 uRequestedEvents = OMXCameraAdapter::CAMERA_PORT_FLUSH; + TIMM_OSAL_U32 pRetrievedEvents; + + if ( 0 != mFlushSem.Count() ) + { + CAMHAL_LOGEB("Error mFlushSem semaphore count %d", mFlushSem.Count()); + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + LOG_FUNCTION_NAME; + + OMXCameraPortParameters * mPreviewData = NULL; + mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + + ///Register for the FLUSH event + ///This method just inserts a message in Event Q, which is checked in the callback + ///The sempahore passed is signalled by the callback + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandFlush, + OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, + mFlushSem); + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + ///Send FLUSH command to preview port + eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp, + OMX_CommandFlush, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandFlush)-0x%x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + CAMHAL_LOGDA("Waiting for flush event"); + + ///Wait for the FLUSH event to occur + ret = mFlushSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after Flush Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("Flush event received"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandFlush, + OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW, + NULL); + CAMHAL_LOGDA("Flush event timeout expired"); + goto EXIT; + } + + LOG_FUNCTION_NAME_EXIT; + + return (ret | ErrorUtils::omxToAndroidError(eError)); + + EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +///API to give the buffers to Adapter +status_t OMXCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable) +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + switch(mode) + { + case CAMERA_PREVIEW: + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num; + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable; + ret = UseBuffersPreview(bufArr, num); + break; + + case CAMERA_IMAGE_CAPTURE: + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num; + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mMaxQueueable = queueable; + ret = UseBuffersCapture(bufArr, num); + break; + + case CAMERA_VIDEO: + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs = num; + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable; + ret = UseBuffersPreview(bufArr, num); + break; + + case CAMERA_MEASUREMENT: + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mNumBufs = num; + mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mMaxQueueable = queueable; + ret = UseBuffersPreviewData(bufArr, num); + break; + + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::UseBuffersPreviewData(void* bufArr, int num) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMXCameraPortParameters * measurementData = NULL; + uint32_t *buffers; + Mutex::Autolock lock( mPreviewDataBufferLock); + + LOG_FUNCTION_NAME; + + if ( mComponentState != OMX_StateLoaded ) + { + CAMHAL_LOGEA("Calling UseBuffersPreviewData() when not in LOADED state"); + return BAD_VALUE; + } + + if ( NULL == bufArr ) + { + CAMHAL_LOGEA("NULL pointer passed for buffArr"); + return BAD_VALUE; + } + + if ( 0 != mUsePreviewDataSem.Count() ) + { + CAMHAL_LOGEB("Error mUsePreviewDataSem semaphore count %d", mUsePreviewDataSem.Count()); + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + if ( NO_ERROR == ret ) + { + measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex]; + measurementData->mNumBufs = num ; + buffers= (uint32_t*) bufArr; + } + + if ( NO_ERROR == ret ) + { + ///Register for port enable event on measurement port + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mMeasurementPortIndex, + mUsePreviewDataSem); + + if ( ret == NO_ERROR ) + { + CAMHAL_LOGDB("Registering for event %d", ret); + } + else + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + } + + if ( NO_ERROR == ret ) + { + ///Enable MEASUREMENT Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortEnable, + mCameraAdapterParameters.mMeasurementPortIndex, + NULL); + + if ( eError == OMX_ErrorNone ) + { + CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError); + } + else + { + CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError); + goto EXIT; + } + } + + if ( NO_ERROR == ret ) + { + ret = mUsePreviewDataSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after measurement port enable Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("Port enable event arrived on measurement port"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mMeasurementPortIndex, + NULL); + CAMHAL_LOGEA("Timeout expoired during port enable on measurement port"); + goto EXIT; + } + + CAMHAL_LOGDA("Port enable event arrived on measurement port"); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +status_t OMXCameraAdapter::switchToExecuting() +{ + status_t ret = NO_ERROR; + TIUTILS::Message msg; + + LOG_FUNCTION_NAME; + + mStateSwitchLock.lock(); + msg.command = CommandHandler::CAMERA_SWITCH_TO_EXECUTING; + msg.arg1 = mErrorNotifier; + ret = mCommandHandler->put(&msg); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::doSwitchToExecuting() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + LOG_FUNCTION_NAME; + + if ( (mComponentState == OMX_StateExecuting) || (mComponentState == OMX_StateInvalid) ){ + CAMHAL_LOGDA("Already in OMX_Executing state or OMX_StateInvalid state"); + mStateSwitchLock.unlock(); + return NO_ERROR; + } + + if ( 0 != mSwitchToExecSem.Count() ){ + CAMHAL_LOGEB("Error mSwitchToExecSem semaphore count %d", mSwitchToExecSem.Count()); + goto EXIT; + } + + ///Register for Preview port DISABLE event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortDisable, + mCameraAdapterParameters.mPrevPortIndex, + mSwitchToExecSem); + if ( NO_ERROR != ret ){ + CAMHAL_LOGEB("Error in registering Port Disable for event %d", ret); + goto EXIT; + } + ///Disable Preview Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortDisable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT); + if (ret != NO_ERROR){ + CAMHAL_LOGEB("Timeout PREVIEW PORT DISABLE %d", ret); + } + + CAMHAL_LOGVB("PREV PORT DISABLED %d", ret); + + ///Register for IDLE state switch event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateIdle, + mSwitchToExecSem); + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error in IDLE STATE SWITCH %d", ret); + goto EXIT; + } + eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp , + OMX_CommandStateSet, + OMX_StateIdle, + NULL); + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT); + if (ret != NO_ERROR){ + CAMHAL_LOGEB("Timeout IDLE STATE SWITCH %d", ret); + goto EXIT; + } + mComponentState = OMX_StateIdle; + CAMHAL_LOGVB("OMX_SendCommand(OMX_StateIdle) 0x%x", eError); + + ///Register for EXECUTING state switch event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateExecuting, + mSwitchToExecSem); + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error in EXECUTING STATE SWITCH %d", ret); + goto EXIT; + } + eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp , + OMX_CommandStateSet, + OMX_StateExecuting, + NULL); + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT); + if (ret != NO_ERROR){ + CAMHAL_LOGEB("Timeout EXEC STATE SWITCH %d", ret); + goto EXIT; + } + mComponentState = OMX_StateExecuting; + CAMHAL_LOGVB("OMX_SendCommand(OMX_StateExecuting) 0x%x", eError); + + mStateSwitchLock.unlock(); + + LOG_FUNCTION_NAME_EXIT; + return ret; + + EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + mStateSwitchLock.unlock(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +status_t OMXCameraAdapter::switchToLoaded() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mStateSwitchLock); + + if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state"); + return NO_ERROR; + } + + if ( 0 != mSwitchToLoadedSem.Count() ) + { + CAMHAL_LOGEB("Error mSwitchToLoadedSem semaphore count %d", mSwitchToLoadedSem.Count()); + goto EXIT; + } + + ///Register for EXECUTING state transition. + ///This method just inserts a message in Event Q, which is checked in the callback + ///The sempahore passed is signalled by the callback + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateIdle, + mSwitchToLoadedSem); + + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp, + OMX_CommandStateSet, + OMX_StateIdle, + NULL); + + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_SendCommand(OMX_StateIdle) - %x", eError); + } + + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + ///Wait for the EXECUTING ->IDLE transition to arrive + + CAMHAL_LOGDA("EXECUTING->IDLE state changed"); + ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after EXECUTING->IDLE Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("EXECUTING->IDLE state changed"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateIdle, + NULL); + CAMHAL_LOGEA("Timeout expired on EXECUTING->IDLE state change"); + goto EXIT; + } + + ///Register for LOADED state transition. + ///This method just inserts a message in Event Q, which is checked in the callback + ///The sempahore passed is signalled by the callback + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateLoaded, + mSwitchToLoadedSem); + + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp, + OMX_CommandStateSet, + OMX_StateLoaded, + NULL); + + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_SendCommand(OMX_StateLoaded) - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + CAMHAL_LOGDA("Switching IDLE->LOADED state"); + ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after IDLE->LOADED Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("IDLE->LOADED state changed"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateLoaded, + NULL); + CAMHAL_LOGEA("Timeout expired on IDLE->LOADED state change"); + goto EXIT; + } + + mComponentState = OMX_StateLoaded; + + ///Register for Preview port ENABLE event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + mSwitchToLoadedSem); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + ///Enable Preview Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + + + CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError); + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + CAMHAL_LOGDA("Enabling Preview port"); + ///Wait for state to switch to idle + ret = mSwitchToLoadedSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after Enabling Preview port Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("Preview port enabled!"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + CAMHAL_LOGEA("Preview enable timedout"); + + goto EXIT; + } + + return (ret | ErrorUtils::omxToAndroidError(eError)); + +EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +status_t OMXCameraAdapter::UseBuffersPreview(void* bufArr, int num) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + int tmpHeight, tmpWidth; + + LOG_FUNCTION_NAME; + + if(!bufArr) + { + CAMHAL_LOGEA("NULL pointer passed for buffArr"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + OMXCameraPortParameters * mPreviewData = NULL; + OMXCameraPortParameters *measurementData = NULL; + mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex]; + mPreviewData->mNumBufs = num ; + uint32_t *buffers = (uint32_t*)bufArr; + + if ( 0 != mUsePreviewSem.Count() ) + { + CAMHAL_LOGEB("Error mUsePreviewSem semaphore count %d", mUsePreviewSem.Count()); + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + if(mPreviewData->mNumBufs != num) + { + CAMHAL_LOGEA("Current number of buffers doesnt equal new num of buffers passed!"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + mStateSwitchLock.lock(); + + if ( mComponentState == OMX_StateLoaded ) + { + + ret = setLDC(mIPP); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("setLDC() failed %d", ret); + LOG_FUNCTION_NAME_EXIT; + return ret; + } + + ret = setNSF(mIPP); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("setNSF() failed %d", ret); + LOG_FUNCTION_NAME_EXIT; + return ret; + } + + ret = setCaptureMode(mCapMode); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("setCaptureMode() failed %d", ret); + LOG_FUNCTION_NAME_EXIT; + return ret; + } + + CAMHAL_LOGDB("Camera Mode = %d", mCapMode); + + if( mCapMode == OMXCameraAdapter::VIDEO_MODE ) + { + ///Enable/Disable Video Noise Filter + ret = enableVideoNoiseFilter(mVnfEnabled); + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VNF %x", ret); + return ret; + } + + ///Enable/Disable Video Stabilization + ret = enableVideoStabilization(mVstabEnabled); + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VSTAB %x", ret); + return ret; + } + } + else + { + ret = enableVideoNoiseFilter(false); + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VNF %x", ret); + return ret; + } + ///Enable/Disable Video Stabilization + ret = enableVideoStabilization(false); + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VSTAB %x", ret); + return ret; + } + } + } + + ret = setSensorOrientation(mSensorOrientation); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error configuring Sensor Orientation %x", ret); + mSensorOrientation = 0; + } + + ret = setVFramerate(mPreviewData->mMinFrameRate, mPreviewData->mMaxFrameRate); + if ( ret != NO_ERROR ) + { + CAMHAL_LOGEB("VFR configuration failed 0x%x", ret); + LOG_FUNCTION_NAME_EXIT; + return ret; + } + + if ( mComponentState == OMX_StateLoaded ) + { + ///Register for IDLE state switch event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateIdle, + mUsePreviewSem); + + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + ///Once we get the buffers, move component state to idle state and pass the buffers to OMX comp using UseBuffer + eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp , + OMX_CommandStateSet, + OMX_StateIdle, + NULL); + + CAMHAL_LOGDB("OMX_SendCommand(OMX_CommandStateSet) 0x%x", eError); + + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + mComponentState = OMX_StateIdle; + } + else + { + ///Register for Preview port ENABLE event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + mUsePreviewSem); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + ///Enable Preview Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + } + + + ///Configure DOMX to use either gralloc handles or vptrs + OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles; + OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER); + + domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + domxUseGrallocHandles.bEnable = OMX_TRUE; + + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_SetParameter - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + OMX_BUFFERHEADERTYPE *pBufferHdr; + for(int index=0;indexmBufSize, + (OMX_U8*)buffers[index]); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_UseBuffer-0x%x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + + //pBufferHdr->pAppPrivate = (OMX_PTR)pBufferHdr; + pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE); + pBufferHdr->nVersion.s.nVersionMajor = 1 ; + pBufferHdr->nVersion.s.nVersionMinor = 1 ; + pBufferHdr->nVersion.s.nRevision = 0 ; + pBufferHdr->nVersion.s.nStep = 0; + mPreviewData->mBufferHeader[index] = pBufferHdr; + } + + if ( mMeasurementEnabled ) + { + + for( int i = 0; i < num; i++ ) + { + OMX_BUFFERHEADERTYPE *pBufHdr; + eError = OMX_UseBuffer( mCameraAdapterParameters.mHandleComp, + &pBufHdr, + mCameraAdapterParameters.mMeasurementPortIndex, + 0, + measurementData->mBufSize, + (OMX_U8*)(mPreviewDataBuffers[i])); + + if ( eError == OMX_ErrorNone ) + { + pBufHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE); + pBufHdr->nVersion.s.nVersionMajor = 1 ; + pBufHdr->nVersion.s.nVersionMinor = 1 ; + pBufHdr->nVersion.s.nRevision = 0 ; + pBufHdr->nVersion.s.nStep = 0; + measurementData->mBufferHeader[i] = pBufHdr; + } + else + { + CAMHAL_LOGEB("OMX_UseBuffer -0x%x", eError); + ret = BAD_VALUE; + break; + } + } + + } + + CAMHAL_LOGDA("Registering preview buffers"); + + ret = mUsePreviewSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after Registering preview buffers Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("Preview buffer registration successfull"); + } + else + { + if ( mComponentState == OMX_StateLoaded ) + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateIdle, + NULL); + } + else + { + ret |= SignalEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + } + CAMHAL_LOGEA("Timeout expired on preview buffer registration"); + goto EXIT; + } + + LOG_FUNCTION_NAME_EXIT; + + return (ret | ErrorUtils::omxToAndroidError(eError)); + + ///If there is any failure, we reach here. + ///Here, we do any resource freeing and convert from OMX error code to Camera Hal error code +EXIT: + mStateSwitchLock.unlock(); + + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + + LOG_FUNCTION_NAME_EXIT; + + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +status_t OMXCameraAdapter::startPreview() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMXCameraPortParameters *mPreviewData = NULL; + OMXCameraPortParameters *measurementData = NULL; + + LOG_FUNCTION_NAME; + + if( 0 != mStartPreviewSem.Count() ) + { + CAMHAL_LOGEB("Error mStartPreviewSem semaphore count %d", mStartPreviewSem.Count()); + ret = NO_INIT; + goto EXIT; + } + + mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex]; + + if( OMX_StateIdle == mComponentState ) + { + ///Register for EXECUTING state transition. + ///This method just inserts a message in Event Q, which is checked in the callback + ///The sempahore passed is signalled by the callback + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateExecuting, + mStartPreviewSem); + + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error in registering for event %d", ret); + goto EXIT; + } + + ///Switch to EXECUTING state + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandStateSet, + OMX_StateExecuting, + NULL); + + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_SendCommand(OMX_StateExecuting)-0x%x", eError); + } + + CAMHAL_LOGDA("+Waiting for component to go into EXECUTING state"); + ret = mStartPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after IDLE_EXECUTING Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("+Great. Component went into executing state!!"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandStateSet, + OMX_StateExecuting, + NULL); + CAMHAL_LOGDA("Timeout expired on executing state switch!"); + goto EXIT; + } + + mComponentState = OMX_StateExecuting; + + } + + mStateSwitchLock.unlock(); + + apply3Asettings(mParameters3A); + //Queue all the buffers on preview port + for(int index=0;index< mPreviewData->mMaxQueueable;index++) + { + CAMHAL_LOGDB("Queuing buffer on Preview port - 0x%x", (uint32_t)mPreviewData->mBufferHeader[index]->pBuffer); + eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, + (OMX_BUFFERHEADERTYPE*)mPreviewData->mBufferHeader[index]); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError); + } + mFramesWithDucati++; +#ifdef DEGUG_LOG + mBuffersWithDucati.add((uint32_t)mPreviewData->mBufferHeader[index]->pBuffer,1); +#endif + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + + if ( mMeasurementEnabled ) + { + + for(int index=0;index< mPreviewData->mNumBufs;index++) + { + CAMHAL_LOGDB("Queuing buffer on Measurement port - 0x%x", (uint32_t) measurementData->mBufferHeader[index]->pBuffer); + eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, + (OMX_BUFFERHEADERTYPE*) measurementData->mBufferHeader[index]); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_FillThisBuffer-0x%x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + + } + + // Enable Ancillary data. The nDCCStatus field is used to signify + // whether the preview frame is a snapshot + if ( OMX_ErrorNone == eError) + { + ret = setExtraData(true, OMX_ALL, OMX_AncillaryData); + } + + + if ( mPending3Asettings ) + apply3Asettings(mParameters3A); + + // enable focus callbacks just once here + // fixes an issue with slow callback registration in Ducati + if ( NO_ERROR == ret ) { + ret = setFocusCallback(true); + } + + //reset frame rate estimates + mFPS = 0.0f; + mLastFPS = 0.0f; + // start frame count from 0. i.e first frame after + // startPreview will be the 0th reference frame + // this way we will wait for second frame until + // takePicture/autoFocus is allowed to run. we + // are seeing SetConfig/GetConfig fail after + // calling after the first frame and not failing + // after the second frame + mFrameCount = -1; + mLastFrameCount = 0; + mIter = 1; + mLastFPSTime = systemTime(); + + LOG_FUNCTION_NAME_EXIT; + + return (ret | ErrorUtils::omxToAndroidError(eError)); + + EXIT: + + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + performCleanupAfterError(); + mStateSwitchLock.unlock(); + LOG_FUNCTION_NAME_EXIT; + + return (ret | ErrorUtils::omxToAndroidError(eError)); + +} + +status_t OMXCameraAdapter::stopPreview() +{ + LOG_FUNCTION_NAME; + + OMX_ERRORTYPE eError = OMX_ErrorNone; + status_t ret = NO_ERROR; + + OMXCameraPortParameters *mCaptureData , *mPreviewData, *measurementData; + mCaptureData = mPreviewData = measurementData = NULL; + + mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + mCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + measurementData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex]; + + if (mAdapterState == LOADED_PREVIEW_STATE) { + // Something happened in CameraHal between UseBuffers and startPreview + // this means that state switch is still locked..so we need to unlock else + // deadlock will occur on the next start preview + mStateSwitchLock.unlock(); + return NO_ERROR; + } + + if ( mComponentState != OMX_StateExecuting ) + { + CAMHAL_LOGEA("Calling StopPreview() when not in EXECUTING state"); + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + { + Mutex::Autolock lock(mFrameCountMutex); + // we should wait for the first frame to come before trying to stopPreview...if not + // we might put OMXCamera in a bad state (IDLE->LOADED timeout). Seeing this a lot + // after a capture + if (mFrameCount < 1) { + // I want to wait for at least two frames.... + mFrameCount = -1; + + // first frame may time some time to come...so wait for an adequate amount of time + // which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover. + ret = mFirstFrameCondition.waitRelative(mFrameCountMutex, + (nsecs_t) 2 * OMX_CAPTURE_TIMEOUT * 1000); + } + // even if we timeout waiting for the first frame...go ahead with trying to stop preview + // signal anybody that might be waiting + mFrameCount = 0; + mFirstFrameCondition.broadcast(); + } + + ret = cancelAutoFocus(); + if(ret!=NO_ERROR) + { + CAMHAL_LOGEB("Error canceling autofocus %d", ret); + // Error, but we probably still want to continue to stop preview + } + + OMX_CONFIG_FOCUSASSISTTYPE focusAssist; + OMX_INIT_STRUCT_PTR (&focusAssist, OMX_CONFIG_FOCUSASSISTTYPE); + focusAssist.nPortIndex = OMX_ALL; + focusAssist.bFocusAssist = OMX_FALSE; + CAMHAL_LOGDB("Configuring AF Assist mode 0x%x", focusAssist.bFocusAssist); + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_IndexConfigFocusAssist, + &focusAssist); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring AF Assist mode 0x%x", eError); + } + else + { + CAMHAL_LOGDA("Camera AF Assist mode configured successfully"); + } + + if ( 0 != mStopPreviewSem.Count() ) + { + CAMHAL_LOGEB("Error mStopPreviewSem semaphore count %d", mStopPreviewSem.Count()); + LOG_FUNCTION_NAME_EXIT; + return NO_INIT; + } + + ret = disableImagePort(); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("disable image port failed 0x%x", ret); + goto EXIT; + } + + CAMHAL_LOGDB("Average framerate: %f", mFPS); + + //Avoid state switching of the OMX Component + ret = flushBuffers(); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Flush Buffers failed 0x%x", ret); + goto EXIT; + } + + ///Register for Preview port Disable event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortDisable, + mCameraAdapterParameters.mPrevPortIndex, + mStopPreviewSem); + + ///Disable Preview Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortDisable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + + ///Free the OMX Buffers + for ( int i = 0 ; i < mPreviewData->mNumBufs ; i++ ) + { + eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp, + mCameraAdapterParameters.mPrevPortIndex, + mPreviewData->mBufferHeader[i]); + + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + + if ( mMeasurementEnabled ) + { + + for ( int i = 0 ; i < measurementData->mNumBufs ; i++ ) + { + eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp, + mCameraAdapterParameters.mMeasurementPortIndex, + measurementData->mBufferHeader[i]); + if(eError!=OMX_ErrorNone) + { + CAMHAL_LOGEB("OMX_FreeBuffer - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + + { + Mutex::Autolock lock(mPreviewDataBufferLock); + mPreviewDataBuffersAvailable.clear(); + } + + } + + CAMHAL_LOGDA("Disabling preview port"); + ret = mStopPreviewSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after Disabling preview port Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("Preview port disabled"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortDisable, + mCameraAdapterParameters.mPrevPortIndex, + NULL); + CAMHAL_LOGEA("Timeout expired on preview port disable"); + goto EXIT; + } + + { + Mutex::Autolock lock(mPreviewBufferLock); + ///Clear all the available preview buffers + mPreviewBuffersAvailable.clear(); + } + + switchToLoaded(); + + + mFirstTimeInit = true; + mPendingCaptureSettings = 0; + mFramesWithDucati = 0; + mFramesWithDisplay = 0; + mFramesWithEncoder = 0; + + LOG_FUNCTION_NAME_EXIT; + + return (ret | ErrorUtils::omxToAndroidError(eError)); + +EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + { + Mutex::Autolock lock(mPreviewBufferLock); + ///Clear all the available preview buffers + mPreviewBuffersAvailable.clear(); + } + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); + +} + +status_t OMXCameraAdapter::setSensorOverclock(bool enable) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_BOOLEANTYPE bOMX; + + LOG_FUNCTION_NAME; + + if ( OMX_StateLoaded != mComponentState ) + { + CAMHAL_LOGDA("OMX component is not in loaded state"); + return ret; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE); + + if ( enable ) + { + bOMX.bEnabled = OMX_TRUE; + } + else + { + bOMX.bEnabled = OMX_FALSE; + } + + CAMHAL_LOGDB("Configuring Sensor overclock mode 0x%x", bOMX.bEnabled); + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexParamSensorOverClockMode, &bOMX); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while setting Sensor overclock 0x%x", eError); + ret = BAD_VALUE; + } + else + { + mSensorOverclock = enable; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_VERSIONTYPE compVersion; + char compName[OMX_MAX_STRINGNAME_SIZE]; + char *currentUUID = NULL; + size_t offset = 0; + + LOG_FUNCTION_NAME; + + if ( NULL == handle ) + { + CAMHAL_LOGEB("Invalid OMX Handle =0x%x", ( unsigned int ) handle); + ret = -EINVAL; + } + + mCompUUID[0] = 0; + + if ( NO_ERROR == ret ) + { + eError = OMX_GetComponentVersion(handle, + compName, + &compVersion, + &mCompRevision, + &mCompUUID + ); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("OMX_GetComponentVersion returned 0x%x", eError); + ret = BAD_VALUE; + } + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGVB("OMX Component name: [%s]", compName); + CAMHAL_LOGVB("OMX Component version: [%u]", ( unsigned int ) compVersion.nVersion); + CAMHAL_LOGVB("Spec version: [%u]", ( unsigned int ) mCompRevision.nVersion); + CAMHAL_LOGVB("Git Commit ID: [%s]", mCompUUID); + currentUUID = ( char * ) mCompUUID; + } + + if ( NULL != currentUUID ) + { + offset = strlen( ( const char * ) mCompUUID) + 1; + if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE ) + { + currentUUID += offset; + CAMHAL_LOGVB("Git Branch: [%s]", currentUUID); + } + else + { + ret = BAD_VALUE; + } + } + + if ( NO_ERROR == ret ) + { + offset = strlen( ( const char * ) currentUUID) + 1; + + if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE ) + { + currentUUID += offset; + CAMHAL_LOGVB("Build date and time: [%s]", currentUUID); + } + else + { + ret = BAD_VALUE; + } + } + + if ( NO_ERROR == ret ) + { + offset = strlen( ( const char * ) currentUUID) + 1; + + if ( (int)currentUUID + (int)offset - (int)mCompUUID < OMX_MAX_STRINGNAME_SIZE ) + { + currentUUID += offset; + CAMHAL_LOGVB("Build description: [%s]", currentUUID); + } + else + { + ret = BAD_VALUE; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::autoFocus() +{ + status_t ret = NO_ERROR; + TIUTILS::Message msg; + + LOG_FUNCTION_NAME; + + { + Mutex::Autolock lock(mFrameCountMutex); + if (mFrameCount < 1) { + // first frame may time some time to come...so wait for an adequate amount of time + // which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover. + ret = mFirstFrameCondition.waitRelative(mFrameCountMutex, + (nsecs_t) 2 * OMX_CAPTURE_TIMEOUT * 1000); + if ((NO_ERROR != ret) || (mFrameCount == 0)) { + goto EXIT; + } + } + } + + msg.command = CommandHandler::CAMERA_PERFORM_AUTOFOCUS; + msg.arg1 = mErrorNotifier; + ret = mCommandHandler->put(&msg); + + EXIT: + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::takePicture() +{ + status_t ret = NO_ERROR; + TIUTILS::Message msg; + + LOG_FUNCTION_NAME; + + { + Mutex::Autolock lock(mFrameCountMutex); + if (mFrameCount < 1) { + // first frame may time some time to come...so wait for an adequate amount of time + // which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover. + ret = mFirstFrameCondition.waitRelative(mFrameCountMutex, + (nsecs_t) 2 * OMX_CAPTURE_TIMEOUT * 1000); + if ((NO_ERROR != ret) || (mFrameCount == 0)) { + goto EXIT; + } + } + } + + msg.command = CommandHandler::CAMERA_START_IMAGE_CAPTURE; + msg.arg1 = mErrorNotifier; + ret = mCommandHandler->put(&msg); + + EXIT: + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::startVideoCapture() +{ + return BaseCameraAdapter::startVideoCapture(); +} + +status_t OMXCameraAdapter::stopVideoCapture() +{ + return BaseCameraAdapter::stopVideoCapture(); +} + +//API to get the frame size required to be allocated. This size is used to override the size passed +//by camera service when VSTAB/VNF is turned ON for example +status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_RECTTYPE tFrameDim; + + LOG_FUNCTION_NAME; + + OMX_INIT_STRUCT_PTR (&tFrameDim, OMX_CONFIG_RECTTYPE); + tFrameDim.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + if ( mOMXStateSwitch ) + { + ret = switchToLoaded(); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("switchToLoaded() failed 0x%x", ret); + goto exit; + } + + mOMXStateSwitch = false; + } + + if ( OMX_StateLoaded == mComponentState ) + { + + ret = setLDC(mIPP); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("setLDC() failed %d", ret); + LOG_FUNCTION_NAME_EXIT; + goto exit; + } + + ret = setNSF(mIPP); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("setNSF() failed %d", ret); + LOG_FUNCTION_NAME_EXIT; + goto exit; + } + + ret = setCaptureMode(mCapMode); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("setCaptureMode() failed %d", ret); + } + + if(mCapMode == OMXCameraAdapter::VIDEO_MODE) + { + if ( NO_ERROR == ret ) + { + ///Enable/Disable Video Noise Filter + ret = enableVideoNoiseFilter(mVnfEnabled); + } + + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VNF %x", ret); + } + + if ( NO_ERROR == ret ) + { + ///Enable/Disable Video Stabilization + ret = enableVideoStabilization(mVstabEnabled); + } + + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VSTAB %x", ret); + } + } + else + { + if ( NO_ERROR == ret ) + { + ///Enable/Disable Video Noise Filter + ret = enableVideoNoiseFilter(false); + } + + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VNF %x", ret); + } + + if ( NO_ERROR == ret ) + { + ///Enable/Disable Video Stabilization + ret = enableVideoStabilization(false); + } + + if ( NO_ERROR != ret) + { + CAMHAL_LOGEB("Error configuring VSTAB %x", ret); + } + } + + } + + ret = setSensorOrientation(mSensorOrientation); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error configuring Sensor Orientation %x", ret); + mSensorOrientation = 0; + } + + if ( NO_ERROR == ret ) + { + eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexParam2DBufferAllocDimension, &tFrameDim); + if ( OMX_ErrorNone == eError) + { + width = tFrameDim.nWidth; + height = tFrameDim.nHeight; + } + } + +exit: + + CAMHAL_LOGDB("Required frame size %dx%d", width, height); + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount) +{ + status_t ret = NO_ERROR; + OMX_PARAM_PORTDEFINITIONTYPE portCheck; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + LOG_FUNCTION_NAME; + + if ( OMX_StateLoaded != mComponentState ) + { + CAMHAL_LOGEA("Calling getFrameDataSize() when not in LOADED state"); + dataFrameSize = 0; + ret = BAD_VALUE; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR(&portCheck, OMX_PARAM_PORTDEFINITIONTYPE); + portCheck.nPortIndex = mCameraAdapterParameters.mMeasurementPortIndex; + + eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("OMX_GetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError); + dataFrameSize = 0; + ret = BAD_VALUE; + } + } + + if ( NO_ERROR == ret ) + { + portCheck.nBufferCountActual = bufferCount; + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("OMX_SetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError); + dataFrameSize = 0; + ret = BAD_VALUE; + } + } + + if ( NO_ERROR == ret ) + { + eError = OMX_GetParameter(mCameraAdapterParameters.mHandleComp, OMX_IndexParamPortDefinition, &portCheck); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("OMX_GetParameter on OMX_IndexParamPortDefinition returned: 0x%x", eError); + ret = BAD_VALUE; + } + else + { + mCameraAdapterParameters.mCameraPortParams[portCheck.nPortIndex].mBufSize = portCheck.nBufferSize; + dataFrameSize = portCheck.nBufferSize; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +void OMXCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt) +{ + LOG_FUNCTION_NAME; + + static const unsigned int DEGREES_TILT_IGNORE = 45; + int device_orientation = 0; + int mount_orientation = 0; + const char *facing_direction = NULL; + + // if tilt angle is greater than DEGREES_TILT_IGNORE + // we are going to ignore the orientation returned from + // sensor. the orientation returned from sensor is not + // reliable. Value of DEGREES_TILT_IGNORE may need adjusting + if (tilt > DEGREES_TILT_IGNORE) { + return; + } + + if (mCapabilities) { + if (mCapabilities->get(CameraProperties::ORIENTATION_INDEX)) { + mount_orientation = atoi(mCapabilities->get(CameraProperties::ORIENTATION_INDEX)); + } + facing_direction = mCapabilities->get(CameraProperties::FACING_INDEX); + } + + // calculate device orientation relative to the sensor orientation + // front camera display is mirrored...needs to be accounted for when orientation + // is 90 or 270...since this will result in a flip on orientation otherwise + if (facing_direction && !strcmp(facing_direction, TICameraParameters::FACING_FRONT) && + (orientation == 90 || orientation == 270)) { + device_orientation = (orientation - mount_orientation + 360) % 360; + } else { // back-facing camera + device_orientation = (orientation + mount_orientation) % 360; + } + + if (device_orientation != mDeviceOrientation) { + mDeviceOrientation = device_orientation; + + mFaceDetectionLock.lock(); + if (mFaceDetectionRunning) { + // restart face detection with new rotation + setFaceDetection(true, mDeviceOrientation); + } + mFaceDetectionLock.unlock(); + } + CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation); + + LOG_FUNCTION_NAME_EXIT; +} + +/* Application callback Functions */ +/*========================================================*/ +/* @ fn SampleTest_EventHandler :: Application callback */ +/*========================================================*/ +OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData) +{ + LOG_FUNCTION_NAME; + + CAMHAL_LOGDB("Event %d", eEvent); + + OMX_ERRORTYPE ret = OMX_ErrorNone; + OMXCameraAdapter *oca = (OMXCameraAdapter*)pAppData; + ret = oca->OMXCameraAdapterEventHandler(hComponent, eEvent, nData1, nData2, pEventData); + + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +/* Application callback Functions */ +/*========================================================*/ +/* @ fn SampleTest_EventHandler :: Application callback */ +/*========================================================*/ +OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData) +{ + + LOG_FUNCTION_NAME; + + OMX_ERRORTYPE eError = OMX_ErrorNone; + CAMHAL_LOGDB("+OMX_Event %x, %d %d", eEvent, (int)nData1, (int)nData2); + + switch (eEvent) { + case OMX_EventCmdComplete: + CAMHAL_LOGDB("+OMX_EventCmdComplete %d %d", (int)nData1, (int)nData2); + + if (OMX_CommandStateSet == nData1) { + mCameraAdapterParameters.mState = (OMX_STATETYPE) nData2; + + } else if (OMX_CommandFlush == nData1) { + CAMHAL_LOGDB("OMX_CommandFlush received for port %d", (int)nData2); + + } else if (OMX_CommandPortDisable == nData1) { + CAMHAL_LOGDB("OMX_CommandPortDisable received for port %d", (int)nData2); + + } else if (OMX_CommandPortEnable == nData1) { + CAMHAL_LOGDB("OMX_CommandPortEnable received for port %d", (int)nData2); + + } else if (OMX_CommandMarkBuffer == nData1) { + ///This is not used currently + } + + CAMHAL_LOGDA("-OMX_EventCmdComplete"); + break; + + case OMX_EventIndexSettingChanged: + CAMHAL_LOGDB("OMX_EventIndexSettingChanged event received data1 0x%x, data2 0x%x", + ( unsigned int ) nData1, ( unsigned int ) nData2); + break; + + case OMX_EventError: + CAMHAL_LOGDB("OMX interface failed to execute OMX command %d", (int)nData1); + CAMHAL_LOGDA("See OMX_INDEXTYPE for reference"); + if ( NULL != mErrorNotifier && ( ( OMX_U32 ) OMX_ErrorHardware == nData1 ) && mComponentState != OMX_StateInvalid) + { + CAMHAL_LOGEA("***Got Fatal Error Notification***\n"); + mComponentState = OMX_StateInvalid; + /* + Remove any unhandled events and + unblock any waiting semaphores + */ + if ( !mEventSignalQ.isEmpty() ) + { + for (unsigned int i = 0 ; i < mEventSignalQ.size(); i++ ) + { + CAMHAL_LOGEB("***Removing %d EVENTS***** \n", mEventSignalQ.size()); + //remove from queue and free msg + TIUTILS::Message *msg = mEventSignalQ.itemAt(i); + if ( NULL != msg ) + { + Semaphore *sem = (Semaphore*) msg->arg3; + if ( sem ) + { + sem->Signal(); + } + free(msg); + } + } + mEventSignalQ.clear(); + } + ///Report Error to App + mErrorNotifier->errorNotify(CAMERA_ERROR_FATAL); + } + break; + + case OMX_EventMark: + break; + + case OMX_EventPortSettingsChanged: + break; + + case OMX_EventBufferFlag: + break; + + case OMX_EventResourcesAcquired: + break; + + case OMX_EventComponentResumed: + break; + + case OMX_EventDynamicResourcesAvailable: + break; + + case OMX_EventPortFormatDetected: + break; + + default: + break; + } + + ///Signal to the thread(s) waiting that the event has occured + SignalEvent(hComponent, eEvent, nData1, nData2, pEventData); + + LOG_FUNCTION_NAME_EXIT; + return eError; + + EXIT: + + CAMHAL_LOGEB("Exiting function %s because of eError=%x", __FUNCTION__, eError); + LOG_FUNCTION_NAME_EXIT; + return eError; +} + +OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData) +{ + Mutex::Autolock lock(mEventLock); + TIUTILS::Message *msg; + bool eventSignalled = false; + + LOG_FUNCTION_NAME; + + if ( !mEventSignalQ.isEmpty() ) + { + CAMHAL_LOGDA("Event queue not empty"); + + for ( unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ ) + { + msg = mEventSignalQ.itemAt(i); + if ( NULL != msg ) + { + if( ( msg->command != 0 || msg->command == ( unsigned int ) ( eEvent ) ) + && ( !msg->arg1 || ( OMX_U32 ) msg->arg1 == nData1 ) + && ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 ) + && msg->arg3) + { + Semaphore *sem = (Semaphore*) msg->arg3; + CAMHAL_LOGDA("Event matched, signalling sem"); + mEventSignalQ.removeAt(i); + //Signal the semaphore provided + sem->Signal(); + free(msg); + eventSignalled = true; + break; + } + } + } + } + else + { + CAMHAL_LOGDA("Event queue empty!!!"); + } + + // Special handling for any unregistered events + if (!eventSignalled) { + // Handling for focus callback + if ((nData2 == OMX_IndexConfigCommonFocusStatus) && + (eEvent == (OMX_EVENTTYPE) OMX_EventIndexSettingChanged)) { + TIUTILS::Message msg; + msg.command = OMXCallbackHandler::CAMERA_FOCUS_STATUS; + msg.arg1 = NULL; + msg.arg2 = NULL; + mOMXCallbackHandler->put(&msg); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return OMX_ErrorNone; +} + +OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData) +{ + Mutex::Autolock lock(mEventLock); + TIUTILS::Message *msg; + LOG_FUNCTION_NAME; + + if ( !mEventSignalQ.isEmpty() ) + { + CAMHAL_LOGDA("Event queue not empty"); + + for ( unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ ) + { + msg = mEventSignalQ.itemAt(i); + if ( NULL != msg ) + { + if( ( msg->command != 0 || msg->command == ( unsigned int ) ( eEvent ) ) + && ( !msg->arg1 || ( OMX_U32 ) msg->arg1 == nData1 ) + && ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 ) + && msg->arg3) + { + Semaphore *sem = (Semaphore*) msg->arg3; + CAMHAL_LOGDA("Event matched, signalling sem"); + mEventSignalQ.removeAt(i); + free(msg); + break; + } + } + } + } + else + { + CAMHAL_LOGEA("Event queue empty!!!"); + } + LOG_FUNCTION_NAME_EXIT; + + return OMX_ErrorNone; +} + + +status_t OMXCameraAdapter::RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN Semaphore &semaphore) +{ + status_t ret = NO_ERROR; + ssize_t res; + Mutex::Autolock lock(mEventLock); + + LOG_FUNCTION_NAME; + TIUTILS::Message * msg = ( struct TIUTILS::Message * ) malloc(sizeof(struct TIUTILS::Message)); + if ( NULL != msg ) + { + msg->command = ( unsigned int ) eEvent; + msg->arg1 = ( void * ) nData1; + msg->arg2 = ( void * ) nData2; + msg->arg3 = ( void * ) &semaphore; + msg->arg4 = ( void * ) hComponent; + res = mEventSignalQ.add(msg); + if ( NO_MEMORY == res ) + { + CAMHAL_LOGEA("No ressources for inserting OMX events"); + free(msg); + ret = -ENOMEM; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/*========================================================*/ +/* @ fn SampleTest_EmptyBufferDone :: Application callback*/ +/*========================================================*/ +OMX_ERRORTYPE OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader) +{ + LOG_FUNCTION_NAME; + + OMX_ERRORTYPE eError = OMX_ErrorNone; + + OMXCameraAdapter *oca = (OMXCameraAdapter*)pAppData; + eError = oca->OMXCameraAdapterEmptyBufferDone(hComponent, pBuffHeader); + + LOG_FUNCTION_NAME_EXIT; + return eError; +} + + +/*========================================================*/ +/* @ fn SampleTest_EmptyBufferDone :: Application callback*/ +/*========================================================*/ +OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader) +{ + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return OMX_ErrorNone; +} + +static void debugShowFPS() +{ + static int mFrameCount = 0; + static int mLastFrameCount = 0; + static nsecs_t mLastFpsTime = 0; + static float mFps = 0; + mFrameCount++; + if (!(mFrameCount & 0x1F)) { + nsecs_t now = systemTime(); + nsecs_t diff = now - mLastFpsTime; + mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff; + mLastFpsTime = now; + mLastFrameCount = mFrameCount; + ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps); + } + // XXX: mFPS has the value we want +} + +/*========================================================*/ +/* @ fn SampleTest_FillBufferDone :: Application callback*/ +/*========================================================*/ +OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader) +{ + TIUTILS::Message msg; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + if (UNLIKELY(mDebugFps)) { + debugShowFPS(); + } + + OMXCameraAdapter *adapter = ( OMXCameraAdapter * ) pAppData; + if ( NULL != adapter ) + { + msg.command = OMXCameraAdapter::OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE; + msg.arg1 = ( void * ) hComponent; + msg.arg2 = ( void * ) pBuffHeader; + adapter->mOMXCallbackHandler->put(&msg); + } + + return eError; +} + +/*========================================================*/ +/* @ fn SampleTest_FillBufferDone :: Application callback*/ +/*========================================================*/ +OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader) +{ + + status_t stat = NO_ERROR; + status_t res1, res2; + OMXCameraPortParameters *pPortParam; + OMX_ERRORTYPE eError = OMX_ErrorNone; + CameraFrame::FrameType typeOfFrame = CameraFrame::ALL_FRAMES; + unsigned int refCount = 0; + BaseCameraAdapter::AdapterState state, nextState; + BaseCameraAdapter::getState(state); + BaseCameraAdapter::getNextState(nextState); + sp fdResult = NULL; + unsigned int mask = 0xFFFF; + CameraFrame cameraFrame; + OMX_TI_PLATFORMPRIVATE *platformPrivate; + OMX_OTHER_EXTRADATATYPE *extraData; + OMX_TI_ANCILLARYDATATYPE *ancillaryData = NULL; + bool snapshotFrame = false; + + res1 = res2 = NO_ERROR; + pPortParam = &(mCameraAdapterParameters.mCameraPortParams[pBuffHeader->nOutputPortIndex]); + + if ( !pBuffHeader || !pBuffHeader->pBuffer ) { + CAMHAL_LOGEA("NULL Buffer from OMX"); + return OMX_ErrorNone; + } + + if (pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW) + { + + if ( ( PREVIEW_ACTIVE & state ) != PREVIEW_ACTIVE ) + { + return OMX_ErrorNone; + } + + if ( mWaitingForSnapshot ) + { + platformPrivate = (OMX_TI_PLATFORMPRIVATE*) pBuffHeader->pPlatformPrivate; + extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer, + platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE) OMX_AncillaryData); + + if ( NULL != extraData ) + { + ancillaryData = (OMX_TI_ANCILLARYDATATYPE*) extraData->data; + snapshotFrame = ancillaryData->nDCCStatus; + mPending3Asettings |= SetFocus; + } + } + + recalculateFPS(); + { + Mutex::Autolock lock(mFaceDetectionLock); + if ( mFaceDetectionRunning && !mFaceDetectionPaused ) { + detectFaces(pBuffHeader, fdResult, pPortParam->mWidth, pPortParam->mHeight); + if ( NULL != fdResult.get() ) { + notifyFaceSubscribers(fdResult); + fdResult.clear(); + } + if ( mFDSwitchAlgoPriority ) { + + //Disable region priority and enable face priority for AF + setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false); + setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true); + + //Disable Region priority and enable Face priority + setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false); + setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true); + mFDSwitchAlgoPriority = false; + } + } + } + + ///Prepare the frames to be sent - initialize CameraFrame object and reference count + // TODO(XXX): ancillary data for snapshot frame is not being sent for video snapshot + // if we are waiting for a snapshot and in video mode...go ahead and send + // this frame as a snapshot + if( mWaitingForSnapshot && (mCapturedFrames > 0) && + (snapshotFrame || (mCapMode == VIDEO_MODE))) + { + typeOfFrame = CameraFrame::SNAPSHOT_FRAME; + mask = (unsigned int)CameraFrame::SNAPSHOT_FRAME; + + // video snapshot gets ancillary data and wb info from last snapshot frame + mCaptureAncillaryData = ancillaryData; + mWhiteBalanceData = NULL; + extraData = getExtradata((OMX_OTHER_EXTRADATATYPE*) platformPrivate->pMetaDataBuffer, + platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE) OMX_WhiteBalance); + if ( NULL != extraData ) + { + mWhiteBalanceData = (OMX_TI_WHITEBALANCERESULTTYPE*) extraData->data; + } + } + else + { + typeOfFrame = CameraFrame::PREVIEW_FRAME_SYNC; + mask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC; + } + + if (mRecording) + { + mask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC; + mFramesWithEncoder++; + } + + //ALOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer); + + if( mWaitingForSnapshot ) + { + mSnapshotCount++; + + if ( (mSnapshotCount == 1) && + ((HIGH_SPEED == mCapMode) || (VIDEO_MODE == mCapMode)) ) + { + notifyShutterSubscribers(); + } + } + + stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam); + mFramesWithDisplay++; + + mFramesWithDucati--; + +#ifdef DEBUG_LOG + if(mBuffersWithDucati.indexOfKey((int)pBuffHeader->pBuffer)<0) + { + ALOGE("Buffer was never with Ducati!! 0x%x", pBuffHeader->pBuffer); + for(int i=0;ipBuffer); +#endif + + if(mDebugFcs) + CAMHAL_LOGEB("C[%d] D[%d] E[%d]", mFramesWithDucati, mFramesWithDisplay, mFramesWithEncoder); + + stat |= advanceZoom(); + + // On the fly update to 3A settings not working + // Do not update 3A here if we are in the middle of a capture + // or in the middle of transitioning to it + if( mPending3Asettings && + ( (nextState & CAPTURE_ACTIVE) == 0 ) && + ( (state & CAPTURE_ACTIVE) == 0 ) ) + { + apply3Asettings(mParameters3A); + } + + } + else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT ) + { + typeOfFrame = CameraFrame::FRAME_DATA_SYNC; + mask = (unsigned int)CameraFrame::FRAME_DATA_SYNC; + + stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam); + } + else if( pBuffHeader->nOutputPortIndex == OMX_CAMERA_PORT_IMAGE_OUT_IMAGE ) + { + OMX_COLOR_FORMATTYPE pixFormat; + const char *valstr = NULL; + + pixFormat = mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mColorFormat; + + if ( OMX_COLOR_FormatUnused == pixFormat ) + { + typeOfFrame = CameraFrame::IMAGE_FRAME; + mask = (unsigned int) CameraFrame::IMAGE_FRAME; + } else if ( pixFormat == OMX_COLOR_FormatCbYCrY && + ((mPictureFormatFromClient && + !strcmp(mPictureFormatFromClient, CameraParameters::PIXEL_FORMAT_JPEG)) || + !mPictureFormatFromClient) ) { + // signals to callbacks that this needs to be coverted to jpeg + // before returning to framework + typeOfFrame = CameraFrame::IMAGE_FRAME; + mask = (unsigned int) CameraFrame::IMAGE_FRAME; + cameraFrame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG; + + // populate exif data and pass to subscribers via quirk + // subscriber is in charge of freeing exif data + ExifElementsTable* exif = new ExifElementsTable(); + setupEXIF_libjpeg(exif, mCaptureAncillaryData, mWhiteBalanceData); + cameraFrame.mQuirks |= CameraFrame::HAS_EXIF_DATA; + cameraFrame.mCookie2 = (void*) exif; + } + else + { + typeOfFrame = CameraFrame::RAW_FRAME; + mask = (unsigned int) CameraFrame::RAW_FRAME; + } + + pPortParam->mImageType = typeOfFrame; + + if((mCapturedFrames>0) && !mCaptureSignalled) + { + mCaptureSignalled = true; + mCaptureSem.Signal(); + } + + if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE ) + { + goto EXIT; + } + + { + Mutex::Autolock lock(mBracketingLock); + if ( mBracketingEnabled ) + { + doBracketing(pBuffHeader, typeOfFrame); + return eError; + } + } + + if ( 1 > mCapturedFrames ) + { + goto EXIT; + } + + CAMHAL_LOGDB("Captured Frames: %d", mCapturedFrames); + + mCapturedFrames--; + + stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam); + + } + else + { + CAMHAL_LOGEA("Frame received for non-(preview/capture/measure) port. This is yet to be supported"); + goto EXIT; + } + + if ( NO_ERROR != stat ) + { + CAMHAL_LOGDB("sendFrameToSubscribers error: %d", stat); + returnFrame(pBuffHeader->pBuffer, typeOfFrame); + } + + return eError; + + EXIT: + + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, stat, eError); + + if ( NO_ERROR != stat ) + { + if ( NULL != mErrorNotifier ) + { + mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN); + } + } + + return eError; +} + +status_t OMXCameraAdapter::recalculateFPS() +{ + float currentFPS; + + { + Mutex::Autolock lock(mFrameCountMutex); + mFrameCount++; + if (mFrameCount == 1) { + mFirstFrameCondition.broadcast(); + } + } + + if ( ( mFrameCount % FPS_PERIOD ) == 0 ) + { + nsecs_t now = systemTime(); + nsecs_t diff = now - mLastFPSTime; + currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff; + mLastFPSTime = now; + mLastFrameCount = mFrameCount; + + if ( 1 == mIter ) + { + mFPS = currentFPS; + } + else + { + //cumulative moving average + mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter; + } + + mLastFPS = mFPS; + mIter++; + } + + return NO_ERROR; +} + +status_t OMXCameraAdapter::sendFrame(CameraFrame &frame) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + + if ( NO_ERROR == ret ) + { + ret = sendFrameToSubscribers(&frame); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NULL == port) + { + CAMHAL_LOGEA("Invalid portParam"); + return -EINVAL; + } + + if ( NULL == pBuffHeader ) + { + CAMHAL_LOGEA("Invalid Buffer header"); + return -EINVAL; + } + + Mutex::Autolock lock(mSubscriberLock); + + //frame.mFrameType = typeOfFrame; + frame.mFrameMask = mask; + frame.mBuffer = pBuffHeader->pBuffer; + frame.mLength = pBuffHeader->nFilledLen; + frame.mAlignment = port->mStride; + frame.mOffset = pBuffHeader->nOffset; + frame.mWidth = port->mWidth; + frame.mHeight = port->mHeight; + frame.mYuv[0] = NULL; + frame.mYuv[1] = NULL; + + if ( onlyOnce && mRecording ) + { + mTimeSourceDelta = (pBuffHeader->nTimeStamp * 1000) - systemTime(SYSTEM_TIME_MONOTONIC); + onlyOnce = false; + } + + frame.mTimestamp = (pBuffHeader->nTimeStamp * 1000) - mTimeSourceDelta; + + ret = setInitFrameRefCount(frame.mBuffer, mask); + + if (ret != NO_ERROR) { + CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret); + } else { + ret = sendFrameToSubscribers(&frame); + } + + CAMHAL_LOGVB("B 0x%x T %llu", frame.mBuffer, pBuffHeader->nTimeStamp); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::initCameraFrame( CameraFrame &frame, + OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, + int typeOfFrame, + OMXCameraPortParameters *port) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NULL == port) + { + CAMHAL_LOGEA("Invalid portParam"); + return -EINVAL; + } + + if ( NULL == pBuffHeader ) + { + CAMHAL_LOGEA("Invalid Buffer header"); + return -EINVAL; + } + + frame.mFrameType = typeOfFrame; + frame.mBuffer = pBuffHeader->pBuffer; + frame.mLength = pBuffHeader->nFilledLen; + frame.mAlignment = port->mStride; + frame.mOffset = pBuffHeader->nOffset; + frame.mWidth = port->mWidth; + frame.mHeight = port->mHeight; + + // Timestamp in pBuffHeader->nTimeStamp is derived on DUCATI side, which is + // is not same time value as derived using systemTime. It would be ideal to use + // exactly same time source across Android and Ducati, which is limited by + // system now. So, workaround for now is to find the time offset between the two + // time sources and compensate the difference, along with the latency involved + // in camera buffer reaching CameraHal. Also, Do timeset offset calculation only + // when recording is in progress, when nTimestamp will be populated by Camera + if ( onlyOnce && mRecording ) + { + mTimeSourceDelta = (pBuffHeader->nTimeStamp * 1000) - systemTime(SYSTEM_TIME_MONOTONIC); + mTimeSourceDelta += kCameraBufferLatencyNs; + onlyOnce = false; + } + + // Calculating the new video timestamp based on offset from ducati source. + frame.mTimestamp = (pBuffHeader->nTimeStamp * 1000) - mTimeSourceDelta; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +bool OMXCameraAdapter::CommandHandler::Handler() +{ + TIUTILS::Message msg; + volatile int forever = 1; + status_t stat; + ErrorNotifier *errorNotify = NULL; + + LOG_FUNCTION_NAME; + + while ( forever ) + { + stat = NO_ERROR; + CAMHAL_LOGDA("Handler: waiting for messsage..."); + TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1); + { + Mutex::Autolock lock(mLock); + mCommandMsgQ.get(&msg); + } + CAMHAL_LOGDB("msg.command = %d", msg.command); + switch ( msg.command ) { + case CommandHandler::CAMERA_START_IMAGE_CAPTURE: + { + stat = mCameraAdapter->startImageCapture(); + break; + } + case CommandHandler::CAMERA_PERFORM_AUTOFOCUS: + { + stat = mCameraAdapter->doAutoFocus(); + break; + } + case CommandHandler::COMMAND_EXIT: + { + CAMHAL_LOGDA("Exiting command handler"); + forever = 0; + break; + } + case CommandHandler::CAMERA_SWITCH_TO_EXECUTING: + { + stat = mCameraAdapter->doSwitchToExecuting(); + break; + } + } + + } + + LOG_FUNCTION_NAME_EXIT; + + return false; +} + +bool OMXCameraAdapter::OMXCallbackHandler::Handler() +{ + TIUTILS::Message msg; + volatile int forever = 1; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + while(forever){ + TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1); + { + Mutex::Autolock lock(mLock); + mCommandMsgQ.get(&msg); + } + + switch ( msg.command ) { + case OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE: + { + ret = mCameraAdapter->OMXCameraAdapterFillBufferDone(( OMX_HANDLETYPE ) msg.arg1, + ( OMX_BUFFERHEADERTYPE *) msg.arg2); + break; + } + case OMXCallbackHandler::CAMERA_FOCUS_STATUS: + { + mCameraAdapter->handleFocusCallback(); + break; + } + case CommandHandler::COMMAND_EXIT: + { + CAMHAL_LOGDA("Exiting OMX callback handler"); + forever = 0; + break; + } + } + } + + LOG_FUNCTION_NAME_EXIT; + return false; +} + +status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT_EXTRADATATYPE eType) { + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_EXTRADATATYPE extraDataControl; + + LOG_FUNCTION_NAME; + + if ( ( OMX_StateInvalid == mComponentState ) || + ( NULL == mCameraAdapterParameters.mHandleComp ) ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return -EINVAL; + } + + OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE); + + extraDataControl.nPortIndex = nPortIndex; + extraDataControl.eExtraDataType = eType; + extraDataControl.eCameraView = OMX_2D; + + if (enable) { + extraDataControl.bEnable = OMX_TRUE; + } else { + extraDataControl.bEnable = OMX_FALSE; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_IndexConfigOtherExtraDataControl, + &extraDataControl); + + LOG_FUNCTION_NAME_EXIT; + + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + + +OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(OMX_OTHER_EXTRADATATYPE *extraData, OMX_U32 extraDataSize, OMX_EXTRADATATYPE type) { + OMX_U32 remainingSize = extraDataSize; + + if ( NULL != extraData ) { + while ( extraData->eType && extraData->nDataSize && extraData->data && + (remainingSize >= extraData->nSize)) { + if ( type == extraData->eType ) { + return extraData; + } + extraData = (OMX_OTHER_EXTRADATATYPE*) ((char*)extraData + extraData->nSize); + remainingSize -= extraData->nSize; + } + } + + // Required extradata type wasn't found + return NULL; +} + +OMXCameraAdapter::OMXCameraAdapter(size_t sensor_index) +{ + LOG_FUNCTION_NAME; + + mOmxInitialized = false; + mComponentState = OMX_StateInvalid; + mSensorIndex = sensor_index; + mPictureRotation = 0; + // Initial values + mTimeSourceDelta = 0; + onlyOnce = true; + + mInitSem.Create(0); + mFlushSem.Create(0); + mUsePreviewDataSem.Create(0); + mUsePreviewSem.Create(0); + mUseCaptureSem.Create(0); + mStartPreviewSem.Create(0); + mStopPreviewSem.Create(0); + mStartCaptureSem.Create(0); + mStopCaptureSem.Create(0); + mSwitchToLoadedSem.Create(0); + mCaptureSem.Create(0); + + mSwitchToExecSem.Create(0); + + mCameraAdapterParameters.mHandleComp = 0; + + mUserSetExpLock = OMX_FALSE; + mUserSetWbLock = OMX_FALSE; + + mFramesWithDucati = 0; + mFramesWithDisplay = 0; + mFramesWithEncoder = 0; + + LOG_FUNCTION_NAME_EXIT; +} + +OMXCameraAdapter::~OMXCameraAdapter() +{ + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(gAdapterLock); + + if ( mOmxInitialized ) { + // return to OMX Loaded state + switchToLoaded(); + + // deinit the OMX + if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid ) { + // free the handle for the Camera component + if ( mCameraAdapterParameters.mHandleComp ) { + OMX_FreeHandle(mCameraAdapterParameters.mHandleComp); + mCameraAdapterParameters.mHandleComp = NULL; + } + } + + OMX_Deinit(); + mOmxInitialized = false; + } + + //Remove any unhandled events + if ( !mEventSignalQ.isEmpty() ) + { + for (unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ ) + { + TIUTILS::Message *msg = mEventSignalQ.itemAt(i); + //remove from queue and free msg + if ( NULL != msg ) + { + Semaphore *sem = (Semaphore*) msg->arg3; + sem->Signal(); + free(msg); + + } + } + mEventSignalQ.clear(); + } + + //Exit and free ref to command handling thread + if ( NULL != mCommandHandler.get() ) + { + TIUTILS::Message msg; + msg.command = CommandHandler::COMMAND_EXIT; + msg.arg1 = mErrorNotifier; + mCommandHandler->clearCommandQ(); + mCommandHandler->put(&msg); + mCommandHandler->requestExitAndWait(); + mCommandHandler.clear(); + } + + //Exit and free ref to callback handling thread + if ( NULL != mOMXCallbackHandler.get() ) + { + TIUTILS::Message msg; + msg.command = OMXCallbackHandler::COMMAND_EXIT; + //Clear all messages pending first + mOMXCallbackHandler->clearCommandQ(); + mOMXCallbackHandler->put(&msg); + mOMXCallbackHandler->requestExitAndWait(); + mOMXCallbackHandler.clear(); + } + + LOG_FUNCTION_NAME_EXIT; +} + +extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index) +{ + CameraAdapter *adapter = NULL; + Mutex::Autolock lock(gAdapterLock); + + LOG_FUNCTION_NAME; + + adapter = new OMXCameraAdapter(sensor_index); + if ( adapter ) { + CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index); + } else { + CAMHAL_LOGEA("Camera adapter create failed!"); + } + + LOG_FUNCTION_NAME_EXIT; + + return adapter; +} + +OMX_ERRORTYPE OMXCameraAdapter::OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData ) +{ + OMX_ERRORTYPE eError = OMX_ErrorUndefined; + + for ( int i = 0; i < 5; ++i ) { + if ( i > 0 ) { + // sleep for 100 ms before next attempt + usleep(100000); + } + + // setup key parameters to send to Ducati during init + OMX_CALLBACKTYPE oCallbacks; + + // initialize the callback handles + oCallbacks.EventHandler = android::OMXCameraAdapterEventHandler; + oCallbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone; + oCallbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone; + + // get handle + eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.CAMERA", pAppData, &oCallbacks); + if ( eError == OMX_ErrorNone ) { + return OMX_ErrorNone; + } + + CAMHAL_LOGEB("OMX_GetHandle() failed, error: 0x%x", eError); + } + + *handle = 0; + return eError; +} + +extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array, + const unsigned int starting_camera, + const unsigned int max_camera) { + int num_cameras_supported = 0; + CameraProperties::Properties* properties = NULL; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_HANDLETYPE handle = NULL; + OMX_TI_CAPTYPE caps; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(gAdapterLock); + + if (!properties_array) { + CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array); + LOG_FUNCTION_NAME_EXIT; + return -EINVAL; + } + + eError = OMX_Init(); + if (eError != OMX_ErrorNone) { + CAMHAL_LOGEB("Error OMX_Init -0x%x", eError); + return eError; + } + + eError = OMXCameraAdapter::OMXCameraGetHandle(&handle); + if (eError != OMX_ErrorNone) { + CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError); + goto EXIT; + } + + // Continue selecting sensor and then querying OMX Camera for it's capabilities + // When sensor select returns an error, we know to break and stop + while (eError == OMX_ErrorNone && + (starting_camera + num_cameras_supported) < max_camera) { + // sensor select + OMX_CONFIG_SENSORSELECTTYPE sensorSelect; + OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE); + sensorSelect.eSensor = (OMX_SENSORSELECT) num_cameras_supported; + eError = OMX_SetConfig(handle, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect); + + if ( OMX_ErrorNone != eError ) { + break; + } + + // get and fill capabilities + properties = properties_array + starting_camera + num_cameras_supported; + OMXCameraAdapter::getCaps(properties, handle); + + // need to fill facing information + // assume that only sensor 0 is back facing + if (num_cameras_supported == 0) { + properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_BACK); + } else { + properties->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT); + } + + num_cameras_supported++; + } + + EXIT: + // clean up + if(handle) { + OMX_FreeHandle(handle); + handle=NULL; + } + OMX_Deinit(); + + LOG_FUNCTION_NAME_EXIT; + + return num_cameras_supported; +} + +}; + + +/*--------------------Camera Adapter Class ENDS here-----------------------------*/ diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp new file mode 100644 index 0000000..e1323ee --- /dev/null +++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp @@ -0,0 +1,1279 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXCap.cpp +* +* This file implements the OMX Capabilities feature. +* +*/ + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" +#include "ErrorUtils.h" +#include "TICameraParameters.h" + +namespace android { + +#undef LOG_TAG + +// Maintain a separate tag for OMXCameraAdapter logs to isolate issues OMX specific +#define LOG_TAG "CameraHAL" + +/************************************ + * global constants and variables + *************************************/ + +#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0])) +#define FPS_MIN 5 +#define FPS_STEP 5 +#define FPS_RANGE_STEP 5 + +static const char PARAM_SEP[] = ","; +static const int PARAM_SEP_CHAR = ','; +static const uint32_t VFR_OFFSET = 8; +static const char VFR_BACKET_START[] = "("; +static const char VFR_BRACKET_END[] = ")"; +static const char FRAMERATE_COUNT = 10; + +/**** look up tables to translate OMX Caps to Parameter ****/ + +const CapResolution OMXCameraAdapter::mImageCapRes [] = { + { 4032, 3024, "4032x3024" }, + { 4000, 3000, "4000x3000" }, + { 3648, 2736, "3648x2736" }, + { 3264, 2448, "3264x2448" }, + { 2592, 1944, "2592x1944" }, + { 2592, 1728, "2592x1728" }, + { 2592, 1458, "2592x1458" }, + { 2048, 1536, "2048x1536" }, + { 1600, 1200, "1600x1200" }, + { 1280, 1024, "1280x1024" }, + { 1152, 864, "1152x864" }, + { 1280, 960, "1280x960" }, + { 640, 480, "640x480" }, + { 320, 240, "320x240" }, +}; + +const CapResolution OMXCameraAdapter::mPreviewRes [] = { + { 1920, 1080, "1920x1080" }, + { 1280, 720, "1280x720" }, + { 960, 720, "960x720" }, + { 800, 480, "800x480" }, + { 720, 576, "720x576" }, + { 720, 480, "720x480" }, + { 768, 576, "768x576" }, + { 640, 480, "640x480" }, + { 320, 240, "320x240" }, + { 352, 288, "352x288" }, + { 240, 160, "240x160" }, + { 176, 144, "176x144" }, + { 128, 96, "128x96" }, +}; + +const CapResolution OMXCameraAdapter::mThumbRes [] = { + { 640, 480, "640x480" }, + { 160, 120, "160x120" }, + { 200, 120, "200x120" }, + { 320, 240, "320x240" }, + { 512, 384, "512x384" }, + { 352, 144, "352x144" }, + { 176, 144, "176x144" }, + { 96, 96, "96x96" }, +}; + +const CapPixelformat OMXCameraAdapter::mPixelformats [] = { + { OMX_COLOR_FormatCbYCrY, CameraParameters::PIXEL_FORMAT_YUV422I }, + { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420SP }, + { OMX_COLOR_Format16bitRGB565, CameraParameters::PIXEL_FORMAT_RGB565 }, + { OMX_COLOR_FormatRawBayer10bit, TICameraParameters::PIXEL_FORMAT_RAW }, + { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420P }, +}; + +const CapFramerate OMXCameraAdapter::mFramerates [] = { + { 30, "30" }, + { 15, "15" }, +}; + +const CapZoom OMXCameraAdapter::mZoomStages [] = { + { 65536, "100" }, + { 68157, "104" }, + { 70124, "107" }, + { 72745, "111" }, + { 75366, "115" }, + { 77988, "119" }, + { 80609, "123" }, + { 83231, "127" }, + { 86508, "132" }, + { 89784, "137" }, + { 92406, "141" }, + { 95683, "146" }, + { 99615, "152" }, + { 102892, "157" }, + { 106168, "162" }, + { 110100, "168" }, + { 114033, "174" }, + { 117965, "180" }, + { 122552, "187" }, + { 126484, "193" }, + { 131072, "200" }, + { 135660, "207" }, + { 140247, "214" }, + { 145490, "222" }, + { 150733, "230" }, + { 155976, "238" }, + { 161219, "246" }, + { 167117, "255" }, + { 173015, "264" }, + { 178913, "273" }, + { 185467, "283" }, + { 192020, "293" }, + { 198574, "303" }, + { 205783, "314" }, + { 212992, "325" }, + { 220201, "336" }, + { 228065, "348" }, + { 236585, "361" }, + { 244449, "373" }, + { 252969, "386" }, + { 262144, "400" }, + { 271319, "414" }, + { 281149, "429" }, + { 290980, "444" }, + { 300810, "459" }, + { 311951, "476" }, + { 322437, "492" }, + { 334234, "510" }, + { 346030, "528" }, + { 357827, "546" }, + { 370934, "566" }, + { 384041, "586" }, + { 397148, "606" }, + { 411566, "628" }, + { 425984, "650" }, + { 441057, "673" }, + { 456131, "696" }, + { 472515, "721" }, + { 488899, "746" }, + { 506593, "773" }, + { 524288, "800" }, +}; + +const CapISO OMXCameraAdapter::mISOStages [] = { + { 0, "auto" }, + { 100, "100" }, + { 200, "200"}, + { 400, "400" }, + { 800, "800" }, + { 1000, "1000" }, + { 1200, "1200" }, + { 1600, "1600" }, +}; + +// mapped values have to match with new_sensor_MSP.h +const CapU32 OMXCameraAdapter::mSensorNames [] = { + { 300, "IMX060" }, + { 301, "OV5650" }, + { 305, "S5K4E1GA"}, + { 306, "S5K6A1GX03" } + // TODO(XXX): need to account for S3D camera later +}; + +// values for supported variable framerates sorted in ascending order +// CapU32Pair = (max fps, min fps, string representation) +const CapU32Pair OMXCameraAdapter::mVarFramerates [] = { + { 15, 15, "(15000,15000)"}, + { 30, 15, "(15000,30000)" }, + { 30, 24, "(24000,30000)" }, +// TODO(XXX): Removing 30,30 range to limit 1080p at 24fps. Will put back soon. +#if 0 + { 30, 30, "(30000,30000)" }, +#endif +}; +/************************************ + * static helper functions + *************************************/ + +// utility function to remove last seperator +void remove_last_sep(char* buffer) { + char* last_sep = NULL; + last_sep = strrchr(buffer, PARAM_SEP_CHAR); + if (last_sep != NULL) { + last_sep[0] = '\0'; + } +} + + +/***************************************** + * internal static function declarations + *****************************************/ + +/**** Utility functions to help translate OMX Caps to Parameter ****/ + +status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format, + const CapPixelformat *cap, + size_t capCount, + char * buffer, + size_t bufferSize) { + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( ( NULL == buffer ) || ( NULL == cap ) ) { + CAMHAL_LOGEA("Invalid input arguments"); + return -EINVAL; + } + + for ( unsigned int i = 0; i < capCount; i++ ) { + if ( format == cap[i].pixelformat ) { + strncat(buffer, cap[i].param, bufferSize - 1); + strncat(buffer, PARAM_SEP, bufferSize - 1); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax, + OMX_U32 framerateMin, + const CapFramerate *cap, + size_t capCount, + char * buffer, + size_t bufferSize) { + status_t ret = NO_ERROR; + bool minInserted = false; + bool maxInserted = false; + char tmpBuffer[FRAMERATE_COUNT]; + + LOG_FUNCTION_NAME; + + if ( ( NULL == buffer ) || ( NULL == cap ) ) { + CAMHAL_LOGEA("Invalid input arguments"); + return -EINVAL; + } + + for ( unsigned int i = 0; i < capCount; i++ ) { + if ( (framerateMax >= cap[i].num) && (framerateMin <= cap[i].num) ) { + strncat(buffer, cap[i].param, bufferSize - 1); + strncat(buffer, PARAM_SEP, bufferSize - 1); + + if ( cap[i].num == framerateMin ) { + minInserted = true; + } + } + if ( cap[i].num == framerateMax ) { + maxInserted = true; + } + } + + if ( !maxInserted ) { + memset(tmpBuffer, 0, FRAMERATE_COUNT); + snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMax); + strncat(buffer, tmpBuffer, bufferSize - 1); + strncat(buffer, PARAM_SEP, bufferSize - 1); + } + + if ( !minInserted ) { + memset(tmpBuffer, 0, FRAMERATE_COUNT); + snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMin); + strncat(buffer, tmpBuffer, bufferSize - 1); + strncat(buffer, PARAM_SEP, bufferSize - 1); + } + + remove_last_sep(buffer); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps, + const CapU32Pair *cap, + size_t capCount, + char *buffer, + char *defaultRange, + size_t bufferSize) { + status_t ret = NO_ERROR; + uint32_t minVFR, maxVFR; + int default_index = -1; + + LOG_FUNCTION_NAME; + + if ( (NULL == buffer) || (NULL == cap) ) { + CAMHAL_LOGEA("Invalid input arguments"); + return -EINVAL; + } + + if(caps.ulPrvVarFPSModesCount < 1) { + return NO_ERROR; + } + + // Assumption: last range in tPrvVarFPSModes will be for S30FPSHD mode + minVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMin >> VFR_OFFSET; + maxVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMax >> VFR_OFFSET; + + if (minVFR < FPS_MIN) { + minVFR = FPS_MIN; + } + + for (unsigned int i = 0; i < capCount; i++) { + // add cap[i] if it is in range and maxVFR != minVFR + if ((maxVFR >= cap[i].num1) && (minVFR <= cap[i].num2)) { + if (buffer[0] != '\0') { + strncat(buffer, PARAM_SEP, bufferSize - 1); + } + strncat(buffer, cap[i].param, bufferSize - 1); + + // choose the max variable framerate as default + if (cap[i].num1 != cap[i].num2) { + default_index = i; + } + } + } + + // if we haven't found any caps in the list to populate + // just use the min and max + if (buffer[0] == '\0') { + snprintf(buffer, bufferSize - 1, + "(%u,%u)", + minVFR * CameraHal::VFR_SCALE, + maxVFR * CameraHal::VFR_SCALE); + } + + if (default_index != -1) { + snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%lu,%lu", + cap[default_index].num2 * CameraHal::VFR_SCALE, + cap[default_index].num1 * CameraHal::VFR_SCALE); + } else { + snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%u,%u", + minVFR * CameraHal::VFR_SCALE, maxVFR * CameraHal::VFR_SCALE); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +size_t OMXCameraAdapter::encodeZoomCap(OMX_S32 maxZoom, + const CapZoom *cap, + size_t capCount, + char * buffer, + size_t bufferSize) { + status_t res = NO_ERROR; + size_t ret = 0; + + LOG_FUNCTION_NAME; + + if ( (NULL == buffer) || (NULL == cap) ) { + CAMHAL_LOGEA("Invalid input arguments"); + return -EINVAL; + } + + + for ( unsigned int i = 0; i < capCount; i++ ) { + if ( cap[i].num <= maxZoom ) { + strncat(buffer, cap[i].param, bufferSize - 1); + strncat(buffer, PARAM_SEP, bufferSize - 1); + ret++; + } + } + remove_last_sep(buffer); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO, + const CapISO *cap, + size_t capCount, + char * buffer, + size_t bufferSize) { + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( (NULL == buffer) || (NULL == cap) ) { + CAMHAL_LOGEA("Invalid input arguments"); + return -EINVAL; + } + + for ( unsigned int i = 0; i < capCount; i++ ) { + if ( cap[i].num <= maxISO) { + strncat(buffer, cap[i].param, bufferSize - 1); + strncat(buffer, PARAM_SEP, bufferSize - 1); + } + } + remove_last_sep(buffer); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res, + const CapResolution *cap, + size_t capCount, + char * buffer, + size_t bufferSize) { + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( (NULL == buffer) || (NULL == cap) ) { + CAMHAL_LOGEA("Invalid input arguments"); + return -EINVAL; + } + + for ( unsigned int i = 0 ; i < capCount ; i++ ) { + if ( (cap[i].width <= res.nWidthMax) && + (cap[i].height <= res.nHeightMax) && + (cap[i].width >= res.nWidthMin) && + (cap[i].height >= res.nHeightMin) ) { + strncat(buffer, cap[i].param, bufferSize -1); + strncat(buffer, PARAM_SEP, bufferSize - 1); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::insertImageSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + ret = encodeSizeCap(caps.tImageResRange, + mImageCapRes, + ARRAY_SIZE(mImageCapRes), + supported, + MAX_PROP_VALUE_LENGTH); + + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported picture sizes 0x%x", ret); + } else { + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + ret = encodeSizeCap(caps.tPreviewResRange, + mPreviewRes, + ARRAY_SIZE(mPreviewRes), + supported, + MAX_PROP_VALUE_LENGTH); + + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported preview sizes 0x%x", ret); + } else { + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + ret = encodeSizeCap(caps.tPreviewResRange, + mPreviewRes, + ARRAY_SIZE(mPreviewRes), + supported, + MAX_PROP_VALUE_LENGTH); + + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported video sizes 0x%x", ret); + } else { + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_VIDEO_SIZES, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + ret = encodeSizeCap(caps.tThumbResRange, + mThumbRes, + ARRAY_SIZE(mThumbRes), + supported, + MAX_PROP_VALUE_LENGTH); + + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported thumbnail sizes 0x%x", ret); + } else { + //CTS Requirement: 0x0 should always be supported + strncat(supported, "0x0", MAX_PROP_NAME_LENGTH); + params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertZoomStages(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) +{ + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + size_t zoomStageCount = 0; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + zoomStageCount = encodeZoomCap(caps.xMaxWidthZoom, + mZoomStages, + ARRAY_SIZE(mZoomStages), + supported, + MAX_PROP_VALUE_LENGTH); + + params->set(CameraProperties::SUPPORTED_ZOOM_RATIOS, supported); + params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, zoomStageCount - 1); //As per CTS requirement + + if ( 0 == zoomStageCount ) { + params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED); + params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_UNSUPPORTED); + } else { + params->set(CameraProperties::ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED); + params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, TICameraParameters::ZOOM_SUPPORTED); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( int i = 0 ; i < caps.ulImageFormatCount ; i++ ) { + ret = encodePixelformatCap(caps.eImageFormats[i], + mPixelformats, + ARRAY_SIZE(mPixelformats), + supported, + MAX_PROP_VALUE_LENGTH); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret); + break; + } + } + + if ( NO_ERROR == ret ) { + //jpeg is not supported in OMX capabilies yet + strncat(supported, CameraParameters::PIXEL_FORMAT_JPEG, MAX_PROP_VALUE_LENGTH - 1); + params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( int i = 0 ; i < caps.ulPreviewFormatCount; i++ ) { + ret = encodePixelformatCap(caps.ePreviewFormats[i], + mPixelformats, + ARRAY_SIZE(mPixelformats), + supported, + MAX_PROP_VALUE_LENGTH); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported preview formats 0x%x", ret); + break; + } + } + + if ( NO_ERROR == ret ) { + // need to advertise we support YV12 format + // We will program preview port with NV21 when we see application set YV12 + strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1); + params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + ret = encodeFramerateCap(caps.xFramerateMax >> VFR_OFFSET, + caps.xFramerateMin >> VFR_OFFSET, + mFramerates, + ARRAY_SIZE(mFramerates), + supported, + MAX_PROP_VALUE_LENGTH); + + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported preview framerates 0x%x", ret); + } else { + params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertVFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + char defaultRange[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + ret = encodeVFramerateCap(caps, + mVarFramerates, + ARRAY_SIZE(mVarFramerates), + supported, + defaultRange, + MAX_PROP_VALUE_LENGTH); + + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported preview framerate ranges 0x%x", ret); + } else { + params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported); + CAMHAL_LOGDB("framerate ranges %s", supported); + params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE_IMAGE); + params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, DEFAULT_FRAMERATE_RANGE_VIDEO); + params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, DEFAULT_FRAMERATE_RANGE_IMAGE); + CAMHAL_LOGDB("Default framerate range: [%s]", DEFAULT_FRAMERATE_RANGE_IMAGE); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertEVs(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMin * 10 )); + params->set(CameraProperties::SUPPORTED_EV_MIN, supported); + + snprintf(supported, MAX_PROP_VALUE_LENGTH, "%d", ( int ) ( caps.xEVCompensationMax * 10 )); + params->set(CameraProperties::SUPPORTED_EV_MAX, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + ret = encodeISOCap(caps.nSensitivityMax, + mISOStages, + ARRAY_SIZE(mISOStages), + supported, + MAX_PROP_VALUE_LENGTH); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error inserting supported ISO modes 0x%x", ret); + } else { + params->set(CameraProperties::SUPPORTED_ISO_VALUES, supported); + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertIPPModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + //Off is always supported + strncat(supported, TICameraParameters::IPP_NONE, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + + if ( caps.bLensDistortionCorrectionSupported ) { + strncat(supported, TICameraParameters::IPP_LDC, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + + if ( caps.bISONoiseFilterSupported ) { + strncat(supported, TICameraParameters::IPP_NSF, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + + if ( caps.bISONoiseFilterSupported && caps.bLensDistortionCorrectionSupported ) { + strncat(supported, TICameraParameters::IPP_LDCNSF, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_IPP_MODES, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertWBModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( unsigned int i = 0 ; i < caps.ulWhiteBalanceCount ; i++ ) { + p = getLUTvalue_OMXtoHAL(caps.eWhiteBalanceModes[i], WBalLUT); + if ( NULL != p ) { + strncat(supported, p, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + } + + params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertEffects(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( unsigned int i = 0 ; i < caps.ulColorEffectCount; i++ ) { + p = getLUTvalue_OMXtoHAL(caps.eColorEffects[i], EffLUT); + if ( NULL != p ) { + strncat(supported, p, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + } + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_EFFECTS, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertExpModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( unsigned int i = 0 ; i < caps.ulExposureModeCount; i++ ) { + p = getLUTvalue_OMXtoHAL(caps.eExposureModes[i], ExpLUT); + if ( NULL != p ) { + strncat(supported, p, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + } + + params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertFlashModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( unsigned int i = 0 ; i < caps.ulFlashCount; i++ ) { + p = getLUTvalue_OMXtoHAL(caps.eFlashModes[i], FlashLUT); + if ( NULL != p ) { + strncat(supported, p, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + } + + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_FLASH_MODES, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertSceneModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( unsigned int i = 0 ; i < caps.ulSceneCount; i++ ) { + p = getLUTvalue_OMXtoHAL(caps.eSceneModes[i], SceneLUT); + if ( NULL != p ) { + strncat(supported, p, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + } + + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_SCENE_MODES, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( unsigned int i = 0 ; i < caps.ulFocusModeCount; i++ ) { + p = getLUTvalue_OMXtoHAL(caps.eFocusModes[i], FocusLUT); + if ( NULL != p ) { + strncat(supported, p, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + } + + // Check if focus is supported by camera + if (caps.ulFocusModeCount == 1 && + caps.eFocusModes[0] == OMX_IMAGE_FocusControlOff) { + // Focus is not supported by camera + // Advertise this to app as infinitiy focus mode + strncat(supported, CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH); + } else { + // Focus is supported but these modes are not supported by the + // capability feature. Apply manually + strncat(supported, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, MAX_PROP_NAME_LENGTH); + } + + params->set(CameraProperties::SUPPORTED_FOCUS_MODES, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertFlickerModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + for ( unsigned int i = 0 ; i < caps.ulFlickerCount; i++ ) { + p = getLUTvalue_OMXtoHAL(caps.eFlicker[i], FlickerLUT); + if ( NULL != p ) { + strncat(supported, p, MAX_PROP_NAME_LENGTH); + strncat(supported, PARAM_SEP, 1); + } + } + remove_last_sep(supported); + params->set(CameraProperties::SUPPORTED_ANTIBANDING, supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + sprintf(supported, "%d", caps.ulAlgoAreasFocusCount); + params->set(CameraProperties::MAX_FOCUS_AREAS, supported); + CAMHAL_LOGDB("Maximum supported focus areas %s", supported); + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + sprintf(supported, "%d", caps.ulAlgoAreasExposureCount); + params->set(CameraProperties::MAX_NUM_METERING_AREAS, supported); + CAMHAL_LOGDB("Maximum supported exposure areas %s", supported); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME + + params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED); + params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, DEFAULT_LOCK_SUPPORTED); + + LOG_FUNCTION_NAME + + return ret; +} + +status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + + LOG_FUNCTION_NAME; + + params->set(CameraProperties::ANTIBANDING, DEFAULT_ANTIBANDING); + params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS); + params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST); + params->set(CameraProperties::EFFECT, DEFAULT_EFFECT); + params->set(CameraProperties::EV_COMPENSATION, DEFAULT_EV_COMPENSATION); + params->set(CameraProperties::SUPPORTED_EV_STEP, DEFAULT_EV_STEP); + params->set(CameraProperties::EXPOSURE_MODE, DEFAULT_EXPOSURE_MODE); + params->set(CameraProperties::FLASH_MODE, DEFAULT_FLASH_MODE); + char *pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED); + if ( NULL != pos ) + { + params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE_PREFERRED); + } + else + { + params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE); + } + params->set(CameraProperties::IPP, DEFAULT_IPP); + params->set(CameraProperties::GBCE, DEFAULT_GBCE); + params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE); + params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY); + params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY); + params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, DEFAULT_THUMBNAIL_SIZE); + params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT); + params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE); + params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT); + params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE); + params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE); + params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS); + params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS); + params->set(CameraProperties::SATURATION, DEFAULT_SATURATION); + params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE); + params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS); + params->set(CameraProperties::VSTAB, DEFAULT_VSTAB); + params->set(CameraProperties::VSTAB_SUPPORTED, DEFAULT_VSTAB_SUPPORTED); + params->set(CameraProperties::WHITEBALANCE, DEFAULT_WB); + params->set(CameraProperties::ZOOM, DEFAULT_ZOOM); + params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES); + params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES); + params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK); + params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK); + if(caps.tSenMounting.nSenId == 305) { + params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_PRIMARY); + } else { + params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_SECONDARY); + } + params->set(CameraProperties::HOR_ANGLE, DEFAULT_HOR_ANGLE); + params->set(CameraProperties::VER_ANGLE, DEFAULT_VER_ANGLE); + params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, DEFAULT_VIDEO_SNAPSHOT_SUPPORTED); + params->set(CameraProperties::VIDEO_SIZE, DEFAULT_VIDEO_SIZE); + params->set(CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO, DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertSenMount(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + const char *p; + unsigned int i = 0; + + LOG_FUNCTION_NAME; + + memset(supported, '\0', MAX_PROP_VALUE_LENGTH); + + // 1) Look up and assign sensor name + for (i = 0; i < ARRAY_SIZE(mSensorNames); i++) { + if(mSensorNames[i].num == caps.tSenMounting.nSenId) { + // sensor found + break; + } + } + if ( i == ARRAY_SIZE(mSensorNames) ) { + p = "UNKNOWN_SENSOR"; + } else { + p = mSensorNames[i].param; + } + strncat(supported, p, MAX_PROP_NAME_LENGTH); + params->set(CameraProperties::CAMERA_NAME, supported); + + // 2) Assign mounting rotation + params->set(CameraProperties::ORIENTATION_INDEX, caps.tSenMounting.nRotation); + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::insertCapabilities(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { + status_t ret = NO_ERROR; + char supported[MAX_PROP_VALUE_LENGTH]; + + LOG_FUNCTION_NAME; + + if ( NO_ERROR == ret ) { + ret = insertImageSizes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertPreviewSizes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertThumbSizes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertZoomStages(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertImageFormats(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertPreviewFormats(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertFramerates(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertVFramerates(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertEVs(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertISOModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertIPPModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertWBModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertEffects(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertExpModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertFlashModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertSceneModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertFocusModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertFlickerModes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertSenMount(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertLocks(params, caps); + } + if ( NO_ERROR == ret) { + ret = insertAreas(params, caps); + + } + + //NOTE: Ensure that we always call insertDefaults after inserting the supported capabilities + //as there are checks inside insertDefaults to make sure a certain default is supported + // or not + if ( NO_ERROR == ret ) { + ret = insertVideoSizes(params, caps); + } + + if ( NO_ERROR == ret ) { + ret = insertDefaults(params, caps); + } + + + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +/***************************************** + * public exposed function declarations + *****************************************/ + +status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HANDLETYPE handle) { + status_t ret = NO_ERROR; + int caps_size = 0; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_CAPTYPE** caps = NULL;; + OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer; + MemoryManager memMgr; + + LOG_FUNCTION_NAME; + + // allocate tiler (or ion) buffer for caps (size is always a multiple of 4K) + caps_size = ((sizeof(OMX_TI_CAPTYPE)+4095)/4096)*4096; + caps = (OMX_TI_CAPTYPE**) memMgr.allocateBuffer(0, 0, NULL, caps_size, 1); + + if (!caps) { + CAMHAL_LOGEB("Error allocating buffer for caps %d", eError); + ret = -ENOMEM; + goto EXIT; + } + + // initialize structures to be passed to OMX Camera + OMX_INIT_STRUCT_PTR (caps[0], OMX_TI_CAPTYPE); + caps[0]->nPortIndex = OMX_ALL; + + OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER); + sharedBuffer.nPortIndex = OMX_ALL; + sharedBuffer.nSharedBuffSize = caps_size; + sharedBuffer.pSharedBuff = (OMX_U8 *) caps[0]; + + // Get capabilities from OMX Camera + eError = OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer); + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error during capabilities query 0x%x", eError); + ret = UNKNOWN_ERROR; + goto EXIT; + } else { + CAMHAL_LOGDA("OMX capability query success"); + } + + // Translate and insert Ducati capabilities to CameraProperties + if ( NO_ERROR == ret ) { + ret = insertCapabilities(params, *caps[0]); + } + + CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps[0]->tSenMounting.nSenId); + + + EXIT: + if (caps) { + memMgr.freeBuffer((void*) caps); + caps = NULL; + } + + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +}; + diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp new file mode 100644 index 0000000..0a622cc --- /dev/null +++ b/camera/OMXCameraAdapter/OMXCapture.cpp @@ -0,0 +1,1229 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXCapture.cpp +* +* This file contains functionality for handling image capture. +* +*/ + +#undef LOG_TAG + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" +#include "ErrorUtils.h" + + +namespace android { + +status_t OMXCameraAdapter::setParametersCapture(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state) +{ + status_t ret = NO_ERROR; + const char *str = NULL; + int w, h; + OMX_COLOR_FORMATTYPE pixFormat; + const char *valstr = NULL; + int varint = 0; + + LOG_FUNCTION_NAME; + + OMXCameraPortParameters *cap; + cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + params.getPictureSize(&w, &h); + + if ( ( w != ( int ) cap->mWidth ) || + ( h != ( int ) cap->mHeight ) ) + { + mPendingCaptureSettings |= SetFormat; + } + + cap->mWidth = w; + cap->mHeight = h; + //TODO: Support more pixelformats + //cap->mStride = 2; + + CAMHAL_LOGVB("Image: cap.mWidth = %d", (int)cap->mWidth); + CAMHAL_LOGVB("Image: cap.mHeight = %d", (int)cap->mHeight); + + if ((valstr = params.getPictureFormat()) != NULL) { + if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) { + CAMHAL_LOGDA("CbYCrY format selected"); + pixFormat = OMX_COLOR_FormatCbYCrY; + mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV422I; + } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) { + CAMHAL_LOGDA("YUV420SP format selected"); + pixFormat = OMX_COLOR_FormatYUV420SemiPlanar; + mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV420SP; + } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) { + CAMHAL_LOGDA("RGB565 format selected"); + pixFormat = OMX_COLOR_Format16bitRGB565; + mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_RGB565; + } else if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_JPEG) == 0) { + CAMHAL_LOGDA("JPEG format selected"); + pixFormat = OMX_COLOR_FormatUnused; + mCodingMode = CodingNone; + mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_JPEG; + } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_JPS) == 0) { + CAMHAL_LOGDA("JPS format selected"); + pixFormat = OMX_COLOR_FormatUnused; + mCodingMode = CodingJPS; + mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_JPS; + } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_MPO) == 0) { + CAMHAL_LOGDA("MPO format selected"); + pixFormat = OMX_COLOR_FormatUnused; + mCodingMode = CodingMPO; + mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_MPO; + } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_RAW) == 0) { + CAMHAL_LOGDA("RAW Picture format selected"); + pixFormat = OMX_COLOR_FormatRawBayer10bit; + mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_RAW; + } else { + CAMHAL_LOGEA("Invalid format, JPEG format selected as default"); + pixFormat = OMX_COLOR_FormatUnused; + mPictureFormatFromClient = NULL; + } + } else { + CAMHAL_LOGEA("Picture format is NULL, defaulting to JPEG"); + pixFormat = OMX_COLOR_FormatUnused; + mPictureFormatFromClient = NULL; + } + + // JPEG capture is not supported in video mode by OMX Camera + // Set capture format to yuv422i...jpeg encode will + // be done on A9 + valstr = params.get(TICameraParameters::KEY_CAP_MODE); + if ( (valstr && !strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE)) && + (pixFormat == OMX_COLOR_FormatUnused) ) { + CAMHAL_LOGDA("Capturing in video mode...selecting yuv422i"); + pixFormat = OMX_COLOR_FormatCbYCrY; + } + + if ( pixFormat != cap->mColorFormat ) + { + mPendingCaptureSettings |= SetFormat; + cap->mColorFormat = pixFormat; + } + +#ifdef OMAP_ENHANCEMENT + + str = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE); + if ( NULL != str ) { + parseExpRange(str, mExposureBracketingValues, EXP_BRACKET_RANGE, mExposureBracketingValidEntries); + } else { + // if bracketing was previously set...we set again before capturing to clear + if (mExposureBracketingValidEntries) mPendingCaptureSettings |= SetExpBracket; + mExposureBracketingValidEntries = 0; + } + +#endif + + varint = params.getInt(CameraParameters::KEY_ROTATION); + if ( varint != -1 ) + { + if ( ( unsigned int ) varint != mPictureRotation) { + mPendingCaptureSettings |= SetRotation; + } + mPictureRotation = varint; + } + else + { + if (mPictureRotation) mPendingCaptureSettings |= SetRotation; + mPictureRotation = 0; + } + + CAMHAL_LOGVB("Picture Rotation set %d", mPictureRotation); + +#ifdef OMAP_ENHANCEMENT + + // Read Sensor Orientation and set it based on perating mode + + varint = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION); + if (( varint != -1 ) && (mCapMode == OMXCameraAdapter::VIDEO_MODE)) + { + mSensorOrientation = varint; + if (mSensorOrientation == 270 ||mSensorOrientation==90) + { + CAMHAL_LOGEA(" Orientation is 270/90. So setting counter rotation to Ducati"); + mSensorOrientation +=180; + mSensorOrientation%=360; + } + } + else + { + mSensorOrientation = 0; + } + + CAMHAL_LOGVB("Sensor Orientation set : %d", mSensorOrientation); + + varint = params.getInt(TICameraParameters::KEY_BURST); + if ( varint >= 1 ) + { + if (varint != mBurstFrames) { + mPendingCaptureSettings |= SetExpBracket; + } + mBurstFrames = varint; + } + else + { + if (mBurstFrames != 1) mPendingCaptureSettings |= SetExpBracket; + mBurstFrames = 1; + } + + CAMHAL_LOGVB("Burst Frames set %d", mBurstFrames); + +#endif + + varint = params.getInt(CameraParameters::KEY_JPEG_QUALITY); + if ( ( varint >= MIN_JPEG_QUALITY ) && + ( varint <= MAX_JPEG_QUALITY ) ) + { + if ( ( unsigned int ) varint != mPictureQuality) { + mPendingCaptureSettings |= SetQuality; + } + mPictureQuality = varint; + } + else + { + if (mPictureQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetQuality; + mPictureQuality = MAX_JPEG_QUALITY; + } + + CAMHAL_LOGVB("Picture Quality set %d", mPictureQuality); + + varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); + if ( varint >= 0 ) + { + if ( ( unsigned int ) varint != mThumbWidth) { + mPendingCaptureSettings |= SetThumb; + } + mThumbWidth = varint; + } + else + { + if (mThumbWidth != DEFAULT_THUMB_WIDTH) mPendingCaptureSettings |= SetThumb; + mThumbWidth = DEFAULT_THUMB_WIDTH; + } + + + CAMHAL_LOGVB("Picture Thumb width set %d", mThumbWidth); + + varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); + if ( varint >= 0 ) + { + if ( ( unsigned int ) varint != mThumbHeight) { + mPendingCaptureSettings |= SetThumb; + } + mThumbHeight = varint; + } + else + { + if (mThumbHeight != DEFAULT_THUMB_HEIGHT) mPendingCaptureSettings |= SetThumb; + mThumbHeight = DEFAULT_THUMB_HEIGHT; + } + + + CAMHAL_LOGVB("Picture Thumb height set %d", mThumbHeight); + + varint = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + if ( ( varint >= MIN_JPEG_QUALITY ) && + ( varint <= MAX_JPEG_QUALITY ) ) + { + if ( ( unsigned int ) varint != mThumbQuality) { + mPendingCaptureSettings |= SetThumb; + } + mThumbQuality = varint; + } + else + { + if (mThumbQuality != MAX_JPEG_QUALITY) mPendingCaptureSettings |= SetThumb; + mThumbQuality = MAX_JPEG_QUALITY; + } + + CAMHAL_LOGDB("Thumbnail Quality set %d", mThumbQuality); + + if (mFirstTimeInit) { + mPendingCaptureSettings = ECapturesettingsAll; + } + + if (mPendingCaptureSettings) { + disableImagePort(); + if ( NULL != mReleaseImageBuffersCallback ) { + mReleaseImageBuffersCallback(mReleaseData); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount) +{ + status_t ret = NO_ERROR; + OMXCameraPortParameters *imgCaptureData = NULL; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + LOG_FUNCTION_NAME; + + if ( NO_ERROR == ret ) + { + imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + imgCaptureData->mNumBufs = bufferCount; + + // check if image port is already configured... + // if it already configured then we don't have to query again + if (!mCaptureConfigured) { + ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData); + } + + if ( ret == NO_ERROR ) + { + length = imgCaptureData->mBufSize; + } + else + { + CAMHAL_LOGEB("setFormat() failed 0x%x", ret); + length = 0; + } + } + + CAMHAL_LOGDB("getPictureBufferSize %d", length); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::parseExpRange(const char *rangeStr, + int * expRange, + size_t count, + size_t &validEntries) +{ + status_t ret = NO_ERROR; + char *ctx, *expVal; + char *tmp = NULL; + size_t i = 0; + + LOG_FUNCTION_NAME; + + if ( NULL == rangeStr ) + { + return -EINVAL; + } + + if ( NULL == expRange ) + { + return -EINVAL; + } + + if ( NO_ERROR == ret ) + { + tmp = ( char * ) malloc( strlen(rangeStr) + 1 ); + + if ( NULL == tmp ) + { + CAMHAL_LOGEA("No resources for temporary buffer"); + return -1; + } + memset(tmp, '\0', strlen(rangeStr) + 1); + + } + + if ( NO_ERROR == ret ) + { + strncpy(tmp, rangeStr, strlen(rangeStr) ); + expVal = strtok_r( (char *) tmp, CameraHal::PARAMS_DELIMITER, &ctx); + + i = 0; + while ( ( NULL != expVal ) && ( i < count ) ) + { + expRange[i] = atoi(expVal); + expVal = strtok_r(NULL, CameraHal::PARAMS_DELIMITER, &ctx); + i++; + } + validEntries = i; + } + + if ( NULL != tmp ) + { + free(tmp); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setExposureBracketing(int *evValues, + size_t evCount, + size_t frameCount) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_CAPTUREMODETYPE expCapMode; + OMX_CONFIG_EXTCAPTUREMODETYPE extExpCapMode; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + if ( NULL == evValues ) + { + CAMHAL_LOGEA("Exposure compensation values pointer is invalid"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&expCapMode, OMX_CONFIG_CAPTUREMODETYPE); + expCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex; + + /// If frameCount>0 but evCount<=0, then this is the case of HQ burst. + //Otherwise, it is normal HQ capture + ///If frameCount>0 and evCount>0 then this is the cause of HQ Exposure bracketing. + if ( 0 == evCount && 0 == frameCount ) + { + expCapMode.bFrameLimited = OMX_FALSE; + } + else + { + expCapMode.bFrameLimited = OMX_TRUE; + expCapMode.nFrameLimit = frameCount; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCaptureMode, + &expCapMode); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring capture mode 0x%x", eError); + } + else + { + CAMHAL_LOGDA("Camera capture mode configured successfully"); + } + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&extExpCapMode, OMX_CONFIG_EXTCAPTUREMODETYPE); + extExpCapMode.nPortIndex = mCameraAdapterParameters.mImagePortIndex; + + if ( 0 == evCount ) + { + extExpCapMode.bEnableBracketing = OMX_FALSE; + } + else + { + extExpCapMode.bEnableBracketing = OMX_TRUE; + extExpCapMode.tBracketConfigType.eBracketMode = OMX_BracketExposureRelativeInEV; + extExpCapMode.tBracketConfigType.nNbrBracketingValues = evCount - 1; + } + + for ( unsigned int i = 0 ; i < evCount ; i++ ) + { + extExpCapMode.tBracketConfigType.nBracketValues[i] = ( evValues[i] * ( 1 << Q16_OFFSET ) ) / 10; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexConfigExtCaptureMode, + &extExpCapMode); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring extended capture mode 0x%x", eError); + } + else + { + CAMHAL_LOGDA("Extended camera capture mode configured successfully"); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setShutterCallback(bool enabled) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_CALLBACKREQUESTTYPE shutterRequstCallback; + + LOG_FUNCTION_NAME; + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component not in executing state"); + ret = -1; + } + + if ( NO_ERROR == ret ) + { + + OMX_INIT_STRUCT_PTR (&shutterRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE); + shutterRequstCallback.nPortIndex = OMX_ALL; + + if ( enabled ) + { + shutterRequstCallback.bEnable = OMX_TRUE; + shutterRequstCallback.nIndex = ( OMX_INDEXTYPE ) OMX_TI_IndexConfigShutterCallback; + CAMHAL_LOGDA("Enabling shutter callback"); + } + else + { + shutterRequstCallback.bEnable = OMX_FALSE; + shutterRequstCallback.nIndex = ( OMX_INDEXTYPE ) OMX_TI_IndexConfigShutterCallback; + CAMHAL_LOGDA("Disabling shutter callback"); + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexConfigCallbackRequest, + &shutterRequstCallback); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error registering shutter callback 0x%x", eError); + ret = -1; + } + else + { + CAMHAL_LOGDB("Shutter callback for index 0x%x registered successfully", + OMX_TI_IndexConfigShutterCallback); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader, + CameraFrame::FrameType typeOfFrame) +{ + status_t ret = NO_ERROR; + int currentBufferIdx, nextBufferIdx; + OMXCameraPortParameters * imgCaptureData = NULL; + + LOG_FUNCTION_NAME; + + imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component is not in executing state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + currentBufferIdx = ( unsigned int ) pBuffHeader->pAppPrivate; + + if ( currentBufferIdx >= imgCaptureData->mNumBufs) + { + CAMHAL_LOGEB("Invalid bracketing buffer index 0x%x", currentBufferIdx); + ret = -EINVAL; + } + } + + if ( NO_ERROR == ret ) + { + mBracketingBuffersQueued[currentBufferIdx] = false; + mBracketingBuffersQueuedCount--; + + if ( 0 >= mBracketingBuffersQueuedCount ) + { + nextBufferIdx = ( currentBufferIdx + 1 ) % imgCaptureData->mNumBufs; + mBracketingBuffersQueued[nextBufferIdx] = true; + mBracketingBuffersQueuedCount++; + mLastBracetingBufferIdx = nextBufferIdx; + setFrameRefCount(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame, 1); + returnFrame(imgCaptureData->mBufferHeader[nextBufferIdx]->pBuffer, typeOfFrame); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::sendBracketFrames() +{ + status_t ret = NO_ERROR; + int currentBufferIdx; + OMXCameraPortParameters * imgCaptureData = NULL; + + LOG_FUNCTION_NAME; + + imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component is not in executing state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + + currentBufferIdx = mLastBracetingBufferIdx; + do + { + currentBufferIdx++; + currentBufferIdx %= imgCaptureData->mNumBufs; + if (!mBracketingBuffersQueued[currentBufferIdx] ) + { + CameraFrame cameraFrame; + sendCallBacks(cameraFrame, + imgCaptureData->mBufferHeader[currentBufferIdx], + imgCaptureData->mImageType, + imgCaptureData); + } + } while ( currentBufferIdx != mLastBracetingBufferIdx ); + + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::startBracketing(int range) +{ + status_t ret = NO_ERROR; + OMXCameraPortParameters * imgCaptureData = NULL; + + LOG_FUNCTION_NAME; + + imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component is not in executing state"); + ret = -EINVAL; + } + + { + Mutex::Autolock lock(mBracketingLock); + + if ( mBracketingEnabled ) + { + return ret; + } + } + + if ( 0 == imgCaptureData->mNumBufs ) + { + CAMHAL_LOGEB("Image capture buffers set to %d", imgCaptureData->mNumBufs); + ret = -EINVAL; + } + + if ( mPending3Asettings ) + apply3Asettings(mParameters3A); + + if ( NO_ERROR == ret ) + { + Mutex::Autolock lock(mBracketingLock); + + mBracketingRange = range; + mBracketingBuffersQueued = new bool[imgCaptureData->mNumBufs]; + if ( NULL == mBracketingBuffersQueued ) + { + CAMHAL_LOGEA("Unable to allocate bracketing management structures"); + ret = -1; + } + + if ( NO_ERROR == ret ) + { + mBracketingBuffersQueuedCount = imgCaptureData->mNumBufs; + mLastBracetingBufferIdx = mBracketingBuffersQueuedCount - 1; + + for ( int i = 0 ; i < imgCaptureData->mNumBufs ; i++ ) + { + mBracketingBuffersQueued[i] = true; + } + + } + } + + if ( NO_ERROR == ret ) + { + + ret = startImageCapture(); + { + Mutex::Autolock lock(mBracketingLock); + + if ( NO_ERROR == ret ) + { + mBracketingEnabled = true; + } + else + { + mBracketingEnabled = false; + } + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::stopBracketing() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mBracketingLock); + + if ( NULL != mBracketingBuffersQueued ) + { + delete [] mBracketingBuffersQueued; + } + + ret = stopImageCapture(); + + mBracketingBuffersQueued = NULL; + mBracketingEnabled = false; + mBracketingBuffersQueuedCount = 0; + mLastBracetingBufferIdx = 0; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::startImageCapture() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMXCameraPortParameters * capData = NULL; + OMX_CONFIG_BOOLEANTYPE bOMX; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mImageCaptureLock); + + if(!mCaptureConfigured) + { + ///Image capture was cancelled before we could start + return NO_ERROR; + } + + if ( 0 != mStartCaptureSem.Count() ) + { + CAMHAL_LOGEB("Error mStartCaptureSem semaphore count %d", mStartCaptureSem.Count()); + return NO_INIT; + } + + if ((getNextState() & (CAPTURE_ACTIVE|BRACKETING_ACTIVE)) == 0) { + CAMHAL_LOGDA("trying starting capture when already canceled"); + return NO_ERROR; + } + + // Camera framework doesn't expect face callbacks once capture is triggered + pauseFaceDetection(true); + + //During bracketing image capture is already active + { + Mutex::Autolock lock(mBracketingLock); + if ( mBracketingEnabled ) + { + //Stop bracketing, activate normal burst for the remaining images + mBracketingEnabled = false; + mCapturedFrames = mBracketingRange; + ret = sendBracketFrames(); + if(ret != NO_ERROR) + goto EXIT; + else + return ret; + } + } + + if ( NO_ERROR == ret ) { + if (mPendingCaptureSettings & SetRotation) { + mPendingCaptureSettings &= ~SetRotation; + ret = setPictureRotation(mPictureRotation); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Error configuring image rotation %x", ret); + } + } + } + + // need to enable wb data for video snapshot to fill in exif data + if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) { + // video snapshot uses wb data from snapshot frame + ret = setExtraData(true, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance); + } + + //OMX shutter callback events are only available in hq mode + if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) + { + + if ( NO_ERROR == ret ) + { + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + (OMX_EVENTTYPE) OMX_EventIndexSettingChanged, + OMX_ALL, + OMX_TI_IndexConfigShutterCallback, + mStartCaptureSem); + } + + if ( NO_ERROR == ret ) + { + ret = setShutterCallback(true); + } + + } + + if ( NO_ERROR == ret ) { + capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + ///Queue all the buffers on capture port + for ( int index = 0 ; index < capData->mNumBufs ; index++ ) { + CAMHAL_LOGDB("Queuing buffer on Capture port - 0x%x", + ( unsigned int ) capData->mBufferHeader[index]->pBuffer); + eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp, + (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]); + + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + + mWaitingForSnapshot = true; + mCaptureSignalled = false; + + // Capturing command is not needed when capturing in video mode + // Only need to queue buffers on image ports + if (mCapMode != VIDEO_MODE) { + OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE); + bOMX.bEnabled = OMX_TRUE; + + /// sending Capturing Command to the component + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCapturing, + &bOMX); + + CAMHAL_LOGDB("Capture set - 0x%x", eError); + + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + } + + //OMX shutter callback events are only available in hq mode + if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) + { + + if ( NO_ERROR == ret ) + { + ret = mStartCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT); + } + + //If something bad happened while we wait + if (mComponentState != OMX_StateExecuting) + { + CAMHAL_LOGEA("Invalid State after Image Capture Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDA("Shutter callback received"); + notifyShutterSubscribers(); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + (OMX_EVENTTYPE) OMX_EventIndexSettingChanged, + OMX_ALL, + OMX_TI_IndexConfigShutterCallback, + NULL); + CAMHAL_LOGEA("Timeout expired on shutter callback"); + goto EXIT; + } + + } + + return (ret | ErrorUtils::omxToAndroidError(eError)); + +EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance); + mWaitingForSnapshot = false; + mCaptureSignalled = false; + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +status_t OMXCameraAdapter::stopImageCapture() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_BOOLEANTYPE bOMX; + OMXCameraPortParameters *imgCaptureData = NULL; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mImageCaptureLock); + + if (!mCaptureConfigured) { + //Capture is not ongoing, return from here + return NO_ERROR; + } + + if ( 0 != mStopCaptureSem.Count() ) { + CAMHAL_LOGEB("Error mStopCaptureSem semaphore count %d", mStopCaptureSem.Count()); + goto EXIT; + } + + //Disable the callback first + mWaitingForSnapshot = false; + mSnapshotCount = 0; + + // OMX shutter callback events are only available in hq mode + if ((HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode)) { + //Disable the callback first + ret = setShutterCallback(false); + + // if anybody is waiting on the shutter callback + // signal them and then recreate the semaphore + if ( 0 != mStartCaptureSem.Count() ) { + + for (int i = mStartCaptureSem.Count(); i < 0; i++) { + ret |= SignalEvent(mCameraAdapterParameters.mHandleComp, + (OMX_EVENTTYPE) OMX_EventIndexSettingChanged, + OMX_ALL, + OMX_TI_IndexConfigShutterCallback, + NULL ); + } + mStartCaptureSem.Create(0); + } + } + + // After capture, face detection should be disabled + // and application needs to restart face detection + stopFaceDetection(); + + //Wait here for the capture to be done, in worst case timeout and proceed with cleanup + mCaptureSem.WaitTimeout(OMX_CAPTURE_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State Image Capture Stop Exitting!!!"); + goto EXIT; + } + + // Disable image capture + // Capturing command is not needed when capturing in video mode + if (mCapMode != VIDEO_MODE) { + OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE); + bOMX.bEnabled = OMX_FALSE; + imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCapturing, + &bOMX); + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGDB("Error during SetConfig- 0x%x", eError); + ret = -1; + goto EXIT; + } + } + + // had to enable wb data for video snapshot to fill in exif data + // now that we are done...disable + if ((ret == NO_ERROR) && (mCapMode == VIDEO_MODE)) { + ret = setExtraData(false, mCameraAdapterParameters.mPrevPortIndex, OMX_WhiteBalance); + } + + CAMHAL_LOGDB("Capture set - 0x%x", eError); + + mCaptureSignalled = true; //set this to true if we exited because of timeout + + { + Mutex::Autolock lock(mFrameCountMutex); + mFrameCount = 0; + mFirstFrameCondition.broadcast(); + } + + return (ret | ErrorUtils::omxToAndroidError(eError)); + +EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + //Release image buffers + if ( NULL != mReleaseImageBuffersCallback ) { + mReleaseImageBuffersCallback(mReleaseData); + } + + { + Mutex::Autolock lock(mFrameCountMutex); + mFrameCount = 0; + mFirstFrameCondition.broadcast(); + } + + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + +status_t OMXCameraAdapter::disableImagePort(){ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMXCameraPortParameters *imgCaptureData = NULL; + + if (!mCaptureConfigured) { + return NO_ERROR; + } + + mCaptureConfigured = false; + imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + ///Register for Image port Disable event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortDisable, + mCameraAdapterParameters.mImagePortIndex, + mStopCaptureSem); + ///Disable Capture Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortDisable, + mCameraAdapterParameters.mImagePortIndex, + NULL); + + ///Free all the buffers on capture port + if (imgCaptureData) { + CAMHAL_LOGDB("Freeing buffer on Capture port - %d", imgCaptureData->mNumBufs); + for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++) { + CAMHAL_LOGDB("Freeing buffer on Capture port - 0x%x", + ( unsigned int ) imgCaptureData->mBufferHeader[index]->pBuffer); + eError = OMX_FreeBuffer(mCameraAdapterParameters.mHandleComp, + mCameraAdapterParameters.mImagePortIndex, + (OMX_BUFFERHEADERTYPE*)imgCaptureData->mBufferHeader[index]); + + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + } + CAMHAL_LOGDA("Waiting for port disable"); + //Wait for the image port enable event + ret = mStopCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after Disable Image Port Exitting!!!"); + goto EXIT; + } + + if ( NO_ERROR == ret ) { + CAMHAL_LOGDA("Port disabled"); + } else { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortDisable, + mCameraAdapterParameters.mImagePortIndex, + NULL); + CAMHAL_LOGDA("Timeout expired on port disable"); + goto EXIT; + } + + EXIT: + return (ret | ErrorUtils::omxToAndroidError(eError)); +} + + +status_t OMXCameraAdapter::UseBuffersCapture(void* bufArr, int num) +{ + LOG_FUNCTION_NAME; + + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMXCameraPortParameters * imgCaptureData = NULL; + uint32_t *buffers = (uint32_t*)bufArr; + OMXCameraPortParameters cap; + + imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + if ( 0 != mUseCaptureSem.Count() ) + { + CAMHAL_LOGEB("Error mUseCaptureSem semaphore count %d", mUseCaptureSem.Count()); + return BAD_VALUE; + } + + // capture is already configured...we can skip this step + if (mCaptureConfigured) { + + if ( NO_ERROR == ret ) + { + ret = setupEXIF(); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret); + } + } + + mCapturedFrames = mBurstFrames; + return NO_ERROR; + } + + imgCaptureData->mNumBufs = num; + + //TODO: Support more pixelformats + + CAMHAL_LOGDB("Params Width = %d", (int)imgCaptureData->mWidth); + CAMHAL_LOGDB("Params Height = %d", (int)imgCaptureData->mWidth); + + if (mPendingCaptureSettings & SetFormat) { + mPendingCaptureSettings &= ~SetFormat; + ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData); + if ( ret != NO_ERROR ) { + CAMHAL_LOGEB("setFormat() failed %d", ret); + LOG_FUNCTION_NAME_EXIT; + return ret; + } + } + + if (mPendingCaptureSettings & SetThumb) { + mPendingCaptureSettings &= ~SetThumb; + ret = setThumbnailParams(mThumbWidth, mThumbHeight, mThumbQuality); + if ( NO_ERROR != ret) { + CAMHAL_LOGEB("Error configuring thumbnail size %x", ret); + return ret; + } + } + + if (mPendingCaptureSettings & SetExpBracket) { + mPendingCaptureSettings &= ~SetExpBracket; + ret = setExposureBracketing( mExposureBracketingValues, + mExposureBracketingValidEntries, mBurstFrames); + if ( ret != NO_ERROR ) { + CAMHAL_LOGEB("setExposureBracketing() failed %d", ret); + goto EXIT; + } + } + + if (mPendingCaptureSettings & SetQuality) { + mPendingCaptureSettings &= ~SetQuality; + ret = setImageQuality(mPictureQuality); + if ( NO_ERROR != ret) { + CAMHAL_LOGEB("Error configuring image quality %x", ret); + goto EXIT; + } + } + + ///Register for Image port ENABLE event + ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mImagePortIndex, + mUseCaptureSem); + + ///Enable Capture Port + eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp, + OMX_CommandPortEnable, + mCameraAdapterParameters.mImagePortIndex, + NULL); + + CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError); + GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError); + + for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++ ) + { + OMX_BUFFERHEADERTYPE *pBufferHdr; + CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d", + (unsigned int)buffers[index], + (int)imgCaptureData->mBufSize); + + eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp, + &pBufferHdr, + mCameraAdapterParameters.mImagePortIndex, + 0, + mCaptureBuffersLength, + (OMX_U8*)buffers[index]); + + CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError); + GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError); + + pBufferHdr->pAppPrivate = (OMX_PTR) index; + pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE); + pBufferHdr->nVersion.s.nVersionMajor = 1 ; + pBufferHdr->nVersion.s.nVersionMinor = 1 ; + pBufferHdr->nVersion.s.nRevision = 0; + pBufferHdr->nVersion.s.nStep = 0; + imgCaptureData->mBufferHeader[index] = pBufferHdr; + } + + //Wait for the image port enable event + CAMHAL_LOGDA("Waiting for port enable"); + ret = mUseCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT); + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) + { + CAMHAL_LOGEA("Invalid State after Enable Image Port Exitting!!!"); + goto EXIT; + } + + if ( ret == NO_ERROR ) + { + CAMHAL_LOGDA("Port enabled"); + } + else + { + ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp, + OMX_EventCmdComplete, + OMX_CommandPortEnable, + mCameraAdapterParameters.mImagePortIndex, + NULL); + CAMHAL_LOGDA("Timeout expired on port enable"); + goto EXIT; + } + + if ( NO_ERROR == ret ) + { + ret = setupEXIF(); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret); + } + } + + mCapturedFrames = mBurstFrames; + mCaptureConfigured = true; + + return (ret | ErrorUtils::omxToAndroidError(eError)); + +EXIT: + CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError); + //Release image buffers + if ( NULL != mReleaseImageBuffersCallback ) { + mReleaseImageBuffersCallback(mReleaseData); + } + performCleanupAfterError(); + LOG_FUNCTION_NAME_EXIT; + return (ret | ErrorUtils::omxToAndroidError(eError)); + +} + +}; diff --git a/camera/OMXCameraAdapter/OMXDefaults.cpp b/camera/OMXCameraAdapter/OMXDefaults.cpp new file mode 100644 index 0000000..aff38d1 --- /dev/null +++ b/camera/OMXCameraAdapter/OMXDefaults.cpp @@ -0,0 +1,83 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXDefaults.cpp +* +* This file contains definitions are OMX Camera defaults +* +*/ + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" + +namespace android { + +#undef LOG_TAG +#define LOG_TAG "CameraHAL" + +#define __STRINGIFY(s) __STRING(s) + +// OMX Camera defaults +const char OMXCameraAdapter::DEFAULT_ANTIBANDING[] = "auto"; +const char OMXCameraAdapter::DEFAULT_BRIGHTNESS[] = "50"; +const char OMXCameraAdapter::DEFAULT_CONTRAST[] = "100"; +const char OMXCameraAdapter::DEFAULT_EFFECT[] = "none"; +const char OMXCameraAdapter::DEFAULT_EV_COMPENSATION[] = "0"; +const char OMXCameraAdapter::DEFAULT_EV_STEP[] = "0.1"; +const char OMXCameraAdapter::DEFAULT_EXPOSURE_MODE[] = "auto"; +const char OMXCameraAdapter::DEFAULT_FLASH_MODE[] = "off"; +const char OMXCameraAdapter::DEFAULT_FOCUS_MODE_PREFERRED[] = "auto"; +const char OMXCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity"; +const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_IMAGE[] = "15000,30000"; +const char OMXCameraAdapter::DEFAULT_FRAMERATE_RANGE_VIDEO[]="24000,30000"; +const char OMXCameraAdapter::DEFAULT_IPP[] = "ldc-nsf"; +const char OMXCameraAdapter::DEFAULT_GBCE[] = "disable"; +const char OMXCameraAdapter::DEFAULT_ISO_MODE[] = "auto"; +const char OMXCameraAdapter::DEFAULT_JPEG_QUALITY[] = "95"; +const char OMXCameraAdapter::DEFAULT_THUMBNAIL_QUALITY[] = "60"; +const char OMXCameraAdapter::DEFAULT_THUMBNAIL_SIZE[] = "160x120"; +const char OMXCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg"; +const char OMXCameraAdapter::DEFAULT_PICTURE_SIZE[] = "320x240"; +const char OMXCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp"; +const char OMXCameraAdapter::DEFAULT_FRAMERATE[] = "30"; +const char OMXCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480"; +const char OMXCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6"; +const char OMXCameraAdapter::DEFAULT_NUM_PIC_BUFS[] = "1"; +const char OMXCameraAdapter::DEFAULT_MAX_FOCUS_AREAS[] = "1"; +const char OMXCameraAdapter::DEFAULT_SATURATION[] = "100"; +const char OMXCameraAdapter::DEFAULT_SCENE_MODE[] = "auto"; +const char OMXCameraAdapter::DEFAULT_SHARPNESS[] = "100"; +const char OMXCameraAdapter::DEFAULT_VSTAB[] = "false"; +const char OMXCameraAdapter::DEFAULT_VSTAB_SUPPORTED[] = "true"; +const char OMXCameraAdapter::DEFAULT_WB[] = "auto"; +const char OMXCameraAdapter::DEFAULT_ZOOM[] = "0"; +const char OMXCameraAdapter::DEFAULT_MAX_FD_HW_FACES[] = __STRINGIFY(MAX_NUM_FACES_SUPPORTED); +const char OMXCameraAdapter::DEFAULT_MAX_FD_SW_FACES[] = "0"; +const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_PRIMARY[] = "3.43"; +const char OMXCameraAdapter::DEFAULT_FOCAL_LENGTH_SECONDARY[] = "1.95"; +const char OMXCameraAdapter::DEFAULT_HOR_ANGLE[] = "54.8"; +const char OMXCameraAdapter::DEFAULT_VER_ANGLE[] = "42.5"; +const char OMXCameraAdapter::DEFAULT_AE_LOCK[] = "false"; +const char OMXCameraAdapter::DEFAULT_AWB_LOCK[] = "false"; +const char OMXCameraAdapter::DEFAULT_MAX_NUM_METERING_AREAS[] = "0"; +const char OMXCameraAdapter::DEFAULT_LOCK_SUPPORTED[] = "true"; +const char OMXCameraAdapter::DEFAULT_LOCK_UNSUPPORTED[] = "false"; +const char OMXCameraAdapter::DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[] = "true"; +const char OMXCameraAdapter::DEFAULT_VIDEO_SIZE[] = "1920x1080"; +const char OMXCameraAdapter::DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "1920x1080"; +}; + diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp new file mode 100644 index 0000000..76d94bd --- /dev/null +++ b/camera/OMXCameraAdapter/OMXExif.cpp @@ -0,0 +1,839 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXExif.cpp +* +* This file contains functionality for handling EXIF insertion. +* +*/ + +#undef LOG_TAG + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" +#include + +namespace android { + +status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state) +{ + status_t ret = NO_ERROR; + const char *valstr = NULL; + double gpsPos; + + LOG_FUNCTION_NAME; + + if( ( valstr = params.get(CameraParameters::KEY_GPS_LATITUDE) ) != NULL ) + { + gpsPos = strtod(valstr, NULL); + + if ( convertGPSCoord(gpsPos, + mEXIFData.mGPSData.mLatDeg, + mEXIFData.mGPSData.mLatMin, + mEXIFData.mGPSData.mLatSec, + mEXIFData.mGPSData.mLatSecDiv ) == NO_ERROR ) + { + + if ( 0 < gpsPos ) + { + strncpy(mEXIFData.mGPSData.mLatRef, GPS_NORTH_REF, GPS_REF_SIZE); + } + else + { + strncpy(mEXIFData.mGPSData.mLatRef, GPS_SOUTH_REF, GPS_REF_SIZE); + } + + mEXIFData.mGPSData.mLatValid = true; + } + else + { + mEXIFData.mGPSData.mLatValid = false; + } + } + else + { + mEXIFData.mGPSData.mLatValid = false; + } + + if( ( valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE) ) != NULL ) + { + gpsPos = strtod(valstr, NULL); + + if ( convertGPSCoord(gpsPos, + mEXIFData.mGPSData.mLongDeg, + mEXIFData.mGPSData.mLongMin, + mEXIFData.mGPSData.mLongSec, + mEXIFData.mGPSData.mLongSecDiv) == NO_ERROR ) + { + + if ( 0 < gpsPos ) + { + strncpy(mEXIFData.mGPSData.mLongRef, GPS_EAST_REF, GPS_REF_SIZE); + } + else + { + strncpy(mEXIFData.mGPSData.mLongRef, GPS_WEST_REF, GPS_REF_SIZE); + } + + mEXIFData.mGPSData.mLongValid= true; + } + else + { + mEXIFData.mGPSData.mLongValid = false; + } + } + else + { + mEXIFData.mGPSData.mLongValid = false; + } + + if( ( valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE) ) != NULL ) + { + gpsPos = strtod(valstr, NULL); + mEXIFData.mGPSData.mAltitude = floor(fabs(gpsPos)); + if (gpsPos < 0) { + mEXIFData.mGPSData.mAltitudeRef = 1; + } else { + mEXIFData.mGPSData.mAltitudeRef = 0; + } + mEXIFData.mGPSData.mAltitudeValid = true; + } + else + { + mEXIFData.mGPSData.mAltitudeValid= false; + } + + if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL ) + { + long gpsTimestamp = strtol(valstr, NULL, 10); + struct tm *timeinfo = gmtime( ( time_t * ) & (gpsTimestamp) ); + if ( NULL != timeinfo ) + { + mEXIFData.mGPSData.mTimeStampHour = timeinfo->tm_hour; + mEXIFData.mGPSData.mTimeStampMin = timeinfo->tm_min; + mEXIFData.mGPSData.mTimeStampSec = timeinfo->tm_sec; + mEXIFData.mGPSData.mTimeStampValid = true; + } + else + { + mEXIFData.mGPSData.mTimeStampValid = false; + } + } + else + { + mEXIFData.mGPSData.mTimeStampValid = false; + } + + if( ( valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL ) + { + long gpsDatestamp = strtol(valstr, NULL, 10); + struct tm *timeinfo = gmtime( ( time_t * ) & (gpsDatestamp) ); + if ( NULL != timeinfo ) + { + strftime(mEXIFData.mGPSData.mDatestamp, GPS_DATESTAMP_SIZE, "%Y:%m:%d", timeinfo); + mEXIFData.mGPSData.mDatestampValid = true; + } + else + { + mEXIFData.mGPSData.mDatestampValid = false; + } + } + else + { + mEXIFData.mGPSData.mDatestampValid = false; + } + + if( ( valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL ) + { + strncpy(mEXIFData.mGPSData.mProcMethod, valstr, GPS_PROCESSING_SIZE-1); + mEXIFData.mGPSData.mProcMethodValid = true; + } + else + { + mEXIFData.mGPSData.mProcMethodValid = false; + } + + if( ( valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM) ) != NULL ) + { + strncpy(mEXIFData.mGPSData.mMapDatum, valstr, GPS_MAPDATUM_SIZE-1); + mEXIFData.mGPSData.mMapDatumValid = true; + } + else + { + mEXIFData.mGPSData.mMapDatumValid = false; + } + + if( ( valstr = params.get(TICameraParameters::KEY_GPS_VERSION) ) != NULL ) + { + strncpy(mEXIFData.mGPSData.mVersionId, valstr, GPS_VERSION_SIZE-1); + mEXIFData.mGPSData.mVersionIdValid = true; + } + else + { + mEXIFData.mGPSData.mVersionIdValid = false; + } + + if( ( valstr = params.get(TICameraParameters::KEY_EXIF_MODEL ) ) != NULL ) + { + CAMHAL_LOGVB("EXIF Model: %s", valstr); + strncpy(mEXIFData.mModel, valstr, EXIF_MODEL_SIZE - 1); + mEXIFData.mModelValid= true; + } + else + { + mEXIFData.mModelValid= false; + } + + if( ( valstr = params.get(TICameraParameters::KEY_EXIF_MAKE ) ) != NULL ) + { + CAMHAL_LOGVB("EXIF Make: %s", valstr); + strncpy(mEXIFData.mMake, valstr, EXIF_MAKE_SIZE - 1); + mEXIFData.mMakeValid = true; + } + else + { + mEXIFData.mMakeValid= false; + } + + + if( ( valstr = params.get(CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) { + CAMHAL_LOGVB("EXIF Focal length: %s", valstr); + ExifElementsTable::stringToRational(valstr, + &mEXIFData.mFocalNum, + &mEXIFData.mFocalDen); + } else { + mEXIFData.mFocalNum = 0; + mEXIFData.mFocalDen = 0; + } + + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setupEXIF() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer; + OMX_TI_CONFIG_EXIF_TAGS *exifTags; + unsigned char *sharedPtr = NULL; + struct timeval sTv; + struct tm *pTime; + OMXCameraPortParameters * capData = NULL; + MemoryManager memMgr; + OMX_U8** memmgr_buf_array = NULL; + int buf_size = 0; + + LOG_FUNCTION_NAME; + + sharedBuffer.pSharedBuff = NULL; + capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER); + sharedBuffer.nPortIndex = mCameraAdapterParameters.mImagePortIndex; + + //We allocate the shared buffer dynamically based on the + //requirements of the EXIF tags. The additional buffers will + //get stored after the EXIF configuration structure and the pointers + //will contain offsets within the shared buffer itself. + buf_size = sizeof(OMX_TI_CONFIG_EXIF_TAGS) + + ( EXIF_MODEL_SIZE ) + + ( EXIF_MAKE_SIZE ) + + ( EXIF_DATE_TIME_SIZE ) + + ( GPS_MAPDATUM_SIZE ) + + ( GPS_PROCESSING_SIZE ); + buf_size = ((buf_size+4095)/4096)*4096; + sharedBuffer.nSharedBuffSize = buf_size; + + memmgr_buf_array = (OMX_U8 **)memMgr.allocateBuffer(0, 0, NULL, buf_size, 1); + sharedBuffer.pSharedBuff = ( OMX_U8 * ) memmgr_buf_array[0]; + + if ( NULL == sharedBuffer.pSharedBuff ) + { + CAMHAL_LOGEA("No resources to allocate OMX shared buffer"); + ret = -1; + } + + //Extra data begins right after the EXIF configuration structure. + sharedPtr = sharedBuffer.pSharedBuff + sizeof(OMX_TI_CONFIG_EXIF_TAGS); + } + + if ( NO_ERROR == ret ) + { + exifTags = ( OMX_TI_CONFIG_EXIF_TAGS * ) sharedBuffer.pSharedBuff; + OMX_INIT_STRUCT_PTR (exifTags, OMX_TI_CONFIG_EXIF_TAGS); + exifTags->nPortIndex = mCameraAdapterParameters.mImagePortIndex; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigExifTags, + &sharedBuffer ); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while retrieving EXIF configuration structure 0x%x", eError); + ret = -1; + } + } + + if ( NO_ERROR == ret ) + { + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusModel ) && + ( mEXIFData.mModelValid ) ) + { + strncpy(( char * ) sharedPtr, + mEXIFData.mModel, + EXIF_MODEL_SIZE - 1); + + exifTags->pModelBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff ); + exifTags->ulModelBuffSizeBytes = strlen((char*)sharedPtr) + 1; + sharedPtr += EXIF_MODEL_SIZE; + exifTags->eStatusModel = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusMake) && + ( mEXIFData.mMakeValid ) ) + { + strncpy( ( char * ) sharedPtr, + mEXIFData.mMake, + EXIF_MAKE_SIZE - 1); + + exifTags->pMakeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff ); + exifTags->ulMakeBuffSizeBytes = strlen((char*)sharedPtr) + 1; + sharedPtr += EXIF_MAKE_SIZE; + exifTags->eStatusMake = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusFocalLength )) + { + if (mEXIFData.mFocalNum || mEXIFData.mFocalDen ) { + exifTags->ulFocalLength[0] = (OMX_U32) mEXIFData.mFocalNum; + exifTags->ulFocalLength[1] = (OMX_U32) mEXIFData.mFocalDen; + CAMHAL_LOGVB("exifTags->ulFocalLength = [%u] [%u]", + (unsigned int)(exifTags->ulFocalLength[0]), + (unsigned int)(exifTags->ulFocalLength[1])); + exifTags->eStatusFocalLength = OMX_TI_TagUpdated; + } + } + + if ( OMX_TI_TagReadWrite == exifTags->eStatusDateTime ) + { + int status = gettimeofday (&sTv, NULL); + pTime = localtime (&sTv.tv_sec); + if ( ( 0 == status ) && ( NULL != pTime ) ) + { + snprintf(( char * ) sharedPtr, EXIF_DATE_TIME_SIZE, + "%04d:%02d:%02d %02d:%02d:%02d", + pTime->tm_year + 1900, + pTime->tm_mon + 1, + pTime->tm_mday, + pTime->tm_hour, + pTime->tm_min, + pTime->tm_sec ); + } + + exifTags->pDateTimeBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff ); + sharedPtr += EXIF_DATE_TIME_SIZE; + exifTags->ulDateTimeBuffSizeBytes = EXIF_DATE_TIME_SIZE; + exifTags->eStatusDateTime = OMX_TI_TagUpdated; + } + + if ( OMX_TI_TagReadWrite == exifTags->eStatusImageWidth ) + { + exifTags->ulImageWidth = capData->mWidth; + exifTags->eStatusImageWidth = OMX_TI_TagUpdated; + } + + if ( OMX_TI_TagReadWrite == exifTags->eStatusImageHeight ) + { + exifTags->ulImageHeight = capData->mHeight; + exifTags->eStatusImageHeight = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLatitude ) && + ( mEXIFData.mGPSData.mLatValid ) ) + { + exifTags->ulGpsLatitude[0] = abs(mEXIFData.mGPSData.mLatDeg); + exifTags->ulGpsLatitude[2] = abs(mEXIFData.mGPSData.mLatMin); + exifTags->ulGpsLatitude[4] = abs(mEXIFData.mGPSData.mLatSec); + exifTags->ulGpsLatitude[1] = 1; + exifTags->ulGpsLatitude[3] = 1; + exifTags->ulGpsLatitude[5] = abs(mEXIFData.mGPSData.mLatSecDiv); + exifTags->eStatusGpsLatitude = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpslatitudeRef ) && + ( mEXIFData.mGPSData.mLatValid ) ) + { + exifTags->cGpslatitudeRef[0] = ( OMX_S8 ) mEXIFData.mGPSData.mLatRef[0]; + exifTags->cGpslatitudeRef[1] = '\0'; + exifTags->eStatusGpslatitudeRef = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLongitude ) && + ( mEXIFData.mGPSData.mLongValid ) ) + { + exifTags->ulGpsLongitude[0] = abs(mEXIFData.mGPSData.mLongDeg); + exifTags->ulGpsLongitude[2] = abs(mEXIFData.mGPSData.mLongMin); + exifTags->ulGpsLongitude[4] = abs(mEXIFData.mGPSData.mLongSec); + exifTags->ulGpsLongitude[1] = 1; + exifTags->ulGpsLongitude[3] = 1; + exifTags->ulGpsLongitude[5] = abs(mEXIFData.mGPSData.mLongSecDiv); + exifTags->eStatusGpsLongitude = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsLongitudeRef ) && + ( mEXIFData.mGPSData.mLongValid ) ) + { + exifTags->cGpsLongitudeRef[0] = ( OMX_S8 ) mEXIFData.mGPSData.mLongRef[0]; + exifTags->cGpsLongitudeRef[1] = '\0'; + exifTags->eStatusGpsLongitudeRef = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsAltitude ) && + ( mEXIFData.mGPSData.mAltitudeValid) ) + { + exifTags->ulGpsAltitude[0] = ( OMX_U32 ) mEXIFData.mGPSData.mAltitude; + exifTags->ulGpsAltitude[1] = 1; + exifTags->eStatusGpsAltitude = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsAltitudeRef ) && + ( mEXIFData.mGPSData.mAltitudeValid) ) + { + exifTags->ucGpsAltitudeRef = (OMX_U8) mEXIFData.mGPSData.mAltitudeRef; + exifTags->eStatusGpsAltitudeRef = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsMapDatum ) && + ( mEXIFData.mGPSData.mMapDatumValid ) ) + { + memcpy(sharedPtr, mEXIFData.mGPSData.mMapDatum, GPS_MAPDATUM_SIZE); + + exifTags->pGpsMapDatumBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff ); + exifTags->ulGpsMapDatumBuffSizeBytes = GPS_MAPDATUM_SIZE; + exifTags->eStatusGpsMapDatum = OMX_TI_TagUpdated; + sharedPtr += GPS_MAPDATUM_SIZE; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsProcessingMethod ) && + ( mEXIFData.mGPSData.mProcMethodValid ) ) + { + exifTags->pGpsProcessingMethodBuff = ( OMX_S8 * ) ( sharedPtr - sharedBuffer.pSharedBuff ); + memcpy(sharedPtr, ExifAsciiPrefix, sizeof(ExifAsciiPrefix)); + sharedPtr += sizeof(ExifAsciiPrefix); + + memcpy(sharedPtr, + mEXIFData.mGPSData.mProcMethod, + ( GPS_PROCESSING_SIZE - sizeof(ExifAsciiPrefix) ) ); + exifTags->ulGpsProcessingMethodBuffSizeBytes = GPS_PROCESSING_SIZE; + exifTags->eStatusGpsProcessingMethod = OMX_TI_TagUpdated; + sharedPtr += GPS_PROCESSING_SIZE; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsVersionId ) && + ( mEXIFData.mGPSData.mVersionIdValid ) ) + { + exifTags->ucGpsVersionId[0] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[0]; + exifTags->ucGpsVersionId[1] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[1]; + exifTags->ucGpsVersionId[2] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[2]; + exifTags->ucGpsVersionId[3] = ( OMX_U8 ) mEXIFData.mGPSData.mVersionId[3]; + exifTags->eStatusGpsVersionId = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsTimeStamp ) && + ( mEXIFData.mGPSData.mTimeStampValid ) ) + { + exifTags->ulGpsTimeStamp[0] = mEXIFData.mGPSData.mTimeStampHour; + exifTags->ulGpsTimeStamp[2] = mEXIFData.mGPSData.mTimeStampMin; + exifTags->ulGpsTimeStamp[4] = mEXIFData.mGPSData.mTimeStampSec; + exifTags->ulGpsTimeStamp[1] = 1; + exifTags->ulGpsTimeStamp[3] = 1; + exifTags->ulGpsTimeStamp[5] = 1; + exifTags->eStatusGpsTimeStamp = OMX_TI_TagUpdated; + } + + if ( ( OMX_TI_TagReadWrite == exifTags->eStatusGpsDateStamp ) && + ( mEXIFData.mGPSData.mDatestampValid ) ) + { + strncpy( ( char * ) exifTags->cGpsDateStamp, + ( char * ) mEXIFData.mGPSData.mDatestamp, + GPS_DATESTAMP_SIZE ); + exifTags->eStatusGpsDateStamp = OMX_TI_TagUpdated; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigExifTags, + &sharedBuffer ); + + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while setting EXIF configuration 0x%x", eError); + ret = -1; + } + } + + if ( NULL != memmgr_buf_array ) + { + memMgr.freeBuffer(memmgr_buf_array); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setupEXIF_libjpeg(ExifElementsTable* exifTable, + OMX_TI_ANCILLARYDATATYPE* pAncillaryData, + OMX_TI_WHITEBALANCERESULTTYPE* pWhiteBalanceData) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + struct timeval sTv; + struct tm *pTime; + OMXCameraPortParameters * capData = NULL; + + LOG_FUNCTION_NAME; + + capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex]; + + if ((NO_ERROR == ret) && (mEXIFData.mModelValid)) { + ret = exifTable->insertElement(TAG_MODEL, mEXIFData.mModel); + } + + if ((NO_ERROR == ret) && (mEXIFData.mMakeValid)) { + ret = exifTable->insertElement(TAG_MAKE, mEXIFData.mMake); + } + + if ((NO_ERROR == ret)) { + if (mEXIFData.mFocalNum || mEXIFData.mFocalDen) { + char temp_value[256]; // arbitrarily long string + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%u/%u", + mEXIFData.mFocalNum, + mEXIFData.mFocalDen); + ret = exifTable->insertElement(TAG_FOCALLENGTH, temp_value); + + } + } + + if ((NO_ERROR == ret)) { + int status = gettimeofday (&sTv, NULL); + pTime = localtime (&sTv.tv_sec); + char temp_value[EXIF_DATE_TIME_SIZE + 1]; + if ((0 == status) && (NULL != pTime)) { + snprintf(temp_value, EXIF_DATE_TIME_SIZE, + "%04d:%02d:%02d %02d:%02d:%02d", + pTime->tm_year + 1900, + pTime->tm_mon + 1, + pTime->tm_mday, + pTime->tm_hour, + pTime->tm_min, + pTime->tm_sec ); + ret = exifTable->insertElement(TAG_DATETIME, temp_value); + } + } + + if ((NO_ERROR == ret)) { + char temp_value[5]; + snprintf(temp_value, sizeof(temp_value)/sizeof(char), "%lu", capData->mWidth); + ret = exifTable->insertElement(TAG_IMAGE_WIDTH, temp_value); + } + + if ((NO_ERROR == ret)) { + char temp_value[5]; + snprintf(temp_value, sizeof(temp_value)/sizeof(char), "%lu", capData->mHeight); + ret = exifTable->insertElement(TAG_IMAGE_LENGTH, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLatValid)) { + char temp_value[256]; // arbitrarily long string + snprintf(temp_value, + sizeof(temp_value)/sizeof(char) - 1, + "%d/%d,%d/%d,%d/%d", + abs(mEXIFData.mGPSData.mLatDeg), 1, + abs(mEXIFData.mGPSData.mLatMin), 1, + abs(mEXIFData.mGPSData.mLatSec), abs(mEXIFData.mGPSData.mLatSecDiv)); + ret = exifTable->insertElement(TAG_GPS_LAT, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLatValid)) { + ret = exifTable->insertElement(TAG_GPS_LAT_REF, mEXIFData.mGPSData.mLatRef); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLongValid)) { + char temp_value[256]; // arbitrarily long string + snprintf(temp_value, + sizeof(temp_value)/sizeof(char) - 1, + "%d/%d,%d/%d,%d/%d", + abs(mEXIFData.mGPSData.mLongDeg), 1, + abs(mEXIFData.mGPSData.mLongMin), 1, + abs(mEXIFData.mGPSData.mLongSec), abs(mEXIFData.mGPSData.mLongSecDiv)); + ret = exifTable->insertElement(TAG_GPS_LONG, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mLongValid)) { + ret = exifTable->insertElement(TAG_GPS_LONG_REF, mEXIFData.mGPSData.mLongRef); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mAltitudeValid)) { + char temp_value[256]; // arbitrarily long string + snprintf(temp_value, + sizeof(temp_value)/sizeof(char) - 1, + "%d/%d", + abs( mEXIFData.mGPSData.mAltitude), 1); + ret = exifTable->insertElement(TAG_GPS_ALT, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mAltitudeValid)) { + char temp_value[5]; + snprintf(temp_value, + sizeof(temp_value)/sizeof(char) - 1, + "%d", mEXIFData.mGPSData.mAltitudeRef); + ret = exifTable->insertElement(TAG_GPS_ALT_REF, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mMapDatumValid)) { + ret = exifTable->insertElement(TAG_GPS_MAP_DATUM, mEXIFData.mGPSData.mMapDatum); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mProcMethodValid)) { + char temp_value[GPS_PROCESSING_SIZE]; + + memcpy(temp_value, ExifAsciiPrefix, sizeof(ExifAsciiPrefix)); + memcpy(temp_value + sizeof(ExifAsciiPrefix), + mEXIFData.mGPSData.mProcMethod, + (GPS_PROCESSING_SIZE - sizeof(ExifAsciiPrefix))); + ret = exifTable->insertElement(TAG_GPS_PROCESSING_METHOD, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mVersionIdValid)) { + char temp_value[256]; // arbitrarily long string + snprintf(temp_value, + sizeof(temp_value)/sizeof(char) - 1, + "%d,%d,%d,%d", + mEXIFData.mGPSData.mVersionId[0], + mEXIFData.mGPSData.mVersionId[1], + mEXIFData.mGPSData.mVersionId[2], + mEXIFData.mGPSData.mVersionId[3]); + ret = exifTable->insertElement(TAG_GPS_VERSION_ID, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mTimeStampValid)) { + char temp_value[256]; // arbitrarily long string + snprintf(temp_value, + sizeof(temp_value)/sizeof(char) - 1, + "%d/%d,%d/%d,%d/%d", + mEXIFData.mGPSData.mTimeStampHour, 1, + mEXIFData.mGPSData.mTimeStampMin, 1, + mEXIFData.mGPSData.mTimeStampSec, 1); + ret = exifTable->insertElement(TAG_GPS_TIMESTAMP, temp_value); + } + + if ((NO_ERROR == ret) && (mEXIFData.mGPSData.mDatestampValid) ) { + ret = exifTable->insertElement(TAG_GPS_DATESTAMP, mEXIFData.mGPSData.mDatestamp); + } + + if (NO_ERROR == ret) { + const char* exif_orient = + ExifElementsTable::degreesToExifOrientation(mPictureRotation); + + if (exif_orient) { + ret = exifTable->insertElement(TAG_ORIENTATION, exif_orient); + } + } + + // fill in short and ushort tags + if (NO_ERROR == ret) { + char temp_value[2]; + temp_value[1] = '\0'; + + // AWB + if (mParameters3A.WhiteBallance == OMX_WhiteBalControlAuto) { + temp_value[0] = '0'; + } else { + temp_value[0] = '1'; + } + exifTable->insertElement(TAG_WHITEBALANCE, temp_value); + + // MeteringMode + // TODO(XXX): only supporting this metering mode at the moment, may change in future + temp_value[0] = '2'; + exifTable->insertElement(TAG_METERING_MODE, temp_value); + + // ExposureProgram + // TODO(XXX): only supporting this exposure program at the moment, may change in future + temp_value[0] = '3'; + exifTable->insertElement(TAG_EXPOSURE_PROGRAM, temp_value); + + // ColorSpace + temp_value[0] = '1'; + exifTable->insertElement(TAG_COLOR_SPACE, temp_value); + + temp_value[0] = '2'; + exifTable->insertElement(TAG_SENSING_METHOD, temp_value); + + temp_value[0] = '1'; + exifTable->insertElement(TAG_CUSTOM_RENDERED, temp_value); + } + + if (pAncillaryData && (NO_ERROR == ret)) { + unsigned int numerator = 0, denominator = 0; + char temp_value[256]; + unsigned int temp_num = 0; + + // DigitalZoomRatio + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%u/%u", + pAncillaryData->nDigitalZoomFactor, 1024); + exifTable->insertElement(TAG_DIGITALZOOMRATIO, temp_value); + + // ExposureTime + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%u/%u", + pAncillaryData->nExposureTime, 1000000); + exifTable->insertElement(TAG_EXPOSURETIME, temp_value); + + // ApertureValue and FNumber + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%u/%u", + pAncillaryData->nApertureValue, 100); + exifTable->insertElement(TAG_FNUMBER, temp_value); + exifTable->insertElement(TAG_APERTURE, temp_value); + + // ISO + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%u,0,0", + pAncillaryData->nCurrentISO); + exifTable->insertElement(TAG_ISO_EQUIVALENT, temp_value); + + // ShutterSpeed + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%f", + log(pAncillaryData->nExposureTime) / log(2)); + ExifElementsTable::stringToRational(temp_value, &numerator, &denominator); + snprintf(temp_value, sizeof(temp_value)/sizeof(char), "%u/%u", numerator, denominator); + exifTable->insertElement(TAG_SHUTTERSPEED, temp_value); + + // Flash + if (mParameters3A.FlashMode == OMX_IMAGE_FlashControlAuto) { + if(pAncillaryData->nFlashStatus) temp_num = 0x19; // Flash fired, auto mode + else temp_num = 0x18; // Flash did not fire, auto mode + } else if (mParameters3A.FlashMode == OMX_IMAGE_FlashControlOn) { + if(pAncillaryData->nFlashStatus) temp_num = 0x9; // Flash fired, compulsory flash mode + else temp_num = 0x10; // Flash did not fire, compulsory flash mode + } else if(pAncillaryData->nFlashStatus) { + temp_num = 0x1; // Flash fired + } else { + temp_num = 0x0; // Flash did not fire + } + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%u", temp_num); + exifTable->insertElement(TAG_FLASH, temp_value); + + if (pWhiteBalanceData) { + unsigned int lightsource = 0; + unsigned int colourtemp = pWhiteBalanceData->nColorTemperature; + bool flash_fired = (temp_num & 0x1); // value from flash above + + // stole this from framework/tools_library/src/tools_sys_exif_tags.c + if( colourtemp <= 3200 ) { + lightsource = 3; // Tungsten + } else if( colourtemp > 3200 && colourtemp <= 4800 ) { + lightsource = 2; // Fluorescent + } else if( colourtemp > 4800 && colourtemp <= 5500 ) { + lightsource = 1; // Daylight + } else if( colourtemp > 5500 && colourtemp <= 6500 ) { + lightsource = 9; // Fine weather + } else if( colourtemp > 6500 ) { + lightsource = 10; // Cloudy weather + } + + if(flash_fired) { + lightsource = 4; // Flash + } + + snprintf(temp_value, + sizeof(temp_value)/sizeof(char), + "%u", lightsource); + exifTable->insertElement(TAG_LIGHT_SOURCE, temp_value); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::convertGPSCoord(double coord, + int °, + int &min, + int &sec, + int &secDivisor) +{ + double tmp; + + LOG_FUNCTION_NAME; + + if ( coord == 0 ) { + + ALOGE("Invalid GPS coordinate"); + + return -EINVAL; + } + + deg = (int) floor(fabs(coord)); + tmp = ( fabs(coord) - floor(fabs(coord)) ) * GPS_MIN_DIV; + min = (int) floor(tmp); + tmp = ( tmp - floor(tmp) ) * ( GPS_SEC_DIV * GPS_SEC_ACCURACY ); + sec = (int) floor(tmp); + secDivisor = GPS_SEC_ACCURACY; + + if( sec >= ( GPS_SEC_DIV * GPS_SEC_ACCURACY ) ) { + sec = 0; + min += 1; + } + + if( min >= 60 ) { + min = 0; + deg += 1; + } + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +}; diff --git a/camera/OMXCameraAdapter/OMXFD.cpp b/camera/OMXCameraAdapter/OMXFD.cpp new file mode 100644 index 0000000..15f8d05 --- /dev/null +++ b/camera/OMXCameraAdapter/OMXFD.cpp @@ -0,0 +1,490 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXFD.cpp +* +* This file contains functionality for handling face detection. +* +*/ + +#undef LOG_TAG + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" + +#define FACE_DETECTION_THRESHOLD 80 + +// constants used for face smooth filtering +static const int HorizontalFilterThreshold = 40; +static const int VerticalFilterThreshold = 40; +static const int HorizontalFaceSizeThreshold = 30; +static const int VerticalFaceSizeThreshold = 30; + + +namespace android { + +status_t OMXCameraAdapter::setParametersFD(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::startFaceDetection() +{ + status_t ret = NO_ERROR; + + Mutex::Autolock lock(mFaceDetectionLock); + + ret = setFaceDetection(true, mDeviceOrientation); + if (ret != NO_ERROR) { + goto out; + } + + if ( mFaceDetectionRunning ) { + mFDSwitchAlgoPriority = true; + } + + // Note: White balance will not be face prioritized, since + // the algorithm needs full frame statistics, and not face + // regions alone. + + faceDetectionNumFacesLastOutput = 0; + out: + return ret; +} + +status_t OMXCameraAdapter::stopFaceDetection() +{ + status_t ret = NO_ERROR; + const char *str = NULL; + BaseCameraAdapter::AdapterState state; + BaseCameraAdapter::getState(state); + + Mutex::Autolock lock(mFaceDetectionLock); + + ret = setFaceDetection(false, mDeviceOrientation); + if (ret != NO_ERROR) { + goto out; + } + + // Reset 3A settings + ret = setParameters3A(mParams, state); + if (ret != NO_ERROR) { + goto out; + } + + if (mPending3Asettings) { + apply3Asettings(mParameters3A); + } + + faceDetectionNumFacesLastOutput = 0; + out: + return ret; +} + +void OMXCameraAdapter::pauseFaceDetection(bool pause) +{ + Mutex::Autolock lock(mFaceDetectionLock); + // pausing will only take affect if fd is already running + if (mFaceDetectionRunning) { + mFaceDetectionPaused = pause; + faceDetectionNumFacesLastOutput = 0; + } +} + +status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_OBJDETECTIONTYPE objDetection; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -EINVAL; + } + + if ( NO_ERROR == ret ) + { + if ( orientation > 270 ) { + orientation = 0; + } + + OMX_INIT_STRUCT_PTR (&objDetection, OMX_CONFIG_OBJDETECTIONTYPE); + objDetection.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + objDetection.nDeviceOrientation = orientation; + if ( enable ) + { + objDetection.bEnable = OMX_TRUE; + } + else + { + objDetection.bEnable = OMX_FALSE; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_IndexConfigImageFaceDetection, + &objDetection); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while configuring face detection 0x%x", eError); + ret = -1; + } + else + { + CAMHAL_LOGDA("Face detection configured successfully"); + } + } + + if ( NO_ERROR == ret ) + { + ret = setExtraData(enable, mCameraAdapterParameters.mPrevPortIndex, OMX_FaceDetection); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEA("Error while configuring face detection extra data"); + } + else + { + CAMHAL_LOGDA("Face detection extra data configured successfully"); + } + } + + if ( NO_ERROR == ret ) + { + mFaceDetectionRunning = enable; + mFaceDetectionPaused = !enable; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader, + sp &result, + size_t previewWidth, + size_t previewHeight) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_TI_FACERESULT *faceResult; + OMX_OTHER_EXTRADATATYPE *extraData; + OMX_FACEDETECTIONTYPE *faceData; + OMX_TI_PLATFORMPRIVATE *platformPrivate; + camera_frame_metadata_t *faces; + + LOG_FUNCTION_NAME; + + if ( OMX_StateExecuting != mComponentState ) { + CAMHAL_LOGEA("OMX component is not in executing state"); + return NO_INIT; + } + + if ( NULL == pBuffHeader ) { + CAMHAL_LOGEA("Invalid Buffer header"); + return-EINVAL; + } + + platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate); + if ( NULL != platformPrivate ) { + if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) { + CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d", + platformPrivate->nSize, + sizeof(OMX_TI_PLATFORMPRIVATE), + platformPrivate->pAuxBuf1, + platformPrivate->pAuxBufSize1, + platformPrivate->pMetaDataBuffer, + platformPrivate->nMetaDataSize); + } else { + CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d", + ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE), + ( unsigned int ) platformPrivate->nSize); + return -EINVAL; + } + } else { + CAMHAL_LOGDA("Invalid OMX_TI_PLATFORMPRIVATE"); + return-EINVAL; + } + + + if ( 0 >= platformPrivate->nMetaDataSize ) { + CAMHAL_LOGDB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d", + ( unsigned int ) platformPrivate->nMetaDataSize); + return -EINVAL; + } + + extraData = getExtradata((OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer), + platformPrivate->nMetaDataSize, (OMX_EXTRADATATYPE)OMX_FaceDetection); + + if ( NULL != extraData ) { + CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x", + extraData->nSize, + sizeof(OMX_OTHER_EXTRADATATYPE), + extraData->eType, + extraData->nDataSize, + extraData->nPortIndex, + extraData->nVersion); + } else { + CAMHAL_LOGDA("Invalid OMX_OTHER_EXTRADATATYPE"); + return -EINVAL; + } + + faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data; + if ( NULL != faceData ) { + if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) { + CAMHAL_LOGVB("Faces detected %d", + faceData->ulFaceCount, + faceData->nSize, + sizeof(OMX_FACEDETECTIONTYPE), + faceData->eCameraView, + faceData->nPortIndex, + faceData->nVersion); + } else { + CAMHAL_LOGDB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d", + ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE), + ( unsigned int ) faceData->nSize); + return -EINVAL; + } + } else { + CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE"); + return -EINVAL; + } + + ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight); + + if ( NO_ERROR == ret ) { + result = new CameraFDResult(faces); + } else { + result.clear(); + result = NULL; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData, + camera_frame_metadata_t **pFaces, + size_t previewWidth, + size_t previewHeight) +{ + status_t ret = NO_ERROR; + camera_face_t *faces; + camera_frame_metadata_t *faceResult; + size_t hRange, vRange; + double tmp; + + LOG_FUNCTION_NAME; + + if ( NULL == faceData ) { + CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter"); + return EINVAL; + } + + LOG_FUNCTION_NAME + + hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT; + vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP; + + faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t)); + if ( NULL == faceResult ) { + return -ENOMEM; + } + + if ( 0 < faceData->ulFaceCount ) { + int orient_mult; + int trans_left, trans_top, trans_right, trans_bot; + + faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount); + if ( NULL == faces ) { + return -ENOMEM; + } + + /** + / * When device is 180 degrees oriented to the sensor, need to translate + / * the output from Ducati to what Android expects + / * Ducati always gives face coordinates in this form, irrespective of + / * rotation, i.e (l,t) always represents the point towards the left eye + / * and top of hair. + / * (l, t) + / * --------------- + / * - ,,,,,,, - + / * - | | - + / * - | a>| - + / * - | | - + / * - ,,,,,,, - + / * --------------- + / * (r, b) + */ + + if (mDeviceOrientation == 180) { + orient_mult = -1; + trans_left = 2; // right is now left + trans_top = 3; // bottom is now top + trans_right = 0; // left is now right + trans_bot = 1; // top is not bottom + } else { + orient_mult = 1; + trans_left = 0; // left + trans_top = 1; // top + trans_right = 2; // right + trans_bot = 3; // bottom + } + + int j = 0, i = 0; + for ( ; j < faceData->ulFaceCount ; j++) + { + OMX_S32 nLeft = 0; + OMX_S32 nTop = 0; + //Face filtering + //For real faces, it is seen that the h/w passes a score >=80 + //For false faces, we seem to get even a score of 70 sometimes. + //In order to avoid any issue at application level, we filter + //<=70 score here. + if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD) + continue; + + if (mDeviceOrientation == 180) { + // from sensor pov, the left pos is the right corner of the face in pov of frame + nLeft = faceData->tFacePosition[j].nLeft + faceData->tFacePosition[j].nWidth; + nTop = faceData->tFacePosition[j].nTop + faceData->tFacePosition[j].nHeight; + } else { + nLeft = faceData->tFacePosition[j].nLeft; + nTop = faceData->tFacePosition[j].nTop; + } + + tmp = ( double ) nLeft / ( double ) previewWidth; + tmp *= hRange; + tmp -= hRange/2; + faces[i].rect[trans_left] = tmp; + + tmp = ( double ) nTop / ( double )previewHeight; + tmp *= vRange; + tmp -= vRange/2; + faces[i].rect[trans_top] = tmp; + + tmp = ( double ) faceData->tFacePosition[j].nWidth / ( double ) previewWidth; + tmp *= hRange; + tmp *= orient_mult; + faces[i].rect[trans_right] = faces[i].rect[trans_left] + tmp; + + tmp = ( double ) faceData->tFacePosition[j].nHeight / ( double ) previewHeight; + tmp *= vRange; + tmp *= orient_mult; + faces[i].rect[trans_bot] = faces[i].rect[trans_top] + tmp; + + faces[i].score = faceData->tFacePosition[j].nScore; + faces[i].id = 0; + faces[i].left_eye[0] = CameraFDResult::INVALID_DATA; + faces[i].left_eye[1] = CameraFDResult::INVALID_DATA; + faces[i].right_eye[0] = CameraFDResult::INVALID_DATA; + faces[i].right_eye[1] = CameraFDResult::INVALID_DATA; + faces[i].mouth[0] = CameraFDResult::INVALID_DATA; + faces[i].mouth[1] = CameraFDResult::INVALID_DATA; + i++; + } + + faceResult->number_of_faces = i; + faceResult->faces = faces; + + for (int i = 0; i < faceResult->number_of_faces; i++) + { + int centerX = (faces[i].rect[trans_left] + faces[i].rect[trans_right] ) / 2; + int centerY = (faces[i].rect[trans_top] + faces[i].rect[trans_bot] ) / 2; + + int sizeX = (faces[i].rect[trans_right] - faces[i].rect[trans_left] ) ; + int sizeY = (faces[i].rect[trans_bot] - faces[i].rect[trans_top] ) ; + + for (int j = 0; j < faceDetectionNumFacesLastOutput; j++) + { + int tempCenterX = (faceDetectionLastOutput[j].rect[trans_left] + + faceDetectionLastOutput[j].rect[trans_right] ) / 2; + int tempCenterY = (faceDetectionLastOutput[j].rect[trans_top] + + faceDetectionLastOutput[j].rect[trans_bot] ) / 2; + int tempSizeX = (faceDetectionLastOutput[j].rect[trans_right] - + faceDetectionLastOutput[j].rect[trans_left] ) ; + int tempSizeY = (faceDetectionLastOutput[j].rect[trans_bot] - + faceDetectionLastOutput[j].rect[trans_top] ) ; + + if ( (abs(tempCenterX - centerX) < HorizontalFilterThreshold) && + (abs(tempCenterY - centerY) < VerticalFilterThreshold) ) + { + // Found Face. It did not move too far. + // Now check size of rectangle compare to last output + if ( (abs (tempSizeX -sizeX) < HorizontalFaceSizeThreshold) && + (abs (tempSizeY -sizeY) < VerticalFaceSizeThreshold) ) + { + // Rectangle is almost same as last time + // Output exactly what was done for this face last time. + faces[i] = faceDetectionLastOutput[j]; + } + else + { + // TODO(XXX): Rectangle size changed but position is same. + // Possibly we can apply just positional correctness. + } + } + } + } + + // Save this output for next iteration + for (int i = 0; i < faceResult->number_of_faces; i++) + { + faceDetectionLastOutput[i] = faces[i]; + } + faceDetectionNumFacesLastOutput = faceResult->number_of_faces; + } else { + faceResult->number_of_faces = 0; + faceResult->faces = NULL; + } + + *pFaces = faceResult; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +}; diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp new file mode 100644 index 0000000..32478af --- /dev/null +++ b/camera/OMXCameraAdapter/OMXFocus.cpp @@ -0,0 +1,840 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +/** +* @file OMXFocus.cpp +* +* This file contains functionality for handling focus configurations. +* +*/ + +#undef LOG_TAG + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" +#include "ErrorUtils.h" + +#define TOUCH_FOCUS_RANGE 0xFF +#define AF_IMAGE_CALLBACK_TIMEOUT 5000000 //5 seconds timeout +#define AF_VIDEO_CALLBACK_TIMEOUT 2800000 //2.8 seconds timeout + +namespace android { + +status_t OMXCameraAdapter::setParametersFocus(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state) +{ + status_t ret = NO_ERROR; + const char *str = NULL; + Vector< sp > tempAreas; + size_t MAX_FOCUS_AREAS; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mFocusAreasLock); + + str = params.get(CameraParameters::KEY_FOCUS_AREAS); + + MAX_FOCUS_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS)); + + if ( NULL != str ) { + ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas); + } + + if ( (NO_ERROR == ret) && CameraArea::areAreasDifferent(mFocusAreas, tempAreas) ) { + mFocusAreas.clear(); + mFocusAreas = tempAreas; + if ( MAX_FOCUS_AREAS < mFocusAreas.size() ) { + CAMHAL_LOGEB("Focus areas supported %d, focus areas set %d", + MAX_FOCUS_AREAS, + mFocusAreas.size()); + ret = -EINVAL; + } + else { + if ( !mFocusAreas.isEmpty() ) { + setTouchFocus(); + } + } + } + + LOG_FUNCTION_NAME; + + return ret; +} + +status_t OMXCameraAdapter::doAutoFocus() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl; + OMX_PARAM_FOCUSSTATUSTYPE focusStatus; + OMX_CONFIG_BOOLEANTYPE bOMX; + nsecs_t timeout = 0; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component in Invalid state"); + returnFocusStatus(false); + return -EINVAL; + } + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component not in executing state"); + returnFocusStatus(false); + return NO_ERROR; + } + + + if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) { + CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called"); + return NO_ERROR; + } + + OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE); + + // If the app calls autoFocus, the camera will stop sending face callbacks. + pauseFaceDetection(true); + + // This is needed for applying FOCUS_REGION correctly + if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea())) + { + //Disable face priority + setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false); + + //Enable region algorithm priority + setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true); + } + + OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE); + focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) mParameters3A.Focus; + + if (mParameters3A.FocusLock) { + // this basically means user never called cancelAutoFocus after a scan... + // if this is the case we need to unlock AF to ensure we will do a scan + if (set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_FALSE) != NO_ERROR) { + CAMHAL_LOGEA("Error Unlocking 3A locks"); + } else { + CAMHAL_LOGDA("AE/AWB unlocked successfully"); + } + + } else if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAuto ) { + // In case we have CAF running we should first check the AF status. + // If it has managed to lock, then do as usual and return status + // immediately. + ret = checkFocus(&focusStatus); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("Focus status check failed 0x%x!", ret); + return ret; + } else { + CAMHAL_LOGDB("Focus status check 0x%x!", focusStatus.eFocusStatus); + } + } + + if ( (focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto && + ( focusStatus.eFocusStatus == OMX_FocusStatusRequest || + focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach || + focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) || + (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) ) + { + OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE); + bOMX.bEnabled = OMX_TRUE; + + //Enable focus scanning + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable, + &bOMX); + + // force AF, Ducati will take care of whether CAF + // or AF will be performed, depending on light conditions + if ( focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto && + ( focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach || + focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) { + focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock; + } + + if ( focusControl.eFocusControl != OMX_IMAGE_FocusControlAuto ) + { + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigFocusControl, + &focusControl); + } + + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while starting focus 0x%x", eError); + return INVALID_OPERATION; + } else { + CAMHAL_LOGDA("Autofocus started successfully"); + } + + // configure focus timeout based on capture mode + timeout = (mCapMode == VIDEO_MODE) ? + ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) : + ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 ); + + { + Mutex::Autolock lock(mDoAFMutex); + ret = mDoAFCond.waitRelative(mDoAFMutex, timeout); + } + + //If somethiing bad happened while we wait + if (mComponentState == OMX_StateInvalid) { + CAMHAL_LOGEA("Invalid State after Auto Focus Exitting!!!"); + return -EINVAL; + } + + if(ret != NO_ERROR) { + CAMHAL_LOGEA("Autofocus callback timeout expired"); + ret = returnFocusStatus(true); + } else { + ret = returnFocusStatus(false); + } + } else { // Focus mode in continuous + if ( NO_ERROR == ret ) { + ret = returnFocusStatus(true); + mPending3Asettings |= SetFocus; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::stopAutoFocus() +{ + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component in Invalid state"); + returnFocusStatus(false); + return -EINVAL; + } + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component not in executing state"); + return NO_ERROR; + } + + if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) { + // No need to stop focus if we are in infinity mode. Nothing to stop. + return NO_ERROR; + } + + OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE); + focusControl.eFocusControl = OMX_IMAGE_FocusControlOff; + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigFocusControl, + &focusControl); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while stopping focus 0x%x", eError); + return ErrorUtils::omxToAndroidError(eError); + } else { + // This is a WA. Usually the OMX Camera component should + // generate AF status change OMX event fairly quickly + // ( after one preview frame ) and this notification should + // actually come from 'handleFocusCallback()'. + Mutex::Autolock lock(mDoAFMutex); + mDoAFCond.broadcast(); + } + + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focusMode) +{; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) { + CAMHAL_LOGEA("OMX component is in invalid state"); + return NO_INIT; + } + + OMX_INIT_STRUCT_PTR (&focusMode, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE); + focusMode.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigFocusControl, + &focusMode); + + if ( OMX_ErrorNone != eError ) { + CAMHAL_LOGEB("Error while retrieving focus mode 0x%x", eError); + } + + LOG_FUNCTION_NAME_EXIT; + + return ErrorUtils::omxToAndroidError(eError); +} + +status_t OMXCameraAdapter::cancelAutoFocus() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusMode; + + LOG_FUNCTION_NAME; + + ret = getFocusMode(focusMode); + if ( NO_ERROR != ret ) { + return ret; + } + + //Stop the AF only for modes other than CAF or Inifinity + if ( ( focusMode.eFocusControl != OMX_IMAGE_FocusControlAuto ) && + ( focusMode.eFocusControl != ( OMX_IMAGE_FOCUSCONTROLTYPE ) + OMX_IMAGE_FocusControlAutoInfinity ) ) { + stopAutoFocus(); + } else if (focusMode.eFocusControl == OMX_IMAGE_FocusControlAuto) { + // This re-enabling of CAF doesn't seem to + // be needed any more. + // re-apply CAF after unlocking and canceling + // mPending3Asettings |= SetFocus; + } + // If the apps call #cancelAutoFocus()}, the face callbacks will also resume. + pauseFaceDetection(false); + + LOG_FUNCTION_NAME_EXIT; + + return ret; + +} + +status_t OMXCameraAdapter::setFocusCallback(bool enabled) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_CALLBACKREQUESTTYPE focusRequstCallback; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component in Invalid state"); + ret = -EINVAL; + } + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component not in executing state"); + ret = NO_ERROR; + } + + if ( NO_ERROR == ret ) + { + + OMX_INIT_STRUCT_PTR (&focusRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE); + focusRequstCallback.nPortIndex = OMX_ALL; + focusRequstCallback.nIndex = OMX_IndexConfigCommonFocusStatus; + + if ( enabled ) + { + focusRequstCallback.bEnable = OMX_TRUE; + } + else + { + focusRequstCallback.bEnable = OMX_FALSE; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_IndexConfigCallbackRequest, + &focusRequstCallback); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error registering focus callback 0x%x", eError); + ret = -1; + } + else + { + CAMHAL_LOGDB("Autofocus callback for index 0x%x registered successfully", + OMX_IndexConfigCommonFocusStatus); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached) +{ + status_t ret = NO_ERROR; + OMX_PARAM_FOCUSSTATUSTYPE eFocusStatus; + CameraHalEvent::FocusStatus focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL; + BaseCameraAdapter::AdapterState state, nextState; + BaseCameraAdapter::getState(state); + BaseCameraAdapter::getNextState(nextState); + + LOG_FUNCTION_NAME; + + OMX_INIT_STRUCT(eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE); + + if( ((AF_ACTIVE & state ) != AF_ACTIVE) && ((AF_ACTIVE & nextState ) != AF_ACTIVE) ) + { + /// We don't send focus callback if focus was not started + CAMHAL_LOGDA("Not sending focus callback because focus was not started"); + return NO_ERROR; + } + + if ( NO_ERROR == ret ) + { + + if ( !timeoutReached ) + { + ret = checkFocus(&eFocusStatus); + + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEA("Focus status check failed!"); + } + } + } + + if ( NO_ERROR == ret ) + { + + if ( timeoutReached ) + { + focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL; + } + else + { + switch (eFocusStatus.eFocusStatus) + { + case OMX_FocusStatusReached: + { + focusStatus = CameraHalEvent::FOCUS_STATUS_SUCCESS; + break; + } + case OMX_FocusStatusOff: // AF got canceled + return NO_ERROR; + case OMX_FocusStatusUnableToReach: + case OMX_FocusStatusRequest: + default: + { + focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL; + break; + } + } + // Lock CAF after AF call + if( set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_TRUE) != NO_ERROR) { + CAMHAL_LOGEA("Error Applying 3A locks"); + } else { + CAMHAL_LOGDA("Focus locked. Applied focus locks successfully"); + } + stopAutoFocus(); + } + + //Query current focus distance after AF is complete + updateFocusDistances(mParameters); + } + + ret = BaseCameraAdapter::setState(CAMERA_CANCEL_AUTOFOCUS); + if ( NO_ERROR == ret ) + { + ret = BaseCameraAdapter::commitState(); + } + else + { + ret |= BaseCameraAdapter::rollbackState(); + } + + if ( NO_ERROR == ret ) + { + notifyFocusSubscribers(focusStatus); + } + + // After focus, face detection will resume sending face callbacks + pauseFaceDetection(false); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + LOG_FUNCTION_NAME; + + if ( NULL == eFocusStatus ) + { + CAMHAL_LOGEA("Invalid focus status"); + ret = -EINVAL; + } + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component in Invalid state"); + ret = -EINVAL; + } + + if ( OMX_StateExecuting != mComponentState ) + { + CAMHAL_LOGEA("OMX component not in executing state"); + ret = NO_ERROR; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE); + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonFocusStatus, + eFocusStatus); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while retrieving focus status: 0x%x", eError); + ret = -1; + } + } + + if ( NO_ERROR == ret ) + { + CAMHAL_LOGDB("Focus Status: %d", eFocusStatus->eFocusStatus); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::updateFocusDistances(CameraParameters ¶ms) +{ + OMX_U32 focusNear, focusOptimal, focusFar; + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + ret = getFocusDistances(focusNear, focusOptimal, focusFar); + if ( NO_ERROR == ret) + { + ret = addFocusDistances(focusNear, focusOptimal, focusFar, params); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error in call to addFocusDistances() 0x%x", ret); + } + } + else + { + CAMHAL_LOGEB("Error in call to getFocusDistances() 0x%x", ret); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError; + + OMX_TI_CONFIG_FOCUSDISTANCETYPE focusDist; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = UNKNOWN_ERROR; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR(&focusDist, OMX_TI_CONFIG_FOCUSDISTANCETYPE); + focusDist.nPortIndex = mCameraAdapterParameters.mPrevPortIndex; + + eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, + ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFocusDistance, + &focusDist); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while querying focus distances 0x%x", eError); + ret = UNKNOWN_ERROR; + } + + } + + if ( NO_ERROR == ret ) + { + near = focusDist.nFocusDistanceNear; + optimal = focusDist.nFocusDistanceOptimal; + far = focusDist.nFocusDistanceFar; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length) +{ + status_t ret = NO_ERROR; + uint32_t focusScale = 1000; + float distFinal; + + LOG_FUNCTION_NAME; + + if(mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity) + { + dist=0; + } + + if ( NO_ERROR == ret ) + { + if ( 0 == dist ) + { + strncpy(buffer, CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 )); + } + else + { + distFinal = dist; + distFinal /= focusScale; + snprintf(buffer, ( length - 1 ) , "%5.3f", distFinal); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near, + OMX_U32 &optimal, + OMX_U32 &far, + CameraParameters& params) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NO_ERROR == ret ) + { + ret = encodeFocusDistance(near, mFocusDistNear, FOCUS_DIST_SIZE); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret); + } + } + + if ( NO_ERROR == ret ) + { + ret = encodeFocusDistance(optimal, mFocusDistOptimal, FOCUS_DIST_SIZE); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret); + } + } + + if ( NO_ERROR == ret ) + { + ret = encodeFocusDistance(far, mFocusDistFar, FOCUS_DIST_SIZE); + if ( NO_ERROR != ret ) + { + CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret); + } + } + + if ( NO_ERROR == ret ) + { + snprintf(mFocusDistBuffer, ( FOCUS_DIST_BUFFER_SIZE - 1) ,"%s,%s,%s", mFocusDistNear, + mFocusDistOptimal, + mFocusDistFar); + + params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::setTouchFocus() +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + + OMX_ALGOAREASTYPE **focusAreas; + OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer; + MemoryManager memMgr; + int areasSize = 0; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -1; + } + + if ( NO_ERROR == ret ) + { + + areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096; + focusAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1); + + OMXCameraPortParameters * mPreviewData = NULL; + mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; + + if (!focusAreas) + { + CAMHAL_LOGEB("Error allocating buffer for focus areas %d", eError); + return -ENOMEM; + } + + OMX_INIT_STRUCT_PTR (focusAreas[0], OMX_ALGOAREASTYPE); + + focusAreas[0]->nPortIndex = OMX_ALL; + focusAreas[0]->nNumAreas = mFocusAreas.size(); + focusAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaFocus; + + // If the area is the special case of (0, 0, 0, 0, 0), then + // the algorithm needs nNumAreas to be set to 0, + // in order to automatically choose the best fitting areas. + if ( mFocusAreas.itemAt(0)->isZeroArea() ) + { + focusAreas[0]->nNumAreas = 0; + } + + for ( unsigned int n = 0; n < mFocusAreas.size(); n++) + { + // transform the coordinates to 3A-type coordinates + mFocusAreas.itemAt(n)->transfrom(mPreviewData->mWidth, + mPreviewData->mHeight, + focusAreas[0]->tAlgoAreas[n].nTop, + focusAreas[0]->tAlgoAreas[n].nLeft, + focusAreas[0]->tAlgoAreas[n].nWidth, + focusAreas[0]->tAlgoAreas[n].nHeight); + + focusAreas[0]->tAlgoAreas[n].nLeft = + ( focusAreas[0]->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth; + focusAreas[0]->tAlgoAreas[n].nTop = + ( focusAreas[0]->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight; + focusAreas[0]->tAlgoAreas[n].nWidth = + ( focusAreas[0]->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth; + focusAreas[0]->tAlgoAreas[n].nHeight = + ( focusAreas[0]->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight; + focusAreas[0]->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight(); + + CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d", + n, (int)focusAreas[0]->tAlgoAreas[n].nTop, (int)focusAreas[0]->tAlgoAreas[n].nLeft, + (int)focusAreas[0]->tAlgoAreas[n].nWidth, (int)focusAreas[0]->tAlgoAreas[n].nHeight, + (int)focusAreas[0]->tAlgoAreas[n].nPriority); + } + + OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER); + + sharedBuffer.nPortIndex = OMX_ALL; + sharedBuffer.nSharedBuffSize = areasSize; + sharedBuffer.pSharedBuff = (OMX_U8 *) focusAreas[0]; + + if ( NULL == sharedBuffer.pSharedBuff ) + { + CAMHAL_LOGEA("No resources to allocate OMX shared buffer"); + ret = -ENOMEM; + goto EXIT; + } + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE) OMX_TI_IndexConfigAlgoAreas, &sharedBuffer); + + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while setting Focus Areas configuration 0x%x", eError); + ret = -EINVAL; + } + + EXIT: + if (NULL != focusAreas) + { + memMgr.freeBuffer((void*) focusAreas); + focusAreas = NULL; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +void OMXCameraAdapter::handleFocusCallback() { + OMX_PARAM_FOCUSSTATUSTYPE eFocusStatus; + CameraHalEvent::FocusStatus focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL; + status_t ret = NO_ERROR; + BaseCameraAdapter::AdapterState nextState; + BaseCameraAdapter::getNextState(nextState); + + OMX_INIT_STRUCT(eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE); + + ret = checkFocus(&eFocusStatus); + + if (NO_ERROR != ret) { + CAMHAL_LOGEA("Focus status check failed!"); + // signal and unblock doAutoFocus + if (AF_ACTIVE & nextState) { + Mutex::Autolock lock(mDoAFMutex); + mDoAFCond.broadcast(); + } + return; + } + + if ( ( eFocusStatus.eFocusStatus != OMX_FocusStatusRequest ) && + ( eFocusStatus.eFocusStatus != OMX_FocusStatusOff ) ) { + // signal doAutoFocus when a end of scan message comes + // ignore start of scan + Mutex::Autolock lock(mDoAFMutex); + mDoAFCond.broadcast(); + } + + if (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE) OMX_IMAGE_FocusControlAuto) { + CAMHAL_LOGDA("unregistered focus callback when not in CAF or doAutoFocus... not handling"); + return; + } + + // Handling for CAF Callbacks + switch (eFocusStatus.eFocusStatus) { + case OMX_FocusStatusRequest: + focusStatus = CameraHalEvent::FOCUS_STATUS_PENDING; + break; + case OMX_FocusStatusReached: + case OMX_FocusStatusOff: + case OMX_FocusStatusUnableToReach: + default: + focusStatus = CameraHalEvent::FOCUS_STATUS_DONE; + break; + } + + notifyFocusSubscribers(focusStatus); +} + +}; diff --git a/camera/OMXCameraAdapter/OMXZoom.cpp b/camera/OMXCameraAdapter/OMXZoom.cpp new file mode 100644 index 0000000..eec7691 --- /dev/null +++ b/camera/OMXCameraAdapter/OMXZoom.cpp @@ -0,0 +1,296 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXZoom.cpp +* +* This file contains functionality for handling zoom configurations. +* +*/ + +#undef LOG_TAG + +#define LOG_TAG "CameraHAL" + +#include "CameraHal.h" +#include "OMXCameraAdapter.h" + +namespace android { + +const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = { + 65536, 68157, 70124, 72745, + 75366, 77988, 80609, 83231, + 86508, 89784, 92406, 95683, + 99615, 102892, 106168, 110100, + 114033, 117965, 122552, 126484, + 131072, 135660, 140247, 145490, + 150733, 155976, 161219, 167117, + 173015, 178913, 185467, 192020, + 198574, 205783, 212992, 220201, + 228065, 236585, 244449, 252969, + 262144, 271319, 281149, 290980, + 300810, 311951, 322437, 334234, + 346030, 357827, 370934, 384041, + 397148, 411566, 425984, 441057, + 456131, 472515, 488899, 506593, + 524288 }; + + +status_t OMXCameraAdapter::setParametersZoom(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state) +{ + status_t ret = NO_ERROR; + Mutex::Autolock lock(mZoomLock); + + LOG_FUNCTION_NAME; + + //Immediate zoom should not be avaialable while smooth zoom is running + if ( ( ZOOM_ACTIVE & state ) != ZOOM_ACTIVE ) + { + int zoom = params.getInt(CameraParameters::KEY_ZOOM); + if( ( zoom >= 0 ) && ( zoom < ZOOM_STAGES ) ) + { + mTargetZoomIdx = zoom; + + //Immediate zoom should be applied instantly ( CTS requirement ) + mCurrentZoomIdx = mTargetZoomIdx; + if(!mZoomUpdating) { + doZoom(mCurrentZoomIdx); + mZoomUpdating = true; + } else { + mZoomUpdate = true; + } + + CAMHAL_LOGDB("Zoom by App %d", zoom); + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::doZoom(int index) +{ + status_t ret = NO_ERROR; + OMX_ERRORTYPE eError = OMX_ErrorNone; + OMX_CONFIG_SCALEFACTORTYPE zoomControl; + + LOG_FUNCTION_NAME; + + if ( OMX_StateInvalid == mComponentState ) + { + CAMHAL_LOGEA("OMX component is in invalid state"); + ret = -1; + } + + if ( ( 0 > index) || ( ( ZOOM_STAGES - 1 ) < index ) ) + { + CAMHAL_LOGEB("Zoom index %d out of range", index); + ret = -EINVAL; + } + + if (mPreviousZoomIndx == index ) + { + return NO_ERROR; + } + + if ( NO_ERROR == ret ) + { + OMX_INIT_STRUCT_PTR (&zoomControl, OMX_CONFIG_SCALEFACTORTYPE); + zoomControl.nPortIndex = OMX_ALL; + zoomControl.xHeight = ZOOM_STEPS[index]; + zoomControl.xWidth = ZOOM_STEPS[index]; + + eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, + OMX_IndexConfigCommonDigitalZoom, + &zoomControl); + if ( OMX_ErrorNone != eError ) + { + CAMHAL_LOGEB("Error while applying digital zoom 0x%x", eError); + ret = -1; + } + else + { + CAMHAL_LOGDA("Digital zoom applied successfully"); + mPreviousZoomIndx = index; + } + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::advanceZoom() +{ + status_t ret = NO_ERROR; + AdapterState state; + Mutex::Autolock lock(mZoomLock); + + BaseCameraAdapter::getState(state); + + if ( mReturnZoomStatus ) + { + mCurrentZoomIdx +=mZoomInc; + mTargetZoomIdx = mCurrentZoomIdx; + mReturnZoomStatus = false; + ret = doZoom(mCurrentZoomIdx); + notifyZoomSubscribers(mCurrentZoomIdx, true); + } + else if ( mCurrentZoomIdx != mTargetZoomIdx ) + { + if ( ZOOM_ACTIVE & state ) + { + if ( mCurrentZoomIdx < mTargetZoomIdx ) + { + mZoomInc = 1; + } + else + { + mZoomInc = -1; + } + + mCurrentZoomIdx += mZoomInc; + } + else + { + mCurrentZoomIdx = mTargetZoomIdx; + } + + ret = doZoom(mCurrentZoomIdx); + + if ( ZOOM_ACTIVE & state ) + { + if ( mCurrentZoomIdx == mTargetZoomIdx ) + { + CAMHAL_LOGDB("[Goal Reached] Smooth Zoom notify currentIdx = %d, targetIdx = %d", + mCurrentZoomIdx, + mTargetZoomIdx); + + if ( NO_ERROR == ret ) + { + + ret = BaseCameraAdapter::setState(CAMERA_STOP_SMOOTH_ZOOM); + + if ( NO_ERROR == ret ) + { + ret = BaseCameraAdapter::commitState(); + } + else + { + ret |= BaseCameraAdapter::rollbackState(); + } + + } + mReturnZoomStatus = false; + notifyZoomSubscribers(mCurrentZoomIdx, true); + } + else + { + CAMHAL_LOGDB("[Advancing] Smooth Zoom notify currentIdx = %d, targetIdx = %d", + mCurrentZoomIdx, + mTargetZoomIdx); + notifyZoomSubscribers(mCurrentZoomIdx, false); + } + } + } + else if ( (mCurrentZoomIdx == mTargetZoomIdx ) && + ( ZOOM_ACTIVE & state ) ) + { + ret = BaseCameraAdapter::setState(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM); + + if ( NO_ERROR == ret ) + { + ret = BaseCameraAdapter::commitState(); + } + else + { + ret |= BaseCameraAdapter::rollbackState(); + } + + } + + if(mZoomUpdate) { + doZoom(mTargetZoomIdx); + mZoomUpdate = false; + mZoomUpdating = true; + } else { + mZoomUpdating = false; + } + + return ret; +} + +status_t OMXCameraAdapter::startSmoothZoom(int targetIdx) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mZoomLock); + + CAMHAL_LOGDB("Start smooth zoom target = %d, mCurrentIdx = %d", + targetIdx, + mCurrentZoomIdx); + + if ( ( targetIdx >= 0 ) && ( targetIdx < ZOOM_STAGES ) ) + { + mTargetZoomIdx = targetIdx; + mZoomParameterIdx = mCurrentZoomIdx; + mReturnZoomStatus = false; + } + else + { + CAMHAL_LOGEB("Smooth value out of range %d!", targetIdx); + ret = -EINVAL; + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t OMXCameraAdapter::stopSmoothZoom() +{ + status_t ret = NO_ERROR; + Mutex::Autolock lock(mZoomLock); + + LOG_FUNCTION_NAME; + + if ( mTargetZoomIdx != mCurrentZoomIdx ) + { + if ( mCurrentZoomIdx < mTargetZoomIdx ) + { + mZoomInc = 1; + } + else + { + mZoomInc = -1; + } + mReturnZoomStatus = true; + mReturnZoomStatus = true; + CAMHAL_LOGDB("Stop smooth zoom mCurrentZoomIdx = %d, mTargetZoomIdx = %d", + mCurrentZoomIdx, + mTargetZoomIdx); + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +}; diff --git a/camera/SensorListener.cpp b/camera/SensorListener.cpp new file mode 100644 index 0000000..bb6f577 --- /dev/null +++ b/camera/SensorListener.cpp @@ -0,0 +1,233 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file SensorListener.cpp +* +* This file listens and propogates sensor events to CameraHal. +* +*/ + +#define LOG_TAG "CameraHAL" + +#include "SensorListener.h" +#include "CameraHal.h" + +#include +#include +#include + +namespace android { + +/*** static declarations ***/ +static const float RADIANS_2_DEG = (float) (180 / M_PI); +// measured values on device...might need tuning +static const int DEGREES_90_THRESH = 50; +static const int DEGREES_180_THRESH = 170; +static const int DEGREES_270_THRESH = 250; + +static int sensor_events_listener(int fd, int events, void* data) +{ + SensorListener* listener = (SensorListener*) data; + ssize_t num_sensors; + ASensorEvent sen_events[8]; + while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) { + for (int i = 0; i < num_sensors; i++) { + if (sen_events[i].type == Sensor::TYPE_ACCELEROMETER) { + float x = sen_events[i].vector.azimuth; + float y = sen_events[i].vector.pitch; + float z = sen_events[i].vector.roll; + float radius = 0; + int tilt = 0, orient = 0; + + CAMHAL_LOGVA("ACCELEROMETER EVENT"); + CAMHAL_LOGVB(" azimuth = %f pitch = %f roll = %f", + sen_events[i].vector.azimuth, + sen_events[i].vector.pitch, + sen_events[i].vector.roll); + // see http://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates + // about conversion from cartesian to spherical for orientation calculations + radius = (float) sqrt(x * x + y * y + z * z); + tilt = (int) asinf(z / radius) * RADIANS_2_DEG; + orient = (int) atan2f(-x, y) * RADIANS_2_DEG; + + if (orient < 0) { + orient += 360; + } + + if (orient >= DEGREES_270_THRESH) { + orient = 270; + } else if (orient >= DEGREES_180_THRESH) { + orient = 180; + } else if (orient >= DEGREES_90_THRESH) { + orient = 90; + } else { + orient = 0; + } + listener->handleOrientation(orient, tilt); + CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient); + } else if (sen_events[i].type == Sensor::TYPE_GYROSCOPE) { + CAMHAL_LOGVA("GYROSCOPE EVENT"); + } + } + } + + if (num_sensors < 0 && num_sensors != -EAGAIN) { + CAMHAL_LOGEB("reading events failed: %s", strerror(-num_sensors)); + } + + return 1; +} + +/****** public - member functions ******/ +SensorListener::SensorListener() { + LOG_FUNCTION_NAME; + + sensorsEnabled = 0; + mOrientationCb = NULL; + mSensorEventQueue = NULL; + mSensorLooperThread = NULL; + + LOG_FUNCTION_NAME_EXIT; +} + +SensorListener::~SensorListener() { + LOG_FUNCTION_NAME; + + CAMHAL_LOGDA("Kill looper thread"); + if (mSensorLooperThread.get()) { + // 1. Request exit + // 2. Wake up looper which should be polling for an event + // 3. Wait for exit + mSensorLooperThread->requestExit(); + mSensorLooperThread->wake(); + mSensorLooperThread->join(); + mSensorLooperThread.clear(); + mSensorLooperThread = NULL; + } + + CAMHAL_LOGDA("Kill looper"); + if (mLooper.get()) { + mLooper->removeFd(mSensorEventQueue->getFd()); + mLooper.clear(); + mLooper = NULL; + } + CAMHAL_LOGDA("SensorListener destroyed"); + + LOG_FUNCTION_NAME_EXIT; +} + +status_t SensorListener::initialize() { + status_t ret = NO_ERROR; + SensorManager& mgr(SensorManager::getInstance()); + + LOG_FUNCTION_NAME; + + sp mLooper; + + mSensorEventQueue = mgr.createEventQueue(); + if (mSensorEventQueue == NULL) { + CAMHAL_LOGEA("createEventQueue returned NULL"); + ret = NO_INIT; + goto out; + } + + mLooper = new Looper(false); + mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this); + + if (mSensorLooperThread.get() == NULL) + mSensorLooperThread = new SensorLooperThread(mLooper.get()); + + if (mSensorLooperThread.get() == NULL) { + CAMHAL_LOGEA("Couldn't create sensor looper thread"); + ret = NO_MEMORY; + goto out; + } + + ret = mSensorLooperThread->run("sensor looper thread", PRIORITY_URGENT_DISPLAY); + if (ret == INVALID_OPERATION){ + CAMHAL_LOGDA("thread already running ?!?"); + } else if (ret != NO_ERROR) { + CAMHAL_LOGEA("couldn't run thread"); + goto out; + } + + out: + LOG_FUNCTION_NAME_EXIT; + return ret; +} + +void SensorListener::setCallbacks(orientation_callback_t orientation_cb, void *cookie) { + LOG_FUNCTION_NAME; + + if (orientation_cb) { + mOrientationCb = orientation_cb; + } + mCbCookie = cookie; + + LOG_FUNCTION_NAME_EXIT; +} + +void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) { + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(&mLock); + + if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) { + mOrientationCb(orientation, tilt, mCbCookie); + } + + LOG_FUNCTION_NAME_EXIT; +} + +void SensorListener::enableSensor(sensor_type_t type) { + Sensor const* sensor; + SensorManager& mgr(SensorManager::getInstance()); + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(&mLock); + + if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) { + sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER); + CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string()); + mSensorEventQueue->enableSensor(sensor); + mSensorEventQueue->setEventRate(sensor, ms2ns(100)); + sensorsEnabled |= SENSOR_ORIENTATION; + } + + LOG_FUNCTION_NAME_EXIT; +} + +void SensorListener::disableSensor(sensor_type_t type) { + Sensor const* sensor; + SensorManager& mgr(SensorManager::getInstance()); + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(&mLock); + + if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) { + sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER); + CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string()); + mSensorEventQueue->disableSensor(sensor); + sensorsEnabled &= ~SENSOR_ORIENTATION; + } + + LOG_FUNCTION_NAME_EXIT; +} + +} // namespace android diff --git a/camera/TICameraParameters.cpp b/camera/TICameraParameters.cpp new file mode 100644 index 0000000..221cff4 --- /dev/null +++ b/camera/TICameraParameters.cpp @@ -0,0 +1,202 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + +#define LOG_TAG "CameraHAL" +#include + +#include +#include +#include +#include "CameraHal.h" + +namespace android { + +//TI extensions to camera mode +const char TICameraParameters::HIGH_PERFORMANCE_MODE[] = "high-performance"; +const char TICameraParameters::HIGH_QUALITY_MODE[] = "high-quality"; +const char TICameraParameters::HIGH_QUALITY_ZSL_MODE[] = "high-quality-zsl"; +const char TICameraParameters::VIDEO_MODE[] = "video-mode"; + +// TI extensions to standard android Parameters +const char TICameraParameters::KEY_SUPPORTED_CAMERAS[] = "camera-indexes"; +const char TICameraParameters::KEY_CAMERA[] = "camera-index"; +const char TICameraParameters::KEY_SHUTTER_ENABLE[] = "shutter-enable"; +const char TICameraParameters::KEY_CAMERA_NAME[] = "camera-name"; +const char TICameraParameters::KEY_BURST[] = "burst-capture"; +const char TICameraParameters::KEY_CAP_MODE[] = "mode"; +const char TICameraParameters::KEY_VNF[] = "vnf"; +const char TICameraParameters::KEY_SATURATION[] = "saturation"; +const char TICameraParameters::KEY_BRIGHTNESS[] = "brightness"; +const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure"; +const char TICameraParameters::KEY_SUPPORTED_EXPOSURE[] = "exposure-mode-values"; +const char TICameraParameters::KEY_CONTRAST[] = "contrast"; +const char TICameraParameters::KEY_SHARPNESS[] = "sharpness"; +const char TICameraParameters::KEY_ISO[] = "iso"; +const char TICameraParameters::KEY_SUPPORTED_ISO_VALUES[] = "iso-mode-values"; +const char TICameraParameters::KEY_SUPPORTED_IPP[] = "ipp-values"; +const char TICameraParameters::KEY_IPP[] = "ipp"; +const char TICameraParameters::KEY_MAN_EXPOSURE[] = "manual-exposure"; +const char TICameraParameters::KEY_METERING_MODE[] = "meter-mode"; +const char TICameraParameters::KEY_PADDED_WIDTH[] = "padded-width"; +const char TICameraParameters::KEY_PADDED_HEIGHT[] = "padded-height"; +const char TICameraParameters::KEY_EXP_BRACKETING_RANGE[] = "exp-bracketing-range"; +const char TICameraParameters::KEY_TEMP_BRACKETING[] = "temporal-bracketing"; +const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS[] = "temporal-bracketing-range-positive"; +const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG[] = "temporal-bracketing-range-negative"; +const char TICameraParameters::KEY_S3D_SUPPORTED[] = "s3d-supported"; +const char TICameraParameters::KEY_MEASUREMENT_ENABLE[] = "measurement"; +const char TICameraParameters::KEY_GBCE[] = "gbce"; +const char TICameraParameters::KEY_GLBCE[] = "glbce"; +const char TICameraParameters::KEY_CURRENT_ISO[] = "current-iso"; +const char TICameraParameters::KEY_SENSOR_ORIENTATION[] = "sensor-orientation"; +const char TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES[] = "sensor-orientation-values"; +const char TICameraParameters::KEY_MINFRAMERATE[] = "min-framerate"; +const char TICameraParameters::KEY_MAXFRAMERATE[] = "max-framerate"; +const char TICameraParameters::KEY_RECORDING_HINT[] = "internal-recording-hint"; +const char TICameraParameters::KEY_AUTO_FOCUS_LOCK[] = "auto-focus-lock"; + +//TI extensions for enabling/disabling GLBCE +const char TICameraParameters::GLBCE_ENABLE[] = "enable"; +const char TICameraParameters::GLBCE_DISABLE[] = "disable"; + +//TI extensions for enabling/disabling GBCE +const char TICameraParameters::GBCE_ENABLE[] = "enable"; +const char TICameraParameters::GBCE_DISABLE[] = "disable"; + +//TI extensions for enabling/disabling measurement +const char TICameraParameters::MEASUREMENT_ENABLE[] = "enable"; +const char TICameraParameters::MEASUREMENT_DISABLE[] = "disable"; + +//TI extensions for zoom +const char TICameraParameters::ZOOM_SUPPORTED[] = "true"; +const char TICameraParameters::ZOOM_UNSUPPORTED[] = "false"; + +// TI extensions for 2D Preview in Stereo Mode +const char TICameraParameters::KEY_S3D2D_PREVIEW[] = "s3d2d-preview"; +const char TICameraParameters::KEY_S3D2D_PREVIEW_MODE[] = "s3d2d-preview-values"; + +//TI extensions for SAC/SMC +const char TICameraParameters::KEY_AUTOCONVERGENCE[] = "auto-convergence"; +const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE[] = "auto-convergence-mode"; +const char TICameraParameters::KEY_MANUALCONVERGENCE_VALUES[] = "manual-convergence-values"; + +//TI extensions for setting EXIF tags +const char TICameraParameters::KEY_EXIF_MODEL[] = "exif-model"; +const char TICameraParameters::KEY_EXIF_MAKE[] = "exif-make"; + +//TI extensions for additiona GPS data +const char TICameraParameters::KEY_GPS_MAPDATUM[] = "gps-mapdatum"; +const char TICameraParameters::KEY_GPS_VERSION[] = "gps-version"; +const char TICameraParameters::KEY_GPS_DATESTAMP[] = "gps-datestamp"; + +//TI extensions for enabling/disabling shutter sound +const char TICameraParameters::SHUTTER_ENABLE[] = "true"; +const char TICameraParameters::SHUTTER_DISABLE[] = "false"; + +//TI extensions for Temporal Bracketing +const char TICameraParameters::BRACKET_ENABLE[] = "enable"; +const char TICameraParameters::BRACKET_DISABLE[] = "disable"; + +//TI extensions to Image post-processing +const char TICameraParameters::IPP_LDCNSF[] = "ldc-nsf"; +const char TICameraParameters::IPP_LDC[] = "ldc"; +const char TICameraParameters::IPP_NSF[] = "nsf"; +const char TICameraParameters::IPP_NONE[] = "off"; + +// TI extensions to standard android pixel formats +const char TICameraParameters::PIXEL_FORMAT_RAW[] = "raw"; +const char TICameraParameters::PIXEL_FORMAT_JPS[] = "jps"; +const char TICameraParameters::PIXEL_FORMAT_MPO[] = "mpo"; +const char TICameraParameters::PIXEL_FORMAT_RAW_JPEG[] = "raw+jpeg"; +const char TICameraParameters::PIXEL_FORMAT_RAW_MPO[] = "raw+mpo"; + +// TI extensions to standard android scene mode settings +const char TICameraParameters::SCENE_MODE_SPORT[] = "sport"; +const char TICameraParameters::SCENE_MODE_CLOSEUP[] = "closeup"; +const char TICameraParameters::SCENE_MODE_AQUA[] = "aqua"; +const char TICameraParameters::SCENE_MODE_SNOWBEACH[] = "snow-beach"; +const char TICameraParameters::SCENE_MODE_MOOD[] = "mood"; +const char TICameraParameters::SCENE_MODE_NIGHT_INDOOR[] = "night-indoor"; +const char TICameraParameters::SCENE_MODE_DOCUMENT[] = "document"; +const char TICameraParameters::SCENE_MODE_BARCODE[] = "barcode"; +const char TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT[] = "super-night"; +const char TICameraParameters::SCENE_MODE_VIDEO_CINE[] = "cine"; +const char TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM[] = "old-film"; + +// TI extensions to standard android white balance values. +const char TICameraParameters::WHITE_BALANCE_TUNGSTEN[] = "tungsten"; +const char TICameraParameters::WHITE_BALANCE_HORIZON[] = "horizon"; +const char TICameraParameters::WHITE_BALANCE_SUNSET[] = "sunset"; +const char TICameraParameters::WHITE_BALANCE_FACE[] = "face-priority"; + +// TI extensions to standard android focus modes. +const char TICameraParameters::FOCUS_MODE_PORTRAIT[] = "portrait"; +const char TICameraParameters::FOCUS_MODE_EXTENDED[] = "extended"; +const char TICameraParameters::FOCUS_MODE_FACE[] = "face-priority"; + +// TI extensions to add values for effect settings. +const char TICameraParameters::EFFECT_NATURAL[] = "natural"; +const char TICameraParameters::EFFECT_VIVID[] = "vivid"; +const char TICameraParameters::EFFECT_COLOR_SWAP[] = "color-swap"; +const char TICameraParameters::EFFECT_BLACKWHITE[] = "blackwhite"; + +// TI extensions to add exposure preset modes +const char TICameraParameters::EXPOSURE_MODE_OFF[] = "off"; +const char TICameraParameters::EXPOSURE_MODE_AUTO[] = "auto"; +const char TICameraParameters::EXPOSURE_MODE_NIGHT[] = "night"; +const char TICameraParameters::EXPOSURE_MODE_BACKLIGHT[] = "backlighting"; +const char TICameraParameters::EXPOSURE_MODE_SPOTLIGHT[] = "spotlight"; +const char TICameraParameters::EXPOSURE_MODE_SPORTS[] = "sports"; +const char TICameraParameters::EXPOSURE_MODE_SNOW[] = "snow"; +const char TICameraParameters::EXPOSURE_MODE_BEACH[] = "beach"; +const char TICameraParameters::EXPOSURE_MODE_APERTURE[] = "aperture"; +const char TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE[] = "small-aperture"; +const char TICameraParameters::EXPOSURE_MODE_FACE[] = "face-priority"; + +// TI extensions to add iso values +const char TICameraParameters::ISO_MODE_AUTO[] = "auto"; +const char TICameraParameters::ISO_MODE_100[] = "100"; +const char TICameraParameters::ISO_MODE_200[] = "200"; +const char TICameraParameters::ISO_MODE_400[] = "400"; +const char TICameraParameters::ISO_MODE_800[] = "800"; +const char TICameraParameters::ISO_MODE_1000[] = "1000"; +const char TICameraParameters::ISO_MODE_1200[] = "1200"; +const char TICameraParameters::ISO_MODE_1600[] = "1600"; + +// TI extensions to add auto convergence values +const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "mode-disable"; +const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "mode-frame"; +const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "mode-center"; +const char TICameraParameters::AUTOCONVERGENCE_MODE_FFT[] = "mode-fft"; +const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "mode-manual"; + +//TI values for camera direction +const char TICameraParameters::FACING_FRONT[]="front"; +const char TICameraParameters::FACING_BACK[]="back"; + +//TI extensions to flash settings +const char TICameraParameters::FLASH_MODE_FILL_IN[] = "fill-in"; + +//TI extensions to add sensor orientation parameters +const char TICameraParameters::ORIENTATION_SENSOR_NONE[] = "0"; +const char TICameraParameters::ORIENTATION_SENSOR_90[] = "90"; +const char TICameraParameters::ORIENTATION_SENSOR_180[] = "180"; +const char TICameraParameters::ORIENTATION_SENSOR_270[] = "270"; +}; + diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp new file mode 100644 index 0000000..c365023 --- /dev/null +++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp @@ -0,0 +1,611 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file V4LCameraAdapter.cpp +* +* This file maps the Camera Hardware Interface to V4L2. +* +*/ + + +#include "V4LCameraAdapter.h" +#include "CameraHal.h" +#include "TICameraParameters.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +#include +#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false )) +static int mDebugFps = 0; + +#define Q16_OFFSET 16 + +#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);} + +namespace android { + +#undef LOG_TAG +///Maintain a separate tag for V4LCameraAdapter logs to isolate issues OMX specific +#define LOG_TAG "CameraHAL" + +//frames skipped before recalculating the framerate +#define FPS_PERIOD 30 + +Mutex gAdapterLock; +const char *device = DEVICE; + + +/*--------------------Camera Adapter Class STARTS here-----------------------------*/ + +status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps) +{ + LOG_FUNCTION_NAME; + + char value[PROPERTY_VALUE_MAX]; + property_get("debug.camera.showfps", value, "0"); + mDebugFps = atoi(value); + + int ret = NO_ERROR; + + // Allocate memory for video info structure + mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo)); + if(!mVideoInfo) + { + return NO_MEMORY; + } + + if ((mCameraHandle = open(device, O_RDWR)) == -1) + { + CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno)); + return -EINVAL; + } + + ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap); + if (ret < 0) + { + CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera"); + return -EINVAL; + } + + if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) + { + CAMHAL_LOGEA("Error while adapter initialization: video capture not supported."); + return -EINVAL; + } + + if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)) + { + CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o"); + return -EINVAL; + } + + // Initialize flags + mPreviewing = false; + mVideoInfo->isStreaming = false; + mRecording = false; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType) +{ + + status_t ret = NO_ERROR; + + if ( !mVideoInfo->isStreaming ) + { + return NO_ERROR; + } + + int i = mPreviewBufs.valueFor(( unsigned int )frameBuf); + if(i<0) + { + return BAD_VALUE; + } + + mVideoInfo->buf.index = i; + mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; + + ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf); + if (ret < 0) { + CAMHAL_LOGEA("Init: VIDIOC_QBUF Failed"); + return -1; + } + + nQueued++; + + return ret; + +} + +status_t V4LCameraAdapter::setParameters(const CameraParameters ¶ms) +{ + LOG_FUNCTION_NAME; + + status_t ret = NO_ERROR; + + int width, height; + + params.getPreviewSize(&width, &height); + + CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT); + + mVideoInfo->width = width; + mVideoInfo->height = height; + mVideoInfo->framesizeIn = (width * height << 1); + mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT; + + mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + mVideoInfo->format.fmt.pix.width = width; + mVideoInfo->format.fmt.pix.height = height; + mVideoInfo->format.fmt.pix.pixelformat = DEFAULT_PIXEL_FORMAT; + + ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format); + if (ret < 0) { + CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s", strerror(errno)); + return ret; + } + + // Udpate the current parameter set + mParams = params; + + LOG_FUNCTION_NAME_EXIT; + return ret; +} + + +void V4LCameraAdapter::getParameters(CameraParameters& params) +{ + LOG_FUNCTION_NAME; + + // Return the current parameter set + params = mParams; + + LOG_FUNCTION_NAME_EXIT; +} + + +///API to give the buffers to Adapter +status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + Mutex::Autolock lock(mLock); + + switch(mode) + { + case CAMERA_PREVIEW: + ret = UseBuffersPreview(bufArr, num); + break; + + //@todo Insert Image capture case here + + case CAMERA_VIDEO: + //@warn Video capture is not fully supported yet + ret = UseBuffersPreview(bufArr, num); + break; + + } + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num) +{ + int ret = NO_ERROR; + + if(NULL == bufArr) + { + return BAD_VALUE; + } + + //First allocate adapter internal buffers at V4L level for USB Cam + //These are the buffers from which we will copy the data into overlay buffers + /* Check if camera can handle NB_BUFFER buffers */ + mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + mVideoInfo->rb.memory = V4L2_MEMORY_MMAP; + mVideoInfo->rb.count = num; + + ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb); + if (ret < 0) { + CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno)); + return ret; + } + + for (int i = 0; i < num; i++) { + + memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer)); + + mVideoInfo->buf.index = i; + mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; + + ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf); + if (ret < 0) { + CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno)); + return ret; + } + + mVideoInfo->mem[i] = mmap (0, + mVideoInfo->buf.length, + PROT_READ | PROT_WRITE, + MAP_SHARED, + mCameraHandle, + mVideoInfo->buf.m.offset); + + if (mVideoInfo->mem[i] == MAP_FAILED) { + CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno)); + return -1; + } + + uint32_t *ptr = (uint32_t*) bufArr; + + //Associate each Camera internal buffer with the one from Overlay + mPreviewBufs.add((int)ptr[i], i); + + } + + // Update the preview buffer count + mPreviewBufferCount = num; + + return ret; +} + +status_t V4LCameraAdapter::startPreview() +{ + status_t ret = NO_ERROR; + + Mutex::Autolock lock(mPreviewBufsLock); + + if(mPreviewing) + { + return BAD_VALUE; + } + + for (int i = 0; i < mPreviewBufferCount; i++) { + + mVideoInfo->buf.index = i; + mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; + + ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf); + if (ret < 0) { + CAMHAL_LOGEA("VIDIOC_QBUF Failed"); + return -EINVAL; + } + + nQueued++; + } + + enum v4l2_buf_type bufType; + if (!mVideoInfo->isStreaming) { + bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType); + if (ret < 0) { + CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno)); + return ret; + } + + mVideoInfo->isStreaming = true; + } + + // Create and start preview thread for receiving buffers from V4L Camera + mPreviewThread = new PreviewThread(this); + + CAMHAL_LOGDA("Created preview thread"); + + + //Update the flag to indicate we are previewing + mPreviewing = true; + + return ret; + +} + +status_t V4LCameraAdapter::stopPreview() +{ + enum v4l2_buf_type bufType; + int ret = NO_ERROR; + + Mutex::Autolock lock(mPreviewBufsLock); + + if(!mPreviewing) + { + return NO_INIT; + } + + if (mVideoInfo->isStreaming) { + bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType); + if (ret < 0) { + CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno)); + return ret; + } + + mVideoInfo->isStreaming = false; + } + + mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; + + nQueued = 0; + nDequeued = 0; + + /* Unmap buffers */ + for (int i = 0; i < mPreviewBufferCount; i++) + if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0) + CAMHAL_LOGEA("Unmap failed"); + + mPreviewBufs.clear(); + + mPreviewThread->requestExitAndWait(); + mPreviewThread.clear(); + + return ret; + +} + +char * V4LCameraAdapter::GetFrame(int &index) +{ + int ret; + + mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; + + /* DQ */ + ret = ioctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf); + if (ret < 0) { + CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed"); + return NULL; + } + nDequeued++; + + index = mVideoInfo->buf.index; + + return (char *)mVideoInfo->mem[mVideoInfo->buf.index]; +} + +//API to get the frame size required to be allocated. This size is used to override the size passed +//by camera service when VSTAB/VNF is turned ON for example +status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height) +{ + status_t ret = NO_ERROR; + + // Just return the current preview size, nothing more to do here. + mParams.getPreviewSize(( int * ) &width, + ( int * ) &height); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount) +{ + // We don't support meta data, so simply return + return NO_ERROR; +} + +status_t V4LCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount) +{ + // We don't support image capture yet, safely return from here without messing up + return NO_ERROR; +} + +static void debugShowFPS() +{ + static int mFrameCount = 0; + static int mLastFrameCount = 0; + static nsecs_t mLastFpsTime = 0; + static float mFps = 0; + mFrameCount++; + if (!(mFrameCount & 0x1F)) { + nsecs_t now = systemTime(); + nsecs_t diff = now - mLastFpsTime; + mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff; + mLastFpsTime = now; + mLastFrameCount = mFrameCount; + ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps); + } + // XXX: mFPS has the value we want +} + +status_t V4LCameraAdapter::recalculateFPS() +{ + float currentFPS; + + mFrameCount++; + + if ( ( mFrameCount % FPS_PERIOD ) == 0 ) + { + nsecs_t now = systemTime(); + nsecs_t diff = now - mLastFPSTime; + currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff; + mLastFPSTime = now; + mLastFrameCount = mFrameCount; + + if ( 1 == mIter ) + { + mFPS = currentFPS; + } + else + { + //cumulative moving average + mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter; + } + + mLastFPS = mFPS; + mIter++; + } + + return NO_ERROR; +} + +void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt) +{ + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; +} + + +V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index) +{ + LOG_FUNCTION_NAME; + + // Nothing useful to do in the constructor + + LOG_FUNCTION_NAME_EXIT; +} + +V4LCameraAdapter::~V4LCameraAdapter() +{ + LOG_FUNCTION_NAME; + + // Close the camera handle and free the video info structure + close(mCameraHandle); + + if (mVideoInfo) + { + free(mVideoInfo); + mVideoInfo = NULL; + } + + LOG_FUNCTION_NAME_EXIT; +} + +/* Preview Thread */ +// --------------------------------------------------------------------------- + +int V4LCameraAdapter::previewThread() +{ + status_t ret = NO_ERROR; + int width, height; + CameraFrame frame; + + if (mPreviewing) + { + int index = 0; + char *fp = this->GetFrame(index); + if(!fp) + { + return BAD_VALUE; + } + + uint8_t* ptr = (uint8_t*) mPreviewBufs.keyAt(index); + + int width, height; + uint16_t* dest = (uint16_t*)ptr; + uint16_t* src = (uint16_t*) fp; + mParams.getPreviewSize(&width, &height); + for(int i=0;i>24)<<16)|(((*src & 0x00FF0000)>>16)<<24) | + (((*src & 0xFF00)>>8)<<0)|(((*src & 0x00FF)>>0)<<8); + src++; + dest++; + } + dest += 4096/2-width; + } + + mParams.getPreviewSize(&width, &height); + frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC; + frame.mBuffer = ptr; + frame.mLength = width*height*2; + frame.mAlignment = width*2; + frame.mOffset = 0; + frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);; + + ret = sendFrameToSubscribers(&frame); + + } + + return ret; +} + +extern "C" CameraAdapter* CameraAdapter_Factory() +{ + CameraAdapter *adapter = NULL; + Mutex::Autolock lock(gAdapterLock); + + LOG_FUNCTION_NAME; + + adapter = new V4LCameraAdapter(sensor_index); + if ( adapter ) { + CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index); + } else { + CAMHAL_LOGEA("Camera adapter create failed!"); + } + + LOG_FUNCTION_NAME_EXIT; + + return adapter; +} + +extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array, + const unsigned int starting_camera, + const unsigned int max_camera) { + int num_cameras_supported = 0; + CameraProperties::Properties* properties = NULL; + + LOG_FUNCTION_NAME; + + if(!properties_array) + { + return -EINVAL; + } + + // TODO: Need to tell camera properties what other cameras we can support + if (starting_camera + num_cameras_supported < max_camera) { + num_cameras_supported++; + properties = properties_array + starting_camera; + properties->set(CameraProperties::CAMERA_NAME, "USBCamera"); + } + + LOG_FUNCTION_NAME_EXIT; + + return num_cameras_supported; +} + +}; + + +/*--------------------Camera Adapter Class ENDS here-----------------------------*/ + diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h new file mode 100644 index 0000000..9cdf45a --- /dev/null +++ b/camera/inc/ANativeWindowDisplayAdapter.h @@ -0,0 +1,188 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#include "CameraHal.h" +#include +#include + +//temporarily define format here +#define HAL_PIXEL_FORMAT_TI_NV12 0x100 + +namespace android { + +/** + * Display handler class - This class basically handles the buffer posting to display + */ + +class ANativeWindowDisplayAdapter : public DisplayAdapter +{ +public: + + typedef struct + { + void *mBuffer; + void *mUser; + int mOffset; + int mWidth; + int mHeight; + int mWidthStride; + int mHeightStride; + int mLength; + CameraFrame::FrameType mType; + } DisplayFrame; + + enum DisplayStates + { + DISPLAY_INIT = 0, + DISPLAY_STARTED, + DISPLAY_STOPPED, + DISPLAY_EXITED + }; + +public: + + ANativeWindowDisplayAdapter(); + virtual ~ANativeWindowDisplayAdapter(); + + ///Initializes the display adapter creates any resources required + virtual status_t initialize(); + + virtual int setPreviewWindow(struct preview_stream_ops *window); + virtual int setFrameProvider(FrameNotifier *frameProvider); + virtual int setErrorHandler(ErrorNotifier *errorNotifier); + virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL); + virtual int disableDisplay(bool cancel_buffer = true); + virtual status_t pauseDisplay(bool pause); + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + //Used for shot to snapshot measurement + virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL); + +#endif + + virtual int useBuffers(void* bufArr, int num); + virtual bool supportsExternalBuffering(); + + //Implementation of inherited interfaces + virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs); + virtual uint32_t * getOffsets() ; + virtual int getFd() ; + virtual int freeBuffer(void* buf); + + virtual int maxQueueableBuffers(unsigned int& queueable); + + ///Class specific functions + static void frameCallbackRelay(CameraFrame* caFrame); + void frameCallback(CameraFrame* caFrame); + + void displayThread(); + + private: + void destroy(); + bool processHalMsg(); + status_t PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame); + bool handleFrameReturn(); + status_t returnBuffersToWindow(); + +public: + + static const int DISPLAY_TIMEOUT; + static const int FAILED_DQS_TO_SUSPEND; + + class DisplayThread : public Thread + { + ANativeWindowDisplayAdapter* mDisplayAdapter; + TIUTILS::MessageQueue mDisplayThreadQ; + + public: + DisplayThread(ANativeWindowDisplayAdapter* da) + : Thread(false), mDisplayAdapter(da) { } + + ///Returns a reference to the display message Q for display adapter to post messages + TIUTILS::MessageQueue& msgQ() + { + return mDisplayThreadQ; + } + + virtual bool threadLoop() + { + mDisplayAdapter->displayThread(); + return false; + } + + enum DisplayThreadCommands + { + DISPLAY_START, + DISPLAY_STOP, + DISPLAY_FRAME, + DISPLAY_EXIT + }; + }; + + //friend declarations +friend class DisplayThread; + +private: + int postBuffer(void* displayBuf); + +private: + bool mFirstInit; + bool mSuspend; + int mFailedDQs; + bool mPaused; //Pause state + preview_stream_ops_t* mANativeWindow; + sp mDisplayThread; + FrameProvider *mFrameProvider; ///Pointer to the frame provider interface + TIUTILS::MessageQueue mDisplayQ; + unsigned int mDisplayState; + ///@todo Have a common class for these members + mutable Mutex mLock; + bool mDisplayEnabled; + int mBufferCount; + buffer_handle_t** mBufferHandleMap; + IMG_native_handle_t** mGrallocHandleMap; + uint32_t* mOffsetsMap; + int mFD; + KeyedVector mFramesWithCameraAdapterMap; + sp mErrorNotifier; + + uint32_t mFrameWidth; + uint32_t mFrameHeight; + uint32_t mPreviewWidth; + uint32_t mPreviewHeight; + + uint32_t mXOff; + uint32_t mYOff; + + const char *mPixelFormat; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + //Used for calculating standby to first shot + struct timeval mStandbyToShot; + bool mMeasureStandby; + //Used for shot to snapshot/shot calculation + struct timeval mStartCapture; + bool mShotToShot; + +#endif + +}; + +}; + diff --git a/camera/inc/BaseCameraAdapter.h b/camera/inc/BaseCameraAdapter.h new file mode 100644 index 0000000..bc38e00 --- /dev/null +++ b/camera/inc/BaseCameraAdapter.h @@ -0,0 +1,272 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#ifndef BASE_CAMERA_ADAPTER_H +#define BASE_CAMERA_ADAPTER_H + +#include "CameraHal.h" + +namespace android { + +class BaseCameraAdapter : public CameraAdapter +{ + +public: + + BaseCameraAdapter(); + virtual ~BaseCameraAdapter(); + + ///Initialzes the camera adapter creates any resources required + virtual status_t initialize(CameraProperties::Properties*) = 0; + + virtual int setErrorHandler(ErrorNotifier *errorNotifier); + + //Message/Frame notification APIs + virtual void enableMsgType(int32_t msgs, frame_callback callback=NULL, event_callback eventCb=NULL, void* cookie=NULL); + virtual void disableMsgType(int32_t msgs, void* cookie); + virtual void returnFrame(void * frameBuf, CameraFrame::FrameType frameType); + virtual void addFramePointers(void *frameBuf, void *y_uv); + virtual void removeFramePointers(); + + //APIs to configure Camera adapter and get the current parameter set + virtual status_t setParameters(const CameraParameters& params) = 0; + virtual void getParameters(CameraParameters& params) = 0; + + //API to send a command to the camera + virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0 ); + + virtual status_t registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data); + + virtual status_t registerEndCaptureCallback(end_image_capture_callback callback, void *user_data); + + //Retrieves the current Adapter state + virtual AdapterState getState(); + //Retrieves the next Adapter state + virtual AdapterState getNextState(); + + // Rolls the state machine back to INTIALIZED_STATE from the current state + virtual status_t rollbackToInitializedState(); + +protected: + //The first two methods will try to switch the adapter state. + //Every call to setState() should be followed by a corresponding + //call to commitState(). If the state switch fails, then it will + //get reset to the previous state via rollbackState(). + virtual status_t setState(CameraCommands operation); + virtual status_t commitState(); + virtual status_t rollbackState(); + + // Retrieves the current Adapter state - for internal use (not locked) + virtual status_t getState(AdapterState &state); + // Retrieves the next Adapter state - for internal use (not locked) + virtual status_t getNextState(AdapterState &state); + + //-----------Interface that needs to be implemented by deriving classes -------------------- + + //Should be implmented by deriving classes in order to start image capture + virtual status_t takePicture(); + + //Should be implmented by deriving classes in order to start image capture + virtual status_t stopImageCapture(); + + //Should be implmented by deriving classes in order to start temporal bracketing + virtual status_t startBracketing(int range); + + //Should be implemented by deriving classes in order to stop temporal bracketing + virtual status_t stopBracketing(); + + //Should be implemented by deriving classes in oder to initiate autoFocus + virtual status_t autoFocus(); + + //Should be implemented by deriving classes in oder to initiate autoFocus + virtual status_t cancelAutoFocus(); + + //Should be called by deriving classes in order to do some bookkeeping + virtual status_t startVideoCapture(); + + //Should be called by deriving classes in order to do some bookkeeping + virtual status_t stopVideoCapture(); + + //Should be implemented by deriving classes in order to start camera preview + virtual status_t startPreview(); + + //Should be implemented by deriving classes in order to stop camera preview + virtual status_t stopPreview(); + + //Should be implemented by deriving classes in order to start smooth zoom + virtual status_t startSmoothZoom(int targetIdx); + + //Should be implemented by deriving classes in order to stop smooth zoom + virtual status_t stopSmoothZoom(); + + //Should be implemented by deriving classes in order to stop smooth zoom + virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable); + + //Should be implemented by deriving classes in order queue a released buffer in CameraAdapter + virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType); + + //API to get the frame size required to be allocated. This size is used to override the size passed + //by camera service when VSTAB/VNF is turned ON for example + virtual status_t getFrameSize(size_t &width, size_t &height); + + //API to get required data frame size + virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount); + + //API to get required picture buffers size with the current configuration in CameraParameters + virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount); + + // Should be implemented by deriving classes in order to start face detection + // ( if supported ) + virtual status_t startFaceDetection(); + + // Should be implemented by deriving classes in order to stop face detection + // ( if supported ) + virtual status_t stopFaceDetection(); + + virtual status_t switchToExecuting(); + + // Receive orientation events from CameraHal + virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt); + + // ---------------------Interface ends----------------------------------- + + status_t notifyFocusSubscribers(CameraHalEvent::FocusStatus status); + status_t notifyShutterSubscribers(); + status_t notifyZoomSubscribers(int zoomIdx, bool targetReached); + status_t notifyFaceSubscribers(sp &faces); + + //Send the frame to subscribers + status_t sendFrameToSubscribers(CameraFrame *frame); + + //Resets the refCount for this particular frame + status_t resetFrameRefCount(CameraFrame &frame); + + //A couple of helper functions + void setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount); + int getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType); + int setInitFrameRefCount(void* buf, unsigned int mask); + +// private member functions +private: + status_t __sendFrameToSubscribers(CameraFrame* frame, + KeyedVector *subscribers, + CameraFrame::FrameType frameType); + status_t rollbackToPreviousState(); + +// protected data types and variables +protected: + enum FrameState { + STOPPED = 0, + RUNNING + }; + + enum FrameCommands { + START_PREVIEW = 0, + START_RECORDING, + RETURN_FRAME, + STOP_PREVIEW, + STOP_RECORDING, + DO_AUTOFOCUS, + TAKE_PICTURE, + FRAME_EXIT + }; + + enum AdapterCommands { + ACK = 0, + ERROR + }; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + struct timeval mStartFocus; + struct timeval mStartCapture; + +#endif + + mutable Mutex mReturnFrameLock; + + //Lock protecting the Adapter state + mutable Mutex mLock; + AdapterState mAdapterState; + AdapterState mNextState; + + //Different frame subscribers get stored using these + KeyedVector mFrameSubscribers; + KeyedVector mFrameDataSubscribers; + KeyedVector mVideoSubscribers; + KeyedVector mImageSubscribers; + KeyedVector mRawSubscribers; + KeyedVector mFocusSubscribers; + KeyedVector mZoomSubscribers; + KeyedVector mShutterSubscribers; + KeyedVector mFaceSubscribers; + + //Preview buffer management data + int *mPreviewBuffers; + int mPreviewBufferCount; + size_t mPreviewBuffersLength; + KeyedVector mPreviewBuffersAvailable; + mutable Mutex mPreviewBufferLock; + + //Video buffer management data + int *mVideoBuffers; + KeyedVector mVideoBuffersAvailable; + int mVideoBuffersCount; + size_t mVideoBuffersLength; + mutable Mutex mVideoBufferLock; + + //Image buffer management data + int *mCaptureBuffers; + KeyedVector mCaptureBuffersAvailable; + int mCaptureBuffersCount; + size_t mCaptureBuffersLength; + mutable Mutex mCaptureBufferLock; + + //Metadata buffermanagement + int *mPreviewDataBuffers; + KeyedVector mPreviewDataBuffersAvailable; + int mPreviewDataBuffersCount; + size_t mPreviewDataBuffersLength; + mutable Mutex mPreviewDataBufferLock; + + TIUTILS::MessageQueue mFrameQ; + TIUTILS::MessageQueue mAdapterQ; + mutable Mutex mSubscriberLock; + ErrorNotifier *mErrorNotifier; + release_image_buffers_callback mReleaseImageBuffersCallback; + end_image_capture_callback mEndImageCaptureCallback; + void *mReleaseData; + void *mEndCaptureData; + bool mRecording; + + uint32_t mFramesWithDucati; + uint32_t mFramesWithDisplay; + uint32_t mFramesWithEncoder; + +#ifdef DEBUG_LOG + KeyedVector mBuffersWithDucati; +#endif + + KeyedVector mFrameQueue; +}; + +}; + +#endif //BASE_CAMERA_ADAPTER_H + + diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h new file mode 100644 index 0000000..2ee35f2 --- /dev/null +++ b/camera/inc/CameraHal.h @@ -0,0 +1,1272 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_H +#define ANDROID_HARDWARE_CAMERA_HARDWARE_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "binder/MemoryBase.h" +#include "binder/MemoryHeapBase.h" +#include +#include +#include +#include "MessageQueue.h" +#include "Semaphore.h" +#include "CameraProperties.h" +#include "DebugUtils.h" +#include "SensorListener.h" + +#include +#include + +#define MIN_WIDTH 640 +#define MIN_HEIGHT 480 +#define PICTURE_WIDTH 3264 /* 5mp - 2560. 8mp - 3280 */ /* Make sure it is a multiple of 16. */ +#define PICTURE_HEIGHT 2448 /* 5mp - 2048. 8mp - 2464 */ /* Make sure it is a multiple of 16. */ +#define PREVIEW_WIDTH 176 +#define PREVIEW_HEIGHT 144 +#define PIXEL_FORMAT V4L2_PIX_FMT_UYVY + +#define VIDEO_FRAME_COUNT_MAX 8 //NUM_OVERLAY_BUFFERS_REQUESTED +#define MAX_CAMERA_BUFFERS 8 //NUM_OVERLAY_BUFFERS_REQUESTED +#define MAX_ZOOM 3 +#define THUMB_WIDTH 80 +#define THUMB_HEIGHT 60 +#define PIX_YUV422I 0 +#define PIX_YUV420P 1 + +#define SATURATION_OFFSET 100 +#define SHARPNESS_OFFSET 100 +#define CONTRAST_OFFSET 100 + +#define CAMHAL_GRALLOC_USAGE GRALLOC_USAGE_HW_TEXTURE | \ + GRALLOC_USAGE_HW_RENDER | \ + GRALLOC_USAGE_SW_READ_RARELY | \ + GRALLOC_USAGE_SW_WRITE_NEVER + +//Enables Absolute PPM measurements in logcat +#define PPM_INSTRUMENTATION_ABS 1 + +#define LOCK_BUFFER_TRIES 5 +#define HAL_PIXEL_FORMAT_NV12 0x100 + +#define CAMHAL_LOGI ALOGI + +//Uncomment to enable more verbose/debug logs +#define DEBUG_LOG + +///Camera HAL Logging Functions +#ifndef DEBUG_LOG + +#define CAMHAL_LOGDA(str) +#define CAMHAL_LOGDB(str, ...) +#define CAMHAL_LOGVA(str) +#define CAMHAL_LOGVB(str, ...) + +#define CAMHAL_LOGEA ALOGE +#define CAMHAL_LOGEB ALOGE + +#undef LOG_FUNCTION_NAME +#undef LOG_FUNCTION_NAME_EXIT +#define LOG_FUNCTION_NAME +#define LOG_FUNCTION_NAME_EXIT + +#else + +#define CAMHAL_LOGDA DBGUTILS_LOGDA +#define CAMHAL_LOGDB DBGUTILS_LOGDB +#define CAMHAL_LOGVA DBGUTILS_LOGVA +#define CAMHAL_LOGVB DBGUTILS_LOGVB + +#define CAMHAL_LOGEA DBGUTILS_LOGEA +#define CAMHAL_LOGEB DBGUTILS_LOGEB + +#endif + + + +#define NONNEG_ASSIGN(x,y) \ + if(x > -1) \ + y = x + +namespace android { + +#define PARAM_BUFFER 6000 + +///Forward declarations +class CameraHal; +class CameraFrame; +class CameraHalEvent; +class DisplayFrame; + +class CameraArea : public RefBase +{ +public: + + CameraArea(int32_t top, + int32_t left, + int32_t bottom, + int32_t right, + uint32_t weight) : mTop(top), + mLeft(left), + mBottom(bottom), + mRight(right), + mWeight(weight) {} + + status_t transfrom(uint32_t width, + uint32_t height, + int32_t &top, + int32_t &left, + uint32_t &areaWidth, + uint32_t &areaHeight); + + bool isValid() + { + return ( ( 0 != mTop ) || ( 0 != mLeft ) || ( 0 != mBottom ) || ( 0 != mRight) ); + } + + bool isZeroArea() + { + return ( (0 == mTop ) && ( 0 == mLeft ) && ( 0 == mBottom ) + && ( 0 == mRight ) && ( 0 == mWeight )); + } + + uint32_t getWeight() + { + return mWeight; + } + + bool compare(const sp &area); + + static status_t parseAreas(const char *area, + uint32_t areaLength, + Vector< sp > &areas); + + static status_t checkArea(int32_t top, + int32_t left, + int32_t bottom, + int32_t right, + int32_t weight); + + static bool areAreasDifferent(Vector< sp > &, Vector< sp > &); + +protected: + static const int32_t TOP = -1000; + static const int32_t LEFT = -1000; + static const int32_t BOTTOM = 1000; + static const int32_t RIGHT = 1000; + static const int32_t WEIGHT_MIN = 1; + static const int32_t WEIGHT_MAX = 1000; + + int32_t mTop; + int32_t mLeft; + int32_t mBottom; + int32_t mRight; + uint32_t mWeight; +}; + +class CameraFDResult : public RefBase +{ +public: + + CameraFDResult() : mFaceData(NULL) {}; + CameraFDResult(camera_frame_metadata_t *faces) : mFaceData(faces) {}; + + virtual ~CameraFDResult() { + if ( ( NULL != mFaceData ) && ( NULL != mFaceData->faces ) ) { + free(mFaceData->faces); + free(mFaceData); + mFaceData=NULL; + } + + if(( NULL != mFaceData )) + { + free(mFaceData); + mFaceData = NULL; + } + } + + camera_frame_metadata_t *getFaceResult() { return mFaceData; }; + + static const ssize_t TOP = -1000; + static const ssize_t LEFT = -1000; + static const ssize_t BOTTOM = 1000; + static const ssize_t RIGHT = 1000; + static const ssize_t INVALID_DATA = -2000; + +private: + + camera_frame_metadata_t *mFaceData; +}; + +class CameraFrame +{ + public: + + enum FrameType + { + PREVIEW_FRAME_SYNC = 0x1, ///SYNC implies that the frame needs to be explicitly returned after consuming in order to be filled by camera again + PREVIEW_FRAME = 0x2 , ///Preview frame includes viewfinder and snapshot frames + IMAGE_FRAME_SYNC = 0x4, ///Image Frame is the image capture output frame + IMAGE_FRAME = 0x8, + VIDEO_FRAME_SYNC = 0x10, ///Timestamp will be updated for these frames + VIDEO_FRAME = 0x20, + FRAME_DATA_SYNC = 0x40, ///Any extra data assosicated with the frame. Always synced with the frame + FRAME_DATA= 0x80, + RAW_FRAME = 0x100, + SNAPSHOT_FRAME = 0x200, + ALL_FRAMES = 0xFFFF ///Maximum of 16 frame types supported + }; + + enum FrameQuirks + { + ENCODE_RAW_YUV422I_TO_JPEG = 0x1 << 0, + HAS_EXIF_DATA = 0x1 << 1, + }; + + //default contrustor + CameraFrame(): + mCookie(NULL), + mCookie2(NULL), + mBuffer(NULL), + mFrameType(0), + mTimestamp(0), + mWidth(0), + mHeight(0), + mOffset(0), + mAlignment(0), + mFd(0), + mLength(0), + mFrameMask(0), + mQuirks(0) { + + mYuv[0] = NULL; + mYuv[1] = NULL; + } + + //copy constructor + CameraFrame(const CameraFrame &frame) : + mCookie(frame.mCookie), + mCookie2(frame.mCookie2), + mBuffer(frame.mBuffer), + mFrameType(frame.mFrameType), + mTimestamp(frame.mTimestamp), + mWidth(frame.mWidth), + mHeight(frame.mHeight), + mOffset(frame.mOffset), + mAlignment(frame.mAlignment), + mFd(frame.mFd), + mLength(frame.mLength), + mFrameMask(frame.mFrameMask), + mQuirks(frame.mQuirks) { + + mYuv[0] = frame.mYuv[0]; + mYuv[1] = frame.mYuv[1]; + } + + void *mCookie; + void *mCookie2; + void *mBuffer; + int mFrameType; + nsecs_t mTimestamp; + unsigned int mWidth, mHeight; + uint32_t mOffset; + unsigned int mAlignment; + int mFd; + uint32_t mLength; + unsigned mFrameMask; + unsigned int mQuirks; + unsigned int mYuv[2]; + ///@todo add other member vars like stride etc +}; + +enum CameraHalError +{ + CAMERA_ERROR_FATAL = 0x1, //Fatal errors can only be recovered by restarting media server + CAMERA_ERROR_HARD = 0x2, // Hard errors are hardware hangs that may be recoverable by resetting the hardware internally within the adapter + CAMERA_ERROR_SOFT = 0x4, // Soft errors are non fatal errors that can be recovered from without needing to stop use-case +}; + +///Common Camera Hal Event class which is visible to CameraAdapter,DisplayAdapter and AppCallbackNotifier +///@todo Rename this class to CameraEvent +class CameraHalEvent +{ +public: + //Enums + enum CameraHalEventType { + NO_EVENTS = 0x0, + EVENT_FOCUS_LOCKED = 0x1, + EVENT_FOCUS_ERROR = 0x2, + EVENT_ZOOM_INDEX_REACHED = 0x4, + EVENT_SHUTTER = 0x8, + EVENT_FACE = 0x10, + ///@remarks Future enum related to display, like frame displayed event, could be added here + ALL_EVENTS = 0xFFFF ///Maximum of 16 event types supported + }; + + enum FocusStatus { + FOCUS_STATUS_SUCCESS = 0x1, + FOCUS_STATUS_FAIL = 0x2, + FOCUS_STATUS_PENDING = 0x4, + FOCUS_STATUS_DONE = 0x8, + }; + + ///Class declarations + ///@remarks Add a new class for a new event type added above + + //Shutter event specific data + typedef struct ShutterEventData_t { + bool shutterClosed; + }ShutterEventData; + + ///Focus event specific data + typedef struct FocusEventData_t { + FocusStatus focusStatus; + int currentFocusValue; + } FocusEventData; + + ///Zoom specific event data + typedef struct ZoomEventData_t { + int currentZoomIndex; + bool targetZoomIndexReached; + } ZoomEventData; + + typedef struct FaceData_t { + int32_t top; + int32_t left; + int32_t bottom; + int32_t right; + uint32_t score; + } FaceData; + + typedef sp FaceEventData; + + class CameraHalEventData : public RefBase{ + + public: + + CameraHalEvent::FocusEventData focusEvent; + CameraHalEvent::ZoomEventData zoomEvent; + CameraHalEvent::ShutterEventData shutterEvent; + CameraHalEvent::FaceEventData faceEvent; + }; + + //default contrustor + CameraHalEvent(): + mCookie(NULL), + mEventType(NO_EVENTS) {} + + //copy constructor + CameraHalEvent(const CameraHalEvent &event) : + mCookie(event.mCookie), + mEventType(event.mEventType), + mEventData(event.mEventData) {}; + + void* mCookie; + CameraHalEventType mEventType; + sp mEventData; + +}; + +/// Have a generic callback class based on template - to adapt CameraFrame and Event +typedef void (*frame_callback) (CameraFrame *cameraFrame); +typedef void (*event_callback) (CameraHalEvent *event); + +//signals CameraHAL to relase image buffers +typedef void (*release_image_buffers_callback) (void *userData); +typedef void (*end_image_capture_callback) (void *userData); + +/** + * Interface class implemented by classes that have some events to communicate to dependendent classes + * Dependent classes use this interface for registering for events + */ +class MessageNotifier +{ +public: + static const uint32_t EVENT_BIT_FIELD_POSITION; + static const uint32_t FRAME_BIT_FIELD_POSITION; + + ///@remarks Msg type comes from CameraFrame and CameraHalEvent classes + /// MSB 16 bits is for events and LSB 16 bits is for frame notifications + /// FrameProvider and EventProvider classes act as helpers to event/frame + /// consumers to call this api + virtual void enableMsgType(int32_t msgs, frame_callback frameCb=NULL, event_callback eventCb=NULL, void* cookie=NULL) = 0; + virtual void disableMsgType(int32_t msgs, void* cookie) = 0; + + virtual ~MessageNotifier() {}; +}; + +class ErrorNotifier : public virtual RefBase +{ +public: + virtual void errorNotify(int error) = 0; + + virtual ~ErrorNotifier() {}; +}; + + +/** + * Interace class abstraction for Camera Adapter to act as a frame provider + * This interface is fully implemented by Camera Adapter + */ +class FrameNotifier : public MessageNotifier +{ +public: + virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0; + virtual void addFramePointers(void *frameBuf, void *buf) = 0; + virtual void removeFramePointers() = 0; + + virtual ~FrameNotifier() {}; +}; + +/** * Wrapper class around Frame Notifier, which is used by display and notification classes for interacting with Camera Adapter + */ +class FrameProvider +{ + FrameNotifier* mFrameNotifier; + void* mCookie; + frame_callback mFrameCallback; + +public: + FrameProvider(FrameNotifier *fn, void* cookie, frame_callback frameCallback) + :mFrameNotifier(fn), mCookie(cookie),mFrameCallback(frameCallback) { } + + int enableFrameNotification(int32_t frameTypes); + int disableFrameNotification(int32_t frameTypes); + int returnFrame(void *frameBuf, CameraFrame::FrameType frameType); + void addFramePointers(void *frameBuf, void *buf); + void removeFramePointers(); +}; + +/** Wrapper class around MessageNotifier, which is used by display and notification classes for interacting with + * Camera Adapter + */ +class EventProvider +{ +public: + MessageNotifier* mEventNotifier; + void* mCookie; + event_callback mEventCallback; + +public: + EventProvider(MessageNotifier *mn, void* cookie, event_callback eventCallback) + :mEventNotifier(mn), mCookie(cookie), mEventCallback(eventCallback) {} + + int enableEventNotification(int32_t eventTypes); + int disableEventNotification(int32_t eventTypes); +}; + +/* + * Interface for providing buffers + */ +class BufferProvider +{ +public: + virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) = 0; + + //additional methods used for memory mapping + virtual uint32_t * getOffsets() = 0; + virtual int getFd() = 0; + + virtual int freeBuffer(void* buf) = 0; + + virtual ~BufferProvider() {} +}; + +/** + * Class for handling data and notify callbacks to application + */ +class AppCallbackNotifier: public ErrorNotifier , public virtual RefBase +{ + +public: + + ///Constants + static const int NOTIFIER_TIMEOUT; + static const int32_t MAX_BUFFERS = 8; + + enum NotifierCommands + { + NOTIFIER_CMD_PROCESS_EVENT, + NOTIFIER_CMD_PROCESS_FRAME, + NOTIFIER_CMD_PROCESS_ERROR + }; + + enum NotifierState + { + NOTIFIER_STOPPED, + NOTIFIER_STARTED, + NOTIFIER_EXITED + }; + +public: + + ~AppCallbackNotifier(); + + ///Initialzes the callback notifier, creates any resources required + status_t initialize(); + + ///Starts the callbacks to application + status_t start(); + + ///Stops the callbacks from going to application + status_t stop(); + + void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider); + void setFrameProvider(FrameNotifier *frameProvider); + + //All sub-components of Camera HAL call this whenever any error happens + virtual void errorNotify(int error); + + status_t startPreviewCallbacks(CameraParameters ¶ms, void *buffers, uint32_t *offsets, int fd, uint32_t length, uint32_t count); + status_t stopPreviewCallbacks(); + + status_t enableMsgType(int32_t msgType); + status_t disableMsgType(int32_t msgType); + + //API for enabling/disabling measurement data + void setMeasurements(bool enable); + + //thread loops + bool notificationThread(); + + ///Notification callback functions + static void frameCallbackRelay(CameraFrame* caFrame); + static void eventCallbackRelay(CameraHalEvent* chEvt); + void frameCallback(CameraFrame* caFrame); + void eventCallback(CameraHalEvent* chEvt); + void flushAndReturnFrames(); + + void setCallbacks(CameraHal *cameraHal, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void *user); + + //Set Burst mode + void setBurst(bool burst); + + //Notifications from CameraHal for video recording case + status_t startRecording(); + status_t stopRecording(); + status_t initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, uint32_t length, uint32_t count, void *vidBufs); + status_t releaseRecordingFrame(const void *opaque); + + status_t useMetaDataBufferMode(bool enable); + + void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2); + + void useVideoBuffers(bool useVideoBuffers); + + bool getUesVideoBuffers(); + void setVideoRes(int width, int height); + + void flushEventQueue(); + + //Internal class definitions + class NotificationThread : public Thread { + AppCallbackNotifier* mAppCallbackNotifier; + TIUTILS::MessageQueue mNotificationThreadQ; + public: + enum NotificationThreadCommands + { + NOTIFIER_START, + NOTIFIER_STOP, + NOTIFIER_EXIT, + }; + public: + NotificationThread(AppCallbackNotifier* nh) + : Thread(false), mAppCallbackNotifier(nh) { } + virtual bool threadLoop() { + return mAppCallbackNotifier->notificationThread(); + } + + TIUTILS::MessageQueue &msgQ() { return mNotificationThreadQ;} + }; + + //Friend declarations + friend class NotificationThread; + +private: + void notifyEvent(); + void notifyFrame(); + bool processMessage(); + void releaseSharedVideoBuffers(); + status_t dummyRaw(); + void copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType); + void copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType); + +private: + mutable Mutex mLock; + mutable Mutex mBurstLock; + CameraHal* mCameraHal; + camera_notify_callback mNotifyCb; + camera_data_callback mDataCb; + camera_data_timestamp_callback mDataCbTimestamp; + camera_request_memory mRequestMemory; + void *mCallbackCookie; + + //Keeps Video MemoryHeaps and Buffers within + //these objects + KeyedVector mVideoHeaps; + KeyedVector mVideoBuffers; + KeyedVector mVideoMap; + + //Keeps list of Gralloc handles and associated Video Metadata Buffers + KeyedVector mVideoMetadataBufferMemoryMap; + KeyedVector mVideoMetadataBufferReverseMap; + + bool mBufferReleased; + + sp< NotificationThread> mNotificationThread; + EventProvider *mEventProvider; + FrameProvider *mFrameProvider; + TIUTILS::MessageQueue mEventQ; + TIUTILS::MessageQueue mFrameQ; + NotifierState mNotifierState; + + bool mPreviewing; + camera_memory_t* mPreviewMemory; + unsigned char* mPreviewBufs[MAX_BUFFERS]; + int mPreviewBufCount; + const char *mPreviewPixelFormat; + KeyedVector > mSharedPreviewHeaps; + KeyedVector > mSharedPreviewBuffers; + + //Burst mode active + bool mBurst; + mutable Mutex mRecordingLock; + bool mRecording; + bool mMeasurementEnabled; + + bool mUseMetaDataBufferMode; + bool mRawAvailable; + + bool mUseVideoBuffers; + + int mVideoWidth; + int mVideoHeight; + +}; + + +/** + * Class used for allocating memory for JPEG bit stream buffers, output buffers of camera in no overlay case + */ +class MemoryManager : public BufferProvider, public virtual RefBase +{ +public: + MemoryManager():mIonFd(-1){ } + + ///Initializes the memory manager creates any resources required + status_t initialize() { return NO_ERROR; } + + int setErrorHandler(ErrorNotifier *errorNotifier); + virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs); + virtual uint32_t * getOffsets(); + virtual int getFd() ; + virtual int freeBuffer(void* buf); + +private: + + sp mErrorNotifier; + int mIonFd; + KeyedVector mIonHandleMap; + KeyedVector mIonFdMap; + KeyedVector mIonBufLength; +}; + + + + +/** + * CameraAdapter interface class + * Concrete classes derive from this class and provide implementations based on the specific camera h/w interface + */ + +class CameraAdapter: public FrameNotifier, public virtual RefBase +{ +protected: + enum AdapterActiveStates { + INTIALIZED_ACTIVE = 1 << 0, + LOADED_PREVIEW_ACTIVE = 1 << 1, + PREVIEW_ACTIVE = 1 << 2, + LOADED_CAPTURE_ACTIVE = 1 << 3, + CAPTURE_ACTIVE = 1 << 4, + BRACKETING_ACTIVE = 1 << 5, + AF_ACTIVE = 1 << 6, + ZOOM_ACTIVE = 1 << 7, + VIDEO_ACTIVE = 1 << 8, + }; +public: + typedef struct + { + void *mBuffers; + uint32_t *mOffsets; + int mFd; + uint32_t mLength; + uint32_t mCount; + uint32_t mMaxQueueable; + } BuffersDescriptor; + + enum CameraCommands + { + CAMERA_START_PREVIEW = 0, + CAMERA_STOP_PREVIEW = 1, + CAMERA_START_VIDEO = 2, + CAMERA_STOP_VIDEO = 3, + CAMERA_START_IMAGE_CAPTURE = 4, + CAMERA_STOP_IMAGE_CAPTURE = 5, + CAMERA_PERFORM_AUTOFOCUS = 6, + CAMERA_CANCEL_AUTOFOCUS = 7, + CAMERA_PREVIEW_FLUSH_BUFFERS = 8, + CAMERA_START_SMOOTH_ZOOM = 9, + CAMERA_STOP_SMOOTH_ZOOM = 10, + CAMERA_USE_BUFFERS_PREVIEW = 11, + CAMERA_SET_TIMEOUT = 12, + CAMERA_CANCEL_TIMEOUT = 13, + CAMERA_START_BRACKET_CAPTURE = 14, + CAMERA_STOP_BRACKET_CAPTURE = 15, + CAMERA_QUERY_RESOLUTION_PREVIEW = 16, + CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE = 17, + CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA = 18, + CAMERA_USE_BUFFERS_IMAGE_CAPTURE = 19, + CAMERA_USE_BUFFERS_PREVIEW_DATA = 20, + CAMERA_TIMEOUT_EXPIRED = 21, + CAMERA_START_FD = 22, + CAMERA_STOP_FD = 23, + CAMERA_SWITCH_TO_EXECUTING = 24, + }; + + enum CameraMode + { + CAMERA_PREVIEW, + CAMERA_IMAGE_CAPTURE, + CAMERA_VIDEO, + CAMERA_MEASUREMENT + }; + + enum AdapterState { + INTIALIZED_STATE = INTIALIZED_ACTIVE, + LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE , + AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + VIDEO_AF_STATE = VIDEO_ACTIVE | AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + VIDEO_LOADED_CAPTURE_STATE = VIDEO_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + VIDEO_CAPTURE_STATE = VIDEO_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE, + }; + +public: + + ///Initialzes the camera adapter creates any resources required + virtual int initialize(CameraProperties::Properties*) = 0; + + virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0; + + //Message/Frame notification APIs + virtual void enableMsgType(int32_t msgs, + frame_callback callback = NULL, + event_callback eventCb = NULL, + void *cookie = NULL) = 0; + virtual void disableMsgType(int32_t msgs, void* cookie) = 0; + virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0; + virtual void addFramePointers(void *frameBuf, void *buf) = 0; + virtual void removeFramePointers() = 0; + + //APIs to configure Camera adapter and get the current parameter set + virtual int setParameters(const CameraParameters& params) = 0; + virtual void getParameters(CameraParameters& params) = 0; + + //API to flush the buffers from Camera + status_t flushBuffers() + { + return sendCommand(CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS); + } + + //Registers callback for returning image buffers back to CameraHAL + virtual int registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) = 0; + + //Registers callback, which signals a completed image capture + virtual int registerEndCaptureCallback(end_image_capture_callback callback, void *user_data) = 0; + + //API to send a command to the camera + virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0) = 0; + + virtual ~CameraAdapter() {}; + + //Retrieves the current Adapter state + virtual AdapterState getState() = 0; + + //Retrieves the next Adapter state + virtual AdapterState getNextState() = 0; + + // Receive orientation events from CameraHal + virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt) = 0; + + // Rolls the state machine back to INTIALIZED_STATE from the current state + virtual status_t rollbackToInitializedState() = 0; + + // Retrieves the current Adapter state - for internal use (not locked) + virtual status_t getState(AdapterState &state) = 0; + // Retrieves the next Adapter state - for internal use (not locked) + virtual status_t getNextState(AdapterState &state) = 0; + +protected: + //The first two methods will try to switch the adapter state. + //Every call to setState() should be followed by a corresponding + //call to commitState(). If the state switch fails, then it will + //get reset to the previous state via rollbackState(). + virtual status_t setState(CameraCommands operation) = 0; + virtual status_t commitState() = 0; + virtual status_t rollbackState() = 0; +}; + +class DisplayAdapter : public BufferProvider, public virtual RefBase +{ +public: + typedef struct S3DParameters_t + { + int mode; + int framePacking; + int order; + int subSampling; + } S3DParameters; + + ///Initializes the display adapter creates any resources required + virtual int initialize() = 0; + + virtual int setPreviewWindow(struct preview_stream_ops *window) = 0; + virtual int setFrameProvider(FrameNotifier *frameProvider) = 0; + virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0; + virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL) = 0; + virtual int disableDisplay(bool cancel_buffer = true) = 0; + //Used for Snapshot review temp. pause + virtual int pauseDisplay(bool pause) = 0; + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + //Used for shot to snapshot measurement + virtual int setSnapshotTimeRef(struct timeval *refTime = NULL) = 0; +#endif + + virtual int useBuffers(void *bufArr, int num) = 0; + virtual bool supportsExternalBuffering() = 0; + + // Get max queueable buffers display supports + // This function should only be called after + // allocateBuffer + virtual int maxQueueableBuffers(unsigned int& queueable) = 0; +}; + +static void releaseImageBuffers(void *userData); + +static void endImageCapture(void *userData); + + /** + Implementation of the Android Camera hardware abstraction layer + + This class implements the interface methods defined in CameraHardwareInterface + for the OMAP4 platform + +*/ +class CameraHal + +{ + +public: + ///Constants + static const int NO_BUFFERS_PREVIEW; + static const int NO_BUFFERS_IMAGE_CAPTURE; + static const uint32_t VFR_SCALE = 1000; + + + /*--------------------Interface Methods---------------------------------*/ + + //@{ +public: + + /** Set the notification and data callbacks */ + void setCallbacks(camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void *user); + + /** Receives orientation events from SensorListener **/ + void onOrientationEvent(uint32_t orientation, uint32_t tilt); + + /** + * The following three functions all take a msgtype, + * which is a bitmask of the messages defined in + * include/ui/Camera.h + */ + + /** + * Enable a message, or set of messages. + */ + void enableMsgType(int32_t msgType); + + /** + * Disable a message, or a set of messages. + */ + void disableMsgType(int32_t msgType); + + /** + * Query whether a message, or a set of messages, is enabled. + * Note that this is operates as an AND, if any of the messages + * queried are off, this will return false. + */ + int msgTypeEnabled(int32_t msgType); + + /** + * Start preview mode. + */ + int startPreview(); + + /** + * Only used if overlays are used for camera preview. + */ + int setPreviewWindow(struct preview_stream_ops *window); + + /** + * Stop a previously started preview. + */ + void stopPreview(); + + /** + * Returns true if preview is enabled. + */ + bool previewEnabled(); + + /** + * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME + * message is sent with the corresponding frame. Every record frame must be released + * by calling releaseRecordingFrame(). + */ + int startRecording(); + + /** + * Stop a previously started recording. + */ + void stopRecording(); + + /** + * Returns true if recording is enabled. + */ + int recordingEnabled(); + + /** + * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. + */ + void releaseRecordingFrame(const void *opaque); + + /** + * Start auto focus, the notification callback routine is called + * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() + * will be called again if another auto focus is needed. + */ + int autoFocus(); + + /** + * Cancels auto-focus function. If the auto-focus is still in progress, + * this function will cancel it. Whether the auto-focus is in progress + * or not, this function will return the focus position to the default. + * If the camera does not support auto-focus, this is a no-op. + */ + int cancelAutoFocus(); + + /** + * Take a picture. + */ + int takePicture(); + + /** + * Cancel a picture that was started with takePicture. Calling this + * method when no picture is being taken is a no-op. + */ + int cancelPicture(); + + /** Set the camera parameters. */ + int setParameters(const char* params); + int setParameters(const CameraParameters& params); + + /** Return the camera parameters. */ + char* getParameters(); + void putParameters(char *); + + /** + * Send command to camera driver. + */ + int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2); + + /** + * Release the hardware resources owned by this object. Note that this is + * *not* done in the destructor. + */ + void release(); + + /** + * Dump state of the camera hardware + */ + int dump(int fd) const; + + + status_t storeMetaDataInBuffers(bool enable); + + //@} + +/*--------------------Internal Member functions - Public---------------------------------*/ + +public: + /** @name internalFunctionsPublic */ + //@{ + + /** Constructor of CameraHal */ + CameraHal(int cameraId); + + // Destructor of CameraHal + ~CameraHal(); + + /** Initialize CameraHal */ + status_t initialize(CameraProperties::Properties*); + + /** Deinitialize CameraHal */ + void deinitialize(); + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + //Uses the constructor timestamp as a reference to calcluate the + // elapsed time + static void PPM(const char *); + //Uses a user provided timestamp as a reference to calcluate the + // elapsed time + static void PPM(const char *, struct timeval*, ...); + +#endif + + /** Free image bufs */ + status_t freeImageBufs(); + + //Signals the end of image capture + status_t signalEndImageCapture(); + + //Events + static void eventCallbackRelay(CameraHalEvent* event); + void eventCallback(CameraHalEvent* event); + void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider); + +/*--------------------Internal Member functions - Private---------------------------------*/ +private: + + /** @name internalFunctionsPrivate */ + //@{ + + /** Set the camera parameters specific to Video Recording. */ + bool setVideoModeParameters(const CameraParameters&); + + /** Reset the camera parameters specific to Video Recording. */ + bool resetVideoModeParameters(); + + /** Restart the preview with setParameter. */ + status_t restartPreview(); + + status_t parseResolution(const char *resStr, int &width, int &height); + + void insertSupportedParams(); + + /** Allocate preview data buffers */ + status_t allocPreviewDataBufs(uint32_t size, uint32_t bufferCount); + + /** Free preview data buffers */ + status_t freePreviewDataBufs(); + + /** Allocate preview buffers */ + status_t allocPreviewBufs(int width, int height, const char* previewFormat, unsigned int bufferCount, unsigned int &max_queueable); + + /** Allocate video buffers */ + status_t allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount); + + /** Allocate image capture buffers */ + status_t allocImageBufs(unsigned int width, unsigned int height, uint32_t length, const char* previewFormat, unsigned int bufferCount); + + /** Free preview buffers */ + status_t freePreviewBufs(); + + /** Free video bufs */ + status_t freeVideoBufs(void *bufs); + + //Check if a given resolution is supported by the current camera + //instance + bool isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions); + + //Check if a given parameter is supported by the current camera + // instance + bool isParameterValid(const char *param, const char *supportedParams); + bool isParameterValid(int param, const char *supportedParams); + status_t doesSetParameterNeedUpdate(const char *new_param, const char *old_params, bool &update); + + /** Initialize default parameters */ + void initDefaultParameters(); + + void dumpProperties(CameraProperties::Properties& cameraProps); + + status_t startImageBracketing(); + + status_t stopImageBracketing(); + + void setShutter(bool enable); + + void forceStopPreview(); + + void selectFPSRange(int framerate, int *min_fps, int *max_fps); + + void setPreferredPreviewRes(int width, int height); + void resetPreviewRes(CameraParameters *mParams, int width, int height); + + //@} + + +/*----------Member variables - Public ---------------------*/ +public: + int32_t mMsgEnabled; + bool mRecordEnabled; + nsecs_t mCurrentTime; + bool mFalsePreview; + bool mPreviewEnabled; + uint32_t mTakePictureQueue; + bool mBracketingEnabled; + bool mBracketingRunning; + //User shutter override + bool mShutterEnabled; + bool mMeasurementEnabled; + //Google's parameter delimiter + static const char PARAMS_DELIMITER[]; + + CameraAdapter *mCameraAdapter; + sp mAppCallbackNotifier; + sp mDisplayAdapter; + sp mMemoryManager; + + sp mPictureHeap; + + int* mGrallocHandles; + bool mFpsRangeChangedByApp; + + + + + +///static member vars + +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + + //Timestamp from the CameraHal constructor + static struct timeval ppm_start; + //Timestamp of the autoFocus command + static struct timeval mStartFocus; + //Timestamp of the startPreview command + static struct timeval mStartPreview; + //Timestamp of the takePicture command + static struct timeval mStartCapture; + +#endif + +/*----------Member variables - Private ---------------------*/ +private: + bool mDynamicPreviewSwitch; + //keeps paused state of display + bool mDisplayPaused; + //Index of current camera adapter + int mCameraIndex; + + mutable Mutex mLock; + + sp mSensorListener; + + void* mCameraAdapterHandle; + + CameraParameters mParameters; + bool mPreviewRunning; + bool mPreviewStateOld; + bool mRecordingEnabled; + EventProvider *mEventProvider; + + int32_t *mPreviewDataBufs; + uint32_t *mPreviewDataOffsets; + int mPreviewDataFd; + int mPreviewDataLength; + int32_t *mImageBufs; + uint32_t *mImageOffsets; + int mImageFd; + int mImageLength; + int32_t *mPreviewBufs; + uint32_t *mPreviewOffsets; + int mPreviewLength; + int mPreviewFd; + int32_t *mVideoBufs; + uint32_t *mVideoOffsets; + int mVideoFd; + int mVideoLength; + + int mBracketRangePositive; + int mBracketRangeNegative; + + ///@todo Rename this as preview buffer provider + BufferProvider *mBufProvider; + BufferProvider *mVideoBufProvider; + + + CameraProperties::Properties* mCameraProperties; + + bool mPreviewStartInProgress; + + bool mSetPreviewWindowCalled; + + uint32_t mPreviewWidth; + uint32_t mPreviewHeight; + int32_t mMaxZoomSupported; + + int mVideoWidth; + int mVideoHeight; + +}; + + +}; // namespace android + +#endif diff --git a/camera/inc/CameraProperties.h b/camera/inc/CameraProperties.h new file mode 100644 index 0000000..6f05877 --- /dev/null +++ b/camera/inc/CameraProperties.h @@ -0,0 +1,198 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + +#ifndef CAMERA_PROPERTIES_H +#define CAMERA_PROPERTIES_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include "cutils/properties.h" + +namespace android { + +#define MAX_CAMERAS_SUPPORTED 2 +#define MAX_SIMUL_CAMERAS_SUPPORTED 1 +#define MAX_PROP_NAME_LENGTH 50 +#define MAX_PROP_VALUE_LENGTH 2048 + +#define EXIF_MAKE_DEFAULT "default_make" +#define EXIF_MODEL_DEFAULT "default_model" + +// Class that handles the Camera Properties +class CameraProperties +{ +public: + static const char INVALID[]; + static const char CAMERA_NAME[]; + static const char CAMERA_SENSOR_INDEX[]; + static const char ORIENTATION_INDEX[]; + static const char FACING_INDEX[]; + static const char S3D_SUPPORTED[]; + static const char SUPPORTED_PREVIEW_SIZES[]; + static const char SUPPORTED_PREVIEW_FORMATS[]; + static const char SUPPORTED_PREVIEW_FRAME_RATES[]; + static const char SUPPORTED_PICTURE_SIZES[]; + static const char SUPPORTED_PICTURE_FORMATS[]; + static const char SUPPORTED_THUMBNAIL_SIZES[]; + static const char SUPPORTED_WHITE_BALANCE[]; + static const char SUPPORTED_EFFECTS[]; + static const char SUPPORTED_ANTIBANDING[]; + static const char SUPPORTED_EXPOSURE_MODES[]; + static const char SUPPORTED_EV_MIN[]; + static const char SUPPORTED_EV_MAX[]; + static const char SUPPORTED_EV_STEP[]; + static const char SUPPORTED_ISO_VALUES[]; + static const char SUPPORTED_SCENE_MODES[]; + static const char SUPPORTED_FLASH_MODES[]; + static const char SUPPORTED_FOCUS_MODES[]; + static const char REQUIRED_PREVIEW_BUFS[]; + static const char REQUIRED_IMAGE_BUFS[]; + static const char SUPPORTED_ZOOM_RATIOS[]; + static const char SUPPORTED_ZOOM_STAGES[]; + static const char SUPPORTED_IPP_MODES[]; + static const char SMOOTH_ZOOM_SUPPORTED[]; + static const char ZOOM_SUPPORTED[]; + static const char PREVIEW_SIZE[]; + static const char PREVIEW_FORMAT[]; + static const char PREVIEW_FRAME_RATE[]; + static const char ZOOM[]; + static const char PICTURE_SIZE[]; + static const char PICTURE_FORMAT[]; + static const char JPEG_THUMBNAIL_SIZE[]; + static const char WHITEBALANCE[]; + static const char EFFECT[]; + static const char ANTIBANDING[]; + static const char EXPOSURE_MODE[]; + static const char EV_COMPENSATION[]; + static const char ISO_MODE[]; + static const char FOCUS_MODE[]; + static const char SCENE_MODE[]; + static const char FLASH_MODE[]; + static const char JPEG_QUALITY[]; + static const char BRIGHTNESS[]; + static const char SATURATION[]; + static const char SHARPNESS[]; + static const char CONTRAST[]; + static const char IPP[]; + static const char GBCE[]; + static const char AUTOCONVERGENCE[]; + static const char AUTOCONVERGENCE_MODE[]; + static const char MANUALCONVERGENCE_VALUES[]; + static const char SENSOR_ORIENTATION[]; + static const char SENSOR_ORIENTATION_VALUES[]; + static const char REVISION[]; + static const char FOCAL_LENGTH[]; + static const char HOR_ANGLE[]; + static const char VER_ANGLE[]; + static const char EXIF_MAKE[]; + static const char EXIF_MODEL[]; + static const char JPEG_THUMBNAIL_QUALITY[]; + static const char MAX_FOCUS_AREAS[]; + static const char MAX_FD_HW_FACES[]; + static const char MAX_FD_SW_FACES[]; + + static const char PARAMS_DELIMITER []; + + static const char S3D2D_PREVIEW[]; + static const char S3D2D_PREVIEW_MODES[]; + static const char VSTAB[]; + static const char VSTAB_SUPPORTED[]; + static const char FRAMERATE_RANGE[]; + static const char FRAMERATE_RANGE_IMAGE[]; + static const char FRAMERATE_RANGE_VIDEO[]; + static const char FRAMERATE_RANGE_SUPPORTED[]; + + static const char DEFAULT_VALUE[]; + + static const char AUTO_EXPOSURE_LOCK[]; + static const char AUTO_EXPOSURE_LOCK_SUPPORTED[]; + static const char AUTO_WHITEBALANCE_LOCK[]; + static const char AUTO_WHITEBALANCE_LOCK_SUPPORTED[]; + static const char MAX_NUM_METERING_AREAS[]; + static const char METERING_AREAS[]; + static const char MAX_NUM_FOCUS_AREAS[]; + + static const char VIDEO_SNAPSHOT_SUPPORTED[]; + + static const char VIDEO_SIZE[]; + static const char SUPPORTED_VIDEO_SIZES[]; + static const char PREFERRED_PREVIEW_SIZE_FOR_VIDEO[]; + + CameraProperties(); + ~CameraProperties(); + + // container class passed around for accessing properties + class Properties + { + public: + Properties() + { + mProperties = new DefaultKeyedVector(String8(DEFAULT_VALUE)); + char property[PROPERTY_VALUE_MAX]; + property_get("ro.product.manufacturer", property, EXIF_MAKE_DEFAULT); + property[0] = toupper(property[0]); + set(EXIF_MAKE, property); + property_get("ro.product.model", property, EXIF_MODEL_DEFAULT); + property[0] = toupper(property[0]); + set(EXIF_MODEL, property); + } + ~Properties() + { + delete mProperties; + } + ssize_t set(const char *prop, const char *value); + ssize_t set(const char *prop, int value); + const char* get(const char * prop); + void dump(); + + protected: + const char* keyAt(unsigned int); + const char* valueAt(unsigned int); + + private: + DefaultKeyedVector* mProperties; + + }; + + ///Initializes the CameraProperties class + status_t initialize(); + status_t loadProperties(); + int camerasSupported(); + int getProperties(int cameraIndex, Properties** properties); + +private: + + uint32_t mCamerasSupported; + int mInitialized; + mutable Mutex mLock; + + Properties mCameraProps[MAX_CAMERAS_SUPPORTED]; + +}; + +}; + +#endif //CAMERA_PROPERTIES_H + diff --git a/camera/inc/Encoder_libjpeg.h b/camera/inc/Encoder_libjpeg.h new file mode 100644 index 0000000..fb9a894 --- /dev/null +++ b/camera/inc/Encoder_libjpeg.h @@ -0,0 +1,209 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file Encoder_libjpeg.h +* +* This defines API for camerahal to encode YUV using libjpeg +* +*/ + +#ifndef ANDROID_CAMERA_HARDWARE_ENCODER_LIBJPEG_H +#define ANDROID_CAMERA_HARDWARE_ENCODER_LIBJPEG_H + +#include +#include + +extern "C" { +#include "jhead.h" +} + +#define CANCEL_TIMEOUT 3000000 // 3 seconds + +namespace android { +/** + * libjpeg encoder class - uses libjpeg to encode yuv + */ + +#define MAX_EXIF_TAGS_SUPPORTED 30 +typedef void (*encoder_libjpeg_callback_t) (void* main_jpeg, + void* thumb_jpeg, + CameraFrame::FrameType type, + void* cookie1, + void* cookie2, + void* cookie3, + bool canceled); + +// these have to match strings defined in external/jhead/exif.c +static const char TAG_MODEL[] = "Model"; +static const char TAG_MAKE[] = "Make"; +static const char TAG_FOCALLENGTH[] = "FocalLength"; +static const char TAG_DATETIME[] = "DateTime"; +static const char TAG_IMAGE_WIDTH[] = "ImageWidth"; +static const char TAG_IMAGE_LENGTH[] = "ImageLength"; +static const char TAG_GPS_LAT[] = "GPSLatitude"; +static const char TAG_GPS_LAT_REF[] = "GPSLatitudeRef"; +static const char TAG_GPS_LONG[] = "GPSLongitude"; +static const char TAG_GPS_LONG_REF[] = "GPSLongitudeRef"; +static const char TAG_GPS_ALT[] = "GPSAltitude"; +static const char TAG_GPS_ALT_REF[] = "GPSAltitudeRef"; +static const char TAG_GPS_MAP_DATUM[] = "GPSMapDatum"; +static const char TAG_GPS_PROCESSING_METHOD[] = "GPSProcessingMethod"; +static const char TAG_GPS_VERSION_ID[] = "GPSVersionID"; +static const char TAG_GPS_TIMESTAMP[] = "GPSTimeStamp"; +static const char TAG_GPS_DATESTAMP[] = "GPSDateStamp"; +static const char TAG_ORIENTATION[] = "Orientation"; +static const char TAG_FLASH[] = "Flash"; +static const char TAG_DIGITALZOOMRATIO[] = "DigitalZoomRatio"; +static const char TAG_EXPOSURETIME[] = "ExposureTime"; +static const char TAG_APERTURE[] = "ApertureValue"; +static const char TAG_ISO_EQUIVALENT[] = "ISOSpeedRatings"; +static const char TAG_WHITEBALANCE[] = "WhiteBalance"; +static const char TAG_LIGHT_SOURCE[] = "LightSource"; +static const char TAG_METERING_MODE[] = "MeteringMode"; +static const char TAG_EXPOSURE_PROGRAM[] = "ExposureProgram"; +static const char TAG_COLOR_SPACE[] = "ColorSpace"; +static const char TAG_CPRS_BITS_PER_PIXEL[] = "CompressedBitsPerPixel"; +static const char TAG_FNUMBER[] = "FNumber"; +static const char TAG_SHUTTERSPEED[] = "ShutterSpeedValue"; +static const char TAG_SENSING_METHOD[] = "SensingMethod"; +static const char TAG_CUSTOM_RENDERED[] = "CustomRendered"; + +class ExifElementsTable { + public: + ExifElementsTable() : + gps_tag_count(0), exif_tag_count(0), position(0), + jpeg_opened(false), has_datetime_tag(false) { } + ~ExifElementsTable(); + + status_t insertElement(const char* tag, const char* value); + void insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size); + status_t insertExifThumbnailImage(const char*, int); + void saveJpeg(unsigned char* picture, size_t jpeg_size); + static const char* degreesToExifOrientation(unsigned int); + static void stringToRational(const char*, unsigned int*, unsigned int*); + static bool isAsciiTag(const char* tag); + private: + ExifElement_t table[MAX_EXIF_TAGS_SUPPORTED]; + unsigned int gps_tag_count; + unsigned int exif_tag_count; + unsigned int position; + bool jpeg_opened; + bool has_datetime_tag; +}; + +class Encoder_libjpeg : public Thread { + /* public member types and variables */ + public: + struct params { + uint8_t* src; + int src_size; + uint8_t* dst; + int dst_size; + int quality; + int in_width; + int in_height; + int out_width; + int out_height; + int right_crop; + int start_offset; + const char* format; + size_t jpeg_size; + }; + /* public member functions */ + public: + Encoder_libjpeg(params* main_jpeg, + params* tn_jpeg, + encoder_libjpeg_callback_t cb, + CameraFrame::FrameType type, + void* cookie1, + void* cookie2, + void* cookie3) + : Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb), + mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3), + mType(type), mThumb(NULL) { + this->incStrong(this); + mCancelSem.Create(0); + } + + ~Encoder_libjpeg() { + CAMHAL_LOGVB("~Encoder_libjpeg(%p)", this); + } + + virtual bool threadLoop() { + size_t size = 0; + sp tn = NULL; + if (mThumbnailInput) { + // start thread to encode thumbnail + mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL); + mThumb->run(); + } + + // encode our main image + size = encode(mMainInput); + + // signal cancel semaphore incase somebody is waiting + mCancelSem.Signal(); + + // check if it is main jpeg thread + if(mThumb.get()) { + // wait until tn jpeg thread exits. + mThumb->join(); + mThumb.clear(); + mThumb = NULL; + } + + if(mCb) { + mCb(mMainInput, mThumbnailInput, mType, mCookie1, mCookie2, mCookie3, mCancelEncoding); + } + + // encoder thread runs, self-destructs, and then exits + this->decStrong(this); + return false; + } + + void cancel() { + mCancelEncoding = true; + if (mThumb.get()) { + mThumb->cancel(); + mCancelSem.WaitTimeout(CANCEL_TIMEOUT); + } + } + + void getCookies(void **cookie1, void **cookie2, void **cookie3) { + if (cookie1) *cookie1 = mCookie1; + if (cookie2) *cookie2 = mCookie2; + if (cookie3) *cookie3 = mCookie3; + } + + private: + params* mMainInput; + params* mThumbnailInput; + encoder_libjpeg_callback_t mCb; + bool mCancelEncoding; + void* mCookie1; + void* mCookie2; + void* mCookie3; + CameraFrame::FrameType mType; + sp mThumb; + Semaphore mCancelSem; + + size_t encode(params*); +}; + +} + +#endif diff --git a/camera/inc/General3A_Settings.h b/camera/inc/General3A_Settings.h new file mode 100644 index 0000000..dab1f8e --- /dev/null +++ b/camera/inc/General3A_Settings.h @@ -0,0 +1,280 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file General3A_Settings.h +* +* This file maps the Camera Hardware Interface to OMX. +* +*/ + +#include "OMX_TI_IVCommon.h" +#include "OMX_TI_Common.h" +#include "OMX_TI_Index.h" +#include "TICameraParameters.h" + +#ifndef GENERAL_3A_SETTINGS_H +#define GENERAL_3A_SETTINGS_H + +#define FOCUS_FACE_PRIORITY OMX_IMAGE_FocusControlMax -1 +#define FOCUS_REGION_PRIORITY OMX_IMAGE_FocusControlMax -2 +#define WB_FACE_PRIORITY OMX_WhiteBalControlMax -1 +#define EXPOSURE_FACE_PRIORITY OMX_ExposureControlMax - 1 + +namespace android { + +struct userToOMX_LUT{ + const char * userDefinition; + int omxDefinition; +}; + +struct LUTtype{ + int size; + const userToOMX_LUT *Table; +}; + +const userToOMX_LUT isoUserToOMX[] = { + { TICameraParameters::ISO_MODE_AUTO, 0 }, + { TICameraParameters::ISO_MODE_100, 100 }, + { TICameraParameters::ISO_MODE_200, 200 }, + { TICameraParameters::ISO_MODE_400, 400 }, + { TICameraParameters::ISO_MODE_800, 800 }, + { TICameraParameters::ISO_MODE_1000, 1000 }, + { TICameraParameters::ISO_MODE_1200, 1200 }, + { TICameraParameters::ISO_MODE_1600, 1600 }, +}; + +const userToOMX_LUT effects_UserToOMX [] = { + { CameraParameters::EFFECT_NONE, OMX_ImageFilterNone }, + { CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative }, + { CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize }, + { CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia }, + { CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale }, + { CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard }, + { CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard }, + { CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua }, + { CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize }, +#ifdef OMAP_ENHANCEMENT + { TICameraParameters::EFFECT_NATURAL, OMX_ImageFilterNatural }, + { TICameraParameters::EFFECT_VIVID, OMX_ImageFilterVivid }, + { TICameraParameters::EFFECT_COLOR_SWAP, OMX_ImageFilterColourSwap }, + { TICameraParameters::EFFECT_BLACKWHITE, OMX_TI_ImageFilterBlackWhite } +#endif +}; + +const userToOMX_LUT scene_UserToOMX [] = { + { CameraParameters::SCENE_MODE_AUTO, OMX_Manual }, + { CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action }, + { CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night }, + { CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party }, + { CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset }, +/*********** TODO: These scene modes are not verified. ************ + ***************** Have to verify and reeable later. ************** + { CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre }, + { CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape }, + { CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait }, + { CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks }, + { CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach }, + { CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight }, + { CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait }, + { CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow }, + { CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto }, +*********************************************************************/ +#ifdef OMAP_ENHANCEMENT + { TICameraParameters::SCENE_MODE_CLOSEUP, OMX_Closeup }, + { TICameraParameters::SCENE_MODE_AQUA, OMX_Underwater }, + { TICameraParameters::SCENE_MODE_SPORT, OMX_Sport }, + { TICameraParameters::SCENE_MODE_MOOD, OMX_Mood }, + { TICameraParameters::SCENE_MODE_NIGHT_INDOOR, OMX_NightIndoor }, + { TICameraParameters::SCENE_MODE_DOCUMENT, OMX_Document }, + { TICameraParameters::SCENE_MODE_BARCODE, OMX_Barcode }, + { TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT, OMX_SuperNight }, + { TICameraParameters::SCENE_MODE_VIDEO_CINE, OMX_Cine }, + { TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM, OMX_OldFilm }, +#endif +}; + +const userToOMX_LUT whiteBal_UserToOMX [] = { + { CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto }, + { CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight }, + { CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy }, + { CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent }, + { CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent }, +/********************** THESE ARE CURRENT NOT TUNED PROPERLY ************************* + { CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade }, + { CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight }, + { CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent }, +**************************************************************************************/ +#ifdef OMAP_ENHANCEMENT + { TICameraParameters::WHITE_BALANCE_TUNGSTEN, OMX_WhiteBalControlTungsten }, + { TICameraParameters::WHITE_BALANCE_HORIZON, OMX_WhiteBalControlHorizon }, + { TICameraParameters::WHITE_BALANCE_FACE, WB_FACE_PRIORITY }, + { TICameraParameters::WHITE_BALANCE_SUNSET, OMX_TI_WhiteBalControlSunset } +#endif +}; + +const userToOMX_LUT antibanding_UserToOMX [] = { + { CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff }, + { CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto }, + { CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 }, + { CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 } +}; + +const userToOMX_LUT focus_UserToOMX [] = { + { CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock }, + { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity }, + { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal }, + { CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro }, + { CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto }, + { CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto }, +#ifdef OMAP_ENHANCEMENT + { TICameraParameters::FOCUS_MODE_FACE , FOCUS_FACE_PRIORITY }, + { TICameraParameters::FOCUS_MODE_PORTRAIT, OMX_IMAGE_FocusControlPortrait }, + { TICameraParameters::FOCUS_MODE_EXTENDED, OMX_IMAGE_FocusControlExtended }, +#endif +}; + +const userToOMX_LUT exposure_UserToOMX [] = { + { TICameraParameters::EXPOSURE_MODE_OFF, OMX_ExposureControlOff }, + { TICameraParameters::EXPOSURE_MODE_AUTO, OMX_ExposureControlAuto }, + { TICameraParameters::EXPOSURE_MODE_NIGHT, OMX_ExposureControlNight }, + { TICameraParameters::EXPOSURE_MODE_BACKLIGHT, OMX_ExposureControlBackLight }, + { TICameraParameters::EXPOSURE_MODE_SPOTLIGHT, OMX_ExposureControlSpotLight}, + { TICameraParameters::EXPOSURE_MODE_SPORTS, OMX_ExposureControlSports }, + { TICameraParameters::EXPOSURE_MODE_SNOW, OMX_ExposureControlSnow }, + { TICameraParameters::EXPOSURE_MODE_BEACH, OMX_ExposureControlBeach }, + { TICameraParameters::EXPOSURE_MODE_APERTURE, OMX_ExposureControlLargeAperture }, + { TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE, OMX_ExposureControlSmallApperture }, + { TICameraParameters::EXPOSURE_MODE_FACE, EXPOSURE_FACE_PRIORITY }, +}; + +const userToOMX_LUT flash_UserToOMX [] = { + { CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff }, + { CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn }, + { CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto }, + { CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch }, + { CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction }, +#ifdef OMAP_ENHANCEMENT + { TICameraParameters::FLASH_MODE_FILL_IN ,OMX_IMAGE_FlashControlFillin } +#endif +}; + +const LUTtype ExpLUT = + { + sizeof(exposure_UserToOMX)/sizeof(exposure_UserToOMX[0]), + exposure_UserToOMX + }; + +const LUTtype WBalLUT = + { + sizeof(whiteBal_UserToOMX)/sizeof(whiteBal_UserToOMX[0]), + whiteBal_UserToOMX + }; + +const LUTtype FlickerLUT = + { + sizeof(antibanding_UserToOMX)/sizeof(antibanding_UserToOMX[0]), + antibanding_UserToOMX + }; + +const LUTtype SceneLUT = + { + sizeof(scene_UserToOMX)/sizeof(scene_UserToOMX[0]), + scene_UserToOMX + }; + +const LUTtype FlashLUT = + { + sizeof(flash_UserToOMX)/sizeof(flash_UserToOMX[0]), + flash_UserToOMX + }; + +const LUTtype EffLUT = + { + sizeof(effects_UserToOMX)/sizeof(effects_UserToOMX[0]), + effects_UserToOMX + }; + +const LUTtype FocusLUT = + { + sizeof(focus_UserToOMX)/sizeof(focus_UserToOMX[0]), + focus_UserToOMX + }; + +const LUTtype IsoLUT = + { + sizeof(isoUserToOMX)/sizeof(isoUserToOMX[0]), + isoUserToOMX + }; + +/* +* class Gen3A_settings +* stores the 3A settings +* also defines the look up tables +* for mapping settings from Hal to OMX +*/ +class Gen3A_settings{ + public: + + int Exposure; + int WhiteBallance; + int Flicker; + int SceneMode; + int Effect; + int Focus; + int EVCompensation; + int Contrast; + int Saturation; + int Sharpness; + int ISO; + int FlashMode; + + unsigned int Brightness; + OMX_BOOL ExposureLock; + OMX_BOOL FocusLock; + OMX_BOOL WhiteBalanceLock; +}; + +/* +* Flags raised when a setting is changed +*/ +enum E3ASettingsFlags +{ + SetSceneMode = 1 << 0, + SetEVCompensation = 1 << 1, + SetWhiteBallance = 1 << 2, + SetFlicker = 1 << 3, + SetExposure = 1 << 4, + SetSharpness = 1 << 5, + SetBrightness = 1 << 6, + SetContrast = 1 << 7, + SetISO = 1 << 8, + SetSaturation = 1 << 9, + SetEffect = 1 << 10, + SetFocus = 1 << 11, + SetExpMode = 1 << 14, + SetFlash = 1 << 15, + SetExpLock = 1 << 16, + SetWBLock = 1 << 17, + SetMeteringAreas = 1 << 18, + + E3aSettingMax, + E3AsettingsAll = ( ((E3aSettingMax -1 ) << 1) -1 ) /// all possible flags raised +}; + +}; + +#endif //GENERAL_3A_SETTINGS_H diff --git a/camera/inc/NV12_resize.h b/camera/inc/NV12_resize.h new file mode 100644 index 0000000..927faf8 --- /dev/null +++ b/camera/inc/NV12_resize.h @@ -0,0 +1,148 @@ +#ifndef NV12_RESIZE_H_ +#define NV12_RESIZE_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +typedef unsigned char mmBool; +typedef unsigned char mmUchar; +typedef unsigned char mmUint8; +typedef unsigned char mmByte; +typedef unsigned short mmUint16; +typedef unsigned int mmUint32; +typedef unsigned long mmUint64; +typedef signed char mmInt8; +typedef char mmChar; +typedef signed short mmInt16; +typedef signed int mmInt32; +typedef signed long mmLong; +typedef signed int mmHandle; +typedef float mmFloat; +typedef double mmDouble; +typedef int HObj; +typedef HObj HFile; +typedef int HDir; +typedef void* mmMutexHandle; +typedef struct _fstat +{ + mmInt32 fileSize; +}VE_FileAttribute; + +typedef struct +{ + mmInt32 second; + mmInt32 millisecond; +}tsVE_Time; + +typedef struct +{ + mmInt32 year; + mmInt32 month; + mmInt32 day; + mmInt32 hour; + mmInt32 minute; + mmInt32 second; +} TmDateTime; + +/*---------------------------------------------------------------------------- + Define : TRUE/FALSE for boolean operations +----------------------------------------------------------------------------*/ + +#ifndef TRUE + #define TRUE 1 +#endif + +#ifndef FALSE + #define FALSE 0 +#endif + +#ifndef NULL + #define NULL 0 +#endif + +const mmUint8 bWeights[8][8][4] = { + {{64, 0, 0, 0}, {56, 0, 0, 8}, {48, 0, 0,16}, {40, 0, 0,24}, + {32, 0, 0,32}, {24, 0, 0,40}, {16, 0, 0,48}, { 8, 0, 0,56}}, + + {{56, 8, 0, 0}, {49, 7, 1, 7}, {42, 6, 2,14}, {35, 5, 3,21}, + {28, 4, 4,28}, {21, 3, 5,35}, {14, 2, 6,42}, { 7, 1, 7,49}}, + + {{48,16, 0, 0}, {42,14, 2, 6}, {36,12,4 ,12}, {30,10,6 ,18}, + {24, 8, 8,24}, {18, 6,10,30}, {12,4 ,12,36}, { 6, 2,14,42}}, + + {{40,24,0 ,0 }, {35,21, 3, 5}, {30,18, 6,10}, {25,15, 9,15}, + {20,12,12,20}, {15, 9,15,25}, {10, 6,18,30}, { 5, 3,21,35}}, + + {{32,32, 0,0 }, {28,28, 4, 4}, {24,24, 8, 8}, {20,20,12,12}, + {16,16,16,16}, {12,12,20,20}, { 8, 8,24,24}, { 4, 4,28,28}}, + + {{24,40,0 ,0 }, {21,35, 5, 3}, {18,30,10, 6}, {15,25,15, 9}, + {12,20,20,12}, { 9,15,25,15}, { 6,10,30,18}, { 3, 5,35,21}}, + + {{16,48, 0,0 }, {14,42, 6, 2}, {12,36,12, 4}, {10,30,18, 6}, + {8 ,24,24,8 }, { 6,18,30,10}, { 4,12,36,12}, { 2, 6,42,14}}, + + {{ 8,56, 0,0 }, { 7,49, 7, 1}, { 6,42,14, 2}, { 5,35,21, 3}, + { 4,28,28,4 }, { 3,21,35, 5}, { 2,14,42, 6}, { 1,7 ,49, 7}} +}; + +typedef enum +{ + IC_FORMAT_NONE, + IC_FORMAT_RGB565, + IC_FORMAT_RGB888, + IC_FORMAT_YCbCr420_lp, + IC_FORMAT_YCbCr, + IC_FORMAT_YCbCr420_FRAME_PK, + IC_FORMAT_MAX +}enumImageFormat; + +/* This structure defines the format of an image */ +typedef struct +{ + mmInt32 uWidth; + mmInt32 uHeight; + mmInt32 uStride; + enumImageFormat eFormat; + mmByte *imgPtr; + mmByte *clrPtr; + mmInt32 uOffset; +} structConvImage; + +typedef struct IC_crop_struct +{ + mmUint32 x; /* x pos of rectangle */ + mmUint32 y; /* y pos of rectangle */ + mmUint32 uWidth; /* dx of rectangle */ + mmUint32 uHeight; /* dy of rectangle */ +} IC_rect_type; + +/*========================================================================== +* Function Name : VT_resizeFrame_Video_opt2_lp +* +* Description : Resize a yuv frame. +* +* Input(s) : input_img_ptr -> Input Image Structure +* : output_img_ptr -> Output Image Structure +* : cropout -> crop structure +* +* Value Returned : mmBool -> FALSE on error TRUE on success +* NOTE: +* Not tested for crop funtionallity. +* faster version. +============================================================================*/ +mmBool +VT_resizeFrame_Video_opt2_lp +( + structConvImage* i_img_ptr, /* Points to the input image */ + structConvImage* o_img_ptr, /* Points to the output image */ + IC_rect_type* cropout, /* how much to resize to in final image */ + mmUint16 dummy /* Transparent pixel value */ + ); + +#ifdef __cplusplus +} +#endif + +#endif //#define NV12_RESIZE_H_ diff --git a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h new file mode 100644 index 0000000..5c88cf7 --- /dev/null +++ b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h @@ -0,0 +1,962 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#ifndef OMX_CAMERA_ADAPTER_H +#define OMX_CAMERA_ADAPTER_H + +#include "CameraHal.h" +#include "OMX_Types.h" +#include "OMX_Core.h" +#include "OMX_CoreExt.h" +#include "OMX_IVCommon.h" +#include "OMX_Component.h" +#include "OMX_Index.h" +#include "OMX_IndexExt.h" +#include "OMX_TI_Index.h" +#include "OMX_TI_IVCommon.h" +#include "OMX_TI_Common.h" +#include "OMX_TI_Image.h" +#include "General3A_Settings.h" +#include "OMXSceneModeTables.h" + +#include "BaseCameraAdapter.h" +#include "Encoder_libjpeg.h" +#include "DebugUtils.h" + + +extern "C" +{ +#include "timm_osal_error.h" +#include "timm_osal_events.h" +#include "timm_osal_trace.h" +#include "timm_osal_semaphores.h" +} + + +namespace android { + +#define Q16_OFFSET 16 + +#define OMX_CMD_TIMEOUT 3000000 //3 sec. +#define OMX_CAPTURE_TIMEOUT 5000000 //5 sec. + +#define FOCUS_THRESHOLD 5 //[s.] + +#define MIN_JPEG_QUALITY 1 +#define MAX_JPEG_QUALITY 100 +#define EXP_BRACKET_RANGE 10 + +#define FOCUS_DIST_SIZE 100 +#define FOCUS_DIST_BUFFER_SIZE 500 + +#define TOUCH_DATA_SIZE 200 +#define DEFAULT_THUMB_WIDTH 160 +#define DEFAULT_THUMB_HEIGHT 120 +#define FRAME_RATE_FULL_HD 27 +#define ZOOM_STAGES 61 + +#define FACE_DETECTION_BUFFER_SIZE 0x1000 +#define MAX_NUM_FACES_SUPPORTED 35 + +#define EXIF_MODEL_SIZE 100 +#define EXIF_MAKE_SIZE 100 +#define EXIF_DATE_TIME_SIZE 20 + +#define GPS_MIN_DIV 60 +#define GPS_SEC_DIV 60 +#define GPS_SEC_ACCURACY 1000 +#define GPS_TIMESTAMP_SIZE 6 +#define GPS_DATESTAMP_SIZE 11 +#define GPS_REF_SIZE 2 +#define GPS_MAPDATUM_SIZE 100 +#define GPS_PROCESSING_SIZE 100 +#define GPS_VERSION_SIZE 4 +#define GPS_NORTH_REF "N" +#define GPS_SOUTH_REF "S" +#define GPS_EAST_REF "E" +#define GPS_WEST_REF "W" + +/* Default portstartnumber of Camera component */ +#define OMX_CAMERA_DEFAULT_START_PORT_NUM 0 + +/* Define number of ports for differt domains */ +#define OMX_CAMERA_PORT_OTHER_NUM 1 +#define OMX_CAMERA_PORT_VIDEO_NUM 4 +#define OMX_CAMERA_PORT_IMAGE_NUM 1 +#define OMX_CAMERA_PORT_AUDIO_NUM 0 +#define OMX_CAMERA_NUM_PORTS (OMX_CAMERA_PORT_OTHER_NUM + OMX_CAMERA_PORT_VIDEO_NUM + OMX_CAMERA_PORT_IMAGE_NUM + OMX_CAMERA_PORT_AUDIO_NUM) + +/* Define start port number for differt domains */ +#define OMX_CAMERA_PORT_OTHER_START OMX_CAMERA_DEFAULT_START_PORT_NUM +#define OMX_CAMERA_PORT_VIDEO_START (OMX_CAMERA_PORT_OTHER_START + OMX_CAMERA_PORT_OTHER_NUM) +#define OMX_CAMERA_PORT_IMAGE_START (OMX_CAMERA_PORT_VIDEO_START + OMX_CAMERA_PORT_VIDEO_NUM) +#define OMX_CAMERA_PORT_AUDIO_START (OMX_CAMERA_PORT_IMAGE_START + OMX_CAMERA_PORT_IMAGE_NUM) + +/* Port index for camera component */ +#define OMX_CAMERA_PORT_OTHER_IN (OMX_CAMERA_PORT_OTHER_START + 0) +#define OMX_CAMERA_PORT_VIDEO_IN_VIDEO (OMX_CAMERA_PORT_VIDEO_START + 0) +#define OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW (OMX_CAMERA_PORT_VIDEO_START + 1) +#define OMX_CAMERA_PORT_VIDEO_OUT_VIDEO (OMX_CAMERA_PORT_VIDEO_START + 2) +#define OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT (OMX_CAMERA_PORT_VIDEO_START + 3) +#define OMX_CAMERA_PORT_IMAGE_OUT_IMAGE (OMX_CAMERA_PORT_IMAGE_START + 0) + + +#define OMX_INIT_STRUCT(_s_, _name_) \ + memset(&(_s_), 0x0, sizeof(_name_)); \ + (_s_).nSize = sizeof(_name_); \ + (_s_).nVersion.s.nVersionMajor = 0x1; \ + (_s_).nVersion.s.nVersionMinor = 0x1; \ + (_s_).nVersion.s.nRevision = 0x0; \ + (_s_).nVersion.s.nStep = 0x0 + +#define OMX_INIT_STRUCT_PTR(_s_, _name_) \ + memset((_s_), 0x0, sizeof(_name_)); \ + (_s_)->nSize = sizeof(_name_); \ + (_s_)->nVersion.s.nVersionMajor = 0x1; \ + (_s_)->nVersion.s.nVersionMinor = 0x1; \ + (_s_)->nVersion.s.nRevision = 0x0; \ + (_s_)->nVersion.s.nStep = 0x0 + +#define GOTO_EXIT_IF(_CONDITION,_ERROR) { \ + if ((_CONDITION)) { \ + eError = (_ERROR); \ + goto EXIT; \ + } \ +} + +const int64_t kCameraBufferLatencyNs = 250000000LL; // 250 ms + +///OMX Specific Functions +static OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData); + +static OMX_ERRORTYPE OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffer); + +static OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader); + +struct CapResolution { + size_t width, height; + const char *param; +}; + +struct CapPixelformat { + OMX_COLOR_FORMATTYPE pixelformat; + const char *param; +}; + +struct CapU32 { + OMX_U32 num; + const char *param; +}; + +struct CapU32Pair { + OMX_U32 num1, num2; + const char *param; +}; +struct CapS32 { + OMX_S32 num; + const char *param; +}; + +typedef CapU32 CapFramerate; +typedef CapU32 CapISO; +typedef CapU32 CapSensorName; +typedef CapS32 CapZoom; +typedef CapS32 CapEVComp; + +/** + * Class which completely abstracts the camera hardware interaction from camera hal + * TODO: Need to list down here, all the message types that will be supported by this class + Need to implement BufferProvider interface to use AllocateBuffer of OMX if needed + */ +class OMXCameraAdapter : public BaseCameraAdapter +{ +public: + + /*--------------------Constant declarations----------------------------------------*/ + static const int32_t MAX_NO_BUFFERS = 20; + + ///@remarks OMX Camera has six ports - buffer input, time input, preview, image, video, and meta data + static const int MAX_NO_PORTS = 6; + + ///Five second timeout + static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000; + + enum OMXCameraEvents + { + CAMERA_PORT_ENABLE = 0x1, + CAMERA_PORT_FLUSH = 0x2, + CAMERA_PORT_DISABLE = 0x4, + }; + + enum CaptureMode + { + HIGH_SPEED = 1, + HIGH_QUALITY = 2, + VIDEO_MODE = 3, + HIGH_QUALITY_ZSL = 4, + }; + + enum IPPMode + { + IPP_NULL = -1, + IPP_NONE = 0, + IPP_NSF, + IPP_LDC, + IPP_LDCNSF, + }; + + enum CodingMode + { + CodingNone = 0, + CodingJPS, + CodingMPO, + CodingRAWJPEG, + CodingRAWMPO, + }; + + enum Algorithm3A + { + WHITE_BALANCE_ALGO = 0x1, + EXPOSURE_ALGO = 0x2, + FOCUS_ALGO = 0x4, + }; + + enum AlgoPriority + { + FACE_PRIORITY = 0, + REGION_PRIORITY, + }; + + enum BrightnessMode + { + BRIGHTNESS_OFF = 0, + BRIGHTNESS_ON, + BRIGHTNESS_AUTO, + }; + + enum CaptureSettingsFlags { + SetFormat = 1 << 0, + SetThumb = 1 << 1, + SetExpBracket = 1 << 2, + SetQuality = 1 << 3, + SetRotation = 1 << 4, + ECaptureSettingMax, + ECapturesettingsAll = ( ((ECaptureSettingMax -1 ) << 1) -1 ) /// all possible flags raised + }; + + class GPSData + { + public: + int mLongDeg, mLongMin, mLongSec, mLongSecDiv; + char mLongRef[GPS_REF_SIZE]; + bool mLongValid; + int mLatDeg, mLatMin, mLatSec, mLatSecDiv; + char mLatRef[GPS_REF_SIZE]; + bool mLatValid; + int mAltitude; + unsigned char mAltitudeRef; + bool mAltitudeValid; + char mMapDatum[GPS_MAPDATUM_SIZE]; + bool mMapDatumValid; + char mVersionId[GPS_VERSION_SIZE]; + bool mVersionIdValid; + char mProcMethod[GPS_PROCESSING_SIZE]; + bool mProcMethodValid; + char mDatestamp[GPS_DATESTAMP_SIZE]; + bool mDatestampValid; + uint32_t mTimeStampHour; + uint32_t mTimeStampMin; + uint32_t mTimeStampSec; + bool mTimeStampValid; + }; + + class EXIFData + { + public: + GPSData mGPSData; + char mMake[EXIF_MODEL_SIZE]; + char mModel[EXIF_MAKE_SIZE]; + unsigned int mFocalNum, mFocalDen; + bool mMakeValid; + bool mModelValid; + }; + + ///Parameters specific to any port of the OMX Camera component + class OMXCameraPortParameters + { + public: + OMX_U32 mHostBufaddr[MAX_NO_BUFFERS]; + OMX_BUFFERHEADERTYPE *mBufferHeader[MAX_NO_BUFFERS]; + OMX_U32 mWidth; + OMX_U32 mHeight; + OMX_U32 mStride; + OMX_U8 mNumBufs; + + // defines maximum number of buffers our of mNumBufs + // queueable at given moment + OMX_U8 mMaxQueueable; + + OMX_U32 mBufSize; + OMX_COLOR_FORMATTYPE mColorFormat; + OMX_PARAM_VIDEONOISEFILTERTYPE mVNFMode; + OMX_PARAM_VIDEOYUVRANGETYPE mYUVRange; + OMX_CONFIG_BOOLEANTYPE mVidStabParam; + OMX_CONFIG_FRAMESTABTYPE mVidStabConfig; + OMX_U32 mCapFrame; + OMX_U32 mFrameRate; + OMX_S32 mMinFrameRate; + OMX_S32 mMaxFrameRate; + CameraFrame::FrameType mImageType; + }; + + ///Context of the OMX Camera component + class OMXCameraAdapterComponentContext + { + public: + OMX_HANDLETYPE mHandleComp; + OMX_U32 mNumPorts; + OMX_STATETYPE mState ; + OMX_U32 mVideoPortIndex; + OMX_U32 mPrevPortIndex; + OMX_U32 mImagePortIndex; + OMX_U32 mMeasurementPortIndex; + OMXCameraPortParameters mCameraPortParams[MAX_NO_PORTS]; + }; + +public: + + OMXCameraAdapter(size_t sensor_index); + ~OMXCameraAdapter(); + + ///Initialzes the camera adapter creates any resources required + virtual status_t initialize(CameraProperties::Properties*); + + //APIs to configure Camera adapter and get the current parameter set + virtual status_t setParameters(const CameraParameters& params); + virtual void getParameters(CameraParameters& params); + + // API + virtual status_t UseBuffersPreview(void* bufArr, int num); + + //API to flush the buffers for preview + status_t flushBuffers(); + + // API + virtual status_t setFormat(OMX_U32 port, OMXCameraPortParameters &cap); + + // Function to get and populate caps from handle + static status_t getCaps(CameraProperties::Properties* props, OMX_HANDLETYPE handle); + static const char* getLUTvalue_OMXtoHAL(int OMXValue, LUTtype LUT); + static int getLUTvalue_HALtoOMX(const char * HalValue, LUTtype LUT); + + OMX_ERRORTYPE OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData); + + OMX_ERRORTYPE OMXCameraAdapterEmptyBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffer); + + OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader); + + static OMX_ERRORTYPE OMXCameraGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData=NULL); + +protected: + + //Parent class method implementation + virtual status_t takePicture(); + virtual status_t stopImageCapture(); + virtual status_t startBracketing(int range); + virtual status_t stopBracketing(); + virtual status_t autoFocus(); + virtual status_t cancelAutoFocus(); + virtual status_t startSmoothZoom(int targetIdx); + virtual status_t stopSmoothZoom(); + virtual status_t startVideoCapture(); + virtual status_t stopVideoCapture(); + virtual status_t startPreview(); + virtual status_t stopPreview(); + virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable); + virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType); + virtual status_t getFrameSize(size_t &width, size_t &height); + virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount); + virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount); + virtual status_t startFaceDetection(); + virtual status_t stopFaceDetection(); + virtual status_t switchToExecuting(); + virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt); + +private: + + status_t doSwitchToExecuting(); + + void performCleanupAfterError(); + + status_t switchToLoaded(); + + OMXCameraPortParameters *getPortParams(CameraFrame::FrameType frameType); + + OMX_ERRORTYPE SignalEvent(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData); + OMX_ERRORTYPE RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData); + + status_t RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN Semaphore &semaphore); + + status_t setPictureRotation(unsigned int degree); + status_t setSensorOrientation(unsigned int degree); + status_t setImageQuality(unsigned int quality); + status_t setThumbnailParams(unsigned int width, unsigned int height, unsigned int quality); + + //EXIF + status_t setParametersEXIF(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state); + status_t convertGPSCoord(double coord, int °, int &min, int &sec, int &secDivisor); + status_t setupEXIF(); + status_t setupEXIF_libjpeg(ExifElementsTable*, OMX_TI_ANCILLARYDATATYPE*, + OMX_TI_WHITEBALANCERESULTTYPE*); + + //Focus functionality + status_t doAutoFocus(); + status_t stopAutoFocus(); + status_t checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus); + status_t returnFocusStatus(bool timeoutReached); + status_t getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focusMode); + void handleFocusCallback(); + + + //Focus distances + status_t setParametersFocus(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state); + status_t addFocusDistances(OMX_U32 &near, + OMX_U32 &optimal, + OMX_U32 &far, + CameraParameters& params); + status_t encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length); + status_t getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far); + + //VSTAB and VNF Functionality + status_t enableVideoNoiseFilter(bool enable); + status_t enableVideoStabilization(bool enable); + + //Digital zoom + status_t setParametersZoom(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state); + status_t doZoom(int index); + status_t advanceZoom(); + + //3A related parameters + status_t setParameters3A(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state); + + // scene modes + status_t setScene(Gen3A_settings& Gen3A); + // returns pointer to SceneModesEntry from the LUT for camera given 'name' and 'scene' + static const SceneModesEntry* getSceneModeEntry(const char* name, OMX_SCENEMODETYPE scene); + + + //Flash modes + status_t setFlashMode(Gen3A_settings& Gen3A); + status_t getFlashMode(Gen3A_settings& Gen3A); + + // Focus modes + status_t setFocusMode(Gen3A_settings& Gen3A); + status_t getFocusMode(Gen3A_settings& Gen3A); + + //Exposure Modes + status_t setExposureMode(Gen3A_settings& Gen3A); + status_t setEVCompensation(Gen3A_settings& Gen3A); + status_t setWBMode(Gen3A_settings& Gen3A); + status_t setFlicker(Gen3A_settings& Gen3A); + status_t setBrightness(Gen3A_settings& Gen3A); + status_t setContrast(Gen3A_settings& Gen3A); + status_t setSharpness(Gen3A_settings& Gen3A); + status_t setSaturation(Gen3A_settings& Gen3A); + status_t setISO(Gen3A_settings& Gen3A); + status_t setEffect(Gen3A_settings& Gen3A); + status_t setMeteringAreas(Gen3A_settings& Gen3A); + + status_t getEVCompensation(Gen3A_settings& Gen3A); + status_t getWBMode(Gen3A_settings& Gen3A); + status_t getSharpness(Gen3A_settings& Gen3A); + status_t getSaturation(Gen3A_settings& Gen3A); + status_t getISO(Gen3A_settings& Gen3A); + + // 3A locks + status_t setExposureLock(Gen3A_settings& Gen3A); + status_t setFocusLock(Gen3A_settings& Gen3A); + status_t setWhiteBalanceLock(Gen3A_settings& Gen3A); + status_t set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus); + + //API to set FrameRate using VFR interface + status_t setVFramerate(OMX_U32 minFrameRate,OMX_U32 maxFrameRate); + + status_t setParametersAlgo(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state); + + //Noise filtering + status_t setNSF(OMXCameraAdapter::IPPMode mode); + + //LDC + status_t setLDC(OMXCameraAdapter::IPPMode mode); + + //GLBCE + status_t setGLBCE(OMXCameraAdapter::BrightnessMode mode); + + //GBCE + status_t setGBCE(OMXCameraAdapter::BrightnessMode mode); + + status_t printComponentVersion(OMX_HANDLETYPE handle); + + //Touch AF + status_t setTouchFocus(); + + //Face detection + status_t setParametersFD(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state); + status_t updateFocusDistances(CameraParameters ¶ms); + status_t setFaceDetection(bool enable, OMX_U32 orientation); + status_t detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader, + sp &result, + size_t previewWidth, + size_t previewHeight); + status_t encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData, + camera_frame_metadata_t **pFaces, + size_t previewWidth, + size_t previewHeight); + void pauseFaceDetection(bool pause); + + //3A Algorithms priority configuration + status_t setAlgoPriority(AlgoPriority priority, Algorithm3A algo, bool enable); + + //Sensor overclocking + status_t setSensorOverclock(bool enable); + + // Utility methods for OMX Capabilities + static status_t insertCapabilities(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t encodeSizeCap(OMX_TI_CAPRESTYPE&, const CapResolution *, size_t, char *, size_t); + static status_t encodeISOCap(OMX_U32, const CapISO*, size_t, char*, size_t); + static size_t encodeZoomCap(OMX_S32, const CapZoom*, size_t, char*, size_t); + static status_t encodeFramerateCap(OMX_U32, OMX_U32, const CapFramerate*, size_t, char*, size_t); + static status_t encodeVFramerateCap(OMX_TI_CAPTYPE&, const CapU32Pair*, size_t, char*, char*, size_t); + static status_t encodePixelformatCap(OMX_COLOR_FORMATTYPE, + const CapPixelformat*, + size_t, + char*, + size_t); + static status_t insertImageSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertPreviewSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertThumbSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertZoomStages(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertImageFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertPreviewFormats(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertVFramerates(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertEVs(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertISOModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertIPPModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &); + static status_t insertWBModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertEffects(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertExpModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertSceneModes(CameraProperties::Properties*, OMX_TI_CAPTYPE &); + static status_t insertFocusModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertFlickerModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertFlashModes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertSenMount(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertDefaults(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertLocks(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertAreas(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + static status_t insertVideoSizes(CameraProperties::Properties*, OMX_TI_CAPTYPE&); + + status_t setParametersCapture(const CameraParameters ¶ms, + BaseCameraAdapter::AdapterState state); + + //Exposure Bracketing + status_t setExposureBracketing(int *evValues, size_t evCount, size_t frameCount); + status_t parseExpRange(const char *rangeStr, int * expRange, size_t count, size_t &validEntries); + + //Temporal Bracketing + status_t doBracketing(OMX_BUFFERHEADERTYPE *pBuffHeader, CameraFrame::FrameType typeOfFrame); + status_t sendBracketFrames(); + + // Image Capture Service + status_t startImageCapture(); + status_t disableImagePort(); + + //Shutter callback notifications + status_t setShutterCallback(bool enabled); + + //Sets eithter HQ or HS mode and the frame count + status_t setCaptureMode(OMXCameraAdapter::CaptureMode mode); + status_t UseBuffersCapture(void* bufArr, int num); + status_t UseBuffersPreviewData(void* bufArr, int num); + + //Used for calculation of the average frame rate during preview + status_t recalculateFPS(); + + //Helper method for initializing a CameFrame object + status_t initCameraFrame(CameraFrame &frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, int typeOfFrame, OMXCameraPortParameters *port); + + //Sends the incoming OMX buffer header to subscribers + status_t sendFrame(CameraFrame &frame); + + status_t sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEADERTYPE *pBuffHeader, unsigned int mask, OMXCameraPortParameters *port); + + status_t apply3Asettings( Gen3A_settings& Gen3A ); + status_t init3AParams(Gen3A_settings &Gen3A); + + // AutoConvergence + status_t setAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE pACMode, OMX_S32 pManualConverence); + status_t getAutoConvergence(OMX_TI_AUTOCONVERGENCEMODETYPE *pACMode, OMX_S32 *pManualConverence); + + status_t setExtraData(bool enable, OMX_U32, OMX_EXT_EXTRADATATYPE); + OMX_OTHER_EXTRADATATYPE *getExtradata(OMX_OTHER_EXTRADATATYPE *extraData, OMX_U32 extraDataSize, OMX_EXTRADATATYPE type); + + class CommandHandler : public Thread { + public: + CommandHandler(OMXCameraAdapter* ca) + : Thread(false), mCameraAdapter(ca) { } + + virtual bool threadLoop() { + bool ret; + ret = Handler(); + return ret; + } + + status_t put(TIUTILS::Message* msg){ + Mutex::Autolock lock(mLock); + return mCommandMsgQ.put(msg); + } + + void clearCommandQ() + { + Mutex::Autolock lock(mLock); + mCommandMsgQ.clear(); + } + + enum { + COMMAND_EXIT = -1, + CAMERA_START_IMAGE_CAPTURE = 0, + CAMERA_PERFORM_AUTOFOCUS = 1, + CAMERA_SWITCH_TO_EXECUTING, + }; + + private: + bool Handler(); + TIUTILS::MessageQueue mCommandMsgQ; + OMXCameraAdapter* mCameraAdapter; + Mutex mLock; + }; + sp mCommandHandler; + +public: + + class OMXCallbackHandler : public Thread { + public: + OMXCallbackHandler(OMXCameraAdapter* ca) + : Thread(false), mCameraAdapter(ca) { } + + virtual bool threadLoop() { + bool ret; + ret = Handler(); + return ret; + } + + status_t put(TIUTILS::Message* msg){ + Mutex::Autolock lock(mLock); + return mCommandMsgQ.put(msg); + } + + void clearCommandQ() + { + Mutex::Autolock lock(mLock); + mCommandMsgQ.clear(); + } + + enum { + COMMAND_EXIT = -1, + CAMERA_FILL_BUFFER_DONE, + CAMERA_FOCUS_STATUS, + }; + + private: + bool Handler(); + TIUTILS::MessageQueue mCommandMsgQ; + OMXCameraAdapter* mCameraAdapter; + Mutex mLock; + }; + + sp mOMXCallbackHandler; + +private: + + //AF callback + status_t setFocusCallback(bool enabled); + + //OMX Capabilities data + static const CapResolution mImageCapRes []; + static const CapResolution mPreviewRes []; + static const CapResolution mThumbRes []; + static const CapPixelformat mPixelformats []; + static const CapFramerate mFramerates []; + static const CapU32 mSensorNames[] ; + static const CapZoom mZoomStages []; + static const CapEVComp mEVCompRanges []; + static const CapISO mISOStages []; + static const CapU32Pair mVarFramerates []; + + // OMX Camera defaults + static const char DEFAULT_ANTIBANDING[]; + static const char DEFAULT_BRIGHTNESS[]; + static const char DEFAULT_CONTRAST[]; + static const char DEFAULT_EFFECT[]; + static const char DEFAULT_EV_COMPENSATION[]; + static const char DEFAULT_EV_STEP[]; + static const char DEFAULT_EXPOSURE_MODE[]; + static const char DEFAULT_FLASH_MODE[]; + static const char DEFAULT_FOCUS_MODE_PREFERRED[]; + static const char DEFAULT_FOCUS_MODE[]; + static const char DEFAULT_FRAMERATE_RANGE_IMAGE[]; + static const char DEFAULT_FRAMERATE_RANGE_VIDEO[]; + static const char DEFAULT_IPP[]; + static const char DEFAULT_GBCE[]; + static const char DEFAULT_ISO_MODE[]; + static const char DEFAULT_JPEG_QUALITY[]; + static const char DEFAULT_THUMBNAIL_QUALITY[]; + static const char DEFAULT_THUMBNAIL_SIZE[]; + static const char DEFAULT_PICTURE_FORMAT[]; + static const char DEFAULT_PICTURE_SIZE[]; + static const char DEFAULT_PREVIEW_FORMAT[]; + static const char DEFAULT_FRAMERATE[]; + static const char DEFAULT_PREVIEW_SIZE[]; + static const char DEFAULT_NUM_PREV_BUFS[]; + static const char DEFAULT_NUM_PIC_BUFS[]; + static const char DEFAULT_MAX_FOCUS_AREAS[]; + static const char DEFAULT_SATURATION[]; + static const char DEFAULT_SCENE_MODE[]; + static const char DEFAULT_SHARPNESS[]; + static const char DEFAULT_VSTAB[]; + static const char DEFAULT_VSTAB_SUPPORTED[]; + static const char DEFAULT_WB[]; + static const char DEFAULT_ZOOM[]; + static const char DEFAULT_MAX_FD_HW_FACES[]; + static const char DEFAULT_MAX_FD_SW_FACES[]; + static const char DEFAULT_AE_LOCK[]; + static const char DEFAULT_AWB_LOCK[]; + static const char DEFAULT_MAX_NUM_METERING_AREAS[]; + static const char DEFAULT_LOCK_SUPPORTED[]; + static const char DEFAULT_LOCK_UNSUPPORTED[]; + static const char DEFAULT_FOCAL_LENGTH_PRIMARY[]; + static const char DEFAULT_FOCAL_LENGTH_SECONDARY[]; + static const char DEFAULT_HOR_ANGLE[]; + static const char DEFAULT_VER_ANGLE[]; + static const char DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[]; + static const char DEFAULT_VIDEO_SIZE[]; + static const char DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[]; + static const size_t MAX_FOCUS_AREAS; + + OMX_VERSIONTYPE mCompRevision; + + //OMX Component UUID + OMX_UUIDTYPE mCompUUID; + + //Current Focus distances + char mFocusDistNear[FOCUS_DIST_SIZE]; + char mFocusDistOptimal[FOCUS_DIST_SIZE]; + char mFocusDistFar[FOCUS_DIST_SIZE]; + char mFocusDistBuffer[FOCUS_DIST_BUFFER_SIZE]; + + // Current Focus areas + Vector< sp > mFocusAreas; + mutable Mutex mFocusAreasLock; + + // Current Metering areas + Vector< sp > mMeteringAreas; + mutable Mutex mMeteringAreasLock; + + CaptureMode mCapMode; + size_t mBurstFrames; + size_t mCapturedFrames; + + bool mMeasurementEnabled; + + //Exposure Bracketing + int mExposureBracketingValues[EXP_BRACKET_RANGE]; + size_t mExposureBracketingValidEntries; + + mutable Mutex mFaceDetectionLock; + //Face detection status + bool mFaceDetectionRunning; + bool mFaceDetectionPaused; + bool mFDSwitchAlgoPriority; + + camera_face_t faceDetectionLastOutput [MAX_NUM_FACES_SUPPORTED]; + int faceDetectionNumFacesLastOutput; + + //Geo-tagging + EXIFData mEXIFData; + + //Image post-processing + IPPMode mIPP; + + //jpeg Picture Quality + unsigned int mPictureQuality; + + //thumbnail resolution + unsigned int mThumbWidth, mThumbHeight; + + //thumbnail quality + unsigned int mThumbQuality; + + //variables holding the estimated framerate + float mFPS, mLastFPS; + + //automatically disable AF after a given amount of frames + unsigned int mFocusThreshold; + + //This is needed for the CTS tests. They falsely assume, that during + //smooth zoom the current zoom stage will not change within the + //zoom callback scope, which in a real world situation is not always the + //case. This variable will "simulate" the expected behavior + unsigned int mZoomParameterIdx; + + //current zoom + Mutex mZoomLock; + unsigned int mCurrentZoomIdx, mTargetZoomIdx, mPreviousZoomIndx; + bool mZoomUpdating, mZoomUpdate; + int mZoomInc; + bool mReturnZoomStatus; + static const int32_t ZOOM_STEPS []; + + //local copy + OMX_VERSIONTYPE mLocalVersionParam; + + unsigned int mPending3Asettings; + Mutex m3ASettingsUpdateLock; + Gen3A_settings mParameters3A; + const char *mPictureFormatFromClient; + + OMX_TI_CONFIG_3A_FACE_PRIORITY mFacePriority; + OMX_TI_CONFIG_3A_REGION_PRIORITY mRegionPriority; + + CameraParameters mParams; + CameraProperties::Properties* mCapabilities; + unsigned int mPictureRotation; + bool mWaitingForSnapshot; + int mSnapshotCount; + bool mCaptureConfigured; + unsigned int mPendingCaptureSettings; + OMX_TI_ANCILLARYDATATYPE* mCaptureAncillaryData; + OMX_TI_WHITEBALANCERESULTTYPE* mWhiteBalanceData; + + //Temporal bracketing management data + mutable Mutex mBracketingLock; + bool *mBracketingBuffersQueued; + int mBracketingBuffersQueuedCount; + int mLastBracetingBufferIdx; + bool mBracketingEnabled; + int mBracketingRange; + + bool mIternalRecordingHint; + + CameraParameters mParameters; + bool mOmxInitialized; + OMXCameraAdapterComponentContext mCameraAdapterParameters; + bool mFirstTimeInit; + + ///Semaphores used internally + Semaphore mInitSem; + Semaphore mFlushSem; + Semaphore mUsePreviewDataSem; + Semaphore mUsePreviewSem; + Semaphore mUseCaptureSem; + Semaphore mStartPreviewSem; + Semaphore mStopPreviewSem; + Semaphore mStartCaptureSem; + Semaphore mStopCaptureSem; + Semaphore mSwitchToLoadedSem; + Semaphore mSwitchToExecSem; + + mutable Mutex mStateSwitchLock; + + Vector mEventSignalQ; + Mutex mEventLock; + + OMX_STATETYPE mComponentState; + + bool mVnfEnabled; + bool mVstabEnabled; + + int mSensorOrientation; + int mDeviceOrientation; + bool mSensorOverclock; + + //Indicates if we should leave + //OMX_Executing state during + //stop-/startPreview + bool mOMXStateSwitch; + + int mFrameCount; + int mLastFrameCount; + unsigned int mIter; + nsecs_t mLastFPSTime; + Mutex mFrameCountMutex; + Condition mFirstFrameCondition; + + Mutex mDoAFMutex; + Condition mDoAFCond; + + size_t mSensorIndex; + CodingMode mCodingMode; + + // Time source delta of ducati & system time + OMX_TICKS mTimeSourceDelta; + bool onlyOnce; + + Semaphore mCaptureSem; + bool mCaptureSignalled; + + OMX_BOOL mUserSetExpLock; + OMX_BOOL mUserSetWbLock; + + Mutex mImageCaptureLock; +}; +}; //// namespace +#endif //OMX_CAMERA_ADAPTER_H + diff --git a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h new file mode 100644 index 0000000..205a87b --- /dev/null +++ b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h @@ -0,0 +1,247 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file OMXSceneModeTables.h +* +* This holds scene mode settings for different omx cameras. +* +*/ + +#include "OMX_TI_IVCommon.h" +#include "OMX_TI_Common.h" +#include "OMX_TI_Index.h" + +#ifndef OMXCAMERAADAPTER_SCENEMODES_H +#define OMXCAMERAADAPTER_SCENEMODES_H + +#ifndef ARRAY_SIZE +#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0])) +#endif + +struct SceneModesEntry { + OMX_SCENEMODETYPE scene; + OMX_IMAGE_FLASHCONTROLTYPE flash; + int focus; + OMX_WHITEBALCONTROLTYPE wb; +}; + +struct CameraToSensorModesLUTEntry { + const char* name; + const SceneModesEntry* Table; + const unsigned int size; +}; + +static const SceneModesEntry S5K4E1GA_SceneModesLUT [] = { + { OMX_Closeup, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlAutoMacro, + OMX_WhiteBalControlAuto }, + { OMX_Landscape, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Underwater, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlAutoLock, + OMX_WhiteBalControlSunLight }, + { OMX_Sport, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Mood, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlAutoLock, + OMX_WhiteBalControlAuto }, + { OMX_NightPortrait, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlPortrait, + OMX_WhiteBalControlAuto }, + { OMX_NightIndoor, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Fireworks, + OMX_IMAGE_FlashControlOn, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Document, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlAutoMacro, + OMX_WhiteBalControlAuto }, + { OMX_Barcode, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlAutoMacro, + OMX_WhiteBalControlAuto }, + { OMX_SuperNight, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Cine, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_OldFilm, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Action, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlAuto, + OMX_WhiteBalControlAuto }, + { OMX_TI_Beach, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlAutoLock, + OMX_WhiteBalControlAuto }, + { OMX_TI_Candlelight, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlIncandescent }, + { OMX_TI_Night, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlAuto, + OMX_WhiteBalControlAuto }, + { OMX_TI_Party, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlAuto, + OMX_WhiteBalControlAuto }, + { OMX_TI_Portrait, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlPortrait, + OMX_WhiteBalControlAuto }, + { OMX_TI_Snow, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlAutoLock, + OMX_WhiteBalControlAuto }, + { OMX_TI_Steadyphoto, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Sunset, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlAuto, + OMX_WhiteBalControlSunLight }, + { OMX_TI_Theatre, + OMX_IMAGE_FlashControlAuto, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, +}; + +static const SceneModesEntry S5K6A1GX03_SceneModesLUT [] = { + { OMX_Closeup, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Landscape, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Underwater, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlSunLight }, + { OMX_Sport, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_SnowBeach, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Mood, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_NightPortrait, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_NightIndoor, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Fireworks, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Document, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Barcode, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_SuperNight, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_Cine, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_OldFilm, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Action, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Beach, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Candlelight, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlIncandescent }, + { OMX_TI_Night, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Party, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Portrait, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Snow, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Steadyphoto, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, + { OMX_TI_Sunset, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlSunLight }, + { OMX_TI_Theatre, + OMX_IMAGE_FlashControlOff, + OMX_IMAGE_FocusControlHyperfocal, + OMX_WhiteBalControlAuto }, +}; + +static const CameraToSensorModesLUTEntry CameraToSensorModesLUT [] = { + { "S5K4E1GA", S5K4E1GA_SceneModesLUT, ARRAY_SIZE(S5K4E1GA_SceneModesLUT)}, + { "S5K6A1GX03", S5K6A1GX03_SceneModesLUT, ARRAY_SIZE(S5K6A1GX03_SceneModesLUT)}, +}; + +#endif diff --git a/camera/inc/SensorListener.h b/camera/inc/SensorListener.h new file mode 100644 index 0000000..913eb95 --- /dev/null +++ b/camera/inc/SensorListener.h @@ -0,0 +1,101 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* @file SensorListener.h +* +* This defines API for camerahal to get sensor events +* +*/ + +#ifndef ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H +#define ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H + +#include +#include +#include +#include +#include + +namespace android { + +/** + * SensorListner class - Registers with sensor manager to get sensor events + */ + +typedef void (*orientation_callback_t) (uint32_t orientation, uint32_t tilt, void* cookie); + +class SensorLooperThread : public Thread { + public: + SensorLooperThread(Looper* looper) + : Thread(false) { + mLooper = sp(looper); + } + ~SensorLooperThread() { + mLooper.clear(); + } + + virtual bool threadLoop() { + int32_t ret = mLooper->pollOnce(-1); + return true; + } + + // force looper wake up + void wake() { + mLooper->wake(); + } + private: + sp mLooper; +}; + + +class SensorListener : public RefBase +{ +/* public - types */ +public: + typedef enum { + SENSOR_ACCELEROMETER = 1 << 0, + SENSOR_MAGNETIC_FIELD = 1 << 1, + SENSOR_GYROSCOPE = 1 << 2, + SENSOR_LIGHT = 1 << 3, + SENSOR_PROXIMITY = 1 << 4, + SENSOR_ORIENTATION = 1 << 5, + } sensor_type_t; +/* public - functions */ +public: + SensorListener(); + ~SensorListener(); + status_t initialize(); + void setCallbacks(orientation_callback_t orientation_cb, void *cookie); + void enableSensor(sensor_type_t type); + void disableSensor(sensor_type_t type); + void handleOrientation(uint32_t orientation, uint32_t tilt); +/* public - member variables */ +public: + sp mSensorEventQueue; +/* private - member variables */ +private: + int sensorsEnabled; + orientation_callback_t mOrientationCb; + void *mCbCookie; + sp mLooper; + sp mSensorLooperThread; + Mutex mLock; +}; + +} + +#endif diff --git a/camera/inc/TICameraParameters.h b/camera/inc/TICameraParameters.h new file mode 100644 index 0000000..4701cae --- /dev/null +++ b/camera/inc/TICameraParameters.h @@ -0,0 +1,242 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + +#ifndef TI_CAMERA_PARAMETERS_H +#define TI_CAMERA_PARAMETERS_H + +#include +#include + +namespace android { + +///TI Specific Camera Parameters +class TICameraParameters +{ +public: + +// Supported Camera indexes +// Example value: "0,1,2,3", where 0-primary, 1-secondary1, 2-secondary2, 3-sterocamera +static const char KEY_SUPPORTED_CAMERAS[]; +// Select logical Camera index +static const char KEY_CAMERA[]; +static const char KEY_CAMERA_NAME[]; +static const char KEY_S3D_SUPPORTED[]; +static const char KEY_BURST[]; +static const char KEY_CAP_MODE[]; +static const char KEY_VNF[]; +static const char KEY_SATURATION[]; +static const char KEY_BRIGHTNESS[]; +static const char KEY_EXPOSURE_MODE[]; +static const char KEY_SUPPORTED_EXPOSURE[]; +static const char KEY_CONTRAST[]; +static const char KEY_SHARPNESS[]; +static const char KEY_ISO[]; +static const char KEY_SUPPORTED_ISO_VALUES[]; +static const char KEY_SUPPORTED_IPP[]; +static const char KEY_IPP[]; +static const char KEY_MAN_EXPOSURE[]; +static const char KEY_METERING_MODE[]; +static const char KEY_PADDED_WIDTH[]; +static const char KEY_PADDED_HEIGHT[]; +static const char KEY_EXP_BRACKETING_RANGE[]; +static const char KEY_TEMP_BRACKETING[]; +static const char KEY_TEMP_BRACKETING_RANGE_POS[]; +static const char KEY_TEMP_BRACKETING_RANGE_NEG[]; +static const char KEY_SHUTTER_ENABLE[]; +static const char KEY_MEASUREMENT_ENABLE[]; +static const char KEY_INITIAL_VALUES[]; +static const char KEY_GBCE[]; +static const char KEY_GLBCE[]; +static const char KEY_MINFRAMERATE[]; +static const char KEY_MAXFRAMERATE[]; + +// TI recording hint to notify camera adapters of possible recording +static const char KEY_RECORDING_HINT[]; +static const char KEY_AUTO_FOCUS_LOCK[]; +static const char KEY_CURRENT_ISO[]; + +static const char KEY_SENSOR_ORIENTATION[]; +static const char KEY_SENSOR_ORIENTATION_VALUES[]; + +//TI extensions for zoom +static const char ZOOM_SUPPORTED[]; +static const char ZOOM_UNSUPPORTED[]; + +//TI extensions for camera capabilies +static const char INITIAL_VALUES_TRUE[]; +static const char INITIAL_VALUES_FALSE[]; + +//TI extensions for enabling/disabling measurements +static const char MEASUREMENT_ENABLE[]; +static const char MEASUREMENT_DISABLE[]; + +// TI extensions to add values for ManualConvergence and AutoConvergence mode +static const char KEY_AUTOCONVERGENCE[]; +static const char KEY_AUTOCONVERGENCE_MODE[]; +static const char KEY_MANUALCONVERGENCE_VALUES[]; + +//TI extensions for enabling/disabling GLBCE +static const char GLBCE_ENABLE[]; +static const char GLBCE_DISABLE[]; + +//TI extensions for enabling/disabling GBCE +static const char GBCE_ENABLE[]; +static const char GBCE_DISABLE[]; + +// TI extensions to add Min frame rate Values +static const char VIDEO_MINFRAMERATE_5[]; +static const char VIDEO_MINFRAMERATE_10[]; +static const char VIDEO_MINFRAMERATE_15[]; +static const char VIDEO_MINFRAMERATE_20[]; +static const char VIDEO_MINFRAMERATE_24[]; +static const char VIDEO_MINFRAMERATE_25[]; +static const char VIDEO_MINFRAMERATE_30[]; +static const char VIDEO_MINFRAMERATE_33[]; + +// TI extensions for Manual Gain and Manual Exposure +static const char KEY_MANUAL_EXPOSURE_LEFT[]; +static const char KEY_MANUAL_EXPOSURE_RIGHT[]; +static const char KEY_MANUAL_EXPOSURE_MODES[]; +static const char KEY_MANUAL_GAIN_EV_RIGHT[]; +static const char KEY_MANUAL_GAIN_EV_LEFT[]; +static const char KEY_MANUAL_GAIN_ISO_RIGHT[]; +static const char KEY_MANUAL_GAIN_ISO_LEFT[]; +static const char KEY_MANUAL_GAIN_MODES[]; + +//TI extensions for setting EXIF tags +static const char KEY_EXIF_MODEL[]; +static const char KEY_EXIF_MAKE[]; + +//TI extensions for additional GPS data +static const char KEY_GPS_MAPDATUM[]; +static const char KEY_GPS_VERSION[]; +static const char KEY_GPS_DATESTAMP[]; + +//TI extensions for enabling/disabling shutter sound +static const char SHUTTER_ENABLE[]; +static const char SHUTTER_DISABLE[]; + +//TI extensions for Temporal bracketing +static const char BRACKET_ENABLE[]; +static const char BRACKET_DISABLE[]; + +//TI extensions to Image post-processing +static const char IPP_LDCNSF[]; +static const char IPP_LDC[]; +static const char IPP_NSF[]; +static const char IPP_NONE[]; + +//TI extensions to camera mode +static const char HIGH_PERFORMANCE_MODE[]; +static const char HIGH_QUALITY_MODE[]; +static const char HIGH_QUALITY_ZSL_MODE[]; +static const char VIDEO_MODE[]; + + +// TI extensions to standard android pixel formats +static const char PIXEL_FORMAT_RAW[]; +static const char PIXEL_FORMAT_JPS[]; +static const char PIXEL_FORMAT_MPO[]; +static const char PIXEL_FORMAT_RAW_JPEG[]; +static const char PIXEL_FORMAT_RAW_MPO[]; + +// TI extensions to standard android scene mode settings +static const char SCENE_MODE_SPORT[]; +static const char SCENE_MODE_CLOSEUP[]; +static const char SCENE_MODE_AQUA[]; +static const char SCENE_MODE_SNOWBEACH[]; +static const char SCENE_MODE_MOOD[]; +static const char SCENE_MODE_NIGHT_INDOOR[]; +static const char SCENE_MODE_DOCUMENT[]; +static const char SCENE_MODE_BARCODE[]; +static const char SCENE_MODE_VIDEO_SUPER_NIGHT[]; +static const char SCENE_MODE_VIDEO_CINE[]; +static const char SCENE_MODE_VIDEO_OLD_FILM[]; + +// TI extensions to standard android white balance settings. +static const char WHITE_BALANCE_TUNGSTEN[]; +static const char WHITE_BALANCE_HORIZON[]; +static const char WHITE_BALANCE_SUNSET[]; +static const char WHITE_BALANCE_FACE[]; + +// TI extensions to add exposure preset modes to android api +static const char EXPOSURE_MODE_OFF[]; +static const char EXPOSURE_MODE_AUTO[]; +static const char EXPOSURE_MODE_NIGHT[]; +static const char EXPOSURE_MODE_BACKLIGHT[]; +static const char EXPOSURE_MODE_SPOTLIGHT[]; +static const char EXPOSURE_MODE_SPORTS[]; +static const char EXPOSURE_MODE_SNOW[]; +static const char EXPOSURE_MODE_BEACH[]; +static const char EXPOSURE_MODE_APERTURE[]; +static const char EXPOSURE_MODE_SMALL_APERTURE[]; +static const char EXPOSURE_MODE_FACE[]; + +// TI extensions to standard android focus presets. +static const char FOCUS_MODE_PORTRAIT[]; +static const char FOCUS_MODE_EXTENDED[]; +static const char FOCUS_MODE_FACE[]; + +// TI extensions to add iso values +static const char ISO_MODE_AUTO[]; +static const char ISO_MODE_100[]; +static const char ISO_MODE_200[]; +static const char ISO_MODE_400[]; +static const char ISO_MODE_800[]; +static const char ISO_MODE_1000[]; +static const char ISO_MODE_1200[]; +static const char ISO_MODE_1600[]; + +// TI extensions to add values for effect settings. +static const char EFFECT_NATURAL[]; +static const char EFFECT_VIVID[]; +static const char EFFECT_COLOR_SWAP[]; +static const char EFFECT_BLACKWHITE[]; + +static const char KEY_S3D2D_PREVIEW[]; +static const char KEY_S3D2D_PREVIEW_MODE[]; + +// TI extensions to add values for AutoConvergence settings. +static const char AUTOCONVERGENCE_MODE_DISABLE[]; +static const char AUTOCONVERGENCE_MODE_FRAME[]; +static const char AUTOCONVERGENCE_MODE_CENTER[]; +static const char AUTOCONVERGENCE_MODE_FFT[]; +static const char AUTOCONVERGENCE_MODE_MANUAL[]; + + +//TI extensions for flash mode settings +static const char FLASH_MODE_FILL_IN[]; + +//TI extensions to add sensor orientation parameters +static const char ORIENTATION_SENSOR_NONE[]; +static const char ORIENTATION_SENSOR_90[]; +static const char ORIENTATION_SENSOR_180[]; +static const char ORIENTATION_SENSOR_270[]; + + +//TI values for camera direction +static const char FACING_FRONT[]; +static const char FACING_BACK[]; + +}; + +}; + +#endif + diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h new file mode 100644 index 0000000..b9d3952 --- /dev/null +++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h @@ -0,0 +1,158 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#ifndef V4L_CAMERA_ADAPTER_H +#define V4L_CAMERA_ADAPTER_H + +#include "CameraHal.h" +#include "BaseCameraAdapter.h" +#include "DebugUtils.h" + +namespace android { + +#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV +#define NB_BUFFER 10 +#define DEVICE "/dev/video4" + + +struct VideoInfo { + struct v4l2_capability cap; + struct v4l2_format format; + struct v4l2_buffer buf; + struct v4l2_requestbuffers rb; + void *mem[NB_BUFFER]; + bool isStreaming; + int width; + int height; + int formatIn; + int framesizeIn; +}; + + +/** + * Class which completely abstracts the camera hardware interaction from camera hal + * TODO: Need to list down here, all the message types that will be supported by this class + Need to implement BufferProvider interface to use AllocateBuffer of OMX if needed + */ +class V4LCameraAdapter : public BaseCameraAdapter +{ +public: + + /*--------------------Constant declarations----------------------------------------*/ + static const int32_t MAX_NO_BUFFERS = 20; + + ///@remarks OMX Camera has six ports - buffer input, time input, preview, image, video, and meta data + static const int MAX_NO_PORTS = 6; + + ///Five second timeout + static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000; + +public: + + V4LCameraAdapter(); + ~V4LCameraAdapter(); + + + ///Initialzes the camera adapter creates any resources required + virtual status_t initialize(CameraProperties::Properties*, int sensor_index=0); + + //APIs to configure Camera adapter and get the current parameter set + virtual status_t setParameters(const CameraParameters& params); + virtual void getParameters(CameraParameters& params); + + // API + virtual status_t UseBuffersPreview(void* bufArr, int num); + + //API to flush the buffers for preview + status_t flushBuffers(); + +protected: + +//----------Parent class method implementation------------------------------------ + virtual status_t startPreview(); + virtual status_t stopPreview(); + virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable); + virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType); + virtual status_t getFrameSize(size_t &width, size_t &height); + virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount); + virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount); + virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt); +//----------------------------------------------------------------------------- + + +private: + + class PreviewThread : public Thread { + V4LCameraAdapter* mAdapter; + public: + PreviewThread(V4LCameraAdapter* hw) : + Thread(false), mAdapter(hw) { } + virtual void onFirstRef() { + run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY); + } + virtual bool threadLoop() { + mAdapter->previewThread(); + // loop until we need to quit + return true; + } + }; + + //Used for calculation of the average frame rate during preview + status_t recalculateFPS(); + + char * GetFrame(int &index); + + int previewThread(); + +public: + +private: + int mPreviewBufferCount; + KeyedVector mPreviewBufs; + mutable Mutex mPreviewBufsLock; + + CameraParameters mParams; + + bool mPreviewing; + bool mCapturing; + Mutex mLock; + + int mFrameCount; + int mLastFrameCount; + unsigned int mIter; + nsecs_t mLastFPSTime; + + //variables holding the estimated framerate + float mFPS, mLastFPS; + + int mSensorIndex; + + // protected by mLock + sp mPreviewThread; + + struct VideoInfo *mVideoInfo; + int mCameraHandle; + + + int nQueued; + int nDequeued; + +}; +}; //// namespace +#endif //V4L_CAMERA_ADAPTER_H + diff --git a/camera/inc/VideoMetadata.h b/camera/inc/VideoMetadata.h new file mode 100644 index 0000000..f05ee50 --- /dev/null +++ b/camera/inc/VideoMetadata.h @@ -0,0 +1,32 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_METADATA_H +#define VIDEO_METADATA_H + +/* This structure is used to pass buffer offset from Camera-Hal to Encoder component + * for specific algorithms like VSTAB & VNF + */ + +typedef struct +{ + int metadataBufferType; + void* handle; + int offset; +} +video_metadata_t; + +#endif -- cgit v1.1