diff options
author | Tyler Luu <tluu@ti.com> | 2012-03-26 17:00:52 -0500 |
---|---|---|
committer | Daniel Levin <dendy@ti.com> | 2012-07-25 08:55:45 -0500 |
commit | d67256d3baed24faed7121ebceaebb0c6be2de6d (patch) | |
tree | bd5ea59682378228e49a879fc42e6e8bc540b458 | |
parent | 78f1a95ab2745b74c10d861dd9b91012ea7bfc93 (diff) | |
download | hardware_ti_omap4-d67256d3baed24faed7121ebceaebb0c6be2de6d.zip hardware_ti_omap4-d67256d3baed24faed7121ebceaebb0c6be2de6d.tar.gz hardware_ti_omap4-d67256d3baed24faed7121ebceaebb0c6be2de6d.tar.bz2 |
camera: initial implementation for BufferSource
Initial implementation to accept and use a BufferSource
(SurfaceTexture) object for buffer management and
datacallbacks.
Change-Id: I0c1b8854f162eb3ea00c568a56c485d67f41ebca
Signed-off-by: Tyler Luu <tluu@ti.com>
Signed-off-by: Vladimir Petrov <vppetrov@mm-sol.com>
-rw-r--r-- | camera/ANativeWindowDisplayAdapter.cpp | 48 | ||||
-rw-r--r-- | camera/Android.mk | 1 | ||||
-rw-r--r-- | camera/BufferSourceAdapter.cpp | 758 | ||||
-rw-r--r-- | camera/CameraHal.cpp | 260 | ||||
-rw-r--r-- | camera/CameraHal_Module.cpp | 20 | ||||
-rw-r--r-- | camera/MemoryManager.cpp | 7 | ||||
-rw-r--r-- | camera/OMXCameraAdapter/OMXCapabilities.cpp | 2 | ||||
-rw-r--r-- | camera/OMXCameraAdapter/OMXCapture.cpp | 29 | ||||
-rw-r--r-- | camera/inc/ANativeWindowDisplayAdapter.h | 6 | ||||
-rw-r--r-- | camera/inc/BaseCameraAdapter.h | 4 | ||||
-rw-r--r-- | camera/inc/BufferSourceAdapter.h | 185 | ||||
-rw-r--r-- | camera/inc/CameraHal.h | 55 | ||||
-rw-r--r-- | camera/inc/OMXCameraAdapter/OMXCameraAdapter.h | 1 |
13 files changed, 1301 insertions, 75 deletions
diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp index 529291c..7c3cc73 100644 --- a/camera/ANativeWindowDisplayAdapter.cpp +++ b/camera/ANativeWindowDisplayAdapter.cpp @@ -65,7 +65,7 @@ OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format) return pixFormat; } -const char* getPixFormatConstant(const char* parameters_format) +const char* DisplayAdapter::getPixFormatConstant(const char* parameters_format) const { const char* pixFormat; @@ -103,7 +103,7 @@ const char* getPixFormatConstant(const char* parameters_format) return pixFormat; } -const size_t getBufSize(const char* parameters_format, int width, int height) +size_t DisplayAdapter::getBufSize(const char* parameters_format, int width, int height) const { int buf_size; @@ -688,6 +688,13 @@ CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int hei } +CameraBuffer* ANativeWindowDisplayAdapter::getBufferList(int *numBufs) { + LOG_FUNCTION_NAME; + if (numBufs) *numBufs = -1; + + return NULL; +} + uint32_t * ANativeWindowDisplayAdapter::getOffsets() { const int lnumBufs = mBufferCount; @@ -739,17 +746,9 @@ uint32_t * ANativeWindowDisplayAdapter::getOffsets() return NULL; } -int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable) -{ +int ANativeWindowDisplayAdapter::minUndequeueableBuffers(int& undequeueable) { LOG_FUNCTION_NAME; int ret = NO_ERROR; - int undequeued = 0; - - if(mBufferCount == 0) - { - ret = -ENOSYS; - goto end; - } if(!mANativeWindow) { @@ -757,18 +756,39 @@ int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable) goto end; } - ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued); + ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeueable); if ( NO_ERROR != ret ) { CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret); - if ( ENODEV == ret ) { CAMHAL_LOGEA("Preview surface abandoned!"); mANativeWindow = NULL; } - return -ret; } + end: + return ret; + LOG_FUNCTION_NAME_EXIT; + +} + +int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable) +{ + LOG_FUNCTION_NAME; + int ret = NO_ERROR; + int undequeued = 0; + + if(mBufferCount == 0) + { + ret = -ENOSYS; + goto end; + } + + ret = minUndequeueableBuffers(undequeued); + if (ret != NO_ERROR) { + goto end; + } + queueable = mBufferCount - undequeued; end: diff --git a/camera/Android.mk b/camera/Android.mk index bfaff82..68e30f0 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -40,6 +40,7 @@ OMAP4_CAMERA_HAL_SRC := \ CameraHalUtilClasses.cpp \ AppCallbackNotifier.cpp \ ANativeWindowDisplayAdapter.cpp \ + BufferSourceAdapter.cpp \ CameraProperties.cpp \ BaseCameraAdapter.cpp \ MemoryManager.cpp \ diff --git a/camera/BufferSourceAdapter.cpp b/camera/BufferSourceAdapter.cpp new file mode 100644 index 0000000..edf78fb --- /dev/null +++ b/camera/BufferSourceAdapter.cpp @@ -0,0 +1,758 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "BufferSourceAdapter.h" +#include <ui/GraphicBuffer.h> +#include <ui/GraphicBufferMapper.h> +#include <hal_public.h> + +namespace android { + +static int getANWFormat(const char* parameters_format) +{ + int format = HAL_PIXEL_FORMAT_TI_NV12_1D; + + if (parameters_format != NULL) { + if (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) { + CAMHAL_LOGDA("CbYCrY format selected"); + // TODO(XXX): not defined yet + format = -1; + } else if (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) { + CAMHAL_LOGDA("YUV420SP format selected"); + format = HAL_PIXEL_FORMAT_TI_NV12_1D; + } else if (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_RGB565) == 0) { + CAMHAL_LOGDA("RGB565 format selected"); + // TODO(XXX): not defined yet + format = -1; + } else { + CAMHAL_LOGDA("Invalid format, NV12 format selected as default"); + format = HAL_PIXEL_FORMAT_TI_NV12_1D; + } + } + + return format; +} + +static const char* getFormatFromANW(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_TI_NV12_1D: + // Assuming NV12 1D is RAW or Image frame + return CameraParameters::PIXEL_FORMAT_YUV420SP; + default: + break; + } + return CameraParameters::PIXEL_FORMAT_YUV420SP; +} + +static CameraFrame::FrameType formatToOutputFrameType(const char* format) { + switch (getANWFormat(format)) { + case HAL_PIXEL_FORMAT_TI_NV12_1D: + // Assuming NV12 1D is RAW or Image frame + return CameraFrame::RAW_FRAME; + case HAL_PIXEL_FORMAT_TI_NV12: + // Assuming NV12 2D is preview or postview frame + return CameraFrame::PREVIEW_FRAME_SYNC; + default: + break; + } + return CameraFrame::RAW_FRAME; +} + +/*--------------------BufferSourceAdapter Class STARTS here-----------------------------*/ + + +/** + * Display Adapter class STARTS here.. + */ +BufferSourceAdapter::BufferSourceAdapter() : mBufferCount(0) +{ + LOG_FUNCTION_NAME; + + mPixelFormat = NULL; + mBuffers = NULL; + mFrameProvider = NULL; + mBufferSource = NULL; + + mFrameWidth = 0; + mFrameHeight = 0; + mPreviewWidth = 0; + mPreviewHeight = 0; + + LOG_FUNCTION_NAME_EXIT; +} + +BufferSourceAdapter::~BufferSourceAdapter() +{ + LOG_FUNCTION_NAME; + + destroy(); + + if (mFrameProvider) { + // Unregister with the frame provider + mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES); + delete mFrameProvider; + mFrameProvider = NULL; + } + + if (mQueueFrame.get()) { + mQueueFrame->requestExit(); + mQueueFrame.clear(); + } + + if (mReturnFrame.get()) { + mReturnFrame->requestExit(); + mReturnFrame.clear(); + } + + if( mBuffers != NULL) + { + delete [] mBuffers; + mBuffers = NULL; + } + + LOG_FUNCTION_NAME_EXIT; +} + +status_t BufferSourceAdapter::initialize() +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + mReturnFrame.clear(); + mReturnFrame = new ReturnFrame(this); + mReturnFrame->run(); + + mQueueFrame.clear(); + mQueueFrame = new QueueFrame(this); + mQueueFrame->run(); + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +int BufferSourceAdapter::setPreviewWindow(preview_stream_ops_t *source) +{ + LOG_FUNCTION_NAME; + + if (!source) { + CAMHAL_LOGEA("NULL window object passed to DisplayAdapter"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + if ( source == mBufferSource ) { + return ALREADY_EXISTS; + } + + // Destroy the existing source, if it exists + destroy(); + + // Move to new source obj + mBufferSource = source; + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +int BufferSourceAdapter::setFrameProvider(FrameNotifier *frameProvider) +{ + LOG_FUNCTION_NAME; + + if ( !frameProvider ) { + CAMHAL_LOGEA("NULL passed for frame provider"); + LOG_FUNCTION_NAME_EXIT; + return BAD_VALUE; + } + + if ( NULL != mFrameProvider ) { + delete mFrameProvider; + } + + mFrameProvider = new FrameProvider(frameProvider, this, frameCallback); + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +int BufferSourceAdapter::setErrorHandler(ErrorNotifier *errorNotifier) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + if ( NULL == errorNotifier ) { + CAMHAL_LOGEA("Invalid Error Notifier reference"); + return -EINVAL; + } + + mErrorNotifier = errorNotifier; + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + +int BufferSourceAdapter::enableDisplay(int width, int height, + struct timeval *refTime) +{ + LOG_FUNCTION_NAME; + CameraFrame::FrameType frameType; + + if (mFrameProvider == NULL) { + // no-op frame provider not set yet + return NO_ERROR; + } + + if (mBufferSourceDirection == BUFFER_SOURCE_TAP_IN) { + // only supporting one type of input frame + frameType = CameraFrame::REPROCESS_INPUT_FRAME; + } else { + frameType = formatToOutputFrameType(mPixelFormat); + } + + mFrameProvider->enableFrameNotification(frameType); + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +int BufferSourceAdapter::disableDisplay(bool cancel_buffer) +{ + LOG_FUNCTION_NAME; + + if (mFrameProvider) mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES); + + LOG_FUNCTION_NAME_EXIT; + + return NO_ERROR; +} + +status_t BufferSourceAdapter::pauseDisplay(bool pause) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + // no-op for BufferSourceAdapter + + LOG_FUNCTION_NAME_EXIT; + + return ret; +} + + +void BufferSourceAdapter::destroy() +{ + LOG_FUNCTION_NAME; + + mBufferCount = 0; + + LOG_FUNCTION_NAME_EXIT; +} + +CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int height, const char* format, + int &bytes, int numBufs) +{ + LOG_FUNCTION_NAME; + status_t err; + int i = -1; + const int lnumBufs = numBufs; + int undequeued = 0; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + + mBuffers = new CameraBuffer [lnumBufs]; + memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs); + + if ( NULL == mBufferSource ) { + return NULL; + } + + // Set gralloc usage bits for window. + err = mBufferSource->set_usage(mBufferSource, CAMHAL_GRALLOC_USAGE); + if (err != 0) { + CAMHAL_LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + + return NULL; + } + + CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs); + // Set the number of buffers needed for this buffer source + err = mBufferSource->set_buffer_count(mBufferSource, numBufs); + if (err != 0) { + CAMHAL_LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + + return NULL; + } + + CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs); + mBufferCount = numBufs; + + // Set window geometry + err = mBufferSource->set_buffers_geometry(mBufferSource, + width, height, + getANWFormat(format)); + + if (err != 0) { + CAMHAL_LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err); + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + return NULL; + } + + if ( mBuffers == NULL ) { + CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers"); + LOG_FUNCTION_NAME_EXIT; + return NULL; + } + + mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeued); + + for (i = 0; i < mBufferCount; i++ ) { + buffer_handle_t *handle; + int stride; // dummy variable to get stride + // TODO(XXX): Do we need to keep stride information in camera hal? + + err = mBufferSource->dequeue_buffer(mBufferSource, &handle, &stride); + + if (err != 0) { + CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err); + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + goto fail; + } + + CAMHAL_LOGDB("got handle %p", handle); + mBuffers[i].opaque = (void *)handle; + mBuffers[i].type = CAMERA_BUFFER_ANW; + mFramesWithCameraAdapterMap.add(handle, i); + + bytes = getBufSize(format, width, height); + } + + for( i = 0; i < mBufferCount-undequeued; i++ ) { + void *y_uv[2]; + Rect bounds(width, height); + + buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque; + mBufferSource->lock_buffer(mBufferSource, handle); + mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv); + mBuffers[i].mapped = y_uv[0]; + } + + // return the rest of the buffers back to ANativeWindow + for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++) { + buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque; + void *y_uv[2]; + Rect bounds(width, height); + + mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv); + mBuffers[i].mapped = y_uv[0]; + mapper.unlock(*handle); + + err = mBufferSource->cancel_buffer(mBufferSource, handle); + if (err != 0) { + CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err); + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + goto fail; + } + mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque); + } + + mPixelFormat = getPixFormatConstant(format); + mFrameWidth = width; + mFrameHeight = height; + mBufferSourceDirection = BUFFER_SOURCE_TAP_OUT; + + return mBuffers; + + fail: + // need to cancel buffers if any were dequeued + for (int start = 0; start < i && i > 0; start++) { + int err = mBufferSource->cancel_buffer(mBufferSource, + (buffer_handle_t *) mBuffers[start].opaque); + if (err != 0) { + CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err); + break; + } + mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[start].opaque); + } + + freeBufferList(mBuffers); + + CAMHAL_LOGEA("Error occurred, performing cleanup"); + + if (NULL != mErrorNotifier.get()) { + mErrorNotifier->errorNotify(-ENOMEM); + } + + LOG_FUNCTION_NAME_EXIT; + return NULL; + +} + +CameraBuffer* BufferSourceAdapter::getBufferList(int *num) { + LOG_FUNCTION_NAME; + status_t err; + const int lnumBufs = 1; + int format, stride; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + buffer_handle_t *handle; + + // TODO(XXX): Only supporting one input buffer at a time right now + *num = 1; + mBuffers = new CameraBuffer [lnumBufs]; + memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs); + + if ( NULL == mBufferSource ) { + return NULL; + } + + err = mBufferSource->update_and_get_buffer(mBufferSource, &handle, &stride); + if (err != 0) { + CAMHAL_LOGEB("update and get buffer failed: %s (%d)", strerror(-err), -err); + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + goto fail; + } + + CAMHAL_LOGD("got handle %p", handle); + mBuffers[0].opaque = (void *)handle; + mBuffers[0].type = CAMERA_BUFFER_ANW; + mFramesWithCameraAdapterMap.add(handle, 0); + + err = mBufferSource->get_buffer_dimension(mBufferSource, &mBuffers[0].width, &mBuffers[0].height); + err = mBufferSource->get_buffer_format(mBufferSource, &format); + + // lock buffer + { + void *y_uv[2]; + Rect bounds(mBuffers[0].width, mBuffers[0].height); + mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv); + mBuffers[0].mapped = y_uv[0]; + } + + mFrameWidth = mBuffers[0].width; + mFrameHeight = mBuffers[0].height; + mPixelFormat = getFormatFromANW(format); + mBuffers[0].format = mPixelFormat; + mBufferSourceDirection = BUFFER_SOURCE_TAP_IN; + + return mBuffers; + + fail: + // need to cancel buffers if any were dequeued + freeBufferList(mBuffers); + + if (NULL != mErrorNotifier.get()) { + mErrorNotifier->errorNotify(-ENOMEM); + } + + LOG_FUNCTION_NAME_EXIT; + return NULL; +} + +uint32_t * BufferSourceAdapter::getOffsets() +{ + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return NULL; +} + +int BufferSourceAdapter::minUndequeueableBuffers(int& undequeueable) { + LOG_FUNCTION_NAME; + int ret = NO_ERROR; + + if(!mBufferSource) + { + ret = INVALID_OPERATION; + goto end; + } + + ret = mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeueable); + if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret); + if ( ENODEV == ret ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + return -ret; + } + + end: + return ret; + LOG_FUNCTION_NAME_EXIT; + +} + +int BufferSourceAdapter::maxQueueableBuffers(unsigned int& queueable) +{ + LOG_FUNCTION_NAME; + int ret = NO_ERROR; + int undequeued = 0; + + if(mBufferCount == 0) { + ret = INVALID_OPERATION; + goto end; + } + + ret = minUndequeueableBuffers(undequeued); + if (ret != NO_ERROR) { + goto end; + } + + queueable = mBufferCount - undequeued; + + end: + return ret; + LOG_FUNCTION_NAME_EXIT; +} + +int BufferSourceAdapter::getFd() +{ + LOG_FUNCTION_NAME; + + LOG_FUNCTION_NAME_EXIT; + + return -1; + +} + +status_t BufferSourceAdapter::returnBuffersToWindow() +{ + status_t ret = NO_ERROR; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + + //Give the buffers back to display here - sort of free it + if (mBufferSource) { + for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) { + int value = mFramesWithCameraAdapterMap.valueAt(i); + buffer_handle_t *handle = (buffer_handle_t *) mBuffers[value].opaque; + + // if buffer index is out of bounds skip + if ((value < 0) || (value >= mBufferCount)) { + CAMHAL_LOGEA("Potential out bounds access to handle...skipping"); + continue; + } + + // unlock buffer before giving it up + mapper.unlock(*handle); + + ret = mBufferSource->cancel_buffer(mBufferSource, handle); + if ( ENODEV == ret ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + return -ret; + } else if ( NO_ERROR != ret ) { + CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)", + strerror(-ret), + -ret); + return -ret; + } + } + } else { + CAMHAL_LOGE("mBufferSource is NULL"); + } + + ///Clear the frames with camera adapter map + mFramesWithCameraAdapterMap.clear(); + + return ret; + +} + +int BufferSourceAdapter::freeBufferList(CameraBuffer * buflist) +{ + LOG_FUNCTION_NAME; + + status_t ret = NO_ERROR; + + Mutex::Autolock lock(mLock); + + if (mBufferSourceDirection == BUFFER_SOURCE_TAP_OUT) returnBuffersToWindow(); + + if ( NULL != buflist ) + { + delete [] buflist; + mBuffers = NULL; + } + + if( mBuffers != NULL) + { + delete [] mBuffers; + mBuffers = NULL; + } + + return NO_ERROR; +} + + +bool BufferSourceAdapter::supportsExternalBuffering() +{ + return false; +} + +void BufferSourceAdapter::addFrame(CameraFrame* frame) +{ + if (mQueueFrame.get()) { + mQueueFrame->addFrame(frame); + } +} + +void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame) +{ + status_t ret = NO_ERROR; + buffer_handle_t *handle = NULL; + int i; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + + if (!mBuffers || !frame->mBuffer) { + CAMHAL_LOGEA("Adapter sent BufferSourceAdapter a NULL frame?"); + return; + } + + Mutex::Autolock lock(mLock); + + for ( i = 0; i < mBufferCount; i++ ) { + if (frame->mBuffer == &mBuffers[i]) { + break; + } + } + + handle = (buffer_handle_t *) mBuffers[i].opaque; + + // Handle input buffers + // TODO(XXX): Move handling of input buffers out of here if + // it becomes more complex + if (frame->mFrameType == CameraFrame::REPROCESS_INPUT_FRAME) { + CAMHAL_LOGD("Unlock %p (buffer #%d)", handle, i); + mapper.unlock(*handle); + return; + } + + // unlock buffer before enqueueing + mapper.unlock(*handle); + + ret = mBufferSource->enqueue_buffer(mBufferSource, handle); + if (ret != 0) { + CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret); + } + + mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) frame->mBuffer->opaque); + + // signal return frame thread that it can dequeue a buffer now + mReturnFrame->signal(); +} + + +bool BufferSourceAdapter::handleFrameReturn() +{ + status_t err; + buffer_handle_t *buf; + int i = 0; + int stride; // dummy variable to get stride + CameraFrame::FrameType type; + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + void *y_uv[2]; + Rect bounds(mFrameWidth, mFrameHeight); + + if ( NULL == mBufferSource ) { + return false; + } + + err = mBufferSource->dequeue_buffer(mBufferSource, &buf, &stride); + if (err != 0) { + CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + + return false; + } + + err = mBufferSource->lock_buffer(mBufferSource, buf); + if (err != 0) { + CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err); + + if ( ENODEV == err ) { + CAMHAL_LOGEA("Preview surface abandoned!"); + mBufferSource = NULL; + } + + return false; + } + + mapper.lock(*buf, CAMHAL_GRALLOC_USAGE, bounds, y_uv); + + for(i = 0; i < mBufferCount; i++) { + if (mBuffers[i].opaque == buf) + break; + } + + if (i >= mBufferCount) { + CAMHAL_LOGEB("Failed to find handle %p", buf); + } + + mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i); + + CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount - 1); + + mFrameProvider->returnFrame(&mBuffers[i], formatToOutputFrameType(mPixelFormat)); + return true; +} + +void BufferSourceAdapter::frameCallback(CameraFrame* caFrame) +{ + if ((NULL != caFrame) && (NULL != caFrame->mCookie)) { + BufferSourceAdapter *da = (BufferSourceAdapter*) caFrame->mCookie; + da->addFrame(caFrame); + } else { + CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p", + caFrame, caFrame ? caFrame->mCookie : NULL); + } +} + +/*--------------------BufferSourceAdapter Class ENDS here-----------------------------*/ + +}; + diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp index 722f553..840dc04 100644 --- a/camera/CameraHal.cpp +++ b/camera/CameraHal.cpp @@ -23,6 +23,7 @@ #include "CameraHal.h" #include "ANativeWindowDisplayAdapter.h" +#include "BufferSourceAdapter.h" #include "TICameraParameters.h" #include "CameraProperties.h" #include <cutils/properties.h> @@ -46,8 +47,13 @@ const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 9; const int CameraHal::SW_SCALING_FPS_LIMIT = 15; const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 16; + const uint32_t MessageNotifier::FRAME_BIT_FIELD_POSITION = 0; +// TODO(XXX): Temporarily increase number of buffers we can allocate from ANW +// until faux-NPA mode is implemented +const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP = 15; + #ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING // HACK: Default path to directory where RAW images coming from video port will be saved to. // If directory not exists the saving is skipped and video port frame is ignored. @@ -1277,7 +1283,9 @@ status_t CameraHal::freePreviewDataBufs() return ret; } -status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, const char* previewFormat, unsigned int bufferCount) +status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, + const char* previewFormat, unsigned int bufferCount, + unsigned int *max_queueable) { status_t ret = NO_ERROR; int bytes; @@ -1288,38 +1296,41 @@ status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size // allocate image buffers only if not already allocated if(NULL != mImageBuffers) { + if (mBufferSourceAdapter_Out.get()) { + mBufferSourceAdapter_Out->maxQueueableBuffers(*max_queueable); + } else { + *max_queueable = bufferCount; + } return NO_ERROR; } - if ( NO_ERROR == ret ) - { - bytes = ((bytes+4095)/4096)*4096; + if (mBufferSourceAdapter_Out.get()) { + mImageBuffers = mBufferSourceAdapter_Out->allocateBufferList(width, height, previewFormat, + bytes, bufferCount); + mBufferSourceAdapter_Out->maxQueueableBuffers(*max_queueable); + } else { + bytes = ((bytes + 4095) / 4096) * 4096; mImageBuffers = mMemoryManager->allocateBufferList(0, 0, previewFormat, bytes, bufferCount); + *max_queueable = bufferCount; + } - CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes); - if( NULL == mImageBuffers ) - { - CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager"); - ret = -NO_MEMORY; - } - else - { - bytes = size; - } - } + CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes); + if ( NULL == mImageBuffers ) { + CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager"); + ret = -NO_MEMORY; + } else { + bytes = size; + } - if ( NO_ERROR == ret ) - { + if ( NO_ERROR == ret ) { mImageFd = mMemoryManager->getFd(); mImageLength = bytes; mImageOffsets = mMemoryManager->getOffsets(); - } - else - { + } else { mImageFd = -1; mImageLength = 0; mImageOffsets = NULL; - } + } LOG_FUNCTION_NAME_EXIT; @@ -1451,6 +1462,14 @@ status_t CameraHal::signalEndImageCapture() LOG_FUNCTION_NAME; + if (mBufferSourceAdapter_Out.get()) { + mBufferSourceAdapter_Out->disableDisplay(); + } + + if (mBufferSourceAdapter_In.get()) { + mBufferSourceAdapter_In->disableDisplay(); + } + if ( mBracketingRunning ) { stopImageBracketing(); } else { @@ -1468,22 +1487,19 @@ status_t CameraHal::freeImageBufs() LOG_FUNCTION_NAME; - if ( NO_ERROR == ret ) - { - - if( NULL != mImageBuffers ) - { - - ret = mMemoryManager->freeBufferList(mImageBuffers); - mImageBuffers = NULL; + if (NULL == mImageBuffers) { + return -EINVAL; + } - } - else - { - ret = -EINVAL; - } + if (mBufferSourceAdapter_Out.get()) { + ret = mBufferSourceAdapter_Out->freeBufferList(mImageBuffers); + } else { + ret = mMemoryManager->freeBufferList(mImageBuffers); + } - } + if (ret == NO_ERROR) { + mImageBuffers = NULL; + } LOG_FUNCTION_NAME_EXIT; @@ -1620,9 +1636,7 @@ status_t CameraHal::startPreview() mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME); } - if (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE) { - mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE); - } + signalEndImageCapture(); return ret; } @@ -1872,6 +1886,137 @@ status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window) return ret; } +/** + @brief Sets ANativeWindow object. + + Buffers provided to CameraHal via this object for tap-in/tap-out + functionality. + + TODO(XXX): this is just going to use preview_stream_ops for now, but we + most likely need to extend it when we want more functionality + + @param[in] window The ANativeWindow object created by Surface flinger + @return NO_ERROR If the ANativeWindow object passes validation criteria + @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios + + */ +status_t CameraHal::setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout) +{ + status_t ret = NO_ERROR; + + LOG_FUNCTION_NAME; + + // If either a tapin or tapout was previously set + // we need to clean up and clear capturing + if ((!tapout && mBufferSourceAdapter_Out.get()) || + (!tapin && mBufferSourceAdapter_In.get())) { + signalEndImageCapture(); + } + + // Set tapout point + // destroy current buffer tapout if NULL tapout is passed + if (!tapout) { + if (mBufferSourceAdapter_Out.get() != NULL) { + CAMHAL_LOGD("NULL tapout passed, destroying buffer tapout adapter"); + mBufferSourceAdapter_Out.clear(); + mBufferSourceAdapter_Out = 0; + } + ret = NO_ERROR; + } else if (mBufferSourceAdapter_Out.get() == NULL) { + mBufferSourceAdapter_Out = new BufferSourceAdapter(); + if(!mBufferSourceAdapter_Out.get()) { + CAMHAL_LOGEA("Couldn't create DisplayAdapter"); + ret = NO_MEMORY; + goto exit; + } + + ret = mBufferSourceAdapter_Out->initialize(); + if (ret != NO_ERROR) + { + mBufferSourceAdapter_Out.clear(); + mBufferSourceAdapter_Out = 0; + CAMHAL_LOGEA("DisplayAdapter initialize failed"); + goto exit; + } + + // CameraAdapter will be the frame provider for BufferSourceAdapter + mBufferSourceAdapter_Out->setFrameProvider(mCameraAdapter); + + // BufferSourceAdapter will use ErrorHandler to send errors back to + // the application + mBufferSourceAdapter_Out->setErrorHandler(mAppCallbackNotifier.get()); + + // Update the display adapter with the new window that is passed from CameraService + ret = mBufferSourceAdapter_Out->setPreviewWindow(tapout); + if(ret != NO_ERROR) { + CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret); + goto exit; + } + } else { + // Update the display adapter with the new window that is passed from CameraService + freeImageBufs(); + ret = mBufferSourceAdapter_Out->setPreviewWindow(tapout); + if (ret == ALREADY_EXISTS) { + // ALREADY_EXISTS should be treated as a noop in this case + ret = NO_ERROR; + } + } + + if (ret != NO_ERROR) { + CAMHAL_LOGE("Error while trying to set tapout point"); + goto exit; + } + + // 1. Set tapin point + if (!tapin) { + if (mBufferSourceAdapter_In.get() != NULL) { + CAMHAL_LOGD("NULL tapin passed, destroying buffer tapin adapter"); + mBufferSourceAdapter_In.clear(); + mBufferSourceAdapter_In = 0; + } + ret = NO_ERROR; + } else if (mBufferSourceAdapter_In.get() == NULL) { + mBufferSourceAdapter_In = new BufferSourceAdapter(); + if(!mBufferSourceAdapter_In.get()) { + CAMHAL_LOGEA("Couldn't create DisplayAdapter"); + ret = NO_MEMORY; + goto exit; + } + + ret = mBufferSourceAdapter_In->initialize(); + if (ret != NO_ERROR) + { + mBufferSourceAdapter_In.clear(); + mBufferSourceAdapter_In = 0; + CAMHAL_LOGEA("DisplayAdapter initialize failed"); + goto exit; + } + + // We need to set a frame provider so camera adapter can return the frame back to us + mBufferSourceAdapter_In->setFrameProvider(mCameraAdapter); + + // BufferSourceAdapter will use ErrorHandler to send errors back to + // the application + mBufferSourceAdapter_In->setErrorHandler(mAppCallbackNotifier.get()); + + // Update the display adapter with the new window that is passed from CameraService + ret = mBufferSourceAdapter_In->setPreviewWindow(tapin); + if(ret != NO_ERROR) { + CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret); + goto exit; + } + } else { + // Update the display adapter with the new window that is passed from CameraService + ret = mBufferSourceAdapter_In->setPreviewWindow(tapin); + if (ret == ALREADY_EXISTS) { + // ALREADY_EXISTS should be treated as a noop in this case + ret = NO_ERROR; + } + } + + exit: + return ret; +} /** @@ -2414,9 +2559,12 @@ void CameraHal::eventCallback(CameraHalEvent* event) status_t CameraHal::startImageBracketing() { - status_t ret = NO_ERROR; - CameraFrame frame; - CameraAdapter::BuffersDescriptor desc; + status_t ret = NO_ERROR; + CameraFrame frame; + CameraAdapter::BuffersDescriptor desc; + unsigned int max_queueable = 0; + + #if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS @@ -2464,6 +2612,7 @@ status_t CameraHal::startImageBracketing() if ( NO_ERROR == ret ) { + unsigned int bufferCount = mBracketRangeNegative + 1; mParameters.getPictureSize(( int * ) &frame.mWidth, ( int * ) &frame.mHeight); @@ -2471,7 +2620,9 @@ status_t CameraHal::startImageBracketing() frame.mHeight, frame.mLength, mParameters.getPictureFormat(), - ( mBracketRangeNegative + 1 )); + bufferCount, + &max_queueable); + mBracketRangeNegative = bufferCount - 1; if ( NO_ERROR != ret ) { CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret); @@ -2486,7 +2637,7 @@ status_t CameraHal::startImageBracketing() desc.mFd = mImageFd; desc.mLength = mImageLength; desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 ); - desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 ); + desc.mMaxQueueable = ( size_t) max_queueable; ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE, ( int ) &desc); @@ -2547,6 +2698,7 @@ status_t CameraHal::takePicture(const char *params) int burst = -1; const char *valstr = NULL; unsigned int bufferCount = 1; + unsigned int max_queueable = 0; unsigned int rawBufferCount = 1; bool isCPCamMode = false; @@ -2648,6 +2800,13 @@ status_t CameraHal::takePicture(const char *params) // For CPCam mode...allocate for worst case burst bufferCount = isCPCamMode || (burst > CameraHal::NO_BUFFERS_IMAGE_CAPTURE) ? CameraHal::NO_BUFFERS_IMAGE_CAPTURE : burst; + + if (mBufferSourceAdapter_Out.get()) { + // TODO(XXX): Temporarily increase number of buffers we can allocate from ANW + // until faux-NPA mode is implemented + bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP; + } + if ( NULL != mAppCallbackNotifier.get() ) { mAppCallbackNotifier->setBurst(true); } @@ -2713,7 +2872,8 @@ status_t CameraHal::takePicture(const char *params) frame.mHeight, frame.mLength, mParameters.getPictureFormat(), - bufferCount); + bufferCount, + &max_queueable); if ( NO_ERROR != ret ) { CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret); @@ -2727,7 +2887,7 @@ status_t CameraHal::takePicture(const char *params) desc.mFd = mImageFd; desc.mLength = mImageLength; desc.mCount = ( size_t ) bufferCount; - desc.mMaxQueueable = ( size_t ) bufferCount; + desc.mMaxQueueable = ( size_t ) max_queueable; ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE, ( int ) &desc); @@ -2759,6 +2919,10 @@ status_t CameraHal::takePicture(const char *params) } } + if ((ret == NO_ERROR) && mBufferSourceAdapter_Out.get()) { + mBufferSourceAdapter_Out->enableDisplay(0, 0, NULL); + } + if ((NO_ERROR == ret) && (NULL != mCameraAdapter)) { #if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS @@ -2790,11 +2954,9 @@ status_t CameraHal::takePicture(const char *params) status_t CameraHal::cancelPicture( ) { LOG_FUNCTION_NAME; + status_t ret = NO_ERROR; - Mutex::Autolock lock(mLock); - - mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE); - + ret = signalEndImageCapture(); return NO_ERROR; } diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp index 8984eae..2181cc2 100644 --- a/camera/CameraHal_Module.cpp +++ b/camera/CameraHal_Module.cpp @@ -88,6 +88,25 @@ int camera_set_preview_window(struct camera_device * device, return rv; } +int camera_set_buffer_source(struct camera_device * device, + struct preview_stream_ops *tapin, + struct preview_stream_ops *tapout) +{ + int rv = -EINVAL; + ti_camera_device_t* ti_dev = NULL; + + LOGV("%s", __FUNCTION__); + + if(!device) + return rv; + + ti_dev = (ti_camera_device_t*) device; + + rv = gCameraHals[ti_dev->cameraid]->setBufferSource(tapin, tapout); + + return rv; +} + void camera_set_callbacks(struct camera_device * device, camera_notify_callback notify_cb, camera_data_callback data_cb, @@ -546,6 +565,7 @@ int camera_device_open(const hw_module_t* module, const char* name, camera_device->base.ops = camera_ops; camera_ops->set_preview_window = camera_set_preview_window; + camera_ops->set_buffer_source = camera_set_buffer_source; camera_ops->set_callbacks = camera_set_callbacks; camera_ops->enable_msg_type = camera_enable_msg_type; camera_ops->disable_msg_type = camera_disable_msg_type; diff --git a/camera/MemoryManager.cpp b/camera/MemoryManager.cpp index ea054c8..f93a427 100644 --- a/camera/MemoryManager.cpp +++ b/camera/MemoryManager.cpp @@ -137,6 +137,13 @@ error: return NULL; } +CameraBuffer* MemoryManager::getBufferList(int *numBufs) { + LOG_FUNCTION_NAME; + if (numBufs) *numBufs = -1; + + return NULL; +} + //TODO: Get needed data to map tiler buffers //Return dummy data for now uint32_t * MemoryManager::getOffsets() diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp index 5f4ebcd..83192d3 100644 --- a/camera/OMXCameraAdapter/OMXCapabilities.cpp +++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp @@ -2424,7 +2424,7 @@ status_t OMXCameraAdapter::getCaps(const int sensorId, CameraProperties::Propert } #ifdef CAMERAHAL_DEBUG - _dumpOmxTiCap(sensorId, *caps[0]); + _dumpOmxTiCap(sensorId, *caps); #endif // Translate and insert Ducati capabilities to CameraProperties diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp index 03d3ee5..12c52bc 100644 --- a/camera/OMXCameraAdapter/OMXCapture.cpp +++ b/camera/OMXCameraAdapter/OMXCapture.cpp @@ -220,13 +220,13 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters ¶ms, if ( params.getInt(TICameraParameters::KEY_BURST) >= 1 ) { if (params.getInt(TICameraParameters::KEY_BURST) != (int) mBurstFrames) { - mPendingCaptureSettings |= SetExpBracket; + mPendingCaptureSettings |= SetBurst; } mBurstFrames = params.getInt(TICameraParameters::KEY_BURST); } else { - if (mBurstFrames != 1) mPendingCaptureSettings |= SetExpBracket; + if (mBurstFrames != 1) mPendingCaptureSettings |= SetBurst; mBurstFrames = 1; } @@ -1060,7 +1060,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing) } if (mPendingCaptureSettings & SetExpBracket) { - mPendingCaptureSettings &= ~SetExpBracket; + mPendingCaptureSettings &= ~(SetExpBracket|SetBurst); if ( mBracketingSet ) { ret = doExposureBracketing(mExposureBracketingValues, mExposureGainBracketingValues, @@ -1132,7 +1132,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing) } } else if ( NO_ERROR == ret ) { ///Queue all the buffers on capture port - for ( int index = 0 ; index < capData->mNumBufs ; index++ ) { + for ( int index = 0 ; index < capData->mMaxQueueable ; index++ ) { CAMHAL_LOGDB("Queuing buffer on Capture port - 0x%x", ( unsigned int ) capData->mBufferHeader[index]->pBuffer); if (mBurstFramesQueued < mBurstFramesAccum) { @@ -1161,6 +1161,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing) } mWaitingForSnapshot = true; mCaptureSignalled = false; + mPendingCaptureSettings &= ~SetBurst; // Capturing command is not needed when capturing in video mode // Only need to queue buffers on image ports @@ -1522,6 +1523,26 @@ status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num) CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError); GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError); + // Configure DOMX to use either gralloc handles or vptrs + if ((imgCaptureData->mNumBufs > 0)) { + OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles; + OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER); + + domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mImagePortIndex; + if (bufArr[0].type == CAMERA_BUFFER_ANW) { + domxUseGrallocHandles.bEnable = OMX_TRUE; + } else { + domxUseGrallocHandles.bEnable = OMX_FALSE; + } + + eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp, + (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles); + if (eError!=OMX_ErrorNone) { + CAMHAL_LOGEB("OMX_SetParameter - %x", eError); + } + GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError); + } + for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++ ) { OMX_BUFFERHEADERTYPE *pBufferHdr; diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h index d20781c..24bcfb5 100644 --- a/camera/inc/ANativeWindowDisplayAdapter.h +++ b/camera/inc/ANativeWindowDisplayAdapter.h @@ -21,10 +21,6 @@ #include <ui/GraphicBufferMapper.h> #include <hal_public.h> -//temporarily define format here -#define HAL_PIXEL_FORMAT_TI_NV12 0x100 -#define HAL_PIXEL_FORMAT_TI_NV12_1D 0x102 - namespace android { /** @@ -82,11 +78,13 @@ public: //Implementation of inherited interfaces virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs); + virtual CameraBuffer *getBufferList(int *numBufs); virtual uint32_t * getOffsets() ; virtual int getFd() ; virtual int freeBufferList(CameraBuffer * buflist); virtual int maxQueueableBuffers(unsigned int& queueable); + virtual int minUndequeueableBuffers(int& unqueueable); ///Class specific functions static void frameCallbackRelay(CameraFrame* caFrame); diff --git a/camera/inc/BaseCameraAdapter.h b/camera/inc/BaseCameraAdapter.h index dbdc88d..0467b7e 100644 --- a/camera/inc/BaseCameraAdapter.h +++ b/camera/inc/BaseCameraAdapter.h @@ -243,14 +243,14 @@ protected: //Image buffer management data CameraBuffer *mCaptureBuffers; - KeyedVector<CameraBuffer *, bool> mCaptureBuffersAvailable; + KeyedVector<CameraBuffer *, int> mCaptureBuffersAvailable; int mCaptureBuffersCount; size_t mCaptureBuffersLength; mutable Mutex mCaptureBufferLock; //Metadata buffermanagement CameraBuffer *mPreviewDataBuffers; - KeyedVector<CameraBuffer *, bool> mPreviewDataBuffersAvailable; + KeyedVector<CameraBuffer *, int> mPreviewDataBuffersAvailable; int mPreviewDataBuffersCount; size_t mPreviewDataBuffersLength; mutable Mutex mPreviewDataBufferLock; diff --git a/camera/inc/BufferSourceAdapter.h b/camera/inc/BufferSourceAdapter.h new file mode 100644 index 0000000..99e14cd --- /dev/null +++ b/camera/inc/BufferSourceAdapter.h @@ -0,0 +1,185 @@ +/* + * Copyright (C) Texas Instruments - http://www.ti.com/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +#include "CameraHal.h" +#include <ui/egl/android_natives.h> +#include <ui/GraphicBufferMapper.h> +#include <hal_public.h> + +#ifndef BUFFER_SOURCE_ADAPTER_H +#define BUFFER_SOURCE_ADAPTER_H + +namespace android { + +/** + * Handles enqueueing/dequeing buffers to tap-in/tap-out points + * TODO(XXX): this class implements DisplayAdapter for now + * but this will most likely change once tap-in/tap-out points + * are better defined + */ + +class BufferSourceAdapter : public DisplayAdapter +{ +// private types +private: + // helper class to return frame in different thread context + class ReturnFrame : public Thread { + public: + ReturnFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) { + mWaitForSignal.Create(0); + mDestroying = false; + } + + ~ReturnFrame() { + mDestroying = true; + mWaitForSignal.Release(); + } + + void signal() { + mWaitForSignal.Signal(); + } + + virtual bool threadLoop() { + mWaitForSignal.Wait(); + if (!mDestroying) mBufferSourceAdapter->handleFrameReturn(); + return true; + } + + private: + BufferSourceAdapter* mBufferSourceAdapter; + Semaphore mWaitForSignal; + bool mDestroying; + }; + + // helper class to queue frame in different thread context + class QueueFrame : public Thread { + public: + QueueFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) { + mDestroying = false; + } + + ~QueueFrame() { + mDestroying = true; + + Mutex::Autolock lock(mFramesMutex); + while (!mFrames.empty()) { + CameraFrame *frame = mFrames.itemAt(0); + mFrames.removeAt(0); + delete frame; + } + mFramesCondition.signal(); + } + + void addFrame(CameraFrame *frame) { + Mutex::Autolock lock(mFramesMutex); + mFrames.add(new CameraFrame(frame)); + mFramesCondition.signal(); + } + + virtual bool threadLoop() { + CameraFrame *frame = NULL; + { + Mutex::Autolock lock(mFramesMutex); + while (mFrames.empty() && !mDestroying) mFramesCondition.wait(mFramesMutex); + if (!mDestroying) { + frame = mFrames.itemAt(0); + mFrames.removeAt(0); + } + } + + if (frame) { + mBufferSourceAdapter->handleFrameCallback(frame); + delete frame; + } + + return true; + } + + private: + BufferSourceAdapter* mBufferSourceAdapter; + Vector<CameraFrame *> mFrames; + Condition mFramesCondition; + Mutex mFramesMutex; + bool mDestroying; + }; + + enum { + BUFFER_SOURCE_TAP_IN, + BUFFER_SOURCE_TAP_OUT + }; + +// public member functions +public: + BufferSourceAdapter(); + virtual ~BufferSourceAdapter(); + + virtual status_t initialize(); + virtual int setPreviewWindow(struct preview_stream_ops *source); + virtual int setFrameProvider(FrameNotifier *frameProvider); + virtual int setErrorHandler(ErrorNotifier *errorNotifier); + virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL); + virtual int disableDisplay(bool cancel_buffer = true); + virtual status_t pauseDisplay(bool pause); +#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS + // Not implemented in this class + virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL) { return NO_ERROR; } +#endif + virtual bool supportsExternalBuffering(); + virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs); + virtual CameraBuffer *getBufferList(int *numBufs); + virtual uint32_t * getOffsets() ; + virtual int getFd() ; + virtual int freeBufferList(CameraBuffer * buflist); + virtual int maxQueueableBuffers(unsigned int& queueable); + virtual int minUndequeueableBuffers(int& unqueueable); + + static void frameCallback(CameraFrame* caFrame); + void addFrame(CameraFrame* caFrame); + void handleFrameCallback(CameraFrame* caFrame); + bool handleFrameReturn(); + +private: + void destroy(); + status_t returnBuffersToWindow(); + +private: + preview_stream_ops_t* mBufferSource; + FrameProvider *mFrameProvider; // Pointer to the frame provider interface + + mutable Mutex mLock; + int mBufferCount; + CameraBuffer *mBuffers; + + KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap; + sp<ErrorNotifier> mErrorNotifier; + sp<ReturnFrame> mReturnFrame; + sp<QueueFrame> mQueueFrame; + + uint32_t mFrameWidth; + uint32_t mFrameHeight; + uint32_t mPreviewWidth; + uint32_t mPreviewHeight; + + int mBufferSourceDirection; + + const char *mPixelFormat; +}; + +}; + +#endif diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h index ee62dcb..5d04881 100644 --- a/camera/inc/CameraHal.h +++ b/camera/inc/CameraHal.h @@ -56,6 +56,10 @@ extern "C" { #include <ion.h> } +//temporarily define format here +#define HAL_PIXEL_FORMAT_TI_NV12 0x100 +#define HAL_PIXEL_FORMAT_TI_NV12_1D 0x102 + #define MIN_WIDTH 640 #define MIN_HEIGHT 480 #define PICTURE_WIDTH 3264 /* 5mp - 2560. 8mp - 3280 */ /* Make sure it is a multiple of 16. */ @@ -347,6 +351,11 @@ typedef struct _CameraBuffer { int fd; size_t size; int index; + + /* These describe the camera buffer */ + int width; + int height; + const char *format; } CameraBuffer; void * camera_buffer_get_omx_ptr (CameraBuffer *buffer); @@ -367,6 +376,7 @@ class CameraFrame FRAME_DATA= 0x80, RAW_FRAME = 0x100, SNAPSHOT_FRAME = 0x200, + REPROCESS_INPUT_FRAME = 0x400, ALL_FRAMES = 0xFFFF ///Maximum of 16 frame types supported }; @@ -416,6 +426,25 @@ class CameraFrame mYuv[1] = frame.mYuv[1]; } + //copy constructor 2 + CameraFrame(const CameraFrame *frame) : + mCookie(frame->mCookie), + mCookie2(frame->mCookie2), + mBuffer(frame->mBuffer), + mFrameType(frame->mFrameType), + mTimestamp(frame->mTimestamp), + mWidth(frame->mWidth), + mHeight(frame->mHeight), + mOffset(frame->mOffset), + mAlignment(frame->mAlignment), + mFd(frame->mFd), + mLength(frame->mLength), + mFrameMask(frame->mFrameMask), + mQuirks(frame->mQuirks) { + mYuv[0] = frame->mYuv[0]; + mYuv[1] = frame->mYuv[1]; + } + void *mCookie; void *mCookie2; CameraBuffer *mBuffer; @@ -616,6 +645,11 @@ class BufferProvider public: virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs) = 0; + // gets a buffer list from BufferProvider when buffers are sent from external source and already pre-allocated + // only call this function for an input source into CameraHal. If buffers are not from a pre-allocated source + // this function will return NULL and numBufs of -1 + virtual CameraBuffer *getBufferList(int *numBufs) = 0; + //additional methods used for memory mapping virtual uint32_t * getOffsets() = 0; virtual int getFd() = 0; @@ -821,6 +855,7 @@ public: int setErrorHandler(ErrorNotifier *errorNotifier); virtual CameraBuffer * allocateBufferList(int width, int height, const char* format, int &bytes, int numBufs); + virtual CameraBuffer *getBufferList(int *numBufs); virtual uint32_t * getOffsets(); virtual int getFd() ; virtual int freeBufferList(CameraBuffer * buflist); @@ -1005,6 +1040,12 @@ public: // This function should only be called after // allocateBufferList virtual int maxQueueableBuffers(unsigned int& queueable) = 0; + + // Get min buffers display needs at any given time + virtual int minUndequeueableBuffers(int& unqueueable) = 0; +protected: + virtual const char* getPixFormatConstant(const char* parameters_format) const; + virtual size_t getBufSize(const char* parameters_format, int width, int height) const; }; static void releaseImageBuffers(void *userData); @@ -1026,6 +1067,7 @@ public: ///Constants static const int NO_BUFFERS_PREVIEW; static const int NO_BUFFERS_IMAGE_CAPTURE; + static const int NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP; static const uint32_t VFR_SCALE = 1000; @@ -1078,6 +1120,11 @@ public: int setPreviewWindow(struct preview_stream_ops *window); /** + * Set a tap-in or tap-out point. + */ + int setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout); + + /** * Stop a previously started preview. */ void stopPreview(); @@ -1236,7 +1283,9 @@ private: status_t allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount); /** Allocate image capture buffers */ - status_t allocImageBufs(unsigned int width, unsigned int height, size_t length, const char* previewFormat, unsigned int bufferCount); + status_t allocImageBufs(unsigned int width, unsigned int height, size_t length, + const char* previewFormat, unsigned int bufferCount, + unsigned int *max_queueable); /** Allocate Raw buffers */ status_t allocRawBufs(int width, int height, const char* previewFormat, int bufferCount); @@ -1304,6 +1353,10 @@ public: sp<AppCallbackNotifier> mAppCallbackNotifier; sp<DisplayAdapter> mDisplayAdapter; sp<MemoryManager> mMemoryManager; + // TODO(XXX): May need to keep this as a vector in the future + // when we can have multiple tap-in/tap-out points + sp<DisplayAdapter> mBufferSourceAdapter_In; + sp<DisplayAdapter> mBufferSourceAdapter_Out; sp<IMemoryHeap> mPictureHeap; diff --git a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h index 6b27af9..9ea3f0c 100644 --- a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h +++ b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h @@ -260,6 +260,7 @@ public: SetExpBracket = 1 << 2, SetQuality = 1 << 3, SetRotation = 1 << 4, + SetBurst = 1 << 5, ECaptureSettingMax, ECapturesettingsAll = ( ((ECaptureSettingMax -1 ) << 1) -1 ) /// all possible flags raised }; |