summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDaniel Levin <dendy@ti.com>2012-11-27 21:34:12 +0200
committerDaniel Levin <dendy@ti.com>2012-12-04 15:38:30 +0200
commitcf614ea4fd9ebc303d6314016d7e226fa7cd1966 (patch)
tree7f09a3067b666a09dd8faadcac8d33b89166e219
parent92c40268fb2cdf196b7bd97fa5e569d8267a9ac7 (diff)
parent005d358cbcf413658d3e5204b699d9bf7367c256 (diff)
downloadhardware_ti_omap4-cf614ea4fd9ebc303d6314016d7e226fa7cd1966.zip
hardware_ti_omap4-cf614ea4fd9ebc303d6314016d7e226fa7cd1966.tar.gz
hardware_ti_omap4-cf614ea4fd9ebc303d6314016d7e226fa7cd1966.tar.bz2
Merge branch 'd-jb-mr0-release-camera' into p-jb-mr1-release
Conflicts: camera/OMXCameraAdapter/OMXCameraAdapter.cpp test/CameraHal/camera_test_bufferqueue.h test/CameraHal/camera_test_surfacetexture.cpp Change-Id: I1f13c6a5b6369e943773d04a650406a79eb95750 Signed-off-by: Daniel Levin <dendy@ti.com>
-rw-r--r--camera/ANativeWindowDisplayAdapter.cpp218
-rwxr-xr-x[-rw-r--r--]camera/Android.mk11
-rw-r--r--camera/AppCallbackNotifier.cpp171
-rw-r--r--camera/BaseCameraAdapter.cpp25
-rw-r--r--camera/BufferSourceAdapter.cpp249
-rw-r--r--camera/CameraHal.cpp747
-rw-r--r--camera/CameraHalCommon.cpp92
-rw-r--r--camera/CameraHal_Module.cpp20
-rw-r--r--camera/DecoderFactory.cpp59
-rwxr-xr-xcamera/Decoder_libjpeg.cpp301
-rw-r--r--camera/FrameDecoder.cpp204
-rw-r--r--camera/MemoryManager.cpp1
-rw-r--r--camera/OMXCameraAdapter/OMX3A.cpp192
-rw-r--r--camera/OMXCameraAdapter/OMXAlgo.cpp37
-rw-r--r--camera/OMXCameraAdapter/OMXCameraAdapter.cpp86
-rw-r--r--camera/OMXCameraAdapter/OMXCapabilities.cpp20
-rw-r--r--camera/OMXCameraAdapter/OMXCapture.cpp437
-rw-r--r--camera/OMXCameraAdapter/OMXExif.cpp11
-rw-r--r--camera/OMXCameraAdapter/OMXFocus.cpp13
-rw-r--r--camera/OMXCameraAdapter/OMXReprocess.cpp68
-rw-r--r--camera/OmxFrameDecoder.cpp1077
-rw-r--r--camera/SensorListener.cpp12
-rw-r--r--camera/SwFrameDecoder.cpp85
-rw-r--r--camera/TICameraParameters.cpp5
-rwxr-xr-x[-rw-r--r--]camera/V4LCameraAdapter/V4LCameraAdapter.cpp625
-rwxr-xr-x[-rw-r--r--]camera/V4LCameraAdapter/V4LCapabilities.cpp33
-rw-r--r--camera/inc/ANativeWindowDisplayAdapter.h7
-rw-r--r--camera/inc/BufferSourceAdapter.h64
-rw-r--r--camera/inc/CameraHal.h64
-rw-r--r--camera/inc/CameraProperties.h1
-rw-r--r--camera/inc/DecoderFactory.h35
-rwxr-xr-xcamera/inc/Decoder_libjpeg.h58
-rw-r--r--camera/inc/FrameDecoder.h173
-rw-r--r--camera/inc/General3A_Settings.h7
-rw-r--r--camera/inc/OMXCameraAdapter/OMXCameraAdapter.h28
-rw-r--r--camera/inc/OMXCameraAdapter/OMXSceneModeTables.h200
-rw-r--r--camera/inc/OmxFrameDecoder.h204
-rw-r--r--camera/inc/SwFrameDecoder.h47
-rw-r--r--camera/inc/TICameraParameters.h6
-rwxr-xr-x[-rw-r--r--]camera/inc/V4LCameraAdapter/V4LCameraAdapter.h45
-rw-r--r--cpcam/java/com/ti/omap/android/cpcam/CPCam.java11
-rw-r--r--cpcam/java/com/ti/omap/android/cpcam/CPCamBufferQueue.java11
-rw-r--r--cpcam/jni/com_ti_omap_android_cpcam_CPCam.cpp38
-rw-r--r--cpcam/jni/com_ti_omap_android_cpcam_CPCamBufferQueue.cpp8
-rw-r--r--test/CameraHal/camera_test.h47
-rw-r--r--test/CameraHal/camera_test_bufferqueue.h43
-rw-r--r--test/CameraHal/camera_test_menu.cpp384
-rw-r--r--test/CameraHal/camera_test_script.cpp198
-rw-r--r--test/CameraHal/camera_test_surfacetexture.cpp450
-rw-r--r--test/CameraHal/camera_test_surfacetexture.h54
50 files changed, 5808 insertions, 1174 deletions
diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp
index 45f7ba0..396e6d4 100644
--- a/camera/ANativeWindowDisplayAdapter.cpp
+++ b/camera/ANativeWindowDisplayAdapter.cpp
@@ -66,80 +66,6 @@ OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format)
return pixFormat;
}
-const char* DisplayAdapter::getPixFormatConstant(const char* parameters_format) const
-{
- const char* pixFormat;
-
- if ( parameters_format != NULL )
- {
- if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
- CAMHAL_LOGVA("CbYCrY format selected");
- pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV422I;
- }
- else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
- {
- // TODO(XXX): We are treating YV12 the same as YUV420SP
- CAMHAL_LOGVA("YUV420SP format selected");
- pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
- else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
- CAMHAL_LOGVA("RGB565 format selected");
- pixFormat = android::CameraParameters::PIXEL_FORMAT_RGB565;
- }
- else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0)
- {
- CAMHAL_LOGVA("BAYER format selected");
- pixFormat = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
- }
- else
- {
- CAMHAL_LOGEA("Invalid format, NV12 format selected as default");
- pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
- }
- else
- {
- CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
- pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
-
- return pixFormat;
-}
-
-size_t DisplayAdapter::getBufSize(const char* parameters_format, int width, int height) const
-{
- int buf_size;
-
- if ( parameters_format != NULL ) {
- if (strcmp(parameters_format,
- android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
- buf_size = width * height * 2;
- }
- else if((strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
- (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
- buf_size = width * height * 3 / 2;
- }
- else if(strcmp(parameters_format,
- android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
- buf_size = width * height * 2;
- }
- else if (strcmp(parameters_format,
- android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
- buf_size = width * height * 2;
- } else {
- CAMHAL_LOGEA("Invalid format");
- buf_size = 0;
- }
- } else {
- CAMHAL_LOGEA("Preview format is NULL");
- buf_size = 0;
- }
-
- return buf_size;
-}
/*--------------------ANativeWindowDisplayAdapter Class STARTS here-----------------------------*/
@@ -149,7 +75,8 @@ size_t DisplayAdapter::getBufSize(const char* parameters_format, int width, int
ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
mDisplayState(ANativeWindowDisplayAdapter::DISPLAY_INIT),
mDisplayEnabled(false),
- mBufferCount(0)
+ mBufferCount(0),
+ mUseExternalBufferLocking(false)
@@ -600,6 +527,7 @@ CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int hei
}
mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+ mPixelFormat = CameraHal::getPixelFormatConstant(format);
for ( i=0; i < mBufferCount; i++ )
{
@@ -623,6 +551,7 @@ CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int hei
CAMHAL_LOGDB("got handle %p", handle);
mBuffers[i].opaque = (void *)handle;
mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mBuffers[i].format = mPixelFormat;
mFramesWithCameraAdapterMap.add(handle, i);
// Tag remaining preview buffers as preview frames
@@ -631,7 +560,7 @@ CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int hei
CameraFrame::PREVIEW_FRAME_SYNC);
}
- bytes = getBufSize(format, width, height);
+ bytes = CameraHal::calculateBufferSize(format, width, height);
}
@@ -651,6 +580,9 @@ CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int hei
mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
mBuffers[i].mapped = y_uv[0];
mFrameProvider->addFramePointers(&mBuffers[i], y_uv);
+ if (mUseExternalBufferLocking) {
+ mapper.unlock(*handle);
+ }
}
// return the rest of the buffers back to ANativeWindow
@@ -678,7 +610,6 @@ CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int hei
}
mFirstInit = true;
- mPixelFormat = getPixFormatConstant(format);
mFrameWidth = width;
mFrameHeight = height;
@@ -854,8 +785,10 @@ status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
continue;
}
- // unlock buffer before giving it up
- mapper.unlock(*handle);
+ if (!mUseExternalBufferLocking) {
+ // unlock buffer before giving it up
+ mapper.unlock(*handle);
+ }
ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
if ( NO_INIT == ret ) {
@@ -1091,13 +1024,27 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
}
for ( i = 0; i < mBufferCount; i++ )
- {
+ {
if ( dispFrame.mBuffer == &mBuffers[i] )
- {
+ {
break;
}
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( mMeasureStandby ) {
+ CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot);
+ mMeasureStandby = false;
+ } else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) {
+ CameraHal::PPM("Shot to snapshot: ", &mStartCapture);
+ mShotToShot = true;
+ } else if ( mShotToShot ) {
+ CameraHal::PPM("Shot to shot: ", &mStartCapture);
+ mShotToShot = false;
+ }
+
+#endif
android::AutoMutex lock(mLock);
@@ -1107,48 +1054,32 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
(!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) &&
!mSuspend)
{
- uint32_t xOff = (dispFrame.mOffset% PAGE_SIZE);
- uint32_t yOff = (dispFrame.mOffset / PAGE_SIZE);
+ uint32_t xOff, yOff;
+
+ CameraHal::getXYFromOffset(&xOff, &yOff, dispFrame.mOffset, PAGE_SIZE, mPixelFormat);
// Set crop only if current x and y offsets do not match with frame offsets
- if((mXOff!=xOff) || (mYOff!=yOff))
- {
- CAMHAL_LOGDB("Offset %d xOff = %d, yOff = %d", dispFrame.mOffset, xOff, yOff);
- uint8_t bytesPerPixel;
- ///Calculate bytes per pixel based on the pixel format
- if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
- {
- bytesPerPixel = 2;
- }
- else if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
- {
- bytesPerPixel = 2;
- }
- else if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
- {
- bytesPerPixel = 1;
- }
- else
- {
- bytesPerPixel = 1;
- }
+ if ((mXOff != xOff) || (mYOff != yOff)) {
+ CAMHAL_LOGDB("offset = %u left = %d top = %d right = %d bottom = %d",
+ dispFrame.mOffset, xOff, yOff ,
+ xOff + mPreviewWidth, yOff + mPreviewHeight);
- CAMHAL_LOGVB(" crop.left = %d crop.top = %d crop.right = %d crop.bottom = %d",
- xOff/bytesPerPixel, yOff , (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
// We'll ignore any errors here, if the surface is
// already invalid, we'll know soon enough.
- mANativeWindow->set_crop(mANativeWindow, xOff/bytesPerPixel, yOff,
- (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
+ mANativeWindow->set_crop(mANativeWindow, xOff, yOff,
+ xOff + mPreviewWidth, yOff + mPreviewHeight);
- ///Update the current x and y offsets
+ // Update the current x and y offsets
mXOff = xOff;
mYOff = yOff;
}
{
buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
- // unlock buffer before sending to display
- mapper.unlock(*handle);
+ if (!mUseExternalBufferLocking) {
+ // unlock buffer before sending to display
+ mapper.unlock(*handle);
+ }
ret = mANativeWindow->enqueue_buffer(mANativeWindow, handle);
}
if ( NO_ERROR != ret ) {
@@ -1163,33 +1094,14 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
Utils::Message msg;
mDisplayQ.put(&msg);
-
-#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
-
- if ( mMeasureStandby )
- {
- CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot);
- mMeasureStandby = false;
- }
- else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType)
- {
- CameraHal::PPM("Shot to snapshot: ", &mStartCapture);
- mShotToShot = true;
- }
- else if ( mShotToShot )
- {
- CameraHal::PPM("Shot to shot: ", &mStartCapture);
- mShotToShot = false;
- }
-#endif
-
}
else
{
buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
-
- // unlock buffer before giving it up
- mapper.unlock(*handle);
+ if (!mUseExternalBufferLocking) {
+ // unlock buffer before giving it up
+ mapper.unlock(*handle);
+ }
// cancel buffer and dequeue another one
ret = mANativeWindow->cancel_buffer(mANativeWindow, handle);
@@ -1259,23 +1171,24 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
if (i == mBufferCount) {
CAMHAL_LOGEB("Failed to find handle %p", buf);
}
-
- // lock buffer before sending to FrameProvider for filling
- bounds.left = 0;
- bounds.top = 0;
- bounds.right = mFrameWidth;
- bounds.bottom = mFrameHeight;
-
- int lock_try_count = 0;
- while (mapper.lock(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){
- if (++lock_try_count > LOCK_BUFFER_TRIES){
- if ( NULL != mErrorNotifier.get() ){
- mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
+ if (!mUseExternalBufferLocking) {
+ // lock buffer before sending to FrameProvider for filling
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = mFrameWidth;
+ bounds.bottom = mFrameHeight;
+
+ int lock_try_count = 0;
+ while (mapper.lock(*(buffer_handle_t *) mBuffers[i].opaque, CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){
+ if (++lock_try_count > LOCK_BUFFER_TRIES){
+ if ( NULL != mErrorNotifier.get() ){
+ mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
+ }
+ return false;
+ }
+ CAMHAL_LOGEA("Gralloc Lock FrameReturn Error: Sleeping 15ms");
+ usleep(15000);
}
- return false;
- }
- CAMHAL_LOGEA("Gralloc Lock FrameReturn Error: Sleeping 15ms");
- usleep(15000);
}
{
@@ -1327,6 +1240,7 @@ void ANativeWindowDisplayAdapter::frameCallbackRelay(CameraFrame* caFrame)
void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
{
///Call queueBuffer of overlay in the context of the callback thread
+
DisplayFrame df;
df.mBuffer = caFrame->mBuffer;
df.mType = (CameraFrame::FrameType) caFrame->mFrameType;
@@ -1338,6 +1252,10 @@ void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
PostFrame(df);
}
+void ANativeWindowDisplayAdapter::setExternalLocking(bool extBuffLocking)
+{
+ mUseExternalBufferLocking = extBuffLocking;
+}
/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/
diff --git a/camera/Android.mk b/camera/Android.mk
index 6676f9e..73f5110 100644..100755
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -38,6 +38,10 @@ ifdef TI_CAMERAHAL_PROFILING
CAMERAHAL_CFLAGS += -DCAMERAHAL_OMX_PROFILING
endif
+ifeq ($(findstring omap5, $(TARGET_BOARD_PLATFORM)),omap5)
+ CAMERAHAL_CFLAGS += -DCAMERAHAL_OMAP5_CAPTURE_MODES
+endif
+
ifeq ($(ENHANCED_DOMX),true)
CAMERAHAL_CFLAGS += -DENHANCED_DOMX
endif
@@ -71,11 +75,16 @@ TI_CAMERAHAL_COMMON_SRC := \
BaseCameraAdapter.cpp \
MemoryManager.cpp \
Encoder_libjpeg.cpp \
+ Decoder_libjpeg.cpp \
SensorListener.cpp \
NV12_resize.cpp \
CameraParameters.cpp \
TICameraParameters.cpp \
- CameraHalCommon.cpp
+ CameraHalCommon.cpp \
+ FrameDecoder.cpp \
+ SwFrameDecoder.cpp \
+ OmxFrameDecoder.cpp \
+ DecoderFactory.cpp
TI_CAMERAHAL_OMX_SRC := \
OMXCameraAdapter/OMX3A.cpp \
diff --git a/camera/AppCallbackNotifier.cpp b/camera/AppCallbackNotifier.cpp
index 44d6a39..dcb7918 100644
--- a/camera/AppCallbackNotifier.cpp
+++ b/camera/AppCallbackNotifier.cpp
@@ -201,6 +201,7 @@ status_t AppCallbackNotifier::initialize()
mRecording = false;
mPreviewing = false;
+ mExternalLocking = false;
LOG_FUNCTION_NAME_EXIT;
@@ -699,6 +700,44 @@ static void copy2Dto1D(void *dst,
}
}
+static void copyCroppedNV12(CameraFrame* frame, unsigned char *dst)
+{
+ unsigned int stride, width, height;
+ uint32_t offset, uvoffset;
+ size_t size;
+
+ CAMHAL_ASSERT(frame && dst);
+
+ offset = frame->mOffset;
+ stride = frame->mAlignment;
+ width = frame->mWidth;
+ height = frame->mHeight;
+ size = frame->mLength;
+ unsigned const char *src = (unsigned char *) frame->mBuffer->mapped;
+
+ // offset to beginning of uv plane
+ uvoffset = (offset + size) * 2 / 3;
+ // offset to beginning of valid region of uv plane
+ uvoffset += (offset - (offset % stride)) / 2 + (offset % stride);
+
+ // start of valid luma region
+ unsigned const char *luma = src + offset;
+ // start of valid chroma region
+ unsigned const char *chroma = src + uvoffset;
+
+ // copy luma and chroma line x line
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(dst, luma, width);
+ luma += stride;
+ dst += width;
+ }
+ for (unsigned int i = 0; i < height / 2; i++) {
+ memcpy(dst, chroma, width);
+ chroma += stride;
+ dst += width;
+ }
+}
+
void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType)
{
camera_memory_t* picture = NULL;
@@ -712,13 +751,26 @@ void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t ms
goto exit;
}
- picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+ if (frame->mBuffer->format &&
+ (strcmp(frame->mBuffer->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) &&
+ (frame->mAlignment != frame->mWidth) &&
+ ( msgType == CAMERA_MSG_RAW_IMAGE )) {
+ size_t size;
- if (NULL != picture) {
- dest = picture->data;
- if (NULL != dest) {
- src = (void *) ((unsigned int) frame->mBuffer->mapped + frame->mOffset);
- memcpy(dest, src, frame->mLength);
+ size = CameraHal::calculateBufferSize(frame->mBuffer->format, frame->mWidth, frame->mHeight);
+ picture = mRequestMemory(-1, size, 1, NULL);
+ if (picture && picture->data) {
+ copyCroppedNV12(frame, (unsigned char*) picture->data);
+ }
+ } else {
+ picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if (NULL != picture) {
+ dest = picture->data;
+ if (NULL != dest) {
+ src = (void *) ((unsigned int) frame->mBuffer->mapped + frame->mOffset);
+ memcpy(dest, src, frame->mLength);
+ }
}
}
}
@@ -735,6 +787,38 @@ void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t ms
}
}
+void AppCallbackNotifier::lockBufferAndUpdatePtrs(CameraFrame* frame)
+{
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
+
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = frame->mWidth;
+ bounds.bottom = frame->mHeight;
+ void *y_uv[2];
+ buffer_handle_t *handle = reinterpret_cast<buffer_handle_t *>(frame->mBuffer->opaque);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ frame->mBuffer->mapped = y_uv[0];
+ frame->mYuv[0] = reinterpret_cast<int>(frame->mBuffer->mapped);
+ frame->mYuv[1] = frame->mYuv[0] + (frame->mLength + frame->mOffset)*2/3;
+}
+
+void AppCallbackNotifier::unlockBufferAndUpdatePtrs(CameraFrame* frame)
+{
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ buffer_handle_t *handle = reinterpret_cast<buffer_handle_t *>(frame->mBuffer->opaque);
+ mapper.unlock(*handle);
+ frame->mBuffer->mapped = NULL;
+ frame->mYuv[0] = NULL;
+ frame->mYuv[1] = NULL;
+}
+
+void AppCallbackNotifier::setExternalLocking(bool extBuffLocking)
+{
+ mExternalLocking = extBuffLocking;
+}
+
void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType)
{
camera_memory_t* picture = NULL;
@@ -754,7 +838,9 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
}
dest = &mPreviewBuffers[mPreviewBufCount];
-
+ if (mExternalLocking) {
+ lockBufferAndUpdatePtrs(frame);
+ }
CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)",
__LINE__,
dest,
@@ -806,6 +892,10 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
}
+ if (mExternalLocking) {
+ unlockBufferAndUpdatePtrs(frame);
+ }
+
// increment for next buffer
mPreviewBufCount = (mPreviewBufCount + 1) % AppCallbackNotifier::MAX_BUFFERS;
}
@@ -989,9 +1079,9 @@ void AppCallbackNotifier::notifyFrame()
current_snapshot = (mPreviewBufCount + MAX_BUFFERS - 1) % MAX_BUFFERS;
tn_jpeg->src = (uint8_t *)mPreviewBuffers[current_snapshot].mapped;
tn_jpeg->src_size = mPreviewMemory->size / MAX_BUFFERS;
- tn_jpeg->dst_size = calculateBufferSize(tn_width,
- tn_height,
- previewFormat);
+ tn_jpeg->dst_size = CameraHal::calculateBufferSize(previewFormat,
+ tn_width,
+ tn_height);
tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->dst_size);
tn_jpeg->quality = tn_quality;
tn_jpeg->in_width = width;
@@ -1080,7 +1170,9 @@ void AppCallbackNotifier::notifyFrame()
bounds.top = 0;
bounds.right = mVideoWidth;
bounds.bottom = mVideoHeight;
-
+ if (mExternalLocking) {
+ lockBufferAndUpdatePtrs(frame);
+ }
void *y_uv[2];
mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
y_uv[1] = y_uv[0] + mVideoHeight*4096;
@@ -1103,6 +1195,9 @@ void AppCallbackNotifier::notifyFrame()
VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
mapper.unlock((buffer_handle_t)vBuf->opaque);
+ if (mExternalLocking) {
+ unlockBufferAndUpdatePtrs(frame);
+ }
videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
/* FIXME remove cast */
videoMetadataBuffer->handle = (void *)vBuf->opaque;
@@ -1130,10 +1225,15 @@ void AppCallbackNotifier::notifyFrame()
CAMHAL_LOGEA("Error! One of the video buffers is NULL");
break;
}
-
+ if (mExternalLocking) {
+ lockBufferAndUpdatePtrs(frame);
+ }
*reinterpret_cast<buffer_handle_t*>(fakebuf->data) = reinterpret_cast<buffer_handle_t>(frame->mBuffer->mapped);
mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, fakebuf, 0, mCallbackCookie);
fakebuf->release(fakebuf);
+ if (mExternalLocking) {
+ unlockBufferAndUpdatePtrs(frame);
+ }
}
}
}
@@ -1459,49 +1559,6 @@ void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
LOG_FUNCTION_NAME_EXIT;
}
-size_t AppCallbackNotifier::calculateBufferSize(size_t width, size_t height, const char *pixelFormat)
-{
- size_t res = 0;
-
- LOG_FUNCTION_NAME
-
- if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
- res = width*height*2;
- } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
- res = (width*height*3)/2;
- } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
- res = width*height*2;
- } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
- size_t yStride, uvStride, ySize, uvSize;
- alignYV12(width, height, yStride, uvStride, ySize, uvSize, res);
- mPreviewPixelFormat = android::CameraParameters::PIXEL_FORMAT_YUV420P;
- }
-
- LOG_FUNCTION_NAME_EXIT;
-
- return res;
-}
-
-const char* AppCallbackNotifier::getContstantForPixelFormat(const char *pixelFormat) {
- if (!pixelFormat) {
- // returning NV12 as default
- return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
-
- if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
- return android::CameraParameters::PIXEL_FORMAT_YUV422I;
- } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ) {
- return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
- } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
- return android::CameraParameters::PIXEL_FORMAT_RGB565;
- } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
- return android::CameraParameters::PIXEL_FORMAT_YUV420P;
- } else {
- // returning NV12 as default
- return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
- }
-}
-
status_t AppCallbackNotifier::startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
{
unsigned int *bufArr;
@@ -1531,8 +1588,8 @@ status_t AppCallbackNotifier::startPreviewCallbacks(android::CameraParameters &p
mPreviewWidth = w;
mPreviewHeight = h;
mPreviewStride = 4096;
- mPreviewPixelFormat = getContstantForPixelFormat(params.getPreviewFormat());
- size = calculateBufferSize(w, h, mPreviewPixelFormat);
+ mPreviewPixelFormat = CameraHal::getPixelFormatConstant(params.getPreviewFormat());
+ size = CameraHal::calculateBufferSize(mPreviewPixelFormat, w, h);
mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL);
if (!mPreviewMemory) {
diff --git a/camera/BaseCameraAdapter.cpp b/camera/BaseCameraAdapter.cpp
index 36b3782..84cde37 100644
--- a/camera/BaseCameraAdapter.cpp
+++ b/camera/BaseCameraAdapter.cpp
@@ -544,17 +544,6 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
android::AutoMutex lock(mCaptureBufferLock);
mCaptureBuffers = desc->mBuffers;
mCaptureBuffersLength = desc->mLength;
- mCaptureBuffersAvailable.clear();
- for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
- {
- mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
- }
- // initial ref count for undeqeueued buffers is 1 since buffer provider
- // is still holding on to it
- for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
- {
- mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
- }
}
if ( NULL != desc )
@@ -1910,7 +1899,6 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
//These events don't change the current state
case CAMERA_QUERY_RESOLUTION_PREVIEW:
- case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = %s",
printState);
@@ -2122,6 +2110,7 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
mNextState = PREVIEW_STATE;
break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_START_IMAGE_CAPTURE:
CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->CAPTURE_STATE event = %s",
printState);
@@ -2430,13 +2419,23 @@ status_t BaseCameraAdapter::setState(CameraCommands operation)
printState);
mNextState = PREVIEW_STATE;
break;
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
case CAMERA_START_IMAGE_CAPTURE:
- case CAMERA_USE_BUFFERS_REPROCESS:
CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->REPROCESS_STATE event = %s",
printState);
mNextState = REPROCESS_STATE;
break;
+ case CAMERA_USE_BUFFERS_REPROCESS:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->REPROCESS_STATE event = %s",
+ printState);
+ mNextState = LOADED_REPROCESS_STATE;
+ break;
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch REPROCESS_STATE->LOADED_CAPTURE_STATE event = %s",
+ printState);
+ mNextState = LOADED_CAPTURE_STATE;
+ break;
default:
CAMHAL_LOGEB("Adapter state switch REPROCESS_STATE Invalid Op! event = %s",
printState);
diff --git a/camera/BufferSourceAdapter.cpp b/camera/BufferSourceAdapter.cpp
index d63b117..3c4e698 100644
--- a/camera/BufferSourceAdapter.cpp
+++ b/camera/BufferSourceAdapter.cpp
@@ -31,8 +31,7 @@ static int getANWFormat(const char* parameters_format)
if (parameters_format != NULL) {
if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("CbYCrY format selected");
- // TODO(XXX): not defined yet
- format = -1;
+ format = HAL_PIXEL_FORMAT_TI_UYVY;
} else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
format = HAL_PIXEL_FORMAT_TI_NV12;
@@ -58,11 +57,12 @@ static int getUsageFromANW(int format)
switch (format) {
case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
// This usage flag indicates to gralloc we want the
// buffers to come from system heap
usage |= GRALLOC_USAGE_PRIVATE_0;
break;
- case HAL_PIXEL_FORMAT_TI_Y16:
default:
// No special flags needed
break;
@@ -78,6 +78,8 @@ static const char* getFormatFromANW(int format)
return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
case HAL_PIXEL_FORMAT_TI_Y16:
return android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ return android::CameraParameters::PIXEL_FORMAT_YUV422I;
default:
break;
}
@@ -88,6 +90,7 @@ static CameraFrame::FrameType formatToOutputFrameType(const char* format) {
switch (getANWFormat(format)) {
case HAL_PIXEL_FORMAT_TI_NV12:
case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
// Assuming NV12 1D is RAW or Image frame
return CameraFrame::RAW_FRAME;
default:
@@ -102,6 +105,7 @@ static int getHeightFromFormat(const char* format, int stride, int size) {
case HAL_PIXEL_FORMAT_TI_NV12:
return (size / (3 * stride)) * 2;
case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
return (size / stride) / 2;
default:
break;
@@ -112,6 +116,11 @@ static int getHeightFromFormat(const char* format, int stride, int size) {
/*--------------------BufferSourceAdapter Class STARTS here-----------------------------*/
+///Constant definitions
+// TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
+// until faux-NPA mode is implemented
+const int BufferSourceAdapter::NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP = 15;
+
/**
* Display Adapter class STARTS here..
*/
@@ -136,6 +145,10 @@ BufferSourceAdapter::~BufferSourceAdapter()
{
LOG_FUNCTION_NAME;
+ freeBufferList(mBuffers);
+
+ android::AutoMutex lock(mLock);
+
destroy();
if (mFrameProvider) {
@@ -155,12 +168,6 @@ BufferSourceAdapter::~BufferSourceAdapter()
mReturnFrame.clear();
}
- if( mBuffers != NULL)
- {
- delete [] mBuffers;
- mBuffers = NULL;
- }
-
LOG_FUNCTION_NAME_EXIT;
}
@@ -193,12 +200,33 @@ int BufferSourceAdapter::setPreviewWindow(preview_stream_ops_t *source)
return BAD_VALUE;
}
- if ( source == mBufferSource ) {
- return ALREADY_EXISTS;
- }
+ if (mBufferSource) {
+ char id1[OP_STR_SIZE], id2[OP_STR_SIZE];
+ status_t ret;
- // Destroy the existing source, if it exists
- destroy();
+ ret = extendedOps()->get_id(mBufferSource, id1, sizeof(id1));
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ return ret;
+ }
+
+ ret = extendedOps()->get_id(source, id2, sizeof(id2));
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ return ret;
+ }
+ if ((0 >= strlen(id1)) || (0 >= strlen(id2))) {
+ CAMHAL_LOGE("Cannot set ST without name: id1:\"%s\" id2:\"%s\"",
+ id1, id2);
+ return NOT_ENOUGH_DATA;
+ }
+ if (0 == strcmp(id1, id2)) {
+ return ALREADY_EXISTS;
+ }
+
+ // client has to unset mBufferSource before being able to set a new one
+ return BAD_VALUE;
+ }
// Move to new source obj
mBufferSource = source;
@@ -208,6 +236,19 @@ int BufferSourceAdapter::setPreviewWindow(preview_stream_ops_t *source)
return NO_ERROR;
}
+bool BufferSourceAdapter::match(const char * str) {
+ char id1[OP_STR_SIZE];
+ status_t ret;
+
+ ret = extendedOps()->get_id(mBufferSource, id1, sizeof(id1));
+
+ if (ret != 0) {
+ CAMHAL_LOGE("Surface::getId returned error %d", ret);
+ }
+
+ return strcmp(id1, str) == 0;
+}
+
int BufferSourceAdapter::setFrameProvider(FrameNotifier *frameProvider)
{
LOG_FUNCTION_NAME;
@@ -324,6 +365,7 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight
int pixFormat = getANWFormat(format);
int usage = getUsageFromANW(pixFormat);
+ mPixelFormat = CameraHal::getPixelFormatConstant(format);
// Set gralloc usage bits for window.
err = mBufferSource->set_usage(mBufferSource, usage);
@@ -338,7 +380,7 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight
return NULL;
}
- CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs);
+ CAMHAL_LOGDB("Number of buffers set to BufferSourceAdapter %d", numBufs);
// Set the number of buffers needed for this buffer source
err = mBufferSource->set_buffer_count(mBufferSource, numBufs);
if (err != 0) {
@@ -399,9 +441,10 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight
CAMHAL_LOGDB("got handle %p", handle);
mBuffers[i].opaque = (void *)handle;
mBuffers[i].type = CAMERA_BUFFER_ANW;
+ mBuffers[i].format = mPixelFormat;
mFramesWithCameraAdapterMap.add(handle, i);
- bytes = getBufSize(format, width, height);
+ bytes = CameraHal::calculateBufferSize(format, width, height);
}
for( i = 0; i < mBufferCount-undequeued; i++ ) {
@@ -436,7 +479,6 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight
mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) mBuffers[i].opaque);
}
- mPixelFormat = getPixFormatConstant(format);
mFrameWidth = width;
mFrameHeight = height;
mBufferSourceDirection = BUFFER_SOURCE_TAP_OUT;
@@ -468,6 +510,118 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int dummyHeight
}
+CameraBuffer *BufferSourceAdapter::getBuffers(bool reset) {
+ int undequeued = 0;
+ status_t err;
+ android::Mutex::Autolock lock(mLock);
+
+ if (!mBufferSource || !mBuffers) {
+ CAMHAL_LOGE("Adapter is not set up properly: "
+ "mBufferSource:%p mBuffers:%p",
+ mBufferSource, mBuffers);
+ goto fail;
+ }
+
+ // CameraHal is indicating to us that the state of the mBuffer
+ // might have changed. We might need to check the state of the
+ // buffer list and pass a new one depending on the state of our
+ // surface
+ if (reset) {
+ const int lnumBufs = mBufferCount;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds(mFrameWidth, mFrameHeight);
+ void *y_uv[2];
+ CameraBuffer * newBuffers = NULL;
+ unsigned int index = 0;
+ android::KeyedVector<void*, int> missingIndices;
+
+ newBuffers = new CameraBuffer [lnumBufs];
+ memset (newBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
+
+ // Use this vector to figure out missing indices
+ for (int i = 0; i < mBufferCount; i++) {
+ missingIndices.add(mBuffers[i].opaque, i);
+ }
+
+ // assign buffers that we have already dequeued
+ for (index = 0; index < mFramesWithCameraAdapterMap.size(); index++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(index);
+ newBuffers[index].opaque = mBuffers[value].opaque;
+ newBuffers[index].type = mBuffers[value].type;
+ newBuffers[index].format = mBuffers[value].format;
+ newBuffers[index].mapped = mBuffers[value].mapped;
+ mFramesWithCameraAdapterMap.replaceValueAt(index, index);
+ missingIndices.removeItem(newBuffers[index].opaque);
+ }
+
+ mBufferSource->get_min_undequeued_buffer_count(mBufferSource, &undequeued);
+
+ // dequeue the rest of the buffers
+ for (index; index < (unsigned int)(mBufferCount-undequeued); index++) {
+ buffer_handle_t *handle;
+ int stride; // dummy variable to get stride
+
+ err = mBufferSource->dequeue_buffer(mBufferSource, &handle, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mBufferSource = NULL;
+ }
+ goto fail;
+ }
+ newBuffers[index].opaque = (void *)handle;
+ newBuffers[index].type = CAMERA_BUFFER_ANW;
+ newBuffers[index].format = mPixelFormat;
+ mFramesWithCameraAdapterMap.add(handle, index);
+
+ mBufferSource->lock_buffer(mBufferSource, handle);
+ mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ newBuffers[index].mapped = y_uv[0];
+ CAMHAL_LOGDB("got handle %p", handle);
+
+ missingIndices.removeItem(newBuffers[index].opaque);
+ }
+
+ // now we need to figure out which buffers aren't dequeued
+ // which are in mBuffers but not newBuffers yet
+ if ((mBufferCount - index) != missingIndices.size()) {
+ CAMHAL_LOGD("Hrmm somethings gone awry. We are missing a different number"
+ " of buffers than we can fill");
+ }
+ for (unsigned int i = 0; i < missingIndices.size(); i++) {
+ int j = missingIndices.valueAt(i);
+
+ CAMHAL_LOGD("Filling at %d", j);
+ newBuffers[index].opaque = mBuffers[j].opaque;
+ newBuffers[index].type = mBuffers[j].type;
+ newBuffers[index].format = mBuffers[j].format;
+ newBuffers[index].mapped = mBuffers[j].mapped;
+ }
+
+ delete [] mBuffers;
+ mBuffers = newBuffers;
+ }
+
+ return mBuffers;
+
+ fail:
+ return NULL;
+}
+
+unsigned int BufferSourceAdapter::getSize() {
+ android::Mutex::Autolock lock(mLock);
+ return CameraHal::calculateBufferSize(mPixelFormat, mFrameWidth, mFrameHeight);
+}
+
+int BufferSourceAdapter::getBufferCount() {
+ int count = -1;
+
+ android::Mutex::Autolock lock(mLock);
+ if (mBufferSource) extendedOps()->get_buffer_count(mBufferSource, &count);
+ return count;
+}
+
CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
LOG_FUNCTION_NAME;
status_t err;
@@ -478,6 +632,7 @@ CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
// TODO(XXX): Only supporting one input buffer at a time right now
*num = 1;
+ mBufferCount = *num;
mBuffers = new CameraBuffer [lnumBufs];
memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
@@ -485,7 +640,10 @@ CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
return NULL;
}
- err = extendedOps()->update_and_get_buffer(mBufferSource, &handle, &mBuffers[0].stride);
+ err = extendedOps()->update_and_get_buffer(mBufferSource,
+ &handle,
+ &mBuffers[0].stride,
+ &mBuffers[0].privateData);
if (err != 0) {
CAMHAL_LOGEB("update and get buffer failed: %s (%d)", strerror(-err), -err);
if ( ENODEV == err ) {
@@ -503,6 +661,10 @@ CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
err = extendedOps()->get_buffer_dimension(mBufferSource, &mBuffers[0].width, &mBuffers[0].height);
err = extendedOps()->get_buffer_format(mBufferSource, &formatSource);
+ int t, l, r, b, w, h;
+ err = extendedOps()->get_crop(mBufferSource, &l, &t, &r, &b);
+ err = extendedOps()->get_current_size(mBufferSource, &w, &h);
+
// lock buffer
{
void *y_uv[2];
@@ -516,6 +678,8 @@ CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
mPixelFormat = getFormatFromANW(formatSource);
mBuffers[0].format = mPixelFormat;
+ mBuffers[0].actual_size = CameraHal::calculateBufferSize(mPixelFormat, w, h);
+ mBuffers[0].offset = t * w + l * CameraHal::getBPP(mPixelFormat);
mBufferSourceDirection = BUFFER_SOURCE_TAP_IN;
return mBuffers;
@@ -649,16 +813,14 @@ int BufferSourceAdapter::freeBufferList(CameraBuffer * buflist)
status_t ret = NO_ERROR;
+ if ( mBuffers != buflist ) {
+ return BAD_VALUE;
+ }
+
android::AutoMutex lock(mLock);
if (mBufferSourceDirection == BUFFER_SOURCE_TAP_OUT) returnBuffersToWindow();
- if ( NULL != buflist )
- {
- delete [] buflist;
- mBuffers = NULL;
- }
-
if( mBuffers != NULL)
{
delete [] mBuffers;
@@ -686,21 +848,31 @@ void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
status_t ret = NO_ERROR;
buffer_handle_t *handle = NULL;
int i;
+ uint32_t x, y;
android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::AutoMutex lock(mLock);
+
if (!mBuffers || !frame->mBuffer) {
CAMHAL_LOGEA("Adapter sent BufferSourceAdapter a NULL frame?");
return;
}
- android::AutoMutex lock(mLock);
-
for ( i = 0; i < mBufferCount; i++ ) {
if (frame->mBuffer == &mBuffers[i]) {
break;
}
}
+ if (i >= mBufferCount) {
+ CAMHAL_LOGD("Can't find frame in buffer list");
+ if (frame->mFrameType != CameraFrame::REPROCESS_INPUT_FRAME) {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ static_cast<CameraFrame::FrameType>(frame->mFrameType));
+ }
+ return;
+ }
+
handle = (buffer_handle_t *) mBuffers[i].opaque;
// Handle input buffers
@@ -709,9 +881,19 @@ void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
if (frame->mFrameType == CameraFrame::REPROCESS_INPUT_FRAME) {
CAMHAL_LOGD("Unlock %p (buffer #%d)", handle, i);
mapper.unlock(*handle);
+ extendedOps()->release_buffer(mBufferSource, mBuffers[i].privateData);
return;
}
+ CameraHal::getXYFromOffset(&x, &y, frame->mOffset, frame->mAlignment, mPixelFormat);
+ CAMHAL_LOGVB("offset = %u left = %d top = %d right = %d bottom = %d",
+ frame->mOffset, x, y, x + frame->mWidth, y + frame->mHeight);
+ ret = mBufferSource->set_crop(mBufferSource, x, y, x + frame->mWidth, y + frame->mHeight);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGE("mBufferSource->set_crop returned error %d", ret);
+ goto fail;
+ }
+
if ( NULL != frame->mMetaData.get() ) {
camera_memory_t *extMeta = frame->mMetaData->getExtendedMetadata();
if ( NULL != extMeta ) {
@@ -720,6 +902,7 @@ void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
ret = extendedOps()->set_metadata(mBufferSource, extMeta);
if (ret != 0) {
CAMHAL_LOGE("Surface::set_metadata returned error %d", ret);
+ goto fail;
}
}
}
@@ -730,12 +913,18 @@ void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
ret = mBufferSource->enqueue_buffer(mBufferSource, handle);
if (ret != 0) {
CAMHAL_LOGE("Surface::queueBuffer returned error %d", ret);
+ goto fail;
}
mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) frame->mBuffer->opaque);
- // signal return frame thread that it can dequeue a buffer now
- mReturnFrame->signal();
+ return;
+
+fail:
+ mFramesWithCameraAdapterMap.clear();
+ mBufferSource = NULL;
+ mReturnFrame->requestExit();
+ mQueueFrame->requestExit();
}
@@ -750,7 +939,9 @@ bool BufferSourceAdapter::handleFrameReturn()
void *y_uv[2];
android::Rect bounds(mFrameWidth, mFrameHeight);
- if ( NULL == mBufferSource ) {
+ android::AutoMutex lock(mLock);
+
+ if ( (NULL == mBufferSource) || (NULL == mBuffers) ) {
return false;
}
diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp
index 71ba1bb..37baec8 100644
--- a/camera/CameraHal.cpp
+++ b/camera/CameraHal.cpp
@@ -35,7 +35,7 @@ namespace Ti {
namespace Camera {
extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t);
-extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t);
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t, CameraHal*);
/*****************************************************************************/
@@ -68,7 +68,11 @@ extern const char * const kYuvImagesOutputDirPath = "/data/misc/camera/YuV_PiCtU
#ifdef OMAP_ENHANCEMENT_CPCAM
-static int dummy_update_and_get_buffer(preview_stream_ops_t*, buffer_handle_t**, int*) {
+static int dummy_update_and_get_buffer(preview_stream_ops_t*, buffer_handle_t**, int*,int*) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_release_buffer(preview_stream_ops_t*, int slot) {
return INVALID_OPERATION;
}
@@ -83,15 +87,38 @@ static int dummy_get_buffer_format(preview_stream_ops_t*, int*) {
static int dummy_set_metadata(preview_stream_ops_t*, const camera_memory_t*) {
return INVALID_OPERATION;
}
+
+static int dummy_get_id(preview_stream_ops_t*, char *data, unsigned int dataSize) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_buffer_count(preview_stream_ops_t*, int *count) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_crop(preview_stream_ops_t*,
+ int *, int *, int *, int *) {
+ return INVALID_OPERATION;
+}
+
+static int dummy_get_current_size(preview_stream_ops_t*,
+ int *, int *) {
+ return INVALID_OPERATION;
+}
#endif
#ifdef OMAP_ENHANCEMENT
static preview_stream_extended_ops_t dummyPreviewStreamExtendedOps = {
#ifdef OMAP_ENHANCEMENT_CPCAM
dummy_update_and_get_buffer,
+ dummy_release_buffer,
dummy_get_buffer_dimension,
dummy_get_buffer_format,
dummy_set_metadata,
+ dummy_get_id,
+ dummy_get_buffer_count,
+ dummy_get_crop,
+ dummy_get_current_size,
#endif
};
#endif
@@ -129,6 +156,7 @@ static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
}
}
+
/*-------------Camera Hal Interface Method definitions STARTS here--------------------*/
/**
@@ -409,16 +437,6 @@ int CameraHal::setParameters(const android::CameraParameters& params)
updateRequired = true;
}
- if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) {
- if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
- CAMHAL_LOGDB("IPP mode set %s", valstr);
- mParameters.set(TICameraParameters::KEY_IPP, valstr);
- } else {
- CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s", valstr);
- return BAD_VALUE;
- }
- }
-
#ifdef OMAP_ENHANCEMENT_VTC
if ((valstr = params.get(TICameraParameters::KEY_VTC_HINT)) != NULL ) {
mParameters.set(TICameraParameters::KEY_VTC_HINT, valstr);
@@ -440,7 +458,21 @@ int CameraHal::setParameters(const android::CameraParameters& params)
}
}
#endif
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
+ if ((mParameters.get(TICameraParameters::KEY_IPP) == NULL) ||
+ (strcmp(valstr, mParameters.get(TICameraParameters::KEY_IPP)))) {
+ CAMHAL_LOGDB("IPP mode set %s", params.get(TICameraParameters::KEY_IPP));
+ mParameters.set(TICameraParameters::KEY_IPP, valstr);
+ restartPreviewRequired = true;
+ }
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s", valstr);
+ return BAD_VALUE;
}
+ }
if ( (valstr = params.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL )
{
@@ -1109,10 +1141,10 @@ int CameraHal::setParameters(const android::CameraParameters& params)
}
//TI extensions for enable/disable algos
- if( (valstr = params.get(TICameraParameters::KEY_ALGO_FIXED_GAMMA)) != NULL )
+ if( (valstr = params.get(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA)) != NULL )
{
- CAMHAL_LOGDB("Fixed Gamma set %s", valstr);
- mParameters.set(TICameraParameters::KEY_ALGO_FIXED_GAMMA, valstr);
+ CAMHAL_LOGDB("External Gamma set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA, valstr);
}
if( (valstr = params.get(TICameraParameters::KEY_ALGO_NSF1)) != NULL )
@@ -1145,6 +1177,12 @@ int CameraHal::setParameters(const android::CameraParameters& params)
mParameters.set(TICameraParameters::KEY_ALGO_GIC, valstr);
}
+ if( (valstr = params.get(TICameraParameters::KEY_GAMMA_TABLE)) != NULL )
+ {
+ CAMHAL_LOGDB("Manual gamma table set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GAMMA_TABLE, valstr);
+ }
+
android::CameraParameters adapterParams = mParameters;
#ifdef OMAP_ENHANCEMENT
@@ -1215,7 +1253,9 @@ int CameraHal::setParameters(const android::CameraParameters& params)
// enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
// will be called in startPreview()
// TODO(XXX): Need to identify other parameters that need update from camera adapter
- if ( (NULL != mCameraAdapter) && (mPreviewEnabled || updateRequired) && !restartPreviewRequired ) {
+ if ( (NULL != mCameraAdapter) &&
+ (mPreviewEnabled || updateRequired) &&
+ (!(mPreviewEnabled && restartPreviewRequired)) ) {
ret |= mCameraAdapter->setParameters(adapterParams);
}
@@ -1253,8 +1293,17 @@ int CameraHal::setParameters(const android::CameraParameters& params)
ret = restartPreview();
} else if (restartPreviewRequired && !previewEnabled() &&
mDisplayPaused && !mRecordingEnabled) {
- CAMHAL_LOGDA("Stopping Preview");
- forceStopPreview();
+ CAMHAL_LOGDA("Restarting preview in paused mode");
+ ret = restartPreview();
+
+ // TODO(XXX): If there is some delay between the restartPreview call and the code
+ // below, then the user could see some preview frames and callbacks. Let's find
+ // a better place to put this later...
+ if (ret == NO_ERROR) {
+ mDisplayPaused = true;
+ mPreviewEnabled = false;
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+ }
}
if ( !mBracketingRunning && mBracketingEnabled ) {
@@ -1411,52 +1460,40 @@ status_t CameraHal::freePreviewDataBufs()
}
status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size,
- const char* previewFormat, unsigned int bufferCount,
- unsigned int *max_queueable)
+ const char* previewFormat, unsigned int bufferCount)
{
status_t ret = NO_ERROR;
- int bytes;
+ int bytes = size;
LOG_FUNCTION_NAME;
- bytes = size;
-
// allocate image buffers only if not already allocated
if(NULL != mImageBuffers) {
- if (mBufferSourceAdapter_Out.get()) {
- mBufferSourceAdapter_Out->maxQueueableBuffers(*max_queueable);
- } else {
- *max_queueable = bufferCount;
- }
return NO_ERROR;
}
- if (mBufferSourceAdapter_Out.get()) {
- mImageBuffers = mBufferSourceAdapter_Out->allocateBufferList(width, height, previewFormat,
- bytes, bufferCount);
- mBufferSourceAdapter_Out->maxQueueableBuffers(*max_queueable);
- } else {
- bytes = ((bytes + 4095) / 4096) * 4096;
+ if ( NO_ERROR == ret ) {
+ bytes = ((bytes+4095)/4096)*4096;
mImageBuffers = mMemoryManager->allocateBufferList(0, 0, previewFormat, bytes, bufferCount);
- *max_queueable = bufferCount;
- }
-
- CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
- if ( NULL == mImageBuffers ) {
- CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
- ret = -NO_MEMORY;
- } else {
- bytes = size;
+ CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
+ if( NULL == mImageBuffers ) {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ } else {
+ bytes = size;
+ }
}
if ( NO_ERROR == ret ) {
mImageFd = mMemoryManager->getFd();
mImageLength = bytes;
mImageOffsets = mMemoryManager->getOffsets();
+ mImageCount = bufferCount;
} else {
mImageFd = -1;
mImageLength = 0;
mImageOffsets = NULL;
+ mImageCount = 0;
}
LOG_FUNCTION_NAME_EXIT;
@@ -1619,14 +1656,12 @@ status_t CameraHal::freeImageBufs()
}
if (mBufferSourceAdapter_Out.get()) {
- ret = mBufferSourceAdapter_Out->freeBufferList(mImageBuffers);
+ mBufferSourceAdapter_Out = 0;
} else {
ret = mMemoryManager->freeBufferList(mImageBuffers);
}
- if (ret == NO_ERROR) {
- mImageBuffers = NULL;
- }
+ mImageBuffers = NULL;
LOG_FUNCTION_NAME_EXIT;
@@ -2014,7 +2049,15 @@ status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
{
// Need to create the display adapter since it has not been created
// Create display adapter
- mDisplayAdapter = new ANativeWindowDisplayAdapter();
+ ANativeWindowDisplayAdapter* displayAdapter = new ANativeWindowDisplayAdapter();
+ displayAdapter->setExternalLocking(mExternalLocking);
+ if (NULL != mAppCallbackNotifier.get()) {
+ mAppCallbackNotifier->setExternalLocking(mExternalLocking);
+ } else {
+ CAMHAL_LOGE("Can't apply locking policy on AppCallbackNotifier");
+ CAMHAL_ASSERT(0);
+ }
+ mDisplayAdapter = displayAdapter;
#ifdef OMAP_ENHANCEMENT
mDisplayAdapter->setExtendedOps(mExtendedPreviewStreamOps);
#endif
@@ -2082,136 +2125,375 @@ void CameraHal::setExtendedPreviewStreamOps(preview_stream_extended_ops_t *ops)
}
/**
- @brief Sets ANativeWindow object.
+ @brief Sets Tapout Surfaces.
- Buffers provided to CameraHal via this object for tap-in/tap-out
+ Buffers provided to CameraHal via this object for tap-out
functionality.
- TODO(XXX): this is just going to use preview_stream_ops for now, but we
- most likely need to extend it when we want more functionality
-
@param[in] window The ANativeWindow object created by Surface flinger
@return NO_ERROR If the ANativeWindow object passes validation criteria
@todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
*/
-status_t CameraHal::setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+status_t CameraHal::setTapoutLocked(struct preview_stream_ops *tapout)
{
status_t ret = NO_ERROR;
+ int index = -1;
LOG_FUNCTION_NAME;
- // If either a tapin or tapout was previously set
- // we need to clean up and clear capturing
- if ((!tapout && mBufferSourceAdapter_Out.get()) ||
- (!tapin && mBufferSourceAdapter_In.get())) {
- signalEndImageCapture();
- }
-
- // Set tapout point
- // destroy current buffer tapout if NULL tapout is passed
if (!tapout) {
- if (mBufferSourceAdapter_Out.get() != NULL) {
- CAMHAL_LOGD("NULL tapout passed, destroying buffer tapout adapter");
- mBufferSourceAdapter_Out.clear();
- mBufferSourceAdapter_Out = 0;
- }
- ret = NO_ERROR;
- } else if (mBufferSourceAdapter_Out.get() == NULL) {
- mBufferSourceAdapter_Out = new BufferSourceAdapter();
- mBufferSourceAdapter_Out->setExtendedOps(mExtendedPreviewStreamOps);
- if(!mBufferSourceAdapter_Out.get()) {
- CAMHAL_LOGEA("Couldn't create DisplayAdapter");
- ret = NO_MEMORY;
- goto exit;
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Set tapout point
+ // 1. Check name of tap-out
+ // 2. If not already set, then create a new one
+ // 3. Allocate buffers. If user is re-setting the surface, free buffers first and re-allocate
+ // in case dimensions have changed
+
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ android::sp<DisplayAdapter> out;
+ out = mOutAdapters.itemAt(i);
+ ret = out->setPreviewWindow(tapout);
+ if (ret == ALREADY_EXISTS) {
+ CAMHAL_LOGD("Tap Out already set at index = %d", i);
+ index = i;
+ ret = NO_ERROR;
}
+ }
- ret = mBufferSourceAdapter_Out->initialize();
- if (ret != NO_ERROR)
- {
- mBufferSourceAdapter_Out.clear();
- mBufferSourceAdapter_Out = 0;
+ if (index < 0) {
+ android::sp<DisplayAdapter> out = new BufferSourceAdapter();
+
+ ret = out->initialize();
+ if (ret != NO_ERROR) {
+ out.clear();
CAMHAL_LOGEA("DisplayAdapter initialize failed");
goto exit;
}
+ // BufferSourceAdapter will be handler of the extended OPS
+ out->setExtendedOps(mExtendedPreviewStreamOps);
+
// CameraAdapter will be the frame provider for BufferSourceAdapter
- mBufferSourceAdapter_Out->setFrameProvider(mCameraAdapter);
+ out->setFrameProvider(mCameraAdapter);
// BufferSourceAdapter will use ErrorHandler to send errors back to
// the application
- mBufferSourceAdapter_Out->setErrorHandler(mAppCallbackNotifier.get());
+ out->setErrorHandler(mAppCallbackNotifier.get());
// Update the display adapter with the new window that is passed from CameraService
- ret = mBufferSourceAdapter_Out->setPreviewWindow(tapout);
+ ret = out->setPreviewWindow(tapout);
if(ret != NO_ERROR) {
CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
goto exit;
}
- } else {
- // Update the display adapter with the new window that is passed from CameraService
- freeImageBufs();
- ret = mBufferSourceAdapter_Out->setPreviewWindow(tapout);
- if (ret == ALREADY_EXISTS) {
- // ALREADY_EXISTS should be treated as a noop in this case
- ret = NO_ERROR;
+
+ if (NULL != mCameraAdapter) {
+ unsigned int bufferCount, max_queueable;
+ CameraFrame frame;
+
+ bufferCount = out->getBufferCount();
+ if (bufferCount < 1) bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ bufferCount);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ if (NO_ERROR == ret) {
+ CameraBuffer *bufs = NULL;
+ unsigned int stride;
+ unsigned int height = frame.mHeight;
+ int size = frame.mLength;
+
+ stride = frame.mAlignment / getBPP(mParameters.getPictureFormat());
+ bufs = out->allocateBufferList(stride,
+ height,
+ mParameters.getPictureFormat(),
+ size,
+ bufferCount);
+ if (bufs == NULL){
+ CAMHAL_LOGEB("error allocating buffer list");
+ goto exit;
+ }
+ }
}
+ mOutAdapters.add(out);
}
- if (ret != NO_ERROR) {
- CAMHAL_LOGE("Error while trying to set tapout point");
- goto exit;
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Releases Tapout Surfaces.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseTapoutLocked(struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ char id[OP_STR_SIZE];
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapout) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
}
- // 1. Set tapin point
- if (!tapin) {
- if (mBufferSourceAdapter_In.get() != NULL) {
- CAMHAL_LOGD("NULL tapin passed, destroying buffer tapin adapter");
- mBufferSourceAdapter_In.clear();
- mBufferSourceAdapter_In = 0;
+ // Get the name of tapout
+ ret = mExtendedPreviewStreamOps->get_id(tapout, id, sizeof(id));
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("get_id OPS returned error %d", ret);
+ return ret;
+ }
+
+ // 1. Check name of tap-out
+ // 2. If exist, then free buffers and then remove it
+ if (mBufferSourceAdapter_Out.get() && mBufferSourceAdapter_Out->match(id)) {
+ CAMHAL_LOGD("REMOVE tap out %p previously set as current", tapout);
+ mBufferSourceAdapter_Out.clear();
+ }
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ android::sp<DisplayAdapter> out;
+ out = mOutAdapters.itemAt(i);
+ if (out->match(id)) {
+ CAMHAL_LOGD("REMOVE tap out %p \"%s\" at position %d", tapout, id, i);
+ mOutAdapters.removeAt(i);
+ break;
}
- ret = NO_ERROR;
- } else if (mBufferSourceAdapter_In.get() == NULL) {
- mBufferSourceAdapter_In = new BufferSourceAdapter();
- mBufferSourceAdapter_In->setExtendedOps(mExtendedPreviewStreamOps);
- if(!mBufferSourceAdapter_In.get()) {
- CAMHAL_LOGEA("Couldn't create DisplayAdapter");
- ret = NO_MEMORY;
- goto exit;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Sets Tapin Surfaces.
+
+ Buffers provided to CameraHal via this object for tap-in
+ functionality.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setTapinLocked(struct preview_stream_ops *tapin)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapin) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // 1. Set tapin point
+ // 1. Check name of tap-in
+ // 2. If not already set, then create a new one
+ // 3. Allocate buffers. If user is re-setting the surface, free buffers first and re-allocate
+ // in case dimensions have changed
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ android::sp<DisplayAdapter> in;
+ in = mInAdapters.itemAt(i);
+ ret = in->setPreviewWindow(tapin);
+ if (ret == ALREADY_EXISTS) {
+ CAMHAL_LOGD("Tap In already set at index = %d", i);
+ index = i;
+ ret = NO_ERROR;
}
+ }
- ret = mBufferSourceAdapter_In->initialize();
- if (ret != NO_ERROR)
- {
- mBufferSourceAdapter_In.clear();
- mBufferSourceAdapter_In = 0;
+ if (index < 0) {
+ android::sp<DisplayAdapter> in = new BufferSourceAdapter();
+
+ ret = in->initialize();
+ if (ret != NO_ERROR) {
+ in.clear();
CAMHAL_LOGEA("DisplayAdapter initialize failed");
goto exit;
}
- // We need to set a frame provider so camera adapter can return the frame back to us
- mBufferSourceAdapter_In->setFrameProvider(mCameraAdapter);
+ // BufferSourceAdapter will be handler of the extended OPS
+ in->setExtendedOps(mExtendedPreviewStreamOps);
+
+ // CameraAdapter will be the frame provider for BufferSourceAdapter
+ in->setFrameProvider(mCameraAdapter);
// BufferSourceAdapter will use ErrorHandler to send errors back to
// the application
- mBufferSourceAdapter_In->setErrorHandler(mAppCallbackNotifier.get());
+ in->setErrorHandler(mAppCallbackNotifier.get());
// Update the display adapter with the new window that is passed from CameraService
- ret = mBufferSourceAdapter_In->setPreviewWindow(tapin);
+ ret = in->setPreviewWindow(tapin);
if(ret != NO_ERROR) {
CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
goto exit;
}
- } else {
- // Update the display adapter with the new window that is passed from CameraService
- ret = mBufferSourceAdapter_In->setPreviewWindow(tapin);
- if (ret == ALREADY_EXISTS) {
- // ALREADY_EXISTS should be treated as a noop in this case
- ret = NO_ERROR;
+
+ mInAdapters.add(in);
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Releases Tapin Surfaces.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseTapinLocked(struct preview_stream_ops *tapin)
+{
+ status_t ret = NO_ERROR;
+ char id[OP_STR_SIZE];
+
+ LOG_FUNCTION_NAME;
+
+ if (!tapin) {
+ CAMHAL_LOGD("Missing argument");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+ }
+
+ // Get the name of tapin
+ ret = mExtendedPreviewStreamOps->get_id(tapin, id, sizeof(id));
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGEB("get_id OPS returned error %d", ret);
+ return ret;
+ }
+
+ // 1. Check name of tap-in
+ // 2. If exist, then free buffers and then remove it
+ if (mBufferSourceAdapter_In.get() && mBufferSourceAdapter_In->match(id)) {
+ CAMHAL_LOGD("REMOVE tap in %p previously set as current", tapin);
+ mBufferSourceAdapter_In.clear();
+ }
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ android::sp<DisplayAdapter> in;
+ in = mInAdapters.itemAt(i);
+ if (in->match(id)) {
+ CAMHAL_LOGD("REMOVE tap in %p \"%s\" at position %d", tapin, id, i);
+ mInAdapters.removeAt(i);
+ break;
}
}
- exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Buffers provided to CameraHal via this object for tap-in/tap-out
+ functionality.
+
+ TODO(XXX): this is just going to use preview_stream_ops for now, but we
+ most likely need to extend it when we want more functionality
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ CAMHAL_LOGD ("setBufferSource(%p, %p)", tapin, tapout);
+
+ ret = setTapoutLocked(tapout);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setTapoutLocked returned error 0x%x", ret);
+ goto exit;
+ }
+
+ ret = setTapinLocked(tapin);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setTapinLocked returned error 0x%x", ret);
+ goto exit;
+ }
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+/**
+ @brief Releases ANativeWindow object.
+
+ Release Buffers previously released with setBufferSource()
+
+ TODO(XXX): this is just going to use preview_stream_ops for now, but we
+ most likely need to extend it when we want more functionality
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::releaseBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout)
+{
+ status_t ret = NO_ERROR;
+ int index = -1;
+
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ CAMHAL_LOGD ("releaseBufferSource(%p, %p)", tapin, tapout);
+ if (tapout) {
+ ret |= releaseTapoutLocked(tapout);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error %d to release tap out", ret);
+ }
+ }
+
+ if (tapin) {
+ ret |= releaseTapinLocked(tapin);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Error %d to release tap in", ret);
+ }
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
#endif
@@ -2402,12 +2684,12 @@ bool CameraHal::setVideoModeParameters(const android::CameraParameters& params)
// Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
if ( (valstr == NULL) ||
- ( (valstr != NULL) && (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) ) )
- {
+ ( (valstr != NULL) && ( (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) &&
+ (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ ) != 0) ) ) ) {
CAMHAL_LOGDA("Set CAPTURE_MODE to VIDEO_MODE");
mParameters.set(TICameraParameters::KEY_CAP_MODE, (const char *) TICameraParameters::VIDEO_MODE);
restartPreviewRequired = true;
- }
+ }
// set VSTAB. restart is required if vstab value has changed
if ( (valstrRemote = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL ) {
@@ -2522,7 +2804,6 @@ status_t CameraHal::restartPreview()
android::AutoMutex lock(mLock);
if (!mCapModeBackup.isEmpty()) {
mParameters.set(TICameraParameters::KEY_CAP_MODE, mCapModeBackup.string());
- mCapModeBackup = "";
} else {
mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
}
@@ -2819,7 +3100,6 @@ status_t CameraHal::startImageBracketing()
if ( NO_ERROR == ret )
{
- unsigned int bufferCount = mBracketRangeNegative + 1;
mParameters.getPictureSize(( int * ) &frame.mWidth,
( int * ) &frame.mHeight);
@@ -2827,9 +3107,7 @@ status_t CameraHal::startImageBracketing()
frame.mHeight,
frame.mLength,
mParameters.getPictureFormat(),
- bufferCount,
- &max_queueable);
- mBracketRangeNegative = bufferCount - 1;
+ ( mBracketRangeNegative + 1 ));
if ( NO_ERROR != ret )
{
CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
@@ -2844,7 +3122,7 @@ status_t CameraHal::startImageBracketing()
desc.mFd = mImageFd;
desc.mLength = mImageLength;
desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 );
- desc.mMaxQueueable = ( size_t) max_queueable;
+ desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 );
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
( int ) &desc);
@@ -2911,7 +3189,7 @@ status_t CameraHal::takePicture(const char *params)
@todo Define error codes if unable to switch to image capture
*/
-status_t CameraHal::__takePicture(const char *params)
+status_t CameraHal::__takePicture(const char *params, struct timeval *captureStart)
{
// cancel AF state if needed (before any operation and mutex lock)
if (mCameraAdapter->getState() == CameraAdapter::AF_STATE) {
@@ -2927,10 +3205,16 @@ status_t CameraHal::__takePicture(const char *params)
unsigned int max_queueable = 0;
unsigned int rawBufferCount = 1;
bool isCPCamMode = false;
+ android::sp<DisplayAdapter> outAdapter = 0;
+ bool reuseTapout = false;
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- gettimeofday(&mStartCapture, NULL);
+ if ( NULL == captureStart ) {
+ gettimeofday(&mStartCapture, NULL);
+ } else {
+ memcpy(&mStartCapture, captureStart, sizeof(struct timeval));
+ }
#endif
@@ -2960,7 +3244,8 @@ status_t CameraHal::__takePicture(const char *params)
// we only support video snapshot if we are in video mode (recording hint is set)
if ( (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE) &&
- (valstr && strcmp(valstr, TICameraParameters::VIDEO_MODE)) ) {
+ (valstr && ( strcmp(valstr, TICameraParameters::VIDEO_MODE) &&
+ strcmp(valstr, TICameraParameters::VIDEO_MODE_HQ ) ) ) ) {
CAMHAL_LOGEA("Trying to capture while recording without recording hint set...");
return INVALID_OPERATION;
}
@@ -2969,7 +3254,7 @@ status_t CameraHal::__takePicture(const char *params)
// check if camera application is using shots parameters
// api. parameters set here override anything set using setParameters
// TODO(XXX): Just going to use legacy TI parameters for now. Need
- // add new APIs in CameraHal to utilize ShotParameters later, so
+ // add new APIs in CameraHal to utilize android::ShotParameters later, so
// we don't have to parse through the whole set of parameters
// in camera adapter
if (strlen(params) > 0) {
@@ -3009,6 +3294,26 @@ status_t CameraHal::__takePicture(const char *params)
}
}
+ valStr = shotParams.get(android::ShotParameters::KEY_CURRENT_TAP_OUT);
+ if (valStr != NULL) {
+ int index = -1;
+ for (unsigned int i = 0; i < mOutAdapters.size(); i++) {
+ if(mOutAdapters.itemAt(i)->match(valStr)) {
+ index = i;
+ break;
+ }
+ }
+ if (index < 0) {
+ CAMHAL_LOGE("Invalid tap out surface passed to camerahal");
+ return BAD_VALUE;
+ }
+ CAMHAL_LOGD("Found matching out adapter at %d", index);
+ outAdapter = mOutAdapters.itemAt(index);
+ if ( outAdapter == mBufferSourceAdapter_Out ) {
+ reuseTapout = true;
+ }
+ }
+
mCameraAdapter->setParameters(mParameters);
} else
#endif
@@ -3017,11 +3322,19 @@ status_t CameraHal::__takePicture(const char *params)
// when we remove legacy TI parameters implementation
}
- // if we are already in the middle of a capture...then we just need
- // setParameters and start image capture to queue more shots
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture parameters set: ", &mStartCapture);
+
+#endif
+
+ // if we are already in the middle of a capture and using the same
+ // tapout ST...then we just need setParameters and start image
+ // capture to queue more shots
if (((mCameraAdapter->getState() & CameraAdapter::CAPTURE_STATE) ==
CameraAdapter::CAPTURE_STATE) &&
- (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE)) {
+ (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) &&
+ (reuseTapout) ) {
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
//pass capture timestamp along with the camera adapter command
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE,
@@ -3034,7 +3347,7 @@ status_t CameraHal::__takePicture(const char *params)
if ( !mBracketingRunning )
{
- // if application didn't set burst through ShotParameters
+ // if application didn't set burst through android::ShotParameters
// then query from TICameraParameters
if ((burst == -1) && (NO_ERROR == ret)) {
burst = mParameters.getInt(TICameraParameters::KEY_BURST);
@@ -3046,10 +3359,15 @@ status_t CameraHal::__takePicture(const char *params)
bufferCount = isCPCamMode || (burst > CameraHal::NO_BUFFERS_IMAGE_CAPTURE) ?
CameraHal::NO_BUFFERS_IMAGE_CAPTURE : burst;
- if (mBufferSourceAdapter_Out.get()) {
- // TODO(XXX): Temporarily increase number of buffers we can allocate from ANW
- // until faux-NPA mode is implemented
- bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+ if (outAdapter.get()) {
+ if ( reuseTapout ) {
+ bufferCount = mImageCount;
+ } else {
+ bufferCount = outAdapter->getBufferCount();
+ if (bufferCount < 1) {
+ bufferCount = NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+ }
+ }
}
if ( NULL != mAppCallbackNotifier.get() ) {
@@ -3106,21 +3424,53 @@ status_t CameraHal::__takePicture(const char *params)
}
}
- if ( NO_ERROR == ret )
- {
- ret = allocImageBufs(frame.mAlignment,
- frame.mHeight,
- frame.mLength,
- mParameters.getPictureFormat(),
- bufferCount,
- &max_queueable);
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffer size queried: ", &mStartCapture);
+
+#endif
+
+ if (outAdapter.get()) {
+ // Avoid locking the tapout again when reusing it
+ if (!reuseTapout) {
+ // Need to reset buffers if we are switching adapters since we don't know
+ // the state of the new buffer list
+ ret = outAdapter->maxQueueableBuffers(max_queueable);
+ if (NO_ERROR != ret) {
+ CAMHAL_LOGE("Couldn't get max queuable");
+ return ret;
}
+ mImageBuffers = outAdapter->getBuffers(true);
+ mImageOffsets = outAdapter->getOffsets();
+ mImageFd = outAdapter->getFd();
+ mImageLength = outAdapter->getSize();
+ mImageCount = bufferCount;
+ mBufferSourceAdapter_Out = outAdapter;
}
+ } else {
+ mBufferSourceAdapter_Out.clear();
+ // allocImageBufs will only allocate new buffers if mImageBuffers is NULL
+ if ( NO_ERROR == ret ) {
+ max_queueable = bufferCount;
+ ret = allocImageBufs(frame.mAlignment / getBPP(mParameters.getPictureFormat()),
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ bufferCount);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+ }
- if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffers allocated: ", &mStartCapture);
+ memcpy(&mImageBuffers->ppmStamp, &mStartCapture, sizeof(struct timeval));
+
+#endif
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
{
desc.mBuffers = mImageBuffers;
desc.mOffsets = mImageOffsets;
@@ -3159,6 +3509,12 @@ status_t CameraHal::__takePicture(const char *params)
}
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture buffers registered: ", &mStartCapture);
+
+#endif
+
if ((ret == NO_ERROR) && mBufferSourceAdapter_Out.get()) {
mBufferSourceAdapter_Out->enableDisplay(0, 0, NULL);
}
@@ -3170,6 +3526,8 @@ status_t CameraHal::__takePicture(const char *params)
//pass capture timestamp along with the camera adapter command
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE, (int) &mStartCapture);
+ CameraHal::PPM("Takepicture capture started: ", &mStartCapture);
+
#else
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
@@ -3284,11 +3642,45 @@ status_t CameraHal::reprocess(const char *params)
CameraAdapter::BuffersDescriptor desc;
CameraBuffer *reprocBuffers = NULL;
android::ShotParameters shotParams;
+ const char *valStr = NULL;
+ struct timeval startReprocess;
android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&startReprocess, NULL);
+
+#endif
+
+ // 0. Get tap in surface
+ if (strlen(params) > 0) {
+ android::String8 shotParams8(params);
+ shotParams.unflatten(shotParams8);
+ }
+
+ valStr = shotParams.get(android::ShotParameters::KEY_CURRENT_TAP_IN);
+ if (valStr != NULL) {
+ int index = -1;
+ for (unsigned int i = 0; i < mInAdapters.size(); i++) {
+ if(mInAdapters.itemAt(i)->match(valStr)) {
+ index = i;
+ break;
+ }
+ }
+ if (index < 0) {
+ CAMHAL_LOGE("Invalid tap in surface passed to camerahal");
+ return BAD_VALUE;
+ }
+ CAMHAL_LOGD("Found matching in adapter at %d", index);
+ mBufferSourceAdapter_In = mInAdapters.itemAt(index);
+ } else {
+ CAMHAL_LOGE("No tap in surface sent with shot config!");
+ return BAD_VALUE;
+ }
+
// 1. Get buffers
if (mBufferSourceAdapter_In.get()) {
reprocBuffers = mBufferSourceAdapter_In->getBufferList(&bufferCount);
@@ -3299,11 +3691,24 @@ status_t CameraHal::reprocess(const char *params)
goto exit;
}
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Got reprocess buffers: ", &startReprocess);
+
+#endif
+
// 2. Get buffer information and parse parameters
{
shotParams.setBurst(bufferCount);
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ memcpy(&reprocBuffers->ppmStamp, &startReprocess, sizeof(struct timeval));
+
+#endif
+
// 3. Give buffer to camera adapter
desc.mBuffers = reprocBuffers;
desc.mOffsets = 0;
@@ -3311,12 +3716,19 @@ status_t CameraHal::reprocess(const char *params)
desc.mLength = 0;
desc.mCount = (size_t) bufferCount;
desc.mMaxQueueable = (size_t) bufferCount;
+
ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS, (int) &desc);
if (ret != NO_ERROR) {
CAMHAL_LOGE("Error calling camera use buffers");
goto exit;
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess buffers registered: ", &startReprocess);
+
+#endif
+
// 4. Start reprocessing
ret = mBufferSourceAdapter_In->enableDisplay(0, 0, NULL);
if (ret != NO_ERROR) {
@@ -3325,9 +3737,9 @@ status_t CameraHal::reprocess(const char *params)
}
// 5. Start capturing
- ret = __takePicture(shotParams.flatten().string());
+ ret = __takePicture(shotParams.flatten().string(), &startReprocess);
- exit:
+exit:
return ret;
}
@@ -3548,6 +3960,7 @@ CameraHal::CameraHal(int cameraId)
mImageOffsets = NULL;
mImageLength = 0;
mImageFd = 0;
+ mImageCount = 0;
mVideoOffsets = NULL;
mVideoFd = 0;
mVideoLength = 0;
@@ -3590,6 +4003,8 @@ CameraHal::CameraHal(int cameraId)
mCameraIndex = cameraId;
+ mExternalLocking = false;
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -3683,7 +4098,7 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
if (strcmp(sensor_name, V4L_CAMERA_NAME_USB) == 0) {
#ifdef V4L_CAMERA_ADAPTER
- mCameraAdapter = V4LCameraAdapter_Factory(sensor_index);
+ mCameraAdapter = V4LCameraAdapter_Factory(sensor_index, this);
#endif
}
else {
@@ -4155,7 +4570,7 @@ void CameraHal::initDefaultParameters()
// TI extensions for enable/disable algos
// Hadcoded for now
- p.set(TICameraParameters::KEY_ALGO_FIXED_GAMMA, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA, android::CameraParameters::FALSE);
p.set(TICameraParameters::KEY_ALGO_NSF1, android::CameraParameters::TRUE);
p.set(TICameraParameters::KEY_ALGO_NSF2, android::CameraParameters::TRUE);
p.set(TICameraParameters::KEY_ALGO_SHARPENING, android::CameraParameters::TRUE);
@@ -4242,6 +4657,11 @@ void CameraHal::deinitialize()
mSensorListener = NULL;
}
+ mBufferSourceAdapter_Out.clear();
+ mBufferSourceAdapter_In.clear();
+ mOutAdapters.clear();
+ mInAdapters.clear();
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -4273,6 +4693,11 @@ void CameraHal::getPreferredPreviewRes(int *width, int *height)
LOG_FUNCTION_NAME_EXIT;
}
+void CameraHal::setExternalLocking(bool extBuffLocking)
+{
+ mExternalLocking = extBuffLocking;
+}
+
void CameraHal::resetPreviewRes(android::CameraParameters *params)
{
LOG_FUNCTION_NAME;
diff --git a/camera/CameraHalCommon.cpp b/camera/CameraHalCommon.cpp
index 1cad4cb..ff460f9 100644
--- a/camera/CameraHalCommon.cpp
+++ b/camera/CameraHalCommon.cpp
@@ -117,5 +117,97 @@ void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){
#endif
+
+/** Common utility function definitions used all over the HAL */
+
+unsigned int CameraHal::getBPP(const char* format) {
+ unsigned int bytesPerPixel;
+
+ // Calculate bytes per pixel based on the pixel format
+ if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0 ||
+ strcmp(format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ bytesPerPixel = 1;
+ } else {
+ bytesPerPixel = 1;
+ }
+
+ return bytesPerPixel;
+}
+
+void CameraHal::getXYFromOffset(unsigned int *x, unsigned int *y,
+ unsigned int offset, unsigned int stride,
+ const char* format)
+{
+ CAMHAL_ASSERT( x && y && format && (0U < stride) );
+
+ *x = (offset % stride) / getBPP(format);
+ *y = (offset / stride);
+}
+
+const char* CameraHal::getPixelFormatConstant(const char* parametersFormat)
+{
+ const char *pixelFormat = NULL;
+
+ if ( NULL != parametersFormat ) {
+ if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
+ CAMHAL_LOGVA("CbYCrY format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if ( (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
+ (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
+ // TODO(XXX): We are treating YV12 the same as YUV420SP
+ CAMHAL_LOGVA("YUV420SP format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
+ CAMHAL_LOGVA("RGB565 format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
+ CAMHAL_LOGVA("BAYER format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ } else if ( 0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_JPEG) ) {
+ CAMHAL_LOGVA("JPEG format selected");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else {
+ CAMHAL_LOGEA("Invalid format, NV12 format selected as default");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
+ pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+
+ return pixelFormat;
+}
+
+size_t CameraHal::calculateBufferSize(const char* parametersFormat, int width, int height)
+{
+ int bufferSize = -1;
+
+ if ( NULL != parametersFormat ) {
+ if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
+ bufferSize = width * height * 2;
+ } else if ( (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
+ (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
+ bufferSize = width * height * 3 / 2;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
+ bufferSize = width * height * 2;
+ } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
+ bufferSize = width * height * 2;
+ } else {
+ CAMHAL_LOGEA("Invalid format");
+ bufferSize = 0;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL");
+ bufferSize = 0;
+ }
+
+ return bufferSize;
+}
+
+
} // namespace Camera
} // namespace Ti
diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp
index 313c7fc..a9277a2 100644
--- a/camera/CameraHal_Module.cpp
+++ b/camera/CameraHal_Module.cpp
@@ -140,6 +140,25 @@ int camera_set_buffer_source(struct camera_device * device,
return rv;
}
+
+int camera_release_buffer_source(struct camera_device * device,
+ struct preview_stream_ops *tapin,
+ struct preview_stream_ops *tapout)
+{
+ CAMHAL_LOG_MODULE_FUNCTION_NAME;
+
+ int rv = -EINVAL;
+ ti_camera_device_t* ti_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ ti_dev = (ti_camera_device_t*) device;
+
+ rv = gCameraHals[ti_dev->cameraid]->releaseBufferSource(tapin, tapout);
+
+ return rv;
+}
#endif
void camera_set_callbacks(struct camera_device * device,
@@ -513,6 +532,7 @@ int camera_send_command(struct camera_device * device,
#ifdef OMAP_ENHANCEMENT_CPCAM
ops->set_extended_preview_ops = camera_set_extended_preview_ops;
ops->set_buffer_source = camera_set_buffer_source;
+ ops->release_buffer_source = camera_release_buffer_source;
ops->take_picture_with_parameters = camera_take_picture_with_parameters;
ops->reprocess = camera_reprocess;
ops->cancel_reprocess = camera_cancel_reprocess;
diff --git a/camera/DecoderFactory.cpp b/camera/DecoderFactory.cpp
new file mode 100644
index 0000000..846fda4
--- /dev/null
+++ b/camera/DecoderFactory.cpp
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FrameDecoder.h"
+#include "SwFrameDecoder.h"
+#include "OmxFrameDecoder.h"
+#include "CameraHal.h"
+#include "DecoderFactory.h"
+
+namespace Ti {
+namespace Camera {
+
+
+FrameDecoder* DecoderFactory::createDecoderByType(DecoderType type, bool forceSwDecoder) {
+ FrameDecoder* decoder = NULL;
+ switch (type) {
+ case DecoderType_MJPEG: {
+
+ if (!forceSwDecoder) {
+ decoder = new OmxFrameDecoder(DecoderType_MJPEG);
+ CAMHAL_LOGD("Using HW Decoder for MJPEG");
+ } else {
+ decoder = new SwFrameDecoder();
+ CAMHAL_LOGD("Using SW Decoder for MJPEG");
+ }
+
+ //TODO add logic that handle verification is HW Decoder is available ?
+ // And if no - create SW decoder.
+ break;
+ }
+ case DecoderType_H264: {
+ decoder = new OmxFrameDecoder(DecoderType_H264);
+ CAMHAL_LOGD("Using HW Decoder for H264");
+ break;
+ }
+ default: {
+ CAMHAL_LOGE("Unrecognized decoder type %d", type);
+ }
+ }
+
+ return decoder;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/Decoder_libjpeg.cpp b/camera/Decoder_libjpeg.cpp
new file mode 100755
index 0000000..35c343f
--- /dev/null
+++ b/camera/Decoder_libjpeg.cpp
@@ -0,0 +1,301 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Decoder_libjpeg.h"
+
+extern "C" {
+ #include "jpeglib.h"
+ #include "jerror.h"
+}
+
+#define NUM_COMPONENTS_IN_YUV 3
+
+namespace Ti {
+namespace Camera {
+
+/* JPEG DHT Segment omitted from MJPEG data */
+static unsigned char jpeg_odml_dht[0x1a6] = {
+ 0xff, 0xd8, /* Start of Image */
+ 0xff, 0xc4, 0x01, 0xa2, /* Define Huffman Table */
+
+ 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+
+ 0x01, 0x00, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+
+ 0x10, 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7d,
+ 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
+ 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08, 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
+ 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
+ 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
+ 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
+ 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+ 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
+ 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
+ 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa,
+
+ 0x11, 0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, 0x04, 0x04, 0x00, 0x01, 0x02, 0x77,
+ 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
+ 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91, 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
+ 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34, 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
+ 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
+ 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
+ 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
+ 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
+ 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
+ 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa
+};
+
+struct libjpeg_source_mgr : jpeg_source_mgr {
+ libjpeg_source_mgr(unsigned char *buffer_ptr, int len);
+ ~libjpeg_source_mgr();
+
+ unsigned char *mBufferPtr;
+ int mFilledLen;
+};
+
+static void libjpeg_init_source(j_decompress_ptr cinfo) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr;
+ src->bytes_in_buffer = 0;
+ src->current_offset = 0;
+}
+
+static boolean libjpeg_seek_input_data(j_decompress_ptr cinfo, long byte_offset) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->current_offset = byte_offset;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr + byte_offset;
+ src->bytes_in_buffer = 0;
+ return TRUE;
+}
+
+static boolean libjpeg_fill_input_buffer(j_decompress_ptr cinfo) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->current_offset += src->mFilledLen;
+ src->next_input_byte = src->mBufferPtr;
+ src->bytes_in_buffer = src->mFilledLen;
+ return TRUE;
+}
+
+static void libjpeg_skip_input_data(j_decompress_ptr cinfo, long num_bytes) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+
+ if (num_bytes > (long)src->bytes_in_buffer) {
+ CAMHAL_LOGEA("\n\n\n libjpeg_skip_input_data - num_bytes > (long)src->bytes_in_buffer \n\n\n");
+ } else {
+ src->next_input_byte += num_bytes;
+ src->bytes_in_buffer -= num_bytes;
+ }
+}
+
+static boolean libjpeg_resync_to_restart(j_decompress_ptr cinfo, int desired) {
+ libjpeg_source_mgr* src = (libjpeg_source_mgr*)cinfo->src;
+ src->next_input_byte = (const JOCTET*)src->mBufferPtr;
+ src->bytes_in_buffer = 0;
+ return TRUE;
+}
+
+static void libjpeg_term_source(j_decompress_ptr /*cinfo*/) {}
+
+libjpeg_source_mgr::libjpeg_source_mgr(unsigned char *buffer_ptr, int len) : mBufferPtr(buffer_ptr), mFilledLen(len) {
+ init_source = libjpeg_init_source;
+ fill_input_buffer = libjpeg_fill_input_buffer;
+ skip_input_data = libjpeg_skip_input_data;
+ resync_to_restart = libjpeg_resync_to_restart;
+ term_source = libjpeg_term_source;
+ seek_input_data = libjpeg_seek_input_data;
+}
+
+libjpeg_source_mgr::~libjpeg_source_mgr() {}
+
+Decoder_libjpeg::Decoder_libjpeg()
+{
+ mWidth = 0;
+ mHeight = 0;
+ Y_Plane = NULL;
+ U_Plane = NULL;
+ V_Plane = NULL;
+ UV_Plane = NULL;
+}
+
+Decoder_libjpeg::~Decoder_libjpeg()
+{
+ release();
+}
+
+void Decoder_libjpeg::release()
+{
+ if (Y_Plane) {
+ free(Y_Plane);
+ Y_Plane = NULL;
+ }
+ if (U_Plane) {
+ free(U_Plane);
+ U_Plane = NULL;
+ }
+ if (V_Plane) {
+ free(V_Plane);
+ V_Plane = NULL;
+ }
+ if (UV_Plane) {
+ free(UV_Plane);
+ UV_Plane = NULL;
+ }
+}
+
+int Decoder_libjpeg::readDHTSize()
+{
+ return sizeof(jpeg_odml_dht);
+}
+
+// 0xFF 0xC4 - DHT (Define Huffman Table) marker
+// 0xFF 0xD9 - SOI (Start Of Image) marker
+// 0xFF 0xD9 - EOI (End Of Image) marker
+// This function return true if if found DHT
+bool Decoder_libjpeg::isDhtExist(unsigned char *jpeg_src, int filled_len) {
+ if (filled_len <= 0) {
+ return false;
+ }
+
+ for (int i = 1; i < filled_len; i++) {
+ if((jpeg_src[i - 1] == 0xFF) && (jpeg_src[i] == 0xC4)) {
+ CAMHAL_LOGD("Found DHT (Define Huffman Table) marker");
+ return true;
+ }
+ }
+ return false;
+}
+
+int Decoder_libjpeg::appendDHT(unsigned char *jpeg_src, int filled_len, unsigned char *jpeg_with_dht_buffer, int buff_size)
+{
+ /* Appending DHT to JPEG */
+
+ int len = filled_len + sizeof(jpeg_odml_dht) - 2; // final length of jpeg data
+ if (len > buff_size) {
+ CAMHAL_LOGEA("\n\n\n Buffer size too small. filled_len=%d, buff_size=%d, sizeof(jpeg_odml_dht)=%d\n\n\n", filled_len, buff_size, sizeof(jpeg_odml_dht));
+ return 0;
+ }
+
+ memcpy(jpeg_with_dht_buffer, jpeg_odml_dht, sizeof(jpeg_odml_dht));
+ memcpy((jpeg_with_dht_buffer + sizeof(jpeg_odml_dht)), jpeg_src + 2, (filled_len - 2));
+ return len;
+}
+
+
+bool Decoder_libjpeg::decode(unsigned char *jpeg_src, int filled_len, unsigned char *nv12_buffer, int stride)
+{
+ struct jpeg_decompress_struct cinfo;
+ struct jpeg_error_mgr jerr;
+ struct libjpeg_source_mgr s_mgr(jpeg_src, filled_len);
+
+ if (filled_len == 0)
+ return false;
+
+ cinfo.err = jpeg_std_error(&jerr);
+ jpeg_create_decompress(&cinfo);
+
+ cinfo.src = &s_mgr;
+ int status = jpeg_read_header(&cinfo, true);
+ if (status != JPEG_HEADER_OK) {
+ CAMHAL_LOGEA("jpeg header corrupted");
+ return false;
+ }
+
+ cinfo.out_color_space = JCS_YCbCr;
+ cinfo.raw_data_out = true;
+ status = jpeg_start_decompress(&cinfo);
+ if (!status){
+ CAMHAL_LOGEA("jpeg_start_decompress failed");
+ return false;
+ }
+
+ if (mWidth == 0){
+ mWidth = cinfo.output_width;
+ mHeight = cinfo.output_height;
+ CAMHAL_LOGEA("w x h = %d x %d. stride=%d", cinfo.output_width, cinfo.output_height, stride);
+ }
+ else if ((cinfo.output_width > mWidth) || (cinfo.output_height > mHeight)) {
+ CAMHAL_LOGEA(" Free the existing buffers so that they are reallocated for new w x h. Old WxH = %dx%d. New WxH = %dx%d",
+ mWidth, mHeight, cinfo.output_width, cinfo.output_height);
+ release();
+ mWidth = cinfo.output_width;
+ mHeight = cinfo.output_height;
+ }
+
+ unsigned int decoded_uv_buffer_size = cinfo.output_width * cinfo.output_height / 2;
+ if (Y_Plane == NULL)Y_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (U_Plane == NULL)U_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (V_Plane == NULL)V_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
+ if (UV_Plane == NULL) UV_Plane = (unsigned char *)malloc(decoded_uv_buffer_size);
+
+ unsigned char **YUV_Planes[NUM_COMPONENTS_IN_YUV];
+ YUV_Planes[0] = Y_Plane;
+ YUV_Planes[1] = U_Plane;
+ YUV_Planes[2] = V_Plane;
+
+ unsigned char *row = &nv12_buffer[0];
+
+ // Y Component
+ for (unsigned int j = 0; j < cinfo.output_height; j++, row += stride)
+ YUV_Planes[0][j] = row;
+
+ row = &UV_Plane[0];
+
+ // U Component
+ for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
+ YUV_Planes[1][j+0] = row;
+ YUV_Planes[1][j+1] = row;
+ }
+
+ // V Component
+ for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
+ YUV_Planes[2][j+0] = row;
+ YUV_Planes[2][j+1] = row;
+ }
+
+ // Interleaving U and V
+ for (unsigned int i = 0; i < cinfo.output_height; i += 8) {
+ jpeg_read_raw_data(&cinfo, YUV_Planes, 8);
+ YUV_Planes[0] += 8;
+ YUV_Planes[1] += 8;
+ YUV_Planes[2] += 8;
+ }
+
+ unsigned char *uv_ptr = nv12_buffer + (stride * cinfo.output_height);
+ unsigned char *u_ptr = UV_Plane;
+ unsigned char *v_ptr = UV_Plane + (decoded_uv_buffer_size / 2);
+ for(unsigned int i = 0; i < cinfo.output_height / 2; i++){
+ for(unsigned int j = 0; j < cinfo.output_width; j+=2){
+ *(uv_ptr + j) = *u_ptr; u_ptr++;
+ *(uv_ptr + j + 1) = *v_ptr; v_ptr++;
+ }
+ uv_ptr = uv_ptr + stride;
+ }
+
+ jpeg_finish_decompress(&cinfo);
+ jpeg_destroy_decompress(&cinfo);
+
+ return true;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/FrameDecoder.cpp b/camera/FrameDecoder.cpp
new file mode 100644
index 0000000..80b4946
--- /dev/null
+++ b/camera/FrameDecoder.cpp
@@ -0,0 +1,204 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Common.h"
+#include "FrameDecoder.h"
+
+
+namespace Ti {
+namespace Camera {
+
+FrameDecoder::FrameDecoder()
+: mCameraHal(NULL), mState(DecoderState_Uninitialized) {
+}
+
+FrameDecoder::~FrameDecoder() {
+}
+
+status_t FrameDecoder::start() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ status_t ret;
+ if (mState == DecoderState_Running) {
+ return NO_INIT;
+ }
+ ret = doStart();
+ if (ret == NO_ERROR) {
+ mState = DecoderState_Running;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void FrameDecoder::stop() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState >= DecoderState_Requested_Stop) {
+ return;
+ }
+ mState = DecoderState_Requested_Stop;
+ doStop();
+ mState = DecoderState_Stoppped;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::release() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState <= DecoderState_Requested_Stop) {
+ return;
+ }
+ doRelease();
+ mState = DecoderState_Uninitialized;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::flush() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState <= DecoderState_Requested_Stop) {
+ return;
+ }
+ doFlush();
+ mInQueue.clear();
+ mOutQueue.clear();
+
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::configure(const DecoderParameters& params) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState == DecoderState_Running) {
+ return;
+ }
+ mParams = params;
+ mInQueue.reserve(mParams.inputBufferCount);
+ mOutQueue.reserve(mParams.outputBufferCount);
+ doConfigure(params);
+ mState = DecoderState_Initialized;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t FrameDecoder::dequeueInputBuffer(int &id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ for (size_t i = 0; i < mInQueue.size(); i++) {
+ int index = mInQueue[i];
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(in->getLock());
+ if (in->getStatus() == BufferStatus_InDecoded) {
+ id = index;
+ in->setStatus(BufferStatus_Unknown);
+ mInQueue.removeAt(i);
+ return NO_ERROR;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return INVALID_OPERATION;
+}
+
+status_t FrameDecoder::dequeueOutputBuffer(int &id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ for (size_t i = 0; i < mOutQueue.size(); i++) {
+ int index = mOutQueue[i];
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(out->getLock());
+ if (out->getStatus() == BufferStatus_OutFilled) {
+ id = index;
+ out->setStatus(BufferStatus_Unknown);
+ mOutQueue.removeAt(i);
+ return NO_ERROR;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return INVALID_OPERATION;
+}
+
+status_t FrameDecoder::queueOutputBuffer(int index) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ //We queue all available buffers to Decoder not in recording mode - before start
+ if (mState > DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(out->getLock());
+ out->setStatus(BufferStatus_OutQueued);
+ mOutQueue.push_back(index);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+status_t FrameDecoder::queueInputBuffer(int id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ {
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(id);
+ android::AutoMutex bufferLock(in->getLock());
+ in->setStatus(BufferStatus_InQueued);
+ mInQueue.push_back(id);
+ }
+
+ // Since we got queued buffer - we can process it
+ doProcessInputBuffer();
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/MemoryManager.cpp b/camera/MemoryManager.cpp
index d466f52..8d40b76 100644
--- a/camera/MemoryManager.cpp
+++ b/camera/MemoryManager.cpp
@@ -118,6 +118,7 @@ CameraBuffer* MemoryManager::allocateBufferList(int width, int height, const cha
buffers[i].ion_fd = mIonFd;
buffers[i].fd = mmap_fd;
buffers[i].size = size;
+ buffers[i].format = CameraHal::getPixelFormatConstant(format);
}
}
diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp
index 9f87e87..9baad08 100644
--- a/camera/OMXCameraAdapter/OMX3A.cpp
+++ b/camera/OMXCameraAdapter/OMX3A.cpp
@@ -388,8 +388,8 @@ status_t OMXCameraAdapter::setParameters3A(const android::CameraParameters &para
}
// TI extensions for enable/disable algos
- declareParameter3ABool(params, TICameraParameters::KEY_ALGO_FIXED_GAMMA,
- mParameters3A.AlgoFixedGamma, SetAlgoFixedGamma, "Fixed Gamma");
+ declareParameter3ABool(params, TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA,
+ mParameters3A.AlgoExternalGamma, SetAlgoExternalGamma, "External Gamma");
declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF1,
mParameters3A.AlgoNSF1, SetAlgoNSF1, "NSF1");
declareParameter3ABool(params, TICameraParameters::KEY_ALGO_NSF2,
@@ -400,11 +400,92 @@ status_t OMXCameraAdapter::setParameters3A(const android::CameraParameters &para
mParameters3A.AlgoThreeLinColorMap, SetAlgoThreeLinColorMap, "ThreeLinColorMap");
declareParameter3ABool(params, TICameraParameters::KEY_ALGO_GIC, mParameters3A.AlgoGIC, SetAlgoGIC, "GIC");
+ // Gamma table
+ str = params.get(TICameraParameters::KEY_GAMMA_TABLE);
+ updateGammaTable(str);
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+void OMXCameraAdapter::updateGammaTable(const char* gamma)
+{
+ unsigned int plane = 0;
+ unsigned int i = 0;
+ bool gamma_changed = false;
+ const char *a = gamma;
+ OMX_TI_GAMMATABLE_ELEM_TYPE *elem[3] = { mParameters3A.mGammaTable.pR,
+ mParameters3A.mGammaTable.pG,
+ mParameters3A.mGammaTable.pB};
+
+ if (!gamma) return;
+
+ mPending3Asettings &= ~SetGammaTable;
+ memset(&mParameters3A.mGammaTable, 0, sizeof(mParameters3A.mGammaTable));
+ for (plane = 0; plane < 3; plane++) {
+ a = strchr(a, '(');
+ if (NULL != a) {
+ a++;
+ for (i = 0; i < OMX_TI_GAMMATABLE_SIZE; i++) {
+ char *b;
+ int newVal;
+ newVal = strtod(a, &b);
+ if (newVal != elem[plane][i].nOffset) {
+ elem[plane][i].nOffset = newVal;
+ gamma_changed = true;
+ }
+ a = strpbrk(b, ",:)");
+ if ((NULL != a) && (':' == *a)) {
+ a++;
+ } else if ((NULL != a) && (',' == *a)){
+ a++;
+ break;
+ } else if ((NULL != a) && (')' == *a)){
+ a++;
+ break;
+ } else {
+ CAMHAL_LOGE("Error while parsing values");
+ gamma_changed = false;
+ break;
+ }
+ newVal = strtod(a, &b);
+ if (newVal != elem[plane][i].nSlope) {
+ elem[plane][i].nSlope = newVal;
+ gamma_changed = true;
+ }
+ a = strpbrk(b, ",:)");
+ if ((NULL != a) && (',' == *a)) {
+ a++;
+ } else if ((NULL != a) && (':' == *a)){
+ a++;
+ break;
+ } else if ((NULL != a) && (')' == *a)){
+ a++;
+ break;
+ } else {
+ CAMHAL_LOGE("Error while parsing values");
+ gamma_changed = false;
+ break;
+ }
+ }
+ if ((OMX_TI_GAMMATABLE_SIZE - 1) != i) {
+ CAMHAL_LOGE("Error while parsing values (incorrect count %u)", i);
+ gamma_changed = false;
+ break;
+ }
+ } else {
+ CAMHAL_LOGE("Error while parsing planes (%u)", plane);
+ gamma_changed = false;
+ break;
+ }
+ }
+
+ if (gamma_changed) {
+ mPending3Asettings |= SetGammaTable;
+ }
+}
+
void OMXCameraAdapter::declareParameter3ABool(const android::CameraParameters &params, const char *key,
OMX_BOOL &current_setting, E3ASettingsFlags pending,
const char *msg)
@@ -1609,18 +1690,11 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
CameraBuffer *bufferlist;
OMX_ALGOAREASTYPE *meteringAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
- MemoryManager memMgr;
int areasSize = 0;
LOG_FUNCTION_NAME
- ret = memMgr.initialize();
- if ( ret != OK ) {
- CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
- return ret;
- }
-
- android::AutoMutex lock(mMeteringAreasLock);
+ android::AutoMutex lock(mMeteringAreasLock);
if ( OMX_StateInvalid == mComponentState )
{
@@ -1629,7 +1703,7 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
}
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- bufferlist = memMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
meteringAreas = (OMX_ALGOAREASTYPE *)bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
@@ -1714,7 +1788,7 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
EXIT:
if (NULL != bufferlist)
{
- memMgr.freeBufferList(bufferlist);
+ mMemMgr.freeBufferList(bufferlist);
}
return ret;
@@ -1774,9 +1848,9 @@ status_t OMXCameraAdapter::setParameter3ABool(const OMX_INDEXTYPE omx_idx,
return Utils::ErrorUtils::omxToAndroidError(eError);
}
-status_t OMXCameraAdapter::setAlgoFixedGamma(Gen3A_settings& Gen3A)
+status_t OMXCameraAdapter::setAlgoExternalGamma(Gen3A_settings& Gen3A)
{
- return setParameter3ABool((OMX_INDEXTYPE) OMX_TI_IndexConfigFixedGamma, Gen3A.AlgoFixedGamma, "Fixed Gamma");
+ return setParameter3ABool((OMX_INDEXTYPE) OMX_TI_IndexConfigExternalGamma, Gen3A.AlgoExternalGamma, "External Gamma");
}
status_t OMXCameraAdapter::setAlgoNSF1(Gen3A_settings& Gen3A)
@@ -1804,6 +1878,86 @@ status_t OMXCameraAdapter::setAlgoGIC(Gen3A_settings& Gen3A)
return setParameter3ABoolInvert((OMX_INDEXTYPE) OMX_TI_IndexConfigDisableGIC, Gen3A.AlgoGIC, "Green Inballance Correction");
}
+status_t OMXCameraAdapter::setGammaTable(Gen3A_settings& Gen3A)
+{
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ CameraBuffer *bufferlist = NULL;
+ OMX_TI_CONFIG_GAMMATABLE_TYPE *gammaTable = NULL;
+ OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
+ int tblSize = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( OMX_StateInvalid == mComponentState ) {
+ CAMHAL_LOGEA("OMX component is in invalid state");
+ ret = NO_INIT;
+ goto EXIT;
+ }
+
+ tblSize = ((sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE)+4095)/4096)*4096;
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, tblSize, 1);
+ if (NULL == bufferlist) {
+ CAMHAL_LOGEB("Error allocating buffer for gamma table");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+ gammaTable = (OMX_TI_CONFIG_GAMMATABLE_TYPE *)bufferlist[0].mapped;
+ if (NULL == gammaTable) {
+ CAMHAL_LOGEB("Error allocating buffer for gamma table (wrong data pointer)");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+
+ memcpy(gammaTable, &mParameters3A.mGammaTable, sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE));
+
+#ifdef CAMERAHAL_DEBUG
+ {
+ android::String8 DmpR;
+ android::String8 DmpG;
+ android::String8 DmpB;
+ for (unsigned int i=0; i<OMX_TI_GAMMATABLE_SIZE;i++) {
+ DmpR.appendFormat(" %d:%d;", (int)gammaTable->pR[i].nOffset, (int)(int)gammaTable->pR[i].nSlope);
+ DmpG.appendFormat(" %d:%d;", (int)gammaTable->pG[i].nOffset, (int)(int)gammaTable->pG[i].nSlope);
+ DmpB.appendFormat(" %d:%d;", (int)gammaTable->pB[i].nOffset, (int)(int)gammaTable->pB[i].nSlope);
+ }
+ CAMHAL_LOGE("Gamma table R:%s", DmpR.string());
+ CAMHAL_LOGE("Gamma table G:%s", DmpG.string());
+ CAMHAL_LOGE("Gamma table B:%s", DmpB.string());
+ }
+#endif
+
+ OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
+ sharedBuffer.nPortIndex = OMX_ALL;
+ sharedBuffer.nSharedBuffSize = sizeof(OMX_TI_CONFIG_GAMMATABLE_TYPE);
+ sharedBuffer.pSharedBuff = (OMX_U8 *)camera_buffer_get_omx_ptr (&bufferlist[0]);
+ if ( NULL == sharedBuffer.pSharedBuff ) {
+ CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
+
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE) OMX_TI_IndexConfigGammaTable, &sharedBuffer);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while setting Gamma Table configuration 0x%x", eError);
+ ret = BAD_VALUE;
+ goto EXIT;
+ } else {
+ CAMHAL_LOGDA("Gamma Table SetConfig successfull.");
+ }
+
+EXIT:
+
+ if (NULL != bufferlist) {
+ mMemMgr.freeBufferList(bufferlist);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
{
status_t ret = NO_ERROR;
@@ -1939,9 +2093,9 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
break;
//TI extensions for enable/disable algos
- case SetAlgoFixedGamma:
+ case SetAlgoExternalGamma:
{
- ret |= setAlgoFixedGamma(Gen3A);
+ ret |= setAlgoExternalGamma(Gen3A);
}
break;
@@ -1975,6 +2129,12 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
}
break;
+ case SetGammaTable:
+ {
+ ret |= setGammaTable(Gen3A);
+ }
+ break;
+
default:
CAMHAL_LOGEB("this setting (0x%x) is still not supported in CameraAdapter ",
currSett);
diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp
index d163ef2..6855d56 100644
--- a/camera/OMXCameraAdapter/OMXAlgo.cpp
+++ b/camera/OMXCameraAdapter/OMXAlgo.cpp
@@ -64,6 +64,9 @@ status_t OMXCameraAdapter::setParametersAlgo(const android::CameraParameters &pa
} else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
capMode = OMXCameraAdapter::VIDEO_MODE;
mCapabilitiesOpMode = MODE_VIDEO;
+ } else if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ) == 0) {
+ capMode = OMXCameraAdapter::VIDEO_MODE_HQ;
+ mCapabilitiesOpMode = MODE_VIDEO_HIGH_QUALITY;
} else if (strcmp(valstr, (const char *) TICameraParameters::CP_CAM_MODE) == 0) {
capMode = OMXCameraAdapter::CP_CAM;
mCapabilitiesOpMode = MODE_CPCAM;
@@ -325,7 +328,7 @@ status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *p
ACParams.nSize = (OMX_U32)sizeof(OMX_TI_CONFIG_CONVERGENCETYPE);
ACParams.nVersion = mLocalVersionParam;
- ACParams.nPortIndex = OMX_ALL;
+ ACParams.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutoConvergence,
@@ -644,8 +647,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
{
OMX_INIT_STRUCT_PTR (&camMode, OMX_CONFIG_CAMOPERATINGMODETYPE);
- if ( mSensorIndex == OMX_TI_StereoSensor )
- {
+ if ( mSensorIndex == OMX_TI_StereoSensor ) {
if ( OMXCameraAdapter::VIDEO_MODE == mode ) {
CAMHAL_LOGDA("Camera mode: STEREO VIDEO");
camMode.eCamOperatingMode = OMX_TI_StereoVideo;
@@ -653,26 +655,18 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
CAMHAL_LOGDA("Camera mode: STEREO");
camMode.eCamOperatingMode = OMX_CaptureStereoImageCapture;
}
- }
- else if ( OMXCameraAdapter::HIGH_SPEED == mode )
- {
+ } else if ( OMXCameraAdapter::HIGH_SPEED == mode ) {
CAMHAL_LOGDA("Camera mode: HIGH SPEED");
camMode.eCamOperatingMode = OMX_CaptureImageHighSpeedTemporalBracketing;
- }
- else if ( OMXCameraAdapter::CP_CAM == mode )
- {
+ } else if ( OMXCameraAdapter::CP_CAM == mode ) {
CAMHAL_LOGDA("Camera mode: CP CAM");
camMode.eCamOperatingMode = OMX_TI_CPCam;
// TODO(XXX): Hardcode for now until we implement re-proc pipe
singlePrevMode.eMode = OMX_TI_SinglePreviewMode_ImageCaptureHighSpeed;
- }
- else if( OMXCameraAdapter::HIGH_QUALITY == mode )
- {
+ } else if( OMXCameraAdapter::HIGH_QUALITY == mode ) {
CAMHAL_LOGDA("Camera mode: HIGH QUALITY");
camMode.eCamOperatingMode = OMX_CaptureImageProfileBase;
- }
- else if( OMXCameraAdapter::HIGH_QUALITY_ZSL== mode )
- {
+ } else if( OMXCameraAdapter::HIGH_QUALITY_ZSL== mode ) {
const char* valstr = NULL;
CAMHAL_LOGDA("Camera mode: HIGH QUALITY_ZSL");
camMode.eCamOperatingMode = OMX_TI_CaptureImageProfileZeroShutterLag;
@@ -683,17 +677,16 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
}
#endif
- }
- else if( OMXCameraAdapter::VIDEO_MODE == mode )
- {
+ } else if( OMXCameraAdapter::VIDEO_MODE == mode ) {
CAMHAL_LOGDA("Camera mode: VIDEO MODE");
camMode.eCamOperatingMode = OMX_CaptureVideo;
- }
- else
- {
+ } else if( OMXCameraAdapter::VIDEO_MODE_HQ == mode ) {
+ CAMHAL_LOGDA("Camera mode: VIDEO MODE HQ");
+ camMode.eCamOperatingMode = OMX_CaptureHighQualityVideo;
+ } else {
CAMHAL_LOGEA("Camera mode: INVALID mode passed!");
return BAD_VALUE;
- }
+ }
if( NO_ERROR == ret )
{
diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
index eecce61..a559703 100644
--- a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
+++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
@@ -82,6 +82,13 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mPending3Asettings = 0;//E3AsettingsAll;
mPendingCaptureSettings = 0;
mPendingPreviewSettings = 0;
+ mPendingReprocessSettings = 0;
+
+ ret = mMemMgr.initialize();
+ if ( ret != OK ) {
+ CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
+ return ret;
+ }
if ( 0 != mInitSem.Count() )
{
@@ -213,8 +220,6 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mVstabEnabled = false;
mVnfEnabled = false;
mBurstFrames = 1;
- mBurstFramesAccum = 0;
- mCapturedFrames = 0;
mFlushShotConfigQueue = false;
mPictureQuality = 100;
mCurrentZoomIdx = 0;
@@ -254,6 +259,10 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mEXIFData.mModelValid = false;
mEXIFData.mMakeValid = false;
+ mCapturedFrames = 0;
+ mBurstFramesAccum = 0;
+ mBurstFramesQueued = 0;
+
//update the mDeviceOrientation with the sensor mount orientation.
//So that the face detect will work before onOrientationEvent()
//get triggered.
@@ -368,12 +377,13 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mParameters3A.ManualGain = 0;
mParameters3A.ManualGainRight = 0;
- mParameters3A.AlgoFixedGamma = OMX_TRUE;
+ mParameters3A.AlgoExternalGamma = OMX_FALSE;
mParameters3A.AlgoNSF1 = OMX_TRUE;
mParameters3A.AlgoNSF2 = OMX_TRUE;
mParameters3A.AlgoSharpening = OMX_TRUE;
mParameters3A.AlgoThreeLinColorMap = OMX_TRUE;
mParameters3A.AlgoGIC = OMX_TRUE;
+ memset(&mParameters3A.mGammaTable, 0, sizeof(mParameters3A.mGammaTable));
LOG_FUNCTION_NAME_EXIT;
return Utils::ErrorUtils::omxToAndroidError(eError);
@@ -455,17 +465,6 @@ status_t OMXCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::
isCaptureFrame = (CameraFrame::IMAGE_FRAME == frameType) ||
(CameraFrame::RAW_FRAME == frameType);
- if ( isCaptureFrame && (NO_ERROR == ret) ) {
- // In CP_CAM mode, end image capture will be signalled when application starts preview
- if ((1 > mCapturedFrames) && !mBracketingEnabled && (mCapMode != CP_CAM)) {
- // Signal end of image capture
- if ( NULL != mEndImageCaptureCallback) {
- mEndImageCaptureCallback(mEndCaptureData);
- }
- return NO_ERROR;
- }
- }
-
if ( NO_ERROR == ret )
{
port = getPortParams(frameType);
@@ -481,7 +480,14 @@ status_t OMXCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::
if ((CameraBuffer *) port->mBufferHeader[i]->pAppPrivate == frameBuf) {
if ( isCaptureFrame && !mBracketingEnabled ) {
android::AutoMutex lock(mBurstLock);
- if (mBurstFramesQueued >= mBurstFramesAccum) {
+ if ((1 > mCapturedFrames) && !mBracketingEnabled && (mCapMode != CP_CAM)) {
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ mEndImageCaptureCallback(mEndCaptureData);
+ }
+ port->mStatus[i] = OMXCameraPortParameters::IDLE;
+ return NO_ERROR;
+ } else if (mBurstFramesQueued >= mBurstFramesAccum) {
port->mStatus[i] = OMXCameraPortParameters::IDLE;
return NO_ERROR;
}
@@ -820,7 +826,8 @@ void OMXCameraAdapter::getParameters(android::CameraParameters& params)
params.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
if ((mParameters3A.Focus == OMX_IMAGE_FocusControlAuto) &&
- (mCapMode != OMXCameraAdapter::VIDEO_MODE)) {
+ ( (mCapMode != OMXCameraAdapter::VIDEO_MODE) &&
+ (mCapMode != OMXCameraAdapter::VIDEO_MODE_HQ) ) ) {
valstr = android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
} else {
valstr = getLUTvalue_OMXtoHAL(mParameters3A.Focus, FocusLUT);
@@ -1377,9 +1384,9 @@ status_t OMXCameraAdapter::useBuffers(CameraMode mode, CameraBuffer * bufArr, in
break;
case CAMERA_IMAGE_CAPTURE:
- mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num;
mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mMaxQueueable = queueable;
ret = UseBuffersCapture(bufArr, num);
+ mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num;
break;
case CAMERA_VIDEO:
@@ -1982,7 +1989,8 @@ status_t OMXCameraAdapter::UseBuffersPreview(CameraBuffer * bufArr, int num)
}
}
- if(mCapMode == OMXCameraAdapter::VIDEO_MODE) {
+ if( (mCapMode == OMXCameraAdapter::VIDEO_MODE) ||
+ (mCapMode == OMXCameraAdapter::VIDEO_MODE_HQ) ) {
if (mPendingPreviewSettings & SetVNF) {
mPendingPreviewSettings &= ~SetVNF;
@@ -2291,7 +2299,7 @@ status_t OMXCameraAdapter::startPreview()
#ifdef CAMERAHAL_DEBUG
{
android::AutoMutex locker(mBuffersWithDucatiLock);
- mBuffersWithDucati.add((int)mPreviewData->mBufferHeader[index]->pAppPrivate,1);
+ mBuffersWithDucati.add((int)mPreviewData->mBufferHeader[index]->pBuffer,1);
}
#endif
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
@@ -2491,6 +2499,7 @@ status_t OMXCameraAdapter::stopPreview() {
mFirstTimeInit = true;
mPendingCaptureSettings = 0;
+ mPendingReprocessSettings = 0;
mFramesWithDucati = 0;
mFramesWithDisplay = 0;
mFramesWithEncoder = 0;
@@ -2729,7 +2738,7 @@ status_t OMXCameraAdapter::takePicture()
LOG_FUNCTION_NAME;
- {
+ if (mNextState != REPROCESS_STATE) {
android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
@@ -2826,7 +2835,8 @@ status_t OMXCameraAdapter::getFrameSize(size_t &width, size_t &height)
}
}
- if(mCapMode == OMXCameraAdapter::VIDEO_MODE) {
+ if((mCapMode == OMXCameraAdapter::VIDEO_MODE) ||
+ (mCapMode == OMXCameraAdapter::VIDEO_MODE_HQ) ) {
if (mPendingPreviewSettings & SetVNF) {
mPendingPreviewSettings &= ~SetVNF;
@@ -3374,8 +3384,7 @@ status_t OMXCameraAdapter::storeProfilingData(OMX_BUFFERHEADERTYPE* pBuffHeader)
if ( UNLIKELY( mDebugProfile ) ) {
platformPrivate = static_cast<OMX_TI_PLATFORMPRIVATE *> (pBuffHeader->pPlatformPrivate);
- extraData = getExtradata(static_cast<OMX_OTHER_EXTRADATATYPE *> (platformPrivate->pMetaDataBuffer),
- platformPrivate->nMetaDataSize,
+ extraData = getExtradata(platformPrivate->pMetaDataBuffer,
static_cast<OMX_EXTRADATATYPE> (OMX_TI_ProfilerData));
if ( NULL != extraData ) {
@@ -3483,7 +3492,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
// if we are waiting for a snapshot and in video mode...go ahead and send
// this frame as a snapshot
if( mWaitingForSnapshot && (mCapturedFrames > 0) &&
- (snapshotFrame || (mCapMode == VIDEO_MODE)))
+ (snapshotFrame || (mCapMode == VIDEO_MODE) || (mCapMode == VIDEO_MODE_HQ ) ))
{
typeOfFrame = CameraFrame::SNAPSHOT_FRAME;
mask = (unsigned int)CameraFrame::SNAPSHOT_FRAME;
@@ -3513,13 +3522,15 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
//CAMHAL_LOGV("FBD pBuffer = 0x%x", pBuffHeader->pBuffer);
if( mWaitingForSnapshot )
- {
- if (!mBracketingEnabled &&
- ((HIGH_SPEED == mCapMode) || (VIDEO_MODE == mCapMode)) )
- {
- notifyShutterSubscribers();
- }
- }
+ {
+ if ( !mBracketingEnabled &&
+ ((HIGH_SPEED == mCapMode) ||
+ (VIDEO_MODE == mCapMode) ||
+ (VIDEO_MODE_HQ == mCapMode)) )
+ {
+ notifyShutterSubscribers();
+ }
+ }
stat = sendCallBacks(cameraFrame, pBuffHeader, mask, pPortParam);
mFramesWithDisplay++;
@@ -4395,6 +4406,19 @@ public:
return err;
}
+#ifdef CAMERAHAL_OMAP5_CAPTURE_MODES
+
+ CAMHAL_LOGD("Camera mode: VIDEO HQ ");
+ properties->setMode(MODE_VIDEO_HIGH_QUALITY);
+ err = fetchCapabiltiesForMode(OMX_CaptureHighQualityVideo,
+ sensorId,
+ properties);
+ if ( NO_ERROR != err ) {
+ return err;
+ }
+
+#endif
+
}
return err;
diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp
index bd734a1..e5afc64 100644
--- a/camera/OMXCameraAdapter/OMXCapabilities.cpp
+++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp
@@ -49,6 +49,9 @@ const int OMXCameraAdapter::SENSORID_OV5640 = 302;
const int OMXCameraAdapter::SENSORID_OV14825 = 304;
const int OMXCameraAdapter::SENSORID_S5K4E1GA = 305;
const int OMXCameraAdapter::SENSORID_S5K6A1GX03 = 306;
+const int OMXCameraAdapter::SENSORID_OV8830 = 310;
+const int OMXCameraAdapter::SENSORID_OV2722 = 311;
+
const int OMXCameraAdapter::FPS_MIN = 5;
const int OMXCameraAdapter::FPS_MAX = 30;
@@ -70,15 +73,24 @@ const CapResolution OMXCameraAdapter::mImageCapRes [] = {
{ 2592, 1944, "2592x1944" },
{ 2592, 1728, "2592x1728" },
{ 2592, 1458, "2592x1458" },
+ { 2400, 1350, "2400x1350" },
{ 2304, 1296, "2304x1296" },
{ 2240, 1344, "2240x1344" },
{ 2160, 1440, "2160x1440" },
{ 2112, 1728, "2112x1728" },
+ { 2112, 1188, "2112x1188" },
{ 2048, 1536, "2048x1536" },
{ 2016, 1512, "2016x1512" },
+ { 2016, 1134, "2016x1134" },
{ 2000, 1600, "2000x1600" },
+ { 1920, 1080, "1920x1080" },
{ 1600, 1200, "1600x1200" },
+ { 1600, 900, "1600x900" },
+ { 1536, 864, "1536x864" },
+ { 1408, 792, "1408x792" },
+ { 1344, 756, "1344x756" },
{ 1280, 1024, "1280x1024" },
+ { 1280, 720, "1280x720" },
{ 1152, 864, "1152x864" },
{ 1280, 960, "1280x960" },
{ 1024, 768, "1024x768" },
@@ -306,7 +318,9 @@ const CapU32 OMXCameraAdapter::mSensorNames [] = {
{ SENSORID_OV5640, "OV5640" },
{ SENSORID_OV14825, "OV14825"},
{ SENSORID_S5K4E1GA, "S5K4E1GA"},
- { SENSORID_S5K6A1GX03, "S5K6A1GX03" }
+ { SENSORID_S5K6A1GX03, "S5K6A1GX03" },
+ { SENSORID_OV8830, "OV8830" },
+ { SENSORID_OV2722, "OV2722" }
// TODO(XXX): need to account for S3D camera later
};
@@ -1711,6 +1725,10 @@ status_t OMXCameraAdapter::insertCaptureModes(CameraProperties::Properties* para
strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
strncat(supported, TICameraParameters::CP_CAM_MODE, REMAINING_BYTES(supported));
#endif
+#ifdef CAMERAHAL_OMAP5_CAPTURE_MODES
+ strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
+ strncat(supported, TICameraParameters::VIDEO_MODE_HQ, REMAINING_BYTES(supported));
+#endif
strncat(supported, PARAM_SEP, REMAINING_BYTES(supported));
strncat(supported, TICameraParameters::ZOOM_BRACKETING, REMAINING_BYTES(supported));
}
diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp
index 780ec99..ecc84f2 100644
--- a/camera/OMXCameraAdapter/OMXCapture.cpp
+++ b/camera/OMXCameraAdapter/OMXCapture.cpp
@@ -134,7 +134,8 @@ status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters
// Set capture format to yuv422i...jpeg encode will
// be done on A9
valstr = params.get(TICameraParameters::KEY_CAP_MODE);
- if ( (valstr && !strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE)) &&
+ if ( (valstr && ( strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0 ||
+ strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE_HQ) == 0 ) ) &&
(pixFormat == OMX_COLOR_FormatUnused) ) {
CAMHAL_LOGDA("Capturing in video mode...selecting yuv422i");
pixFormat = OMX_COLOR_FormatCbYCrY;
@@ -152,14 +153,14 @@ status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters
( strcmp(str, android::CameraParameters::TRUE) == 0 ) ) {
if ( !mBracketingSet ) {
- mPendingCaptureSettings |= SetExpBracket;
+ mPendingCaptureSettings |= SetBurstExpBracket;
}
mBracketingSet = true;
} else {
if ( mBracketingSet ) {
- mPendingCaptureSettings |= SetExpBracket;
+ mPendingCaptureSettings |= SetBurstExpBracket;
}
mBracketingSet = false;
@@ -174,7 +175,7 @@ status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters
} else {
mExposureBracketMode = OMX_BracketExposureRelativeInEV;
}
- mPendingCaptureSettings |= SetExpBracket;
+ mPendingCaptureSettings |= SetBurstExpBracket;
} else if ( (str = params.get(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE)) != NULL) {
parseExpRange(str, mExposureBracketingValues, mExposureGainBracketingValues,
mExposureGainBracketingModes,
@@ -184,11 +185,16 @@ status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters
} else {
mExposureBracketMode = OMX_BracketExposureGainAbsolute;
}
- mPendingCaptureSettings |= SetExpBracket;
+ mPendingCaptureSettings |= SetBurstExpBracket;
} else {
+ // always set queued shot config in CPCAM mode
+ if (mCapMode == OMXCameraAdapter::CP_CAM) {
+ mExposureBracketMode = OMX_BracketVectorShot;
+ mPendingCaptureSettings |= SetBurstExpBracket;
+ }
// if bracketing was previously set...we set again before capturing to clear
if (mExposureBracketingValidEntries) {
- mPendingCaptureSettings |= SetExpBracket;
+ mPendingCaptureSettings |= SetBurstExpBracket;
mExposureBracketingValidEntries = 0;
}
}
@@ -262,13 +268,13 @@ status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters
if ( varint >= 1 )
{
if (varint != (int) mBurstFrames) {
- mPendingCaptureSettings |= SetBurst;
+ mPendingCaptureSettings |= SetBurstExpBracket;
}
mBurstFrames = varint;
}
else
{
- if (mBurstFrames != 1) mPendingCaptureSettings |= SetBurst;
+ if (mBurstFrames != 1) mPendingCaptureSettings |= SetBurstExpBracket;
mBurstFrames = 1;
}
@@ -339,26 +345,6 @@ status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters
mPendingCaptureSettings = ECapturesettingsAll;
}
- // we are already capturing and in cpcam mode...just need to enqueue
- // shots
- inCaptureState = (CAPTURE_ACTIVE & mAdapterState) && (CAPTURE_ACTIVE & mNextState);
- if ((mPendingCaptureSettings & ~SetExpBracket) && !inCaptureState) {
- disableReprocess();
- disableImagePort();
- if ( NULL != mReleaseImageBuffersCallback ) {
- mReleaseImageBuffersCallback(mReleaseData);
- }
- if (mPendingCaptureSettings & SetFormat) {
- mPendingCaptureSettings &= ~SetFormat;
- ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *cap);
- if ( ret != NO_ERROR ) {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
- }
- }
- }
-
cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
cap->mWidth = params.getInt(TICameraParameters::RAW_WIDTH);
cap->mHeight = params.getInt(TICameraParameters::RAW_HEIGHT);
@@ -380,11 +366,17 @@ status_t OMXCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t buffe
{
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
- imgCaptureData->mNumBufs = bufferCount;
- // check if image port is already configured...
- // if it already configured then we don't have to query again
- if (!mCaptureConfigured) {
+ // If any settings have changed that need to be set with SetParam,
+ // we will need to disable the port to set them
+ if ((mPendingCaptureSettings & ECaptureParamSettings)) {
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
+
+ if (mPendingCaptureSettings & SetFormat) {
ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
}
@@ -719,15 +711,39 @@ status_t OMXCameraAdapter::setVectorShot(int *evValues,
( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
&enqueueShotConfigs);
if ( OMX_ErrorNone != eError ) {
- CAMHAL_LOGEB("Error while configuring bracket shot 0x%x", eError);
+ CAMHAL_LOGEB("Error while configuring enqueue shot 0x%x", eError);
goto exit;
} else {
- CAMHAL_LOGDA("Bracket shot configured successfully");
+ CAMHAL_LOGDA("Enqueue shot configured successfully");
}
// Flush only first time
doFlush = false;
}
+ // Handle burst capture (no any bracketing) case
+ if (0 == evCount) {
+ CAMHAL_LOGE("Handle burst capture (no any bracketing) case");
+ enqueueShotConfigs.nShotConfig[0].nConfigId = 0;
+ enqueueShotConfigs.nShotConfig[0].nFrames = frameCount;
+ enqueueShotConfigs.nShotConfig[0].nEC = 0;
+ enqueueShotConfigs.nShotConfig[0].nExp = 0;
+ enqueueShotConfigs.nShotConfig[0].nGain = 0;
+ enqueueShotConfigs.nShotConfig[0].eExpGainApplyMethod = OMX_TI_EXPGAINAPPLYMETHOD_RELATIVE;
+ enqueueShotConfigs.nShotConfig[0].bNoSnapshot = OMX_FALSE; // TODO: Make this configurable
+ enqueueShotConfigs.nNumConfigs = 1;
+ enqueueShotConfigs.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ enqueueShotConfigs.bFlushQueue = doFlush ? OMX_TRUE : OMX_FALSE;
+ eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
+ ( OMX_INDEXTYPE ) OMX_TI_IndexConfigEnqueueShotConfigs,
+ &enqueueShotConfigs);
+ if ( OMX_ErrorNone != eError ) {
+ CAMHAL_LOGEB("Error while configuring enqueue shot 0x%x", eError);
+ goto exit;
+ } else {
+ CAMHAL_LOGDA("Enqueue shot configured successfully");
+ }
+ }
+
exit:
LOG_FUNCTION_NAME_EXIT;
@@ -1153,8 +1169,8 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
}
}
- if (capParams->mPendingCaptureSettings & (SetBurst|SetExpBracket)) {
- mPendingCaptureSettings &= ~(SetExpBracket|SetBurst);
+ if (capParams->mPendingCaptureSettings & SetBurstExpBracket) {
+ mPendingCaptureSettings &= ~SetBurstExpBracket;
if ( mBracketingSet ) {
ret = doExposureBracketing(capParams->mExposureBracketingValues,
capParams->mExposureGainBracketingValues,
@@ -1180,6 +1196,10 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
}
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture bracketing configs done: ", &mStartCapture);
+#endif
+
capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
//OMX shutter callback events are only available in hq mode
@@ -1204,9 +1224,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
apply3Asettings(mParameters3A);
}
- // check is we are already in capture state...which means we are
- // accumulating shots
- if ((ret == NO_ERROR) && (mBurstFramesQueued > 0)) {
+ if (ret == NO_ERROR) {
int index = 0;
int queued = 0;
android::AutoMutex lock(mBurstLock);
@@ -1225,7 +1243,13 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
mCapturedFrames += mBurstFrames;
mBurstFramesAccum += mBurstFrames;
}
-
+ CAMHAL_LOGD("mBurstFramesQueued = %d mBurstFramesAccum = %d index = %d "
+ "capData->mNumBufs = %d queued = %d capData->mMaxQueueable = %d",
+ mBurstFramesQueued,mBurstFramesAccum,index,
+ capData->mNumBufs,queued,capData->mMaxQueueable);
+ CAMHAL_LOGD("%d", (mBurstFramesQueued < mBurstFramesAccum)
+ && (index < capData->mNumBufs)
+ && (queued < capData->mMaxQueueable));
while ((mBurstFramesQueued < mBurstFramesAccum) &&
(index < capData->mNumBufs) &&
(queued < capData->mMaxQueueable)) {
@@ -1239,25 +1263,11 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
mBurstFramesQueued++;
queued++;
} else if (OMXCameraPortParameters::FILL == capData->mStatus[index]) {
+ CAMHAL_LOGE("Not queueing index = %d", index);
queued++;
}
index++;
}
- } else if ( NO_ERROR == ret ) {
- ///Queue all the buffers on capture port
- for ( int index = 0 ; index < capData->mMaxQueueable ; index++ ) {
- if (mBurstFramesQueued < mBurstFramesAccum) {
- CAMHAL_LOGDB("Queuing buffer on Capture port - %p",
- capData->mBufferHeader[index]->pBuffer);
- capData->mStatus[index] = OMXCameraPortParameters::FILL;
- eError = OMX_FillThisBuffer(mCameraAdapterParameters.mHandleComp,
- (OMX_BUFFERHEADERTYPE*)capData->mBufferHeader[index]);
- mBurstFramesQueued++;
- } else {
- capData->mStatus[index] = OMXCameraPortParameters::IDLE;
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
if (mRawCapture) {
@@ -1277,11 +1287,10 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
mWaitingForSnapshot = true;
mCaptureSignalled = false;
- mPendingCaptureSettings &= ~SetBurst;
// Capturing command is not needed when capturing in video mode
// Only need to queue buffers on image ports
- if (mCapMode != VIDEO_MODE) {
+ if ( ( mCapMode != VIDEO_MODE ) && ( mCapMode != VIDEO_MODE_HQ ) ) {
OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
bOMX.bEnabled = OMX_TRUE;
@@ -1296,6 +1305,10 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
}
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture image buffers queued and capture enabled: ", &mStartCapture);
+#endif
+
//OMX shutter callback events are only available in hq mode
if ( (HIGH_QUALITY == mCapMode) || (HIGH_QUALITY_ZSL== mCapMode))
@@ -1330,6 +1343,10 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startImageCapture shutter event received: ", &mStartCapture);
+#endif
+
return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
@@ -1419,7 +1436,7 @@ status_t OMXCameraAdapter::stopImageCapture()
// Disable image capture
// Capturing command is not needed when capturing in video mode
- if (mCapMode != VIDEO_MODE) {
+ if ( ( mCapMode != VIDEO_MODE ) && ( mCapMode != VIDEO_MODE_HQ ) ) {
OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
bOMX.bEnabled = OMX_FALSE;
imgCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -1433,13 +1450,6 @@ status_t OMXCameraAdapter::stopImageCapture()
}
}
- // Disable WB and vector shot extra data for metadata
- setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
- // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
- // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
- // setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
- setExtraData(false, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
-
CAMHAL_LOGDB("Capture set - 0x%x", eError);
mCaptureSignalled = true; //set this to true if we exited because of timeout
@@ -1466,6 +1476,10 @@ status_t OMXCameraAdapter::stopImageCapture()
// Workaround when doing many consecutive shots, CAF wasn't getting restarted.
mPending3Asettings |= SetFocus;
+ mCapturedFrames = 0;
+ mBurstFramesAccum = 0;
+ mBurstFramesQueued = 0;
+
return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
@@ -1550,7 +1564,11 @@ status_t OMXCameraAdapter::disableImagePort(){
deinitInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+ // since port settings are not persistent after port is disabled...
+ mPendingCaptureSettings |= SetFormat;
+
#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
+
if (mRawCapture) {
///Register for Video port Disable event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
@@ -1712,153 +1730,177 @@ status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
return BAD_VALUE;
}
- // capture is already configured...we can skip this step
- if (mCaptureConfigured) {
- if ( NO_ERROR == ret )
- {
- ret = setupEXIF();
- if ( NO_ERROR != ret )
- {
- CAMHAL_LOGEB("Error configuring EXIF Buffer %x", ret);
- }
- }
-
- mCapturedFrames = mBurstFrames;
- mBurstFramesQueued = 0;
- return NO_ERROR;
- }
+ CAMHAL_ASSERT(num > 0);
- imgCaptureData->mNumBufs = num;
+ // if some setting that requires a SetParameter (including
+ // changing buffer types) then we need to disable the port
+ // before being allowed to apply the settings
+ if ((mPendingCaptureSettings & ECaptureParamSettings) ||
+ bufArr[0].type != imgCaptureData->mBufferType ||
+ imgCaptureData->mNumBufs != num) {
+ if (mCaptureConfigured) {
+ disableImagePort();
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ }
- //TODO: Support more pixelformats
+ imgCaptureData->mBufferType = bufArr[0].type;
+ imgCaptureData->mNumBufs = num;
- CAMHAL_LOGDB("Params Width = %d", (int)imgCaptureData->mWidth);
- CAMHAL_LOGDB("Params Height = %d", (int)imgCaptureData->mHeight);
+ CAMHAL_LOGDB("Params Width = %d", (int)imgCaptureData->mWidth);
+ CAMHAL_LOGDB("Params Height = %d", (int)imgCaptureData->mHeight);
- if (mPendingCaptureSettings & SetFormat) {
- mPendingCaptureSettings &= ~SetFormat;
- ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
- if ( ret != NO_ERROR ) {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
+ if (mPendingCaptureSettings & SetFormat) {
+ mPendingCaptureSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE, *imgCaptureData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
}
- }
- if (mPendingCaptureSettings & SetThumb) {
- mPendingCaptureSettings &= ~SetThumb;
- ret = setThumbnailParams(mThumbWidth, mThumbHeight, mThumbQuality);
- if ( NO_ERROR != ret) {
- CAMHAL_LOGEB("Error configuring thumbnail size %x", ret);
- return ret;
+ if (mPendingCaptureSettings & SetThumb) {
+ mPendingCaptureSettings &= ~SetThumb;
+ ret = setThumbnailParams(mThumbWidth, mThumbHeight, mThumbQuality);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring thumbnail size %x", ret);
+ return ret;
+ }
}
- }
- if (mPendingCaptureSettings & SetQuality) {
- mPendingCaptureSettings &= ~SetQuality;
- ret = setImageQuality(mPictureQuality);
- if ( NO_ERROR != ret) {
- CAMHAL_LOGEB("Error configuring image quality %x", ret);
- goto EXIT;
+ if (mPendingCaptureSettings & SetQuality) {
+ mPendingCaptureSettings &= ~SetQuality;
+ ret = setImageQuality(mPictureQuality);
+ if ( NO_ERROR != ret) {
+ CAMHAL_LOGEB("Error configuring image quality %x", ret);
+ goto EXIT;
+ }
}
- }
- // assuming if buffers are from ANW that they will be pagelist buffers
- // and need a tiler reservation
- if (bufArr[0].type == CAMERA_BUFFER_ANW) {
- initInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
- }
-
- ///Register for Image port ENABLE event
- ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mImagePortIndex,
- mUseCaptureSem);
+ // Configure DOMX to use either gralloc handles or vptrs
+ {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
+
+ domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
+ if (bufArr[0].type == CAMERA_BUFFER_ANW) {
+ CAMHAL_LOGD ("Using ANW Buffers");
+ initInternalBuffers(mCameraAdapterParameters.mImagePortIndex);
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+ } else {
+ CAMHAL_LOGD ("Using ION Buffers");
+ domxUseGrallocHandles.bEnable = OMX_FALSE;
+ }
- ///Enable Capture Port
- eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mImagePortIndex,
- NULL);
-
- CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
- GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
-
- // Configure DOMX to use either gralloc handles or vptrs
- if ((imgCaptureData->mNumBufs > 0)) {
- OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
- OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
-
- domxUseGrallocHandles.nPortIndex = mCameraAdapterParameters.mImagePortIndex;
- if (bufArr[0].type == CAMERA_BUFFER_ANW) {
- CAMHAL_LOGD ("Using ANW Buffers");
- domxUseGrallocHandles.bEnable = OMX_TRUE;
- } else {
- CAMHAL_LOGD ("Using ION Buffers");
- domxUseGrallocHandles.bEnable = OMX_FALSE;
+ eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
+ (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+ if (eError!=OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
+ }
+ GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
}
- eError = OMX_SetParameter(mCameraAdapterParameters.mHandleComp,
- (OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
- if (eError!=OMX_ErrorNone) {
- CAMHAL_LOGEB("OMX_SetParameter - %x", eError);
- }
- GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
- }
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- for ( int index = 0 ; index < imgCaptureData->mNumBufs ; index++ )
- {
- OMX_BUFFERHEADERTYPE *pBufferHdr;
- CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
- (unsigned int)bufArr[index].opaque,
- (int)imgCaptureData->mBufSize);
+ CameraHal::PPM("Takepicture image port configuration: ", &bufArr->ppmStamp);
- eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
- &pBufferHdr,
+#endif
+
+ // Register for Image port ENABLE event
+ ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
mCameraAdapterParameters.mImagePortIndex,
- 0,
- imgCaptureData->mBufSize,
- (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+ mUseCaptureSem);
+
+ // Enable Capture Port
+ eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
- pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
- bufArr[index].index = index;
- pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
- pBufferHdr->nVersion.s.nVersionMajor = 1 ;
- pBufferHdr->nVersion.s.nVersionMinor = 1 ;
- pBufferHdr->nVersion.s.nRevision = 0;
- pBufferHdr->nVersion.s.nStep = 0;
- imgCaptureData->mBufferHeader[index] = pBufferHdr;
- }
+ for (int index = 0 ; index < imgCaptureData->mNumBufs ; index++) {
+ OMX_BUFFERHEADERTYPE *pBufferHdr;
+ CAMHAL_LOGDB("OMX_UseBuffer Capture address: 0x%x, size = %d",
+ (unsigned int)bufArr[index].opaque,
+ (int)imgCaptureData->mBufSize);
+
+ eError = OMX_UseBuffer(mCameraAdapterParameters.mHandleComp,
+ &pBufferHdr,
+ mCameraAdapterParameters.mImagePortIndex,
+ 0,
+ imgCaptureData->mBufSize,
+ (OMX_U8*)camera_buffer_get_omx_ptr(&bufArr[index]));
+
+ CAMHAL_LOGDB("OMX_UseBuffer = 0x%x", eError);
+ GOTO_EXIT_IF(( eError != OMX_ErrorNone ), eError);
+
+ pBufferHdr->pAppPrivate = (OMX_PTR) &bufArr[index];
+ bufArr[index].index = index;
+ pBufferHdr->nSize = sizeof(OMX_BUFFERHEADERTYPE);
+ pBufferHdr->nVersion.s.nVersionMajor = 1 ;
+ pBufferHdr->nVersion.s.nVersionMinor = 1 ;
+ pBufferHdr->nVersion.s.nRevision = 0;
+ pBufferHdr->nVersion.s.nStep = 0;
+ imgCaptureData->mBufferHeader[index] = pBufferHdr;
+ imgCaptureData->mStatus[index] = OMXCameraPortParameters::IDLE;
+ }
+
+ // Wait for the image port enable event
+ CAMHAL_LOGDA("Waiting for port enable");
+ ret = mUseCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+
+ // If somethiing bad happened while we wait
+ if (mComponentState == OMX_StateInvalid) {
+ CAMHAL_LOGEA("Invalid State after Enable Image Port Exitting!!!");
+ goto EXIT;
+ }
- //Wait for the image port enable event
- CAMHAL_LOGDA("Waiting for port enable");
- ret = mUseCaptureSem.WaitTimeout(OMX_CMD_TIMEOUT);
+ if (ret != NO_ERROR) {
+ ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
+ OMX_EventCmdComplete,
+ OMX_CommandPortEnable,
+ mCameraAdapterParameters.mImagePortIndex,
+ NULL);
+ CAMHAL_LOGDA("Timeout expired on port enable");
+ goto EXIT;
+ }
+ CAMHAL_LOGDA("Port enabled");
- //If somethiing bad happened while we wait
- if (mComponentState == OMX_StateInvalid)
- {
- CAMHAL_LOGEA("Invalid State after Enable Image Port Exitting!!!");
- goto EXIT;
- }
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
- if ( ret == NO_ERROR )
- {
- CAMHAL_LOGDA("Port enabled");
+ CameraHal::PPM("Takepicture image port enabled and buffers registered: ", &bufArr->ppmStamp);
+
+#endif
+
+ if (mNextState != LOADED_REPROCESS_CAPTURE_STATE) {
+ // Enable WB and vector shot extra data for metadata
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
+ setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
}
- else
- {
- ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
- OMX_EventCmdComplete,
- OMX_CommandPortEnable,
- mCameraAdapterParameters.mImagePortIndex,
- NULL);
- CAMHAL_LOGDA("Timeout expired on port enable");
- goto EXIT;
+
+ // CPCam mode only supports vector shot
+ // Regular capture is not supported
+ if ( (mCapMode == CP_CAM) && (mNextState != LOADED_REPROCESS_CAPTURE_STATE) ) {
+ initVectorShot();
+ }
+
+ mCaptureBuffersAvailable.clear();
+ for (unsigned int i = 0; i < imgCaptureData->mMaxQueueable; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
+ }
+
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for (unsigned int i = imgCaptureData->mMaxQueueable; i < imgCaptureData->mNumBufs; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
}
+ }
if ( NO_ERROR == ret )
{
@@ -1869,18 +1911,7 @@ status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
}
}
- // Enable WB and vector shot extra data for metadata
- setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_WhiteBalance);
- // TODO: WA: if domx client disables VectShotInfo metadata on the image port, this causes
- // VectShotInfo to be disabled internally on preview port also. Remove setting in OMXCapture
- // setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_VectShotInfo);
- setExtraData(true, mCameraAdapterParameters.mImagePortIndex, OMX_TI_LSCTable);
-
- // CPCam mode only supports vector shot
- // Regular capture is not supported
- if (mCapMode == CP_CAM) initVectorShot();
-
- // Choose proper single preview mode for cpcapture capture (reproc or hs)
+ // Choose proper single preview mode for cp capture (reproc or hs)
if (( NO_ERROR == ret) && (OMXCameraAdapter::CP_CAM == mCapMode)) {
OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE singlePrevMode;
OMX_INIT_STRUCT_PTR (&singlePrevMode, OMX_TI_CONFIG_SINGLEPREVIEWMODETYPE);
@@ -1903,9 +1934,11 @@ status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
}
}
- mCapturedFrames = mBurstFrames;
- mBurstFramesAccum = mBurstFrames;
- mBurstFramesQueued = 0;
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Takepicture extra configs on image port done: ", &bufArr->ppmStamp);
+
+#endif
mCaptureConfigured = true;
diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp
index cbf7c1c..b4fde5a 100644
--- a/camera/OMXCameraAdapter/OMXExif.cpp
+++ b/camera/OMXCameraAdapter/OMXExif.cpp
@@ -237,18 +237,11 @@ status_t OMXCameraAdapter::setupEXIF()
struct timeval sTv;
struct tm *pTime;
OMXCameraPortParameters * capData = NULL;
- MemoryManager memMgr;
CameraBuffer *memmgr_buf_array;
int buf_size = 0;
LOG_FUNCTION_NAME;
- ret = memMgr.initialize();
- if ( ret != OK ) {
- CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
- return ret;
- }
-
sharedBuffer.pSharedBuff = NULL;
capData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex];
@@ -276,7 +269,7 @@ status_t OMXCameraAdapter::setupEXIF()
buf_size = ((buf_size+4095)/4096)*4096;
sharedBuffer.nSharedBuffSize = buf_size;
- memmgr_buf_array = memMgr.allocateBufferList(0, 0, NULL, buf_size, 1);
+ memmgr_buf_array = mMemMgr.allocateBufferList(0, 0, NULL, buf_size, 1);
sharedBuffer.pSharedBuff = (OMX_U8*)camera_buffer_get_omx_ptr(&memmgr_buf_array[0]);
startPtr = ( OMX_U8 * ) memmgr_buf_array[0].opaque;
@@ -505,7 +498,7 @@ status_t OMXCameraAdapter::setupEXIF()
if ( NULL != memmgr_buf_array )
{
- memMgr.freeBufferList(memmgr_buf_array);
+ mMemMgr.freeBufferList(memmgr_buf_array);
}
LOG_FUNCTION_NAME_EXIT;
diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp
index 65577a5..386fff3 100644
--- a/camera/OMXCameraAdapter/OMXFocus.cpp
+++ b/camera/OMXCameraAdapter/OMXFocus.cpp
@@ -200,7 +200,7 @@ status_t OMXCameraAdapter::doAutoFocus()
}
// configure focus timeout based on capture mode
- timeout = (mCapMode == VIDEO_MODE) ?
+ timeout = (mCapMode == VIDEO_MODE) || (mCapMode == VIDEO_MODE_HQ) ?
( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );
@@ -714,18 +714,11 @@ status_t OMXCameraAdapter::setTouchFocus()
OMX_ALGOAREASTYPE *focusAreas;
OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
- MemoryManager memMgr;
CameraBuffer *bufferlist;
int areasSize = 0;
LOG_FUNCTION_NAME;
- ret = memMgr.initialize();
- if ( ret != OK ) {
- CAMHAL_LOGE("MemoryManager initialization failed, error: %d", ret);
- return ret;
- }
-
if ( OMX_StateInvalid == mComponentState )
{
CAMHAL_LOGEA("OMX component is in invalid state");
@@ -736,7 +729,7 @@ status_t OMXCameraAdapter::setTouchFocus()
{
areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
- bufferlist = memMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
+ bufferlist = mMemMgr.allocateBufferList(0, 0, NULL, areasSize, 1);
focusAreas = (OMX_ALGOAREASTYPE*) bufferlist[0].opaque;
OMXCameraPortParameters * mPreviewData = NULL;
@@ -822,7 +815,7 @@ status_t OMXCameraAdapter::setTouchFocus()
EXIT:
if (NULL != bufferlist)
{
- memMgr.freeBufferList (bufferlist);
+ mMemMgr.freeBufferList (bufferlist);
}
}
diff --git a/camera/OMXCameraAdapter/OMXReprocess.cpp b/camera/OMXCameraAdapter/OMXReprocess.cpp
index 28f1744..6fdbe7b 100644
--- a/camera/OMXCameraAdapter/OMXReprocess.cpp
+++ b/camera/OMXCameraAdapter/OMXReprocess.cpp
@@ -60,6 +60,9 @@ status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameter
} else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
CAMHAL_LOGDA("RAW Picture format selected");
pixFormat = OMX_COLOR_FormatRawBayer10bit;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ CAMHAL_LOGDA("YUV422i Picture format selected");
+ pixFormat = OMX_COLOR_FormatCbYCrY;
} else {
CAMHAL_LOGDA("Format not supported, selecting YUV420SP by default");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
@@ -74,7 +77,8 @@ status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameter
portData->mWidth = w;
portData->mHeight = h;
- if ( OMX_COLOR_FormatRawBayer10bit == pixFormat ) {
+ if ( ( OMX_COLOR_FormatRawBayer10bit == pixFormat ) ||
+ ( OMX_COLOR_FormatCbYCrY == pixFormat ) ) {
portData->mStride = w * 2;
} else {
portData->mStride = s;
@@ -82,12 +86,7 @@ status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameter
portData->mColorFormat = pixFormat;
- ret = setFormat(OMX_CAMERA_PORT_VIDEO_IN_VIDEO, *portData);
- if ( ret != NO_ERROR ) {
- CAMHAL_LOGEB("setFormat() failed %d", ret);
- LOG_FUNCTION_NAME_EXIT;
- return ret;
- }
+ mPendingReprocessSettings |= SetFormat;
}
LOG_FUNCTION_NAME_EXIT;
@@ -114,8 +113,10 @@ status_t OMXCameraAdapter::startReprocess()
android::AutoMutex lock(mBurstLock);
for ( int index = 0 ; index < portData->mMaxQueueable ; index++ ) {
- CAMHAL_LOGDB("Queuing buffer on video input port - %p",
- portData->mBufferHeader[index]->pBuffer);
+ CAMHAL_LOGDB("Queuing buffer on video input port - %p, offset: %d, length: %d",
+ portData->mBufferHeader[index]->pBuffer,
+ portData->mBufferHeader[index]->nOffset,
+ portData->mBufferHeader[index]->nFilledLen);
portData->mStatus[index] = OMXCameraPortParameters::FILL;
eError = OMX_EmptyThisBuffer(mCameraAdapterParameters.mHandleComp,
(OMX_BUFFERHEADERTYPE*)portData->mBufferHeader[index]);
@@ -123,6 +124,10 @@ status_t OMXCameraAdapter::startReprocess()
}
}
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("startReprocess buffers queued on video port: ", &mStartCapture);
+#endif
+
return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
@@ -220,22 +225,45 @@ status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
return BAD_VALUE;
}
+ CAMHAL_ASSERT(num > 0);
+
if (mAdapterState == REPROCESS_STATE) {
stopReprocess();
} else if (mAdapterState == CAPTURE_STATE) {
stopImageCapture();
- disableImagePort();
+ stopReprocess();
}
- if (mReprocConfigured) {
- return NO_ERROR;
- }
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess stopping image capture and disabling image port: ", &bufArr->ppmStamp);
+
+#endif
portData->mNumBufs = num;
// Configure
ret = setParametersReprocess(mParams, bufArr, mAdapterState);
+ if (mReprocConfigured) {
+ if (mPendingReprocessSettings & ECaptureParamSettings) {
+ stopReprocess();
+ } else {
+ // Tap in port has been already configured.
+ return NO_ERROR;
+ }
+ }
+
+ if (mPendingReprocessSettings & SetFormat) {
+ mPendingReprocessSettings &= ~SetFormat;
+ ret = setFormat(OMX_CAMERA_PORT_VIDEO_IN_VIDEO, *portData);
+ if ( ret != NO_ERROR ) {
+ CAMHAL_LOGEB("setFormat() failed %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ }
+
// Configure DOMX to use either gralloc handles or vptrs
OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
OMX_INIT_STRUCT_PTR (&domxUseGrallocHandles, OMX_TI_PARAMUSENATIVEBUFFER);
@@ -259,6 +287,12 @@ status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess configuration done: ", &bufArr->ppmStamp);
+
+#endif
+
// Enable Port
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
@@ -295,6 +329,8 @@ status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
pBufferHdr->nVersion.s.nVersionMinor = 1 ;
pBufferHdr->nVersion.s.nRevision = 0;
pBufferHdr->nVersion.s.nStep = 0;
+ pBufferHdr->nOffset = bufArr[index].offset;
+ pBufferHdr->nFilledLen = bufArr[index].actual_size;
portData->mBufferHeader[index] = pBufferHdr;
}
@@ -322,6 +358,12 @@ status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
mReprocConfigured = true;
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ CameraHal::PPM("Reprocess video port enabled and buffers registered: ", &bufArr->ppmStamp);
+
+#endif
+
return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
diff --git a/camera/OmxFrameDecoder.cpp b/camera/OmxFrameDecoder.cpp
new file mode 100644
index 0000000..be794e5
--- /dev/null
+++ b/camera/OmxFrameDecoder.cpp
@@ -0,0 +1,1077 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ErrorUtils.h"
+#include "OmxFrameDecoder.h"
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Index.h"
+#include "Decoder_libjpeg.h"
+
+
+namespace Ti {
+namespace Camera {
+
+const static uint32_t kMaxColorFormatSupported = 1000;
+const static int kMaxStateSwitchTimeOut = 1 * 1000 * 1000 * 1000; // 1 sec
+
+static const char* gDecoderRole[2] = {"video_decoder.mjpeg", "video_decoder.avc"};
+static const OMX_VIDEO_CODINGTYPE gCompressionFormat[2] = {OMX_VIDEO_CodingMJPEG, OMX_VIDEO_CodingAVC};
+
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+
+
+CallbackDispatcher::CallbackDispatcher()
+: mDone(false) {
+ mThread = new CallbackDispatcherThread(this);
+ mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
+}
+
+CallbackDispatcher::~CallbackDispatcher() {
+ {
+ android::Mutex::Autolock autoLock(mLock);
+
+ mDone = true;
+ mQueueChanged.signal();
+ }
+
+ status_t status = mThread->join();
+ if (status != WOULD_BLOCK) {
+ //CAMHAL_ASSERT(status, (status_t)NO_ERROR);
+ }
+}
+
+void CallbackDispatcher::post(const OmxMessage &msg) {
+ android::Mutex::Autolock autoLock(mLock);
+
+ mQueue.push_back(msg);
+ mQueueChanged.signal();
+}
+
+void CallbackDispatcher::dispatch(const OmxMessage &msg) {
+
+ switch(msg.type)
+ {
+ case OmxMessage::EVENT :
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.eventData.appData)->eventHandler(msg.u.eventData.event, msg.u.eventData.data1, msg.u.eventData.data2, msg.u.eventData.pEventData);
+ break;
+ }
+
+ case OmxMessage::EMPTY_BUFFER_DONE:
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->emptyBufferDoneHandler(msg.u.bufferData.pBuffHead);
+ break;
+ }
+
+ case OmxMessage::FILL_BUFFER_DONE:
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->fillBufferDoneHandler(msg.u.bufferData.pBuffHead);
+ break;
+ }
+ };
+}
+
+bool CallbackDispatcher::loop() {
+ for (;;) {
+ OmxMessage msg;
+
+ {
+ android::Mutex::Autolock autoLock(mLock);
+ while (!mDone && mQueue.empty()) {
+ mQueueChanged.wait(mLock);
+ }
+
+ if (mDone) {
+ break;
+ }
+
+ msg = *mQueue.begin();
+ mQueue.erase(mQueue.begin());
+ }
+
+ dispatch(msg);
+ }
+
+ return false;
+}
+
+bool CallbackDispatcherThread::threadLoop() {
+ return mDispatcher->loop();
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData) {
+ OmxMessage msg;
+ msg.type = OmxMessage::EVENT;
+ msg.u.eventData.appData = appData;
+ msg.u.eventData.event = event;
+ msg.u.eventData.data1 = data1;
+ msg.u.eventData.data2 = data2;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneCallback(OMX_HANDLETYPE hComponent,
+ OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
+ OmxMessage msg;
+ msg.type = OmxMessage::EMPTY_BUFFER_DONE;
+ msg.u.bufferData.appData = appData;
+ msg.u.bufferData.pBuffHead = pBuffHead;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneCallback(OMX_HANDLETYPE hComponent,
+ OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
+ OmxMessage msg;
+ msg.type = OmxMessage::FILL_BUFFER_DONE;
+ msg.u.bufferData.appData = appData;
+ msg.u.bufferData.pBuffHead = pBuffHead;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+OmxFrameDecoder::OmxFrameDecoder(DecoderType type)
+ : mOmxInialized(false), mCurrentState(OmxDecoderState_Unloaded), mPreviousState(OmxDecoderState_Unloaded),
+ mStopping(false), mDecoderType(type), mIsNeedCheckDHT(true), mAlwaysAppendDHT(false) {
+}
+
+OmxFrameDecoder::~OmxFrameDecoder() {
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mHwLock);
+
+ int bufferIndex = reinterpret_cast<int>(pBuffHead->pAppPrivate);
+ CAMHAL_LOGD("Got header %p id = %d", pBuffHead, bufferIndex);
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(bufferIndex);
+
+ android::AutoMutex itemLock(in->getLock());
+ in->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_InDecoded : BufferStatus_InQueued);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mHwLock);
+
+ int index = (int)pBuffHead->pAppPrivate;
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+
+ android::AutoMutex itemLock(out->getLock());
+ CameraBuffer* frame = static_cast<CameraBuffer*>(out->buffer);
+ out->setOffset(pBuffHead->nOffset);
+ out->setTimestamp(pBuffHead->nTimeStamp);
+ out->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_OutFilled : BufferStatus_OutQueued);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ android::AutoMutex lock(mHwLock);
+
+ switch(event) {
+
+ case OMX_EventCmdComplete:
+ {
+ if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateIdle)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateIdle\n");
+ commitState(OmxDecoderState_Idle);
+ mStateCondition.signal();
+ }
+ else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateExecuting)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateExecuting\n");
+ commitState(OmxDecoderState_Executing);
+ mStateCondition.signal();
+ }
+ else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateLoaded)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateLoaded\n");
+ if(getOmxState() == OmxDecoderState_Executing)
+ commitState(OmxDecoderState_Loaded);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandFlush) {
+ CAMHAL_LOGD("OMX_CommandFlush done on %d port\n", data2);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandPortDisable) {
+ CAMHAL_LOGD("OMX_CommandPortDisable done on %d port\n", data2);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandPortEnable) {
+ CAMHAL_LOGD("OMX_CommandPortEnable done on %d port\n", data2);
+ mStateCondition.signal();
+ } else {
+ CAMHAL_LOGD("Event %d done on %d port\n", data1, data2);
+ }
+ break;
+ }
+ case OMX_EventError:
+ {
+ CAMHAL_LOGD("\n\n\nOMX Component reported an Error!!!! 0x%x 0x%x\n\n\n", data1, data2);
+ commitState(OmxDecoderState_Error);
+ omxSendCommand(OMX_CommandStateSet, OMX_StateInvalid);
+ mStateCondition.signal();
+ break;
+ }
+ case OMX_EventPortSettingsChanged:
+ {
+ CAMHAL_LOGD("\n\n\nOMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)\n\n\n",
+ data1, data2);
+ if (data2 == 0) {
+ // This means that some serious change to port happens
+ commitState(OmxDecoderState_Reconfigure);
+ } else if (data2 == OMX_IndexConfigCommonOutputCrop) {
+#if 0
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = PortIndexOutput;
+ status_t ret = omxGetConfig(OMX_IndexConfigCommonOutputCrop, &rect);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't get new crop parameters 0x%x", ret);
+ break;
+ }
+
+ CAMHAL_LOGV("Crop should change to %d %d %d %d", rect.nLeft, rect.nTop, rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight);
+#endif
+ }
+ break;
+ }
+ default:
+ {
+ CAMHAL_LOGD("\n\n\nOMX Unhandelled event ID=0x%x!!!!\n\n\n", event);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+ }
+
+void OmxFrameDecoder::doConfigure(const DecoderParameters& config) {
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OmxFrameDecoder::enableGrallockHandles() {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ InitOMXParams(&domxUseGrallocHandles);
+
+ domxUseGrallocHandles.nPortIndex = PortIndexOutput;
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ return omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+}
+
+status_t OmxFrameDecoder::omxSwitchToExecutingSync() {
+ CAMHAL_LOGV("Try set OMX_StateExecuting");
+ android::AutoMutex lock(mHwLock);
+ omxSendCommand(OMX_CommandStateSet, OMX_StateExecuting);
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to EXECUTING ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+void OmxFrameDecoder::dumpPortSettings(PortType port) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = port;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ omxDumpPortSettings(def);
+}
+
+status_t OmxFrameDecoder::disablePortSync(int port) {
+ OMX_ERRORTYPE eError;
+ android::AutoMutex lock(mHwLock);
+ eError = OMX_SendCommand(mHandleComp, OMX_CommandPortDisable, port, NULL);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_CommandPortDisable OMX_ALL returned error 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::enablePortSync(int port) {
+ android::AutoMutex lock(mHwLock);
+ OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, OMX_CommandPortEnable, port, NULL);
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SendCommand OMX_CommandPortEnable OUT returned error 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ return NO_ERROR;
+}
+
+
+status_t OmxFrameDecoder::doPortReconfigure() {
+ OMX_ERRORTYPE eError;
+ status_t ret = NO_ERROR;
+
+ CAMHAL_LOGD("Starting port reconfiguration !");
+ dumpPortSettings(PortIndexInput);
+ dumpPortSettings(PortIndexOutput);
+
+ android::AutoMutex lock(mHwLock);
+
+ omxSendCommand(OMX_CommandFlush, PortIndexOutput);
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ omxSendCommand(OMX_CommandFlush, PortIndexInput);
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ ret = omxSendCommand(OMX_CommandPortDisable, PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_CommandPortDisable PortIndexOutput returned error 0x%x", ret);
+ return ret;
+ }
+
+ freeBuffersOnOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+ CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+
+
+ ret = omxSendCommand(OMX_CommandPortEnable, PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable returned error 0x%x", ret);
+ return ret;
+ }
+
+ allocateBuffersOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable timeout 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGD("Port reconfiguration DONE!");
+ //dumpPortSettings(PortIndexOutput);
+
+ return NO_ERROR;
+}
+
+void OmxFrameDecoder::queueOutputBuffers() {
+
+ LOG_FUNCTION_NAME;
+
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ for (size_t i = 0; i < mOutQueue.size(); i++) {
+ int index = mOutQueue[i];
+ android::sp<MediaBuffer> &outBuffer = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(outBuffer->getLock());
+ if (outBuffer->getStatus() == BufferStatus_OutQueued) {
+ outBuffer->setStatus(BufferStatus_OutWaitForFill);
+ CameraBuffer* frame = static_cast<CameraBuffer*>(outBuffer->buffer);
+ OMX_BUFFERHEADERTYPE *pOutBufHdr = mOutBufferHeaders[outBuffer->bufferId];
+ CAMHAL_LOGV("Fill this buffer cf=%p bh=%p id=%d", frame, pOutBufHdr, outBuffer->bufferId);
+ status_t status = omxFillThisBuffer(pOutBufHdr);
+ CAMHAL_ASSERT(status == NO_ERROR);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doProcessInputBuffer() {
+
+ LOG_FUNCTION_NAME;
+
+ if (getOmxState() == OmxDecoderState_Reconfigure) {
+ if (doPortReconfigure() == NO_ERROR) {
+ commitState(OmxDecoderState_Executing);
+ queueOutputBuffers();
+ } else {
+ commitState(OmxDecoderState_Error);
+ return;
+ }
+
+ }
+
+ if (getOmxState() == OmxDecoderState_Idle) {
+ CAMHAL_ASSERT(omxSwitchToExecutingSync() == NO_ERROR);
+ queueOutputBuffers();
+ }
+
+ if (getOmxState() == OmxDecoderState_Executing) {
+ for (size_t i = 0; i < mInQueue.size(); i++) {
+ int index = mInQueue[i];
+ CAMHAL_LOGD("Got in inqueue[%d] buffer id=%d", i, index);
+ android::sp<MediaBuffer> &inBuffer = mInBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(inBuffer->getLock());
+ if (inBuffer->getStatus() == BufferStatus_InQueued) {
+ OMX_BUFFERHEADERTYPE *pInBufHdr = mInBufferHeaders[index];
+ inBuffer->setStatus(BufferStatus_InWaitForEmpty);
+ omxEmptyThisBuffer(inBuffer, pInBufHdr);
+ }
+ }
+ queueOutputBuffers();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OmxFrameDecoder::omxInit() {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_Init();
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
+ }
+ else mOmxInialized = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr) {
+ OMX_ERRORTYPE eError = OMX_ErrorUndefined;
+
+ pOutBufHdr->nFilledLen = 0;
+ pOutBufHdr->nOffset = 0;
+ pOutBufHdr->nFlags = 0;
+
+ eError = OMX_FillThisBuffer(mHandleComp, pOutBufHdr);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_FillThisBuffer ERROR 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+
+status_t OmxFrameDecoder::omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ OMX_CALLBACKTYPE & callbacks) {
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorUndefined;
+
+ eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.DECODER", pAppData, &callbacks);
+ if((eError != OMX_ErrorNone) || (handle == NULL)) {
+ handle = NULL;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ commitState(OmxDecoderState_Loaded);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ CAMHAL_LOGD("Founded id for empty is %d ", inBuffer->bufferId);
+ if (inBuffer->filledLen > def.nBufferSize) {
+ CAMHAL_LOGE("Can't copy IN buffer due to it too small %d than needed %d", def.nBufferSize, inBuffer->filledLen);
+ return UNKNOWN_ERROR;
+ }
+
+ int filledLen = inBuffer->filledLen;
+ unsigned char* dataBuffer = reinterpret_cast<unsigned char*>(inBuffer->buffer);
+
+ //If decoder type MJPEG we check if append DHT forced and if true append it
+ //in other case we check mIsNeedCheckDHT and if true search for DHT in buffer
+ //if we don't found it - will do append
+ //once we find that buffer not contain DHT we will append it each time
+ if ((mDecoderType == DecoderType_MJPEG) && ((mAlwaysAppendDHT) || ((mIsNeedCheckDHT) &&
+ (mIsNeedCheckDHT = !Decoder_libjpeg::isDhtExist(dataBuffer, filledLen))))) {
+ CAMHAL_LOGV("Will append DHT to buffer");
+ Decoder_libjpeg::appendDHT(dataBuffer, filledLen, pInBufHdr->pBuffer, filledLen + Decoder_libjpeg::readDHTSize());
+ filledLen += Decoder_libjpeg::readDHTSize();
+ mIsNeedCheckDHT = false;
+ mAlwaysAppendDHT = true;
+ } else {
+ memcpy(pInBufHdr->pBuffer, dataBuffer, filledLen);
+ }
+
+ CAMHAL_LOGV("Copied %d bytes into In buffer with bh=%p", filledLen, pInBufHdr);
+ CAMHAL_LOGV("Empty this buffer id=%d timestamp %lld offset=%d", inBuffer->bufferId, pInBufHdr->nTimeStamp, pInBufHdr->nOffset);
+ pInBufHdr->nFilledLen = filledLen;
+ pInBufHdr->nTimeStamp = inBuffer->getTimestamp();
+ pInBufHdr->nFlags = 16;
+ pInBufHdr->nOffset = 0;
+ eError = OMX_EmptyThisBuffer(mHandleComp, pInBufHdr);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_EmptyThisBuffer ERROR 0x%x", eError);
+ Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+
+status_t OmxFrameDecoder::allocateBuffersOutput() {
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+
+ CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
+
+ OMX_BUFFERHEADERTYPE *pOutBufHdr;
+ mOutBufferHeaders.clear();
+ for (size_t i = 0; i < mOutBuffers->size(); i++) {
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(i);
+ android::AutoMutex lock(outBuffer->getLock());
+ CameraBuffer* cb = static_cast<CameraBuffer*>(outBuffer->buffer);
+ OMX_U8 * outPtr = static_cast<OMX_U8*>(camera_buffer_get_omx_ptr(cb));
+ CAMHAL_LOGV("Try to set OMX_UseBuffer [0x%x] for output port with length %d ", outPtr, def.nBufferSize);
+ eError = OMX_UseBuffer(mHandleComp, &pOutBufHdr, PortIndexOutput, (void*)i, def.nBufferSize, outPtr);
+
+ if (eError != OMX_ErrorNone) {
+ ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", eError, eError);
+ commitState(OmxDecoderState_Error);
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGD("Got buffer header %p", pOutBufHdr);
+ mOutBufferHeaders.add(pOutBufHdr);
+ }
+
+ omxDumpPortSettings(def);
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+
+}
+
+status_t OmxFrameDecoder::allocateBuffersInput() {
+ LOG_FUNCTION_NAME;
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_BUFFERHEADERTYPE *pInBufHdr;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+
+ // TODO: Will be changed since port reconfiguration will be handled
+ def.nBufferCountActual = mInBuffers->size();
+ def.bEnabled = OMX_TRUE;
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+ mInBufferHeaders.clear();
+
+ for (size_t i = 0; i < mInBuffers->size(); i++) {
+ CAMHAL_LOGD("Will do OMX_AllocateBuffer for input port with size %d id=%d", def.nBufferSize, i);
+ eError = OMX_AllocateBuffer(mHandleComp, &pInBufHdr, PortIndexInput, (void*)i, def.nBufferSize);
+ if (eError != OMX_ErrorNone) {
+ ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", eError, eError);
+ commitState(OmxDecoderState_Error);
+ return UNKNOWN_ERROR;
+ }
+ CAMHAL_LOGD("Got new buffer header [%p] for IN port", pInBufHdr);
+ mInBufferHeaders.push_back(pInBufHdr);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::getAndConfigureDecoder() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError;
+
+ ret = omxInit();
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_Init returned error 0x%x", ret);
+ return ret;
+ }
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = OmxFrameDecoder::eventCallback;
+ callbacks.EmptyBufferDone = OmxFrameDecoder::emptyBufferDoneCallback;
+ callbacks.FillBufferDone = OmxFrameDecoder::fillBufferDoneCallback;
+ ret = omxGetHandle(&mHandleComp, this, callbacks);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_GetHandle returned error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ ret = setComponentRole();
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setComponentRole returned error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ disablePortSync(PortIndexOutput);
+ ret = setVideoOutputFormat(mParams.width, mParams.height);
+ enablePortSync(PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't set output format error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ enableGrallockHandles();
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::switchToIdle() {
+ CAMHAL_ASSERT(getOmxState() == OmxDecoderState_Loaded);
+ CAMHAL_LOGD("Try set OMX_StateIdle");
+ android::AutoMutex lock(mHwLock);
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+
+ allocateBuffersInput();
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+ allocateBuffersOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
+ return ret;
+ }
+ commitState(OmxDecoderState_Idle);
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::doStart() {
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ mStopping = false;
+ OMX_ERRORTYPE eError;
+
+ ret = getAndConfigureDecoder();
+
+#if 0
+ OMX_TI_PARAM_ENHANCEDPORTRECONFIG tParamStruct;
+ tParamStruct.nSize = sizeof(OMX_TI_PARAM_ENHANCEDPORTRECONFIG);
+ tParamStruct.nVersion.s.nVersionMajor = 0x1;
+ tParamStruct.nVersion.s.nVersionMinor = 0x1;
+ tParamStruct.nVersion.s.nRevision = 0x0;
+ tParamStruct.nVersion.s.nStep = 0x0;
+ tParamStruct.nPortIndex = PortIndexOutput;
+ tParamStruct.bUsePortReconfigForCrop = OMX_TRUE;
+ tParamStruct.bUsePortReconfigForPadding = OMX_FALSE;
+ omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexParamUseEnhancedPortReconfig, &tParamStruct);
+#endif
+
+ // Transition to IDLE
+ ret = switchToIdle();
+ dumpPortSettings(PortIndexInput);
+ dumpPortSettings(PortIndexOutput);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t OmxFrameDecoder::omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_GetParameter(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_GetParameter - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_GetConfig(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_GetConfig - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_SetParameter(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SetParameter - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_SetConfig(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SetConfig - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
+ OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, cmd, param, NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SendCommand - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::setVideoOutputFormat(OMX_U32 width, OMX_U32 height) {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGV("setVideoOutputFormat width=%ld, height=%ld", width, height);
+
+ OMX_VIDEO_CODINGTYPE compressionFormat = gCompressionFormat[mDecoderType];
+
+ status_t err = setVideoPortFormatType(
+ PortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
+
+ if (err != NO_ERROR) {
+ CAMHAL_LOGE("Error during setVideoPortFormatType 0x%x", err);
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ err = omxGetParameter(OMX_IndexParamPortDefinition, &def);
+
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+
+ err = omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE odef;
+ OMX_VIDEO_PORTDEFINITIONTYPE *out_video_def = &odef.format.video;
+
+ InitOMXParams(&odef);
+ odef.nPortIndex = PortIndexOutput;
+
+ err = omxGetParameter(OMX_IndexParamPortDefinition, &odef);
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ out_video_def->nFrameWidth = width;
+ out_video_def->nFrameHeight = height;
+ out_video_def->xFramerate = 30<< 16;//((width >= 720) ? 60 : 30) << 16;
+ out_video_def->nStride = 4096;
+
+ err = omxSetParameter(OMX_IndexParamPortDefinition, &odef);
+ CAMHAL_LOGD("OUT port is configured");
+ dumpPortSettings(PortIndexOutput);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return err;
+}
+
+status_t OmxFrameDecoder::setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+ format.nPortIndex = portIndex;
+ format.nIndex = 0;
+ bool found = false;
+
+ OMX_U32 index = 0;
+ for (;;) {
+ CAMHAL_LOGV("Will check index = %d", index);
+ format.nIndex = index;
+ OMX_ERRORTYPE eError = OMX_GetParameter(
+ mHandleComp, OMX_IndexParamVideoPortFormat,
+ &format);
+
+ CAMHAL_LOGV("format.eCompressionFormat=0x%x format.eColorFormat=0x%x", format.eCompressionFormat, format.eColorFormat);
+
+ if (format.eCompressionFormat == compressionFormat
+ && format.eColorFormat == colorFormat) {
+ found = true;
+ break;
+ }
+
+ ++index;
+ if (index >= kMaxColorFormatSupported) {
+ CAMHAL_LOGE("color format %d or compression format %d is not supported",
+ colorFormat, compressionFormat);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ if (!found) {
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGV("found a match.");
+ OMX_ERRORTYPE eError = OMX_SetParameter(
+ mHandleComp, OMX_IndexParamVideoPortFormat,
+ &format);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::setComponentRole() {
+ OMX_PARAM_COMPONENTROLETYPE roleParams;
+ const char *role = gDecoderRole[mDecoderType];
+ InitOMXParams(&roleParams);
+
+ strncpy((char *)roleParams.cRole,
+ role, OMX_MAX_STRINGNAME_SIZE - 1);
+ roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+ return omxSetParameter(OMX_IndexParamStandardComponentRole, &roleParams);
+}
+
+void OmxFrameDecoder::freeBuffersOnOutput() {
+ LOG_FUNCTION_NAME;
+ for (size_t i = 0; i < mOutBufferHeaders.size(); i++) {
+ OMX_BUFFERHEADERTYPE* header = mOutBufferHeaders[i];
+ CAMHAL_LOGD("Freeing OUT buffer header %p", header);
+ OMX_FreeBuffer(mHandleComp, PortIndexOutput, header);
+ }
+ mOutBufferHeaders.clear();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::freeBuffersOnInput() {
+ LOG_FUNCTION_NAME;
+ for (size_t i = 0; i < mInBufferHeaders.size(); i++) {
+ OMX_BUFFERHEADERTYPE* header = mInBufferHeaders[i];
+ CAMHAL_LOGD("Freeing IN buffer header %p", header);
+ OMX_FreeBuffer(mHandleComp, PortIndexInput, header);
+ }
+ mInBufferHeaders.clear();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doStop() {
+ LOG_FUNCTION_NAME;
+
+ mStopping = true;
+ android::AutoMutex lock(mHwLock);
+
+ CAMHAL_LOGD("HwFrameDecoder::doStop state id=%d", getOmxState());
+
+ if ((getOmxState() == OmxDecoderState_Executing) || (getOmxState() == OmxDecoderState_Reconfigure)) {
+
+ CAMHAL_LOGD("Try set OMX_StateIdle");
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ }
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
+ }
+ commitState(OmxDecoderState_Idle);
+ }
+
+ if (getOmxState() == OmxDecoderState_Idle) {
+
+ CAMHAL_LOGD("Try set OMX_StateLoaded");
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateLoaded);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ return;
+ }
+ freeBuffersOnOutput();
+ freeBuffersOnInput();
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ }
+ commitState(OmxDecoderState_Loaded);
+
+ }
+
+ if (getOmxState() == OmxDecoderState_Error) {
+ CAMHAL_LOGD("In state ERROR will try to free buffers!");
+ freeBuffersOnOutput();
+ freeBuffersOnInput();
+ }
+
+ CAMHAL_LOGD("Before OMX_FreeHandle ....");
+ OMX_FreeHandle(mHandleComp);
+ CAMHAL_LOGD("After OMX_FreeHandle ....");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doFlush() {
+ LOG_FUNCTION_NAME;
+ mIsNeedCheckDHT = true;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doRelease() {
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def) {
+ CAMHAL_LOGD("----------Port settings start--------------------");
+ CAMHAL_LOGD("nSize=%d nPortIndex=%d eDir=%d nBufferCountActual=%d", def.nSize, def.nPortIndex, def.eDir, def.nBufferCountActual);
+ CAMHAL_LOGD("nBufferCountMin=%d nBufferSize=%d bEnabled=%d bPopulated=%d bBuffersContiguous=%d nBufferAlignment=%d", def.nBufferCountMin, def.nBufferSize, def.bEnabled, def.bPopulated, def.bBuffersContiguous, def.nBufferAlignment);
+
+ CAMHAL_LOGD("eDomain = %d",def.eDomain);
+
+ if (def.eDomain == OMX_PortDomainVideo) {
+ CAMHAL_LOGD("===============Video Port===================");
+ CAMHAL_LOGD("cMIMEType=%s",def.format.video.cMIMEType);
+ CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.video.nFrameWidth, def.format.video.nFrameHeight);
+ CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.video.nStride, def.format.video.nSliceHeight);
+ CAMHAL_LOGD("nBitrate=%d xFramerate=%d", def.format.video.nBitrate, def.format.video.xFramerate>>16);
+ CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.video.bFlagErrorConcealment, def.format.video.eCompressionFormat);
+ CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.video.eColorFormat, def.format.video.pNativeWindow);
+ CAMHAL_LOGD("===============END Video Part===================");
+ }
+ else if (def.eDomain == OMX_PortDomainImage) {
+ CAMHAL_LOGD("===============Image Port===================");
+ CAMHAL_LOGD("cMIMEType=%s",def.format.image.cMIMEType);
+ CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.image.nFrameWidth, def.format.image.nFrameHeight);
+ CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.image.nStride, def.format.image.nSliceHeight);
+ CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.image.bFlagErrorConcealment, def.format.image.eCompressionFormat);
+ CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.image.eColorFormat, def.format.image.pNativeWindow);
+ CAMHAL_LOGD("===============END Image Part===================");
+ }
+ CAMHAL_LOGD("----------Port settings end--------------------");
+}
+
+void OmxFrameDecoder::omxDumpBufferHeader(OMX_BUFFERHEADERTYPE* bh) {
+ CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE start==============");
+ CAMHAL_LOGD("nAllocLen=%d nFilledLen=%d nOffset=%d nFlags=0x%x", bh->nAllocLen, bh->nFilledLen, bh->nOffset, bh->nFlags);
+ CAMHAL_LOGD("pBuffer=%p nOutputPortIndex=%d nInputPortIndex=%d nSize=0x%x", bh->pBuffer, bh->nOutputPortIndex, bh->nInputPortIndex, bh->nSize);
+ CAMHAL_LOGD("nVersion=0x%x", bh->nVersion);
+ CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE end==============");
+}
+
+bool OmxFrameDecoder::getPaddedDimensions(size_t &width, size_t &height) {
+
+ switch (height) {
+
+ case 480: {
+ height = 576;
+ if (width == 640) {
+ width = 768;
+ }
+ break;
+ }
+ case 720: {
+ height = 832;
+ if (width == 1280) {
+ width = 1408;
+ }
+ break;
+ }
+ case 1080: {
+ height = 1184;
+ if (width == 1920) {
+ width = 2048;
+ }
+ break;
+ }
+
+ }
+
+ CAMHAL_LOGD("WxH updated to padded values : %d x %d", width, height);
+ return true;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/SensorListener.cpp b/camera/SensorListener.cpp
index 45a278b..e53fa83 100644
--- a/camera/SensorListener.cpp
+++ b/camera/SensorListener.cpp
@@ -201,10 +201,14 @@ void SensorListener::enableSensor(sensor_type_t type) {
if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
- CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
- mSensorEventQueue->enableSensor(sensor);
- mSensorEventQueue->setEventRate(sensor, ms2ns(100));
- sensorsEnabled |= SENSOR_ORIENTATION;
+ if(sensor) {
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->enableSensor(sensor);
+ mSensorEventQueue->setEventRate(sensor, ms2ns(100));
+ sensorsEnabled |= SENSOR_ORIENTATION;
+ } else {
+ CAMHAL_LOGDB("not enabling absent orientation sensor");
+ }
}
LOG_FUNCTION_NAME_EXIT;
diff --git a/camera/SwFrameDecoder.cpp b/camera/SwFrameDecoder.cpp
new file mode 100644
index 0000000..2ce2c0f
--- /dev/null
+++ b/camera/SwFrameDecoder.cpp
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Common.h"
+#include "SwFrameDecoder.h"
+
+namespace Ti {
+namespace Camera {
+
+SwFrameDecoder::SwFrameDecoder()
+: mjpegWithHdrSize(0), mJpegWithHeaderBuffer(NULL) {
+}
+
+SwFrameDecoder::~SwFrameDecoder() {
+ delete [] mJpegWithHeaderBuffer;
+ mJpegWithHeaderBuffer = NULL;
+}
+
+
+void SwFrameDecoder::doConfigure(const DecoderParameters& params) {
+ LOG_FUNCTION_NAME;
+
+ mjpegWithHdrSize = (mParams.width * mParams.height / 2) +
+ mJpgdecoder.readDHTSize();
+ if (mJpegWithHeaderBuffer != NULL) {
+ delete [] mJpegWithHeaderBuffer;
+ mJpegWithHeaderBuffer = NULL;
+ }
+ mJpegWithHeaderBuffer = new unsigned char[mjpegWithHdrSize];
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+void SwFrameDecoder::doProcessInputBuffer() {
+ LOG_FUNCTION_NAME;
+ nsecs_t timestamp = 0;
+
+ CAMHAL_LOGV("Will add header to MJPEG");
+ int final_jpg_sz = 0;
+ {
+ int inIndex = mInQueue.itemAt(0);
+ android::sp<MediaBuffer>& inBuffer = mInBuffers->editItemAt(inIndex);
+ android::AutoMutex lock(inBuffer->getLock());
+ timestamp = inBuffer->getTimestamp();
+ final_jpg_sz = mJpgdecoder.appendDHT(
+ reinterpret_cast<unsigned char*>(inBuffer->buffer),
+ inBuffer->filledLen, mJpegWithHeaderBuffer, mjpegWithHdrSize);
+ inBuffer->setStatus(BufferStatus_InDecoded);
+ }
+ CAMHAL_LOGV("Added header to MJPEG");
+ {
+ int outIndex = mOutQueue.itemAt(0);
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(outIndex);
+ android::AutoMutex lock(outBuffer->getLock());
+ CameraBuffer* buffer = reinterpret_cast<CameraBuffer*>(outBuffer->buffer);
+ if (!mJpgdecoder.decode(mJpegWithHeaderBuffer, final_jpg_sz,
+ reinterpret_cast<unsigned char*>(buffer->mapped), 4096)) {
+ CAMHAL_LOGEA("Error while decoding JPEG");
+ return;
+ }
+ outBuffer->setTimestamp(timestamp);
+ outBuffer->setStatus(BufferStatus_OutFilled);
+ }
+ CAMHAL_LOGV("JPEG decoded!");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/TICameraParameters.cpp b/camera/TICameraParameters.cpp
index ed8105e..ae8cd81 100644
--- a/camera/TICameraParameters.cpp
+++ b/camera/TICameraParameters.cpp
@@ -31,6 +31,7 @@ const char TICameraParameters::HIGH_QUALITY_MODE[] = "high-quality";
const char TICameraParameters::HIGH_QUALITY_ZSL_MODE[] = "high-quality-zsl";
const char TICameraParameters::CP_CAM_MODE[] = "cp-cam";
const char TICameraParameters::VIDEO_MODE[] = "video-mode";
+const char TICameraParameters::VIDEO_MODE_HQ[] = "video-mode-hq";
const char TICameraParameters::EXPOSURE_BRACKETING[] = "exposure-bracketing";
const char TICameraParameters::ZOOM_BRACKETING[] = "zoom-bracketing";
const char TICameraParameters::TEMP_BRACKETING[] = "temporal-bracketing";
@@ -220,12 +221,14 @@ const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[
const char TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION[] = "mechanical-misalignment-correction";
//TI extensions for enable/disable algos
-const char TICameraParameters::KEY_ALGO_FIXED_GAMMA[] = TI_KEY_ALGO_PREFIX "fixed-gamma";
+const char TICameraParameters::KEY_ALGO_EXTERNAL_GAMMA[] = TI_KEY_ALGO_PREFIX "external-gamma";
const char TICameraParameters::KEY_ALGO_NSF1[] = TI_KEY_ALGO_PREFIX "nsf1";
const char TICameraParameters::KEY_ALGO_NSF2[] = TI_KEY_ALGO_PREFIX "nsf2";
const char TICameraParameters::KEY_ALGO_SHARPENING[] = TI_KEY_ALGO_PREFIX "sharpening";
const char TICameraParameters::KEY_ALGO_THREELINCOLORMAP[] = TI_KEY_ALGO_PREFIX "threelinecolormap";
const char TICameraParameters::KEY_ALGO_GIC[] = TI_KEY_ALGO_PREFIX "gic";
+const char TICameraParameters::KEY_GAMMA_TABLE[] = "gamma-table";
+
} // namespace Camera
} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index 29c71c7..2c641ce 100644..100755
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -37,11 +37,10 @@
#include <sys/mman.h>
#include <sys/select.h>
#include <linux/videodev.h>
+#include <cutils/properties.h>
-#include <ui/GraphicBuffer.h>
-#include <ui/GraphicBufferMapper.h>
+#include "DecoderFactory.h"
-#include <cutils/properties.h>
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
static int mDebugFps = 0;
@@ -55,11 +54,6 @@ namespace Camera {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-//define this macro to save first few raw frames when starting the preview.
-//#define SAVE_RAW_FRAMES 1
-//#define DUMP_CAPTURE_FRAME 1
-//#define PPM_PER_FRAME_CONVERSION 1
-
//Proto Types
static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
@@ -68,14 +62,40 @@ static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int wid
android::Mutex gV4LAdapterLock;
char device[15];
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ if(mDebugFps) {
+ mFrameCount++;
+ if ((mFrameCount % 30 == 0)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGE("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ }
+}
+
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
/*--------------------V4L wrapper functions -------------------------------*/
+
+bool V4LCameraAdapter::isNeedToUseDecoder() const {
+ return mPixelFormat != V4L2_PIX_FMT_YUYV;
+}
+
status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
status_t ret = NO_ERROR;
errno = 0;
+ android::AutoMutex lock(mV4LLock);
+
do {
ret = ioctl (fd, req, argp);
}while (-1 == ret && EINTR == errno);
@@ -86,6 +106,11 @@ status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
status_t V4LCameraAdapter::v4lInitMmap(int& count) {
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ int width, height;
+ mParams.getPreviewSize(&width, &height);
+
//First allocate adapter internal buffers at V4L level for USB Cam
//These are the buffers from which we will copy the data into overlay buffers
/* Check if camera can handle NB_BUFFER buffers */
@@ -100,6 +125,10 @@ status_t V4LCameraAdapter::v4lInitMmap(int& count) {
}
count = mVideoInfo->rb.count;
+
+ //Since we will do mapping of new In buffers - clear input MediaBuffer storage
+ mInBuffers.clear();
+
for (int i = 0; i < count; i++) {
memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
@@ -126,7 +155,26 @@ status_t V4LCameraAdapter::v4lInitMmap(int& count) {
CAMHAL_LOGEB("Unable to map buffer [%d]. (%s)", i, strerror(errno));
return -1;
}
+
+ MediaBuffer* buffer = new MediaBuffer(i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ mInBuffers.push_back(buffer);
}
+
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerInputBuffers(&mInBuffers);
+ DecoderParameters params;
+ params.width = width;
+ params.height = height;
+ params.inputBufferCount = count;
+ params.outputBufferCount = count;
+ mDecoder->configure(params);
+ }
+
+
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
@@ -151,9 +199,14 @@ status_t V4LCameraAdapter::v4lStartStreaming () {
status_t ret = NO_ERROR;
enum v4l2_buf_type bufType;
+ LOG_FUNCTION_NAME;
+
if (!mVideoInfo->isStreaming) {
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-
+ ret = applyFpsValue();
+ if (ret != NO_ERROR) {
+ return ret;
+ }
ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
if (ret < 0) {
CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
@@ -161,6 +214,8 @@ status_t V4LCameraAdapter::v4lStartStreaming () {
}
mVideoInfo->isStreaming = true;
}
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -168,6 +223,8 @@ status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
status_t ret = NO_ERROR;
enum v4l2_buf_type bufType;
+ LOG_FUNCTION_NAME;
+
if (mVideoInfo->isStreaming) {
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@@ -199,12 +256,15 @@ status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
}
}
EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
if (ret < 0) {
@@ -214,7 +274,7 @@ status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_for
mVideoInfo->width = width;
mVideoInfo->height = height;
mVideoInfo->framesizeIn = (width * height << 1);
- mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+ mVideoInfo->formatIn = pix_format;
mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->format.fmt.pix.width = width;
@@ -228,6 +288,9 @@ status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_for
}
v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
+ CAMHAL_LOGD("### Using: WxH = %dx%d pixelformat=0x%x ", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height, mVideoInfo->format.fmt.pix.pixelformat);
+ CAMHAL_LOGD("### Using: bytesperline=%d sizeimage=%d colorspace=0x%x", mVideoInfo->format.fmt.pix.bytesperline, mVideoInfo->format.fmt.pix.sizeimage, mVideoInfo->format.fmt.pix.colorspace);
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -238,10 +301,12 @@ status_t V4LCameraAdapter::restartPreview ()
int height = 0;
struct v4l2_streamparm streamParams;
+ LOG_FUNCTION_NAME;
+
//configure for preview size and pixel format.
mParams.getPreviewSize(&width, &height);
- ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ ret = v4lSetFormat (width, height, mPixelFormat);
if (ret < 0) {
CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
goto EXIT;
@@ -253,25 +318,14 @@ status_t V4LCameraAdapter::restartPreview ()
goto EXIT;
}
- //set frame rate
- streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
- streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
- streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
- streamParams.parm.capture.timeperframe.numerator= 1;
- ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
- if (ret < 0) {
- CAMHAL_LOGEB("VIDIOC_S_PARM Failed: %s", strerror(errno));
- goto EXIT;
- }
-
for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
- mVideoInfo->buf.index = i;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ v4l2_buffer buf;
+ buf.index = i;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
- ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
if (ret < 0) {
CAMHAL_LOGEA("VIDIOC_QBUF Failed");
goto EXIT;
@@ -282,6 +336,7 @@ status_t V4LCameraAdapter::restartPreview ()
ret = v4lStartStreaming();
CAMHAL_LOGDA("Ready for preview....");
EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -291,6 +346,9 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
char value[PROPERTY_VALUE_MAX];
LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
@@ -303,7 +361,7 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
goto EXIT;
}
- if ((mCameraHandle = open(device, O_RDWR) ) == -1) {
+ if ((mCameraHandle = open(device, O_RDWR | O_NONBLOCK) ) == -1) {
CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
ret = BAD_VALUE;
goto EXIT;
@@ -342,73 +400,121 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
{
status_t ret = NO_ERROR;
int idx = 0;
+
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mLock);
+
if ( frameType == CameraFrame::IMAGE_FRAME) { //(1 > mCapturedFrames)
// Signal end of image capture
if ( NULL != mEndImageCaptureCallback) {
CAMHAL_LOGDB("===========Signal End Image Capture==========");
+ mLock.unlock();
mEndImageCaptureCallback(mEndCaptureData);
+ mLock.lock();
}
- goto EXIT;
+ return ret;
}
+
if ( !mVideoInfo->isStreaming ) {
- goto EXIT;
+ return ret;
}
- idx = mPreviewBufs.valueFor(frameBuf);
+ for (int xx = 0; xx < NB_BUFFER; xx++){
+ if (mPreviewBufs[xx] == frameBuf){
+ idx = xx;
+ break;
+ }
+ }
+ if (idx == NB_BUFFER){
+ CAMHAL_LOGEB("Wrong index = %d. What do i do? What do i do?",idx);
+ return ret;
+ }
if(idx < 0) {
CAMHAL_LOGEB("Wrong index = %d",idx);
- goto EXIT;
+ return ret;
}
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mOutBuffers.size(); i++) {
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers.editItemAt(i);
+ CameraBuffer* buffer = static_cast<CameraBuffer*>(outBuffer->buffer);
+ if (buffer == frameBuf) {
+ mDecoder->queueOutputBuffer(outBuffer->bufferId);
+ break;
+ }
+ }
- mVideoInfo->buf.index = idx;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ int inIndex = -1;
+ ret = mDecoder->dequeueInputBuffer(inIndex);
- ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
- if (ret < 0) {
- CAMHAL_LOGEA("VIDIOC_QBUF Failed");
- goto EXIT;
+ if (ret == NO_ERROR) {
+ ret = returnBufferToV4L(inIndex);
+ }
+
+ } else {
+ v4l2_buffer buf;
+ buf.index = idx;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ CAMHAL_LOGD("Will return buffer to V4L with id=%d", idx);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+
+ nQueued++;
}
- nQueued++;
+
EXIT:
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+status_t V4LCameraAdapter::applyFpsValue() {
+ struct v4l2_streamparm streamParams;
+ status_t ret = NO_ERROR;
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = mFrameRate / CameraHal::VFR_SCALE;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno));
+ return ret;
+ }
+ int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator;
+ CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps);
+ return NO_ERROR;
+}
+
status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params)
{
status_t ret = NO_ERROR;
int width, height;
- struct v4l2_streamparm streamParams;
+ int minFps = 0, maxFps = 0;
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mLock);
+
if(!mPreviewing && !mCapturing) {
params.getPreviewSize(&width, &height);
- CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
-
- ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, mPixelFormat);
+ ret = v4lSetFormat( width, height, mPixelFormat);
if (ret < 0) {
CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
goto EXIT;
}
- //set frame rate
- // Now its fixed to 30 FPS
- streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
- streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
- streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
- streamParams.parm.capture.timeperframe.numerator= 1;
- ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
- if (ret < 0) {
- CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno));
- goto EXIT;
+
+ params.getPreviewFpsRange(&minFps, &maxFps);
+ CAMHAL_LOGD("Current fps is %d new fps is (%d,%d)", mFrameRate, minFps, maxFps);
+ if (maxFps != mFrameRate) {
+ mFrameRate = maxFps;
}
- int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator;
- CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps);
+
}
// Udpate the current parameter set
@@ -424,6 +530,7 @@ void V4LCameraAdapter::getParameters(android::CameraParameters& params)
{
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mLock);
// Return the current parameter set
params = mParams;
@@ -485,6 +592,17 @@ status_t V4LCameraAdapter::UseBuffersCapture(CameraBuffer *bufArr, int num) {
CAMHAL_LOGDB("capture- buff [%d] = 0x%x ",i, mCaptureBufs.keyAt(i));
}
+ mCaptureBuffersAvailable.clear();
+ for (int i = 0; i < mCaptureBufferCountQueueable; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
+ }
+
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for (int i = mCaptureBufferCountQueueable; i < num; i++ ) {
+ mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
+ }
+
// Update the preview buffer count
mCaptureBufferCount = num;
EXIT:
@@ -504,13 +622,20 @@ status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
}
ret = v4lInitMmap(num);
+
+ mOutBuffers.clear();
+
if (ret == NO_ERROR) {
for (int i = 0; i < num; i++) {
//Associate each Camera internal buffer with the one from Overlay
- mPreviewBufs.add(&bufArr[i], i);
- CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs.keyAt(i));
+ mPreviewBufs[i] = &bufArr[i];
+ MediaBuffer* buffer = new MediaBuffer(i, mPreviewBufs[i]);
+ mOutBuffers.push_back(buffer);
+ CAMHAL_LOGDB("Preview- buff [%d] = 0x%x length=%d",i, mPreviewBufs[i], mFrameQueue.valueFor(mPreviewBufs[i])->mLength);
+ }
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerOutputBuffers(&mOutBuffers);
}
-
// Update the preview buffer count
mPreviewBufferCount = num;
}
@@ -531,7 +656,7 @@ status_t V4LCameraAdapter::takePicture() {
LOG_FUNCTION_NAME;
- android::AutoMutex lock(mCaptureBufsLock);
+ android::AutoMutex lock(mLock);
if(mCapturing) {
CAMHAL_LOGEA("Already Capture in Progress...");
@@ -554,7 +679,7 @@ status_t V4LCameraAdapter::takePicture() {
CAMHAL_LOGDB("Image Capture Size WxH = %dx%d",width,height);
yuv422i_buff_size = width * height * 2;
- ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ ret = v4lSetFormat (width, height, DEFAULT_CAPTURE_FORMAT);
if (ret < 0) {
CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
goto EXIT;
@@ -568,11 +693,12 @@ status_t V4LCameraAdapter::takePicture() {
for (int i = 0; i < mCaptureBufferCountQueueable; i++) {
- mVideoInfo->buf.index = i;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ v4l2_buffer buf;
+ buf.index = i;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
- ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
if (ret < 0) {
CAMHAL_LOGEA("VIDIOC_QBUF Failed");
ret = BAD_VALUE;
@@ -590,7 +716,9 @@ status_t V4LCameraAdapter::takePicture() {
CAMHAL_LOGDA("Streaming started for Image Capture");
//get the frame and send to encode as JPG
- fp = this->GetFrame(index);
+ int filledLen;
+ CAMHAL_LOGD("*********Will dequeue frame for Image Capture***********");
+ fp = this->GetFrame(index, filledLen);
if(!fp) {
CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
ret = BAD_VALUE;
@@ -599,7 +727,7 @@ status_t V4LCameraAdapter::takePicture() {
CAMHAL_LOGDA("::Capture Frame received from V4L::");
buffer = mCaptureBufs.keyAt(index);
- CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size);
+ CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d fill_length=%d", index, buffer->opaque, yuv422i_buff_size, filledLen);
//copy the yuv422i data to the image buffer.
memcpy(buffer->opaque, fp, yuv422i_buff_size);
@@ -659,6 +787,8 @@ status_t V4LCameraAdapter::stopImageCapture()
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mLock);
+
//Release image buffers
if ( NULL != mReleaseImageBuffersCallback ) {
mReleaseImageBuffersCallback(mReleaseData);
@@ -686,7 +816,8 @@ status_t V4LCameraAdapter::startPreview()
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- android::AutoMutex lock(mPreviewBufsLock);
+
+ android::AutoMutex lock(mLock);
if(mPreviewing) {
ret = BAD_VALUE;
@@ -695,18 +826,25 @@ status_t V4LCameraAdapter::startPreview()
for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
- mVideoInfo->buf.index = i;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ v4l2_buffer buf;
+ buf.index = i;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
- ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
if (ret < 0) {
CAMHAL_LOGEA("VIDIOC_QBUF Failed");
goto EXIT;
}
nQueued++;
}
-
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+ mDecoder->queueOutputBuffer(i);
+ CAMHAL_LOGV("Queued output buffer with id=%d ", i);
+ }
+ mDecoder->start();
+ }
ret = v4lStartStreaming();
// Create and start preview thread for receiving buffers from V4L Camera
@@ -730,13 +868,19 @@ status_t V4LCameraAdapter::stopPreview()
int ret = NO_ERROR;
LOG_FUNCTION_NAME;
- android::AutoMutex lock(mStopPreviewLock);
+
+ android::AutoMutex lock(mLock);
if(!mPreviewing) {
return NO_INIT;
}
mPreviewing = false;
-
+ if (isNeedToUseDecoder()) {
+ android::AutoMutex lock(mStopLock);
+ mStopCondition.waitRelative(mStopLock, 100000000);
+ mDecoder->stop();
+ mDecoder->flush();
+ }
ret = v4lStopStreaming(mPreviewBufferCount);
if (ret < 0) {
CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
@@ -746,37 +890,90 @@ status_t V4LCameraAdapter::stopPreview()
nDequeued = 0;
mFramesWithEncoder = 0;
- mPreviewBufs.clear();
+ mLock.unlock();
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-char * V4LCameraAdapter::GetFrame(int &index)
+
+void saveFile(unsigned char* buff, int buff_size) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+ if (counter > 30) {
+ return;
+ }
+ //dump nv12 buffer
+ counter++;
+ sprintf(fn, "/data/tmp/dump_%03d.h264", counter);
+ CAMHAL_LOGEB("Dumping h264 frame to a file : %s.", fn);
+
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ write(fd, buff, buff_size );
+ close(fd);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
{
int ret = NO_ERROR;
LOG_FUNCTION_NAME;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ v4l2_buffer buf;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
/* DQ */
- ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ // Some V4L drivers, notably uvc, protect each incoming call with
+ // a driver-wide mutex. If we use poll() or blocking VIDIOC_DQBUF ioctl
+ // here then we sometimes would run into a deadlock on VIDIO_QBUF ioctl.
+ while(true) {
+ if(!mVideoInfo->isStreaming) {
+ return NULL;
+ }
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &buf);
+ if((ret == 0) || (errno != EAGAIN)) {
+ break;
+ }
+ }
+
if (ret < 0) {
CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
return NULL;
}
- nDequeued++;
- index = mVideoInfo->buf.index;
+ index = buf.index;
+ filledLen = buf.bytesused;
+ android::sp<MediaBuffer>& inBuffer = mInBuffers.editItemAt(index);
+ {
+ android::AutoMutex bufferLock(inBuffer->getLock());
+ inBuffer->setTimestamp(systemTime(SYSTEM_TIME_MONOTONIC));
+ inBuffer->filledLen = buf.bytesused;
+ }
+ debugShowFPS();
LOG_FUNCTION_NAME_EXIT;
- return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
+ return (char *)mVideoInfo->mem[index];
}
+
+
+
+
//API to get the frame size required to be allocated. This size is used to override the size passed
//by camera service when VSTAB/VNF is turned ON for example
status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
@@ -784,9 +981,15 @@ status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mLock);
+
// Just return the current preview size, nothing more to do here.
- mParams.getPreviewSize(( int * ) &width,
- ( int * ) &height);
+ mParams.getPreviewSize(( int * ) &width,( int * ) &height);
+
+ // TODO: This will reside until correct port reconfiguration handling will done.
+ if (isNeedToUseDecoder()) {
+ mDecoder->getPaddedDimensions(width, height);
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -795,11 +998,12 @@ status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
{
+ android::AutoMutex lock(mLock);
// We don't support meta data, so simply return
return NO_ERROR;
}
-status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame *frame, size_t bufferCount)
+status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
{
int width = 0;
int height = 0;
@@ -807,41 +1011,20 @@ status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame *frame, size_t buffe
LOG_FUNCTION_NAME;
- if (frame == NULL) {
- return BAD_VALUE;
- }
+ android::AutoMutex lock(mLock);
mParams.getPictureSize( &width, &height );
- frame->mLength = width * height * bytesPerPixel;
- frame->mWidth = width;
- frame->mHeight = height;
- frame->mAlignment = width * bytesPerPixel;
+ frame.mLength = width * height * bytesPerPixel;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mAlignment = width * bytesPerPixel;
CAMHAL_LOGDB("Picture size: W x H = %u x %u (size=%u bytes, alignment=%u bytes)",
- frame->mWidth, frame->mHeight, frame->mLength, frame->mAlignment);
+ frame.mWidth, frame.mHeight, frame.mLength, frame.mAlignment);
LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
-static void debugShowFPS()
-{
- static int mFrameCount = 0;
- static int mLastFrameCount = 0;
- static nsecs_t mLastFpsTime = 0;
- static float mFps = 0;
- if(mDebugFps) {
- mFrameCount++;
- if (!(mFrameCount & 0x1F)) {
- nsecs_t now = systemTime();
- nsecs_t diff = now - mLastFpsTime;
- mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
- mLastFpsTime = now;
- mLastFrameCount = mFrameCount;
- CAMHAL_LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
- }
- }
-}
-
status_t V4LCameraAdapter::recalculateFPS()
{
float currentFPS;
@@ -877,16 +1060,70 @@ void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
{
LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mLock);
+
LOG_FUNCTION_NAME_EXIT;
}
+void V4LCameraAdapter::setupWorkingMode() {
+ char value[PROPERTY_VALUE_MAX];
+ int v4lMode = 0;
-V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
+ property_get("camera.v4l.mode", value, "3");
+ v4lMode = atoi(value);
+
+ if (mDecoder) {
+ delete mDecoder;
+ mDecoder = NULL;
+ }
+
+ switch (v4lMode) {
+ case 0 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with HW decoding");
+ break;
+ }
+
+ case 1 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(false);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, true);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with SW decoding");
+ break;
+ }
+
+ case 2 : {
+ // This is WA for Kernel 3.0 - till correct h264 parsing come.
+ mPixelFormat = 0;//V4L2_PIX_FMT_H264
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_H264, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_H264");
+ break;
+ }
+ default:
+ case 3 : {
+ mCameraHal->setExternalLocking(false);
+ mPixelFormat = V4L2_PIX_FMT_YUYV;
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_YUYV");
+ }
+
+ }
+}
+
+V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index, CameraHal* hal)
+ :mPixelFormat(DEFAULT_PIXEL_FORMAT), mFrameRate(0), mCameraHal(hal)
{
LOG_FUNCTION_NAME;
// Nothing useful to do in the constructor
mFramesWithEncoder = 0;
+ mDecoder = 0;
+ nQueued = 0;
+ nDequeued = 0;
+
+ setupWorkingMode();
LOG_FUNCTION_NAME_EXIT;
}
@@ -904,6 +1141,11 @@ V4LCameraAdapter::~V4LCameraAdapter()
mVideoInfo = NULL;
}
+ delete mDecoder;
+
+ mInBuffers.clear();
+ mOutBuffers.clear();
+
LOG_FUNCTION_NAME_EXIT;
}
@@ -1075,36 +1317,73 @@ static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int wid
LOG_FUNCTION_NAME_EXIT;
}
-#ifdef SAVE_RAW_FRAMES
-void saveFile(unsigned char* buff, int buff_size) {
- static int counter = 1;
- int fd = -1;
- char fn[256];
+
+
+
+/* Preview Thread */
+// ---------------------------------------------------------------------------
+
+void V4LCameraAdapter::returnOutputBuffer(int index)
+{
LOG_FUNCTION_NAME;
- if (counter > 3) {
- return;
- }
- //dump nv12 buffer
- counter++;
- sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter);
- CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn);
- fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
- if(fd < 0) {
- CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
- return;
+ size_t width, height;
+ int stride = 4096;
+ CameraFrame frame;
+
+ getFrameSize(width, height);
+
+ android::Mutex::Autolock slock(mSubscriberLock);
+
+ android::sp<MediaBuffer>& buffer = mOutBuffers.editItemAt(index);
+
+ CameraBuffer* cbuffer = static_cast<CameraBuffer*>(buffer->buffer);
+
+ frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
+ frame.mBuffer = cbuffer;
+ if (isNeedToUseDecoder()) {
+ //We always get NV12 on out, when using decoder.
+ frame.mLength = height * stride * 3 / 2;
+ } else {
+ frame.mLength = CameraHal::calculateBufferSize(mParams.getPreviewFormat(), width, height);
}
+ frame.mAlignment = stride;
+ frame.mOffset = buffer->getOffset();
+ frame.mTimestamp = buffer->getTimestamp();
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
- write(fd, buff, buff_size );
- close(fd);
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+ int ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+ //debugShowFPS();
LOG_FUNCTION_NAME_EXIT;
}
-#endif
-/* Preview Thread */
-// ---------------------------------------------------------------------------
+status_t V4LCameraAdapter::returnBufferToV4L(int id) {
+ status_t ret = NO_ERROR;
+ v4l2_buffer buf;
+ buf.index = id;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed 0x%x", ret);
+ return FAILED_TRANSACTION;
+ }
+
+ return NO_ERROR;
+}
int V4LCameraAdapter::previewThread()
{
@@ -1113,36 +1392,65 @@ int V4LCameraAdapter::previewThread()
CameraFrame frame;
void *y_uv[2];
int index = 0;
+ int filledLen = 0;
int stride = 4096;
char *fp = NULL;
mParams.getPreviewSize(&width, &height);
- if (mPreviewing) {
+ {
+ android::AutoMutex lock(mLock);
+ if (!mPreviewing) {
+ //If stop preview is called - it can now go on.
+ android::AutoMutex stopLock(mStopLock);
+ mStopCondition.signal();
+ return ret;
+ }
+ }
- fp = this->GetFrame(index);
- if(!fp) {
- ret = BAD_VALUE;
- goto EXIT;
+ {
+ android::Mutex::Autolock lock(mSubscriberLock);
+ if ( mFrameSubscribers.size() == 0 ) {
+ return BAD_VALUE;
}
- CameraBuffer *buffer = mPreviewBufs.keyAt(index);
- CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer);
- if (!lframe) {
- ret = BAD_VALUE;
- goto EXIT;
+ }
+
+ if (isNeedToUseDecoder()){
+
+ CAMHAL_LOGV("########### Decoder ###########");
+ int inIndex = -1, outIndex = -1;
+
+ if (GetFrame(index, filledLen) != NULL) {
+ CAMHAL_LOGD("Dequeued buffer from V4L with ID=%d", index);
+ mDecoder->queueInputBuffer(index);
}
- debugShowFPS();
+ while (NO_ERROR == mDecoder->dequeueInputBuffer(inIndex)) {
+ returnBufferToV4L(inIndex);
+ }
- if ( mFrameSubscribers.size() == 0 ) {
- ret = BAD_VALUE;
- goto EXIT;
+ while (NO_ERROR == mDecoder->dequeueOutputBuffer(outIndex)) {
+ returnOutputBuffer(outIndex);
}
- y_uv[0] = (void*) lframe->mYuv[0];
- //y_uv[1] = (void*) lframe->mYuv[1];
- //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride);
- convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height);
- CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] );
+
+ CAMHAL_LOGV("########### End Decode ###########");
+ goto EXIT;
+ }
+ else
+ {
+ fp = GetFrame(index, filledLen);
+
+ if(!fp) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ CAMHAL_LOGD("GOT IN frame with ID=%d",index);
+
+ CameraBuffer *buffer = mPreviewBufs[index];
+ if (mPixelFormat == V4L2_PIX_FMT_YUYV) {
+ convertYUV422ToNV12Tiler(reinterpret_cast<unsigned char*>(fp), reinterpret_cast<unsigned char*>(buffer->mapped), width, height);
+ }
+ CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; mapped= 0x%x.",index, buffer, buffer->mapped);
#ifdef SAVE_RAW_FRAMES
unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
@@ -1152,6 +1460,8 @@ int V4LCameraAdapter::previewThread()
free (nv12_buff);
#endif
+ android::Mutex::Autolock lock(mSubscriberLock);
+
frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
frame.mBuffer = buffer;
frame.mLength = width*height*3/2;
@@ -1173,6 +1483,7 @@ int V4LCameraAdapter::previewThread()
ret = sendFrameToSubscribers(&frame);
}
}
+
EXIT:
return ret;
@@ -1208,14 +1519,14 @@ void detectVideoDevice(char** video_device_list, int& num_device) {
}
}
-extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index, CameraHal* hal)
{
CameraAdapter *adapter = NULL;
android::AutoMutex lock(gV4LAdapterLock);
LOG_FUNCTION_NAME;
- adapter = new V4LCameraAdapter(sensor_index);
+ adapter = new V4LCameraAdapter(sensor_index, hal);
if ( adapter ) {
CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
} else {
diff --git a/camera/V4LCameraAdapter/V4LCapabilities.cpp b/camera/V4LCameraAdapter/V4LCapabilities.cpp
index 3a84268..f03ba9b 100644..100755
--- a/camera/V4LCameraAdapter/V4LCapabilities.cpp
+++ b/camera/V4LCameraAdapter/V4LCapabilities.cpp
@@ -43,11 +43,12 @@ static const char PARAM_SEP[] = ",";
//Camera defaults
const char V4LCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
const char V4LCameraAdapter::DEFAULT_PICTURE_SIZE[] = "640x480";
-const char V4LCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv422i-yuyv";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
const char V4LCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
const char V4LCameraAdapter::DEFAULT_FRAMERATE[] = "30";
const char V4LCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
+const char V4LCameraAdapter::DEFAULT_FRAMERATE_RANGE[] = "30000,30000";
const char * V4LCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
const char * V4LCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
@@ -81,8 +82,7 @@ status_t V4LCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, "320x240");
params->set(CameraProperties::JPEG_QUALITY, "90");
params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, "50");
- params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(30000,30000)");
- params->set(CameraProperties::FRAMERATE_RANGE, "30000,30000");
+ params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE);
params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, "none");
params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, "auto");
params->set(CameraProperties::SUPPORTED_ISO_VALUES, "auto");
@@ -95,6 +95,12 @@ status_t V4LCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
params->set(CameraProperties::VNF, DEFAULT_VNF);
+ //For compatibility
+ params->set(CameraProperties::SUPPORTED_ZOOM_RATIOS,"0");
+ params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, "0");
+ params->set(CameraProperties::ZOOM, "0");
+ params->set(CameraProperties::ZOOM_SUPPORTED, "true");
+
LOG_FUNCTION_NAME_EXIT;
@@ -115,6 +121,8 @@ status_t V4LCameraAdapter::insertPreviewFormats(CameraProperties::Properties* pa
}
}
strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ strncat (supported, PARAM_SEP, 1 );
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420SP, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
return NO_ERROR;
}
@@ -154,11 +162,11 @@ status_t V4LCameraAdapter::insertImageSizes(CameraProperties::Properties* params
status_t V4LCameraAdapter::insertFrameRates(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
char supported[MAX_PROP_VALUE_LENGTH];
- char temp[10];
+ char temp[MAX_PROP_VALUE_LENGTH];
memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
for (int i = 0; i < caps.ulFrameRateCount; i++) {
- snprintf (temp, 10, "%d", caps.ulFrameRates[i] );
+ snprintf (temp, sizeof(temp) - 1, "%d", caps.ulFrameRates[i] );
if (supported[0] != '\0') {
strncat(supported, PARAM_SEP, 1);
}
@@ -166,6 +174,17 @@ status_t V4LCameraAdapter::insertFrameRates(CameraProperties::Properties* params
}
params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+
+ memset(supported, 0, sizeof(supported));
+
+ for (int i = caps.ulFrameRateCount - 1; i >= 0 ; i--) {
+ if ( supported[0] ) strncat(supported, PARAM_SEP, 1);
+ snprintf(temp, sizeof(temp) - 1, "(%d,%d)", caps.ulFrameRates[i] * CameraHal::VFR_SCALE, caps.ulFrameRates[i] * CameraHal::VFR_SCALE);
+ strcat(supported, temp);
+ }
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
+
return NO_ERROR;
}
@@ -262,7 +281,7 @@ status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Propert
frmSizeEnum.index = i;
//Check for frame sizes for default pixel format
//TODO: Check for frame sizes for all supported pixel formats
- frmSizeEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ frmSizeEnum.pixel_format = DEFAULT_PIXEL_FORMAT;
status = ioctl (handle, VIDIOC_ENUM_FRAMESIZES, &frmSizeEnum);
if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
break;
@@ -300,7 +319,7 @@ status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Propert
for ( i = 0; status == NO_ERROR; i++) {
frmIvalEnum.index = i;
//Check for supported frame rates for the default pixel format.
- frmIvalEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ frmIvalEnum.pixel_format = DEFAULT_PIXEL_FORMAT;
frmIvalEnum.width = caps.tPreviewRes[j].width;
frmIvalEnum.height = caps.tPreviewRes[j].height;
diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h
index 560e98d..eba91bb 100644
--- a/camera/inc/ANativeWindowDisplayAdapter.h
+++ b/camera/inc/ANativeWindowDisplayAdapter.h
@@ -86,6 +86,9 @@ public:
virtual status_t maxQueueableBuffers(unsigned int& queueable);
virtual status_t minUndequeueableBuffers(int& unqueueable);
+ // If set to true ANativeWindowDisplayAdapter will not lock/unlock graphic buffers
+ void setExternalLocking(bool extBuffLocking);
+
///Class specific functions
static void frameCallbackRelay(CameraFrame* caFrame);
void frameCallback(CameraFrame* caFrame);
@@ -173,6 +176,10 @@ private:
const char *mPixelFormat;
+ //In case if we ,as example, using out buffers in Ducati Decoder
+ //DOMX will handle lock/unlock of graphic buffers
+ bool mUseExternalBufferLocking;
+
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
//Used for calculating standby to first shot
struct timeval mStandbyToShot;
diff --git a/camera/inc/BufferSourceAdapter.h b/camera/inc/BufferSourceAdapter.h
index 436d2e5..c006b9d 100644
--- a/camera/inc/BufferSourceAdapter.h
+++ b/camera/inc/BufferSourceAdapter.h
@@ -37,32 +37,54 @@ class BufferSourceAdapter : public DisplayAdapter
{
// private types
private:
+ ///Constant declarations
+ static const int NO_BUFFERS_IMAGE_CAPTURE_SYSTEM_HEAP;
+
+
// helper class to return frame in different thread context
class ReturnFrame : public android::Thread {
public:
ReturnFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
- mWaitForSignal.Create(0);
+ android::AutoMutex lock(mReturnFrameMutex);
mDestroying = false;
+ mFrameCount = 0;
}
~ReturnFrame() {
- mDestroying = true;
- mWaitForSignal.Release();
+ android::AutoMutex lock(mReturnFrameMutex);
}
void signal() {
- mWaitForSignal.Signal();
+ android::AutoMutex lock(mReturnFrameMutex);
+ mFrameCount++;
+ mReturnFrameCondition.signal();
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
+ android::AutoMutex lock(mReturnFrameMutex);
+ mDestroying = true;
+ mReturnFrameCondition.signal();
}
virtual bool threadLoop() {
- mWaitForSignal.Wait();
- if (!mDestroying) mBufferSourceAdapter->handleFrameReturn();
+ android::AutoMutex lock(mReturnFrameMutex);
+ if ( 0 >= mFrameCount ) {
+ mReturnFrameCondition.wait(mReturnFrameMutex);
+ }
+ if (!mDestroying) {
+ mBufferSourceAdapter->handleFrameReturn();
+ mFrameCount--;
+ }
return true;
}
private:
BufferSourceAdapter* mBufferSourceAdapter;
- Utils::Semaphore mWaitForSignal;
+ android::Condition mReturnFrameCondition;
+ android::Mutex mReturnFrameMutex;
+ int mFrameCount;
bool mDestroying;
};
@@ -74,6 +96,17 @@ private:
}
~QueueFrame() {
+ }
+
+ void addFrame(CameraFrame *frame) {
+ android::AutoMutex lock(mFramesMutex);
+ mFrames.add(new CameraFrame(*frame));
+ mFramesCondition.signal();
+ }
+
+ virtual void requestExit() {
+ Thread::requestExit();
+
mDestroying = true;
android::AutoMutex lock(mFramesMutex);
@@ -84,12 +117,6 @@ private:
delete frame;
}
mFramesCondition.signal();
- }
-
- void addFrame(CameraFrame *frame) {
- android::AutoMutex lock(mFramesMutex);
- mFrames.add(new CameraFrame(*frame));
- mFramesCondition.signal();
}
virtual bool threadLoop() {
@@ -106,6 +133,12 @@ private:
if (frame) {
mBufferSourceAdapter->handleFrameCallback(frame);
frame->mMetaData.clear();
+
+ if (frame->mFrameType != CameraFrame::REPROCESS_INPUT_FRAME) {
+ // signal return frame thread that it can dequeue a buffer now
+ mBufferSourceAdapter->mReturnFrame->signal();
+ }
+
delete frame;
}
@@ -149,6 +182,11 @@ public:
virtual int freeBufferList(CameraBuffer * buflist);
virtual int maxQueueableBuffers(unsigned int& queueable);
virtual int minUndequeueableBuffers(int& unqueueable);
+ virtual bool match(const char * str);
+
+ virtual CameraBuffer * getBuffers(bool reset = false);
+ virtual unsigned int getSize();
+ virtual int getBufferCount();
static void frameCallback(CameraFrame* caFrame);
void addFrame(CameraFrame* caFrame);
diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h
index e29518b..02004c9 100644
--- a/camera/inc/CameraHal.h
+++ b/camera/inc/CameraHal.h
@@ -64,6 +64,7 @@
#define HAL_PIXEL_FORMAT_TI_NV12 0x100
#define HAL_PIXEL_FORMAT_TI_Y8 0x103
#define HAL_PIXEL_FORMAT_TI_Y16 0x104
+#define HAL_PIXEL_FORMAT_TI_UYVY 0x105
#define MIN_WIDTH 640
#define MIN_HEIGHT 480
@@ -98,6 +99,8 @@
#define LOCK_BUFFER_TRIES 5
#define HAL_PIXEL_FORMAT_NV12 0x100
+#define OP_STR_SIZE 100
+
#define NONNEG_ASSIGN(x,y) \
if(x > -1) \
y = x
@@ -350,6 +353,17 @@ typedef struct _CameraBuffer {
int stride;
int height;
const char *format;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ struct timeval ppmStamp;
+
+#endif
+
+ /* These are for buffers which include borders */
+ int offset; // where valid data starts
+ int actual_size; // size of the entire buffer with borders
+ int privateData;
} CameraBuffer;
void * camera_buffer_get_omx_ptr (CameraBuffer *buffer);
@@ -617,6 +631,9 @@ public:
//additional methods used for memory mapping
virtual uint32_t * getOffsets() = 0;
virtual int getFd() = 0;
+ virtual CameraBuffer * getBuffers(bool reset = false) { return NULL; }
+ virtual unsigned int getSize() {return 0; }
+ virtual int getBufferCount() {return -1; }
virtual int freeBufferList(CameraBuffer * buf) = 0;
@@ -713,6 +730,7 @@ public:
void setVideoRes(int width, int height);
void flushEventQueue();
+ void setExternalLocking(bool extBuffLocking);
//Internal class definitions
class NotificationThread : public android::Thread {
@@ -748,6 +766,8 @@ private:
void copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType);
size_t calculateBufferSize(size_t width, size_t height, const char *pixelFormat);
const char* getContstantForPixelFormat(const char *pixelFormat);
+ void lockBufferAndUpdatePtrs(CameraFrame* frame);
+ void unlockBufferAndUpdatePtrs(CameraFrame* frame);
private:
mutable android::Mutex mLock;
@@ -803,6 +823,8 @@ private:
int mVideoWidth;
int mVideoHeight;
+ bool mExternalLocking;
+
};
@@ -1033,9 +1055,9 @@ public:
// Get min buffers display needs at any given time
virtual status_t minUndequeueableBuffers(int& unqueueable) = 0;
-protected:
- virtual const char* getPixFormatConstant(const char* parameters_format) const;
- virtual size_t getBufSize(const char* parameters_format, int width, int height) const;
+
+ // Given a vector of DisplayAdapters find the one corresponding to str
+ virtual bool match(const char * str) { return false; }
private:
#ifdef OMAP_ENHANCEMENT
@@ -1130,6 +1152,11 @@ public:
#endif
/**
+ * Release a tap-in or tap-out point.
+ */
+ int releaseBufferSource(struct preview_stream_ops *tapin, struct preview_stream_ops *tapout);
+
+ /**
* Stop a previously started preview.
*/
void stopPreview();
@@ -1225,6 +1252,9 @@ public:
status_t storeMetaDataInBuffers(bool enable);
+ // Use external locking for graphic buffers
+ void setExternalLocking(bool extBuffLocking);
+
//@}
/*--------------------Internal Member functions - Public---------------------------------*/
@@ -1267,6 +1297,13 @@ public:
void eventCallback(CameraHalEvent* event);
void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+ static const char* getPixelFormatConstant(const char* parameters_format);
+ static size_t calculateBufferSize(const char* parameters_format, int width, int height);
+ static void getXYFromOffset(unsigned int *x, unsigned int *y,
+ unsigned int offset, unsigned int stride,
+ const char* format);
+ static unsigned int getBPP(const char* format);
+
/*--------------------Internal Member functions - Private---------------------------------*/
private:
@@ -1300,8 +1337,7 @@ private:
/** Allocate image capture buffers */
status_t allocImageBufs(unsigned int width, unsigned int height, size_t length,
- const char* previewFormat, unsigned int bufferCount,
- unsigned int *max_queueable);
+ const char* previewFormat, unsigned int bufferCount);
/** Allocate Raw buffers */
status_t allocRawBufs(int width, int height, const char* previewFormat, int bufferCount);
@@ -1346,10 +1382,13 @@ private:
void resetPreviewRes(android::CameraParameters *params);
// Internal __takePicture function - used in public takePicture() and reprocess()
- int __takePicture(const char* params);
+ int __takePicture(const char* params, struct timeval *captureStart = NULL);
//@}
-
+ status_t setTapoutLocked(struct preview_stream_ops *out);
+ status_t releaseTapoutLocked(struct preview_stream_ops *out);
+ status_t setTapinLocked(struct preview_stream_ops *in);
+ status_t releaseTapinLocked(struct preview_stream_ops *in);
/*----------Member variables - Public ---------------------*/
public:
int32_t mMsgEnabled;
@@ -1370,8 +1409,12 @@ public:
android::sp<AppCallbackNotifier> mAppCallbackNotifier;
android::sp<DisplayAdapter> mDisplayAdapter;
android::sp<MemoryManager> mMemoryManager;
- // TODO(XXX): May need to keep this as a vector in the future
- // when we can have multiple tap-in/tap-out points
+
+ android::Vector< android::sp<DisplayAdapter> > mOutAdapters;
+ android::Vector< android::sp<DisplayAdapter> > mInAdapters;
+
+ // TODO(XXX): Even though we support user setting multiple BufferSourceAdapters now
+ // only one tap in surface and one tap out surface is supported at a time.
android::sp<DisplayAdapter> mBufferSourceAdapter_In;
android::sp<DisplayAdapter> mBufferSourceAdapter_Out;
@@ -1441,6 +1484,7 @@ private:
uint32_t *mImageOffsets;
int mImageFd;
int mImageLength;
+ unsigned int mImageCount;
CameraBuffer *mPreviewBuffers;
uint32_t *mPreviewOffsets;
int mPreviewLength;
@@ -1473,6 +1517,8 @@ private:
int mVideoHeight;
android::String8 mCapModeBackup;
+
+ bool mExternalLocking;
};
} // namespace Camera
diff --git a/camera/inc/CameraProperties.h b/camera/inc/CameraProperties.h
index bfc6012..6d92341 100644
--- a/camera/inc/CameraProperties.h
+++ b/camera/inc/CameraProperties.h
@@ -49,6 +49,7 @@ enum OperatingMode {
MODE_VIDEO,
MODE_STEREO,
MODE_CPCAM,
+ MODE_VIDEO_HIGH_QUALITY,
MODE_MAX
};
diff --git a/camera/inc/DecoderFactory.h b/camera/inc/DecoderFactory.h
new file mode 100644
index 0000000..d5e566f
--- /dev/null
+++ b/camera/inc/DecoderFactory.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DECODERFACTORY_H_
+#define DECODERFACTORY_H_
+
+#include "FrameDecoder.h"
+
+namespace Ti {
+namespace Camera {
+
+class DecoderFactory {
+ DecoderFactory();
+ ~DecoderFactory();
+public:
+ static FrameDecoder* createDecoderByType(DecoderType type, bool forceSwDecoder = false);
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif /* DECODERFACTORY_H_ */
diff --git a/camera/inc/Decoder_libjpeg.h b/camera/inc/Decoder_libjpeg.h
new file mode 100755
index 0000000..425ebf1
--- /dev/null
+++ b/camera/inc/Decoder_libjpeg.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_HARDWARE_DECODER_LIBJPEG_H
+#define ANDROID_CAMERA_HARDWARE_DECODER_LIBJPEG_H
+
+#include "CameraHal.h"
+
+extern "C" {
+#include "jhead.h"
+
+#undef TRUE
+#undef FALSE
+
+}
+
+
+namespace Ti {
+namespace Camera {
+
+class Decoder_libjpeg
+{
+
+public:
+ Decoder_libjpeg();
+ ~Decoder_libjpeg();
+ static int readDHTSize();
+ static bool isDhtExist(unsigned char *jpeg_src, int filled_len);
+ static int appendDHT(unsigned char *jpeg_src, int filled_len, unsigned char *jpeg_with_dht_buffer, int buff_size);
+ bool decode(unsigned char *jpeg_src, int filled_len, unsigned char *nv12_buffer, int stride);
+
+private:
+ void release();
+ unsigned char **Y_Plane;
+ unsigned char **U_Plane;
+ unsigned char **V_Plane;
+ unsigned char *UV_Plane;
+ unsigned int mWidth, mHeight;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif
+
diff --git a/camera/inc/FrameDecoder.h b/camera/inc/FrameDecoder.h
new file mode 100644
index 0000000..fab0544
--- /dev/null
+++ b/camera/inc/FrameDecoder.h
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEDECODER_H_
+#define FRAMEDECODER_H_
+
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+#include "CameraHal.h"
+
+
+namespace Ti {
+namespace Camera {
+
+enum DecoderType {
+ DecoderType_MJPEG,
+ DecoderType_H264
+};
+
+enum BufferStatus {
+ BufferStatus_Unknown,
+ BufferStatus_InQueued,
+ BufferStatus_InWaitForEmpty,
+ BufferStatus_InDecoded,
+ BufferStatus_OutQueued,
+ BufferStatus_OutWaitForFill,
+ BufferStatus_OutFilled
+};
+
+enum DecoderState {
+ DecoderState_Uninitialized,
+ DecoderState_Initialized,
+ DecoderState_Running,
+ DecoderState_Requested_Stop,
+ DecoderState_Stoppped
+};
+
+class MediaBuffer: public virtual android::RefBase {
+
+public:
+ MediaBuffer()
+ : bufferId(-1), buffer(0), filledLen(0), size(0),
+ mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ }
+
+ MediaBuffer(int id, void* buffer, size_t buffSize = 0)
+ : bufferId(id), buffer(buffer), filledLen(0), size(buffSize),
+ mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ }
+
+ virtual ~MediaBuffer() {
+ }
+
+ int bufferId;
+ void* buffer;
+ int filledLen;
+ size_t size;
+
+ nsecs_t getTimestamp() const {
+ return mTimestamp;
+ }
+ void setTimestamp(nsecs_t ts) {
+ mTimestamp = ts;
+ }
+
+ BufferStatus getStatus() const {
+ return mStatus;
+ }
+
+ void setStatus(BufferStatus status) {
+ mStatus = status;
+ }
+
+ android::Mutex& getLock() const {
+ return mLock;
+ }
+
+ uint32_t getOffset() const {
+ return mOffset;
+ }
+
+ void setOffset(uint32_t offset) {
+ mOffset = offset;
+ }
+
+private:
+ uint32_t mOffset;
+ nsecs_t mTimestamp;
+ BufferStatus mStatus;
+ mutable android::Mutex mLock;
+};
+
+struct DecoderParameters {
+ int width;
+ int height;
+ int inputBufferCount;
+ int outputBufferCount;
+};
+
+class FrameDecoder {
+public:
+ FrameDecoder();
+ virtual ~FrameDecoder();
+ void configure(const DecoderParameters& config);
+ status_t start();
+ void stop();
+ void release();
+ void flush();
+ status_t queueInputBuffer(int id);
+ status_t dequeueInputBuffer(int &id);
+ status_t queueOutputBuffer(int id);
+ status_t dequeueOutputBuffer(int &id);
+
+ void registerOutputBuffers(android::Vector< android::sp<MediaBuffer> > *outBuffers) {
+ android::AutoMutex lock(mLock);
+ mOutQueue.clear();
+ mOutBuffers = outBuffers;
+ }
+
+ void registerInputBuffers(android::Vector< android::sp<MediaBuffer> > *inBuffers) {
+ android::AutoMutex lock(mLock);
+ mInQueue.clear();
+ mInBuffers = inBuffers;
+ }
+
+ virtual bool getPaddedDimensions(size_t &width, size_t &height) {
+ return false;
+ }
+
+ void setHal(CameraHal* hal) {
+ mCameraHal = hal;
+ }
+
+protected:
+ virtual void doConfigure(const DecoderParameters& config) = 0;
+ virtual void doProcessInputBuffer() = 0;
+ virtual status_t doStart() = 0;
+ virtual void doStop() = 0;
+ virtual void doFlush() = 0;
+ virtual void doRelease() = 0;
+
+ DecoderParameters mParams;
+
+ android::Vector<int> mInQueue;
+ android::Vector<int> mOutQueue;
+
+ android::Vector< android::sp<MediaBuffer> >* mInBuffers;
+ android::Vector< android::sp<MediaBuffer> >* mOutBuffers;
+
+ CameraHal* mCameraHal;
+
+private:
+ DecoderState mState;
+ android::Mutex mLock;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif /* FRAMEDECODER_H_ */
diff --git a/camera/inc/General3A_Settings.h b/camera/inc/General3A_Settings.h
index 777b255..c1e017c 100644
--- a/camera/inc/General3A_Settings.h
+++ b/camera/inc/General3A_Settings.h
@@ -241,13 +241,15 @@ class Gen3A_settings{
OMX_BOOL FocusLock;
OMX_BOOL WhiteBalanceLock;
- OMX_BOOL AlgoFixedGamma;
+ OMX_BOOL AlgoExternalGamma;
OMX_BOOL AlgoNSF1;
OMX_BOOL AlgoNSF2;
OMX_BOOL AlgoSharpening;
OMX_BOOL AlgoThreeLinColorMap;
OMX_BOOL AlgoGIC;
+ OMX_TI_CONFIG_GAMMATABLE_TYPE mGammaTable;
+
};
/*
@@ -274,12 +276,13 @@ enum E3ASettingsFlags
SetMeteringAreas = 1 << 18,
SetManualExposure = 1 << 19,
- SetAlgoFixedGamma = 1 << 20,
+ SetAlgoExternalGamma = 1 << 20,
SetAlgoNSF1 = 1 << 21,
SetAlgoNSF2 = 1 << 22,
SetAlgoSharpening = 1 << 23,
SetAlgoThreeLinColorMap = 1 << 24,
SetAlgoGIC = 1 << 25,
+ SetGammaTable = 1 << 26,
E3aSettingMax,
diff --git a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
index 0d67036..b2da574 100644
--- a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
+++ b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
@@ -213,10 +213,11 @@ public:
{
INITIAL_MODE = -1,
HIGH_SPEED = 1,
- HIGH_QUALITY = 2,
- VIDEO_MODE = 3,
- HIGH_QUALITY_ZSL = 4,
- CP_CAM = 5,
+ HIGH_QUALITY,
+ VIDEO_MODE,
+ HIGH_QUALITY_ZSL,
+ CP_CAM,
+ VIDEO_MODE_HQ,
};
enum IPPMode
@@ -258,12 +259,13 @@ public:
enum CaptureSettingsFlags {
SetFormat = 1 << 0,
SetThumb = 1 << 1,
- SetExpBracket = 1 << 2,
+ SetBurstExpBracket = 1 << 2,
SetQuality = 1 << 3,
SetRotation = 1 << 4,
- SetBurst = 1 << 5,
ECaptureSettingMax,
- ECapturesettingsAll = ( ((ECaptureSettingMax -1 ) << 1) -1 ) /// all possible flags raised
+ ECapturesettingsAll = ( ((ECaptureSettingMax -1 ) << 1) -1 ), /// all possible flags raised
+ ECaptureParamSettings = SetFormat | SetThumb | SetQuality, // Settings set with SetParam
+ ECaptureConfigSettings = (ECapturesettingsAll & ~ECaptureParamSettings)
};
enum PreviewSettingsFlags {
@@ -350,6 +352,7 @@ public:
OMX_U32 mMaxFrameRate;
CameraFrame::FrameType mImageType;
OMX_TI_STEREOFRAMELAYOUTTYPE mFrameLayoutType;
+ CameraBufferType mBufferType;
CameraBuffer * lookup_omx_buffer (OMX_BUFFERHEADERTYPE *pBufHeader);
enum {
@@ -576,13 +579,17 @@ private:
const OMX_BOOL data, const char *msg);
status_t setParameter3ABoolInvert(const OMX_INDEXTYPE omx_idx,
const OMX_BOOL data, const char *msg);
- status_t setAlgoFixedGamma(Gen3A_settings& Gen3A);
+ status_t setAlgoExternalGamma(Gen3A_settings& Gen3A);
status_t setAlgoNSF1(Gen3A_settings& Gen3A);
status_t setAlgoNSF2(Gen3A_settings& Gen3A);
status_t setAlgoSharpening(Gen3A_settings& Gen3A);
status_t setAlgoThreeLinColorMap(Gen3A_settings& Gen3A);
status_t setAlgoGIC(Gen3A_settings& Gen3A);
+ //Gamma table
+ void updateGammaTable(const char* gamma);
+ status_t setGammaTable(Gen3A_settings& Gen3A);
+
status_t getEVCompensation(Gen3A_settings& Gen3A);
status_t getWBMode(Gen3A_settings& Gen3A);
status_t getSharpness(Gen3A_settings& Gen3A);
@@ -909,6 +916,8 @@ private:
static const int SENSORID_OV14825;
static const int SENSORID_S5K4E1GA;
static const int SENSORID_S5K6A1GX03;
+ static const int SENSORID_OV8830;
+ static const int SENSORID_OV2722;
static const CapU32 mFacing [];
static const userToOMX_LUT mAutoConvergence [];
static const LUTtype mAutoConvergenceLUT;
@@ -1094,6 +1103,7 @@ private:
bool mCaptureConfigured;
unsigned int mPendingCaptureSettings;
unsigned int mPendingPreviewSettings;
+ unsigned int mPendingReprocessSettings;
OMX_TI_ANCILLARYDATATYPE* mCaptureAncillaryData;
OMX_TI_WHITEBALANCERESULTTYPE* mWhiteBalanceData;
bool mReprocConfigured;
@@ -1199,6 +1209,8 @@ private:
bool mTunnelDestroyed;
bool mPreviewPortInitialized;
+ // Used for allocations that need to be sent to Ducati
+ MemoryManager mMemMgr;
};
} // namespace Camera
diff --git a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
index d57843e..e791727 100644
--- a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
+++ b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
@@ -539,12 +539,212 @@ static const SceneModesEntry OV5650_SceneModesLUT [] = {
OMX_WhiteBalControlAuto },
};
+static const SceneModesEntry OV8830_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlPortrait,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoLock,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
+static const SceneModesEntry OV2722_SceneModesLUT [] = {
+ { OMX_Closeup,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Landscape,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlSunLight },
+ { OMX_Underwater,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Sport,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SnowBeach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Mood,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightPortrait,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_NightIndoor,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Fireworks,
+ OMX_IMAGE_FlashControlOn,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Document,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_Barcode,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_SuperNight,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlAutoInfinity,
+ OMX_WhiteBalControlAuto },
+ { OMX_Cine,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_OldFilm,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Action,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Beach,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Candlelight,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlIncandescent },
+ { OMX_TI_Night,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Party,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Portrait,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Snow,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Steadyphoto,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+ { OMX_TI_Sunset,
+ OMX_IMAGE_FlashControlAuto,
+ OMX_IMAGE_FocusControlHyperfocal,
+ ( OMX_WHITEBALCONTROLTYPE ) OMX_TI_WhiteBalControlSunset },
+ { OMX_TI_Theatre,
+ OMX_IMAGE_FlashControlOff,
+ OMX_IMAGE_FocusControlHyperfocal,
+ OMX_WhiteBalControlAuto },
+};
+
static const CameraToSensorModesLUTEntry CameraToSensorModesLUT [] = {
{ "S5K4E1GA", S5K4E1GA_SceneModesLUT, ARRAY_SIZE(S5K4E1GA_SceneModesLUT)},
{ "S5K6A1GX03", S5K6A1GX03_SceneModesLUT, ARRAY_SIZE(S5K6A1GX03_SceneModesLUT)},
{ "IMX060", IMX060_SceneModesLUT, ARRAY_SIZE(IMX060_SceneModesLUT)},
{ "OV5640", OV5640_SceneModesLUT, ARRAY_SIZE(OV5640_SceneModesLUT)},
{ "OV5650", OV5650_SceneModesLUT, ARRAY_SIZE(OV5650_SceneModesLUT)},
+ { "OV8830", OV8830_SceneModesLUT, ARRAY_SIZE(OV8830_SceneModesLUT)},
+ { "OV2722", OV2722_SceneModesLUT, ARRAY_SIZE(OV2722_SceneModesLUT)}
};
} // namespace Camera
diff --git a/camera/inc/OmxFrameDecoder.h b/camera/inc/OmxFrameDecoder.h
new file mode 100644
index 0000000..7cbbf2c
--- /dev/null
+++ b/camera/inc/OmxFrameDecoder.h
@@ -0,0 +1,204 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMXFRAMEDECODER_H_
+#define OMXFRAMEDECODER_H_
+
+
+#include <utils/threads.h>
+#include <utils/List.h>
+#include "FrameDecoder.h"
+#include "OMX_Types.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "Decoder_libjpeg.h"
+
+namespace Ti {
+namespace Camera {
+
+enum OmxDecoderState {
+ OmxDecoderState_Unloaded = 0,
+ OmxDecoderState_Loaded,
+ OmxDecoderState_Idle,
+ OmxDecoderState_Executing,
+ OmxDecoderState_Error,
+ OmxDecoderState_Invalid,
+ OmxDecoderState_Reconfigure,
+ OmxDecoderState_Exit
+};
+
+enum PortType {
+ PortIndexInput = 0,
+ PortIndexOutput = 1
+};
+
+
+struct OmxMessage {
+ enum {
+ EVENT,
+ EMPTY_BUFFER_DONE,
+ FILL_BUFFER_DONE,
+ }type;
+
+ union {
+ // if type == EVENT
+ struct {
+ OMX_PTR appData;
+ OMX_EVENTTYPE event;
+ OMX_U32 data1;
+ OMX_U32 data2;
+ OMX_PTR pEventData;
+ } eventData;
+
+ // if type == (EMPTY_BUFFER_DONE || FILL_BUFFER_DONE)
+ struct {
+ OMX_PTR appData;
+ OMX_BUFFERHEADERTYPE* pBuffHead;
+ } bufferData;
+ } u;
+};
+
+class CallbackDispatcher;
+
+struct CallbackDispatcherThread : public android::Thread {
+ CallbackDispatcherThread(CallbackDispatcher *dispatcher)
+ : mDispatcher(dispatcher) {
+ }
+
+private:
+ CallbackDispatcher *mDispatcher;
+
+ bool threadLoop();
+
+ CallbackDispatcherThread(const CallbackDispatcherThread &);
+ CallbackDispatcherThread &operator=(const CallbackDispatcherThread &);
+};
+
+class CallbackDispatcher
+{
+
+public:
+ CallbackDispatcher();
+ ~CallbackDispatcher();
+
+ void post(const OmxMessage &msg);
+ bool loop();
+
+private:
+ void dispatch(const OmxMessage &msg);
+
+ CallbackDispatcher(const CallbackDispatcher &);
+ CallbackDispatcher &operator=(const CallbackDispatcher &);
+
+ android::Mutex mLock;
+ android::Condition mQueueChanged;
+ android::List<OmxMessage> mQueue;
+ android::sp<CallbackDispatcherThread> mThread;
+ bool mDone;
+};
+
+class OmxFrameDecoder : public FrameDecoder
+{
+
+public:
+ OmxFrameDecoder(DecoderType type = DecoderType_MJPEG);
+ virtual ~OmxFrameDecoder();
+
+ OMX_ERRORTYPE eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData);
+ OMX_ERRORTYPE fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
+ OMX_ERRORTYPE emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
+
+ static OMX_ERRORTYPE eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData);
+ static OMX_ERRORTYPE emptyBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
+ static OMX_ERRORTYPE fillBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
+
+ virtual bool getPaddedDimensions(size_t &width, size_t &height);
+
+protected:
+ virtual void doConfigure (const DecoderParameters& config);
+ virtual void doProcessInputBuffer();
+ virtual status_t doStart();
+ virtual void doStop();
+ virtual void doFlush();
+ virtual void doRelease();
+
+private:
+ status_t setComponentRole();
+ status_t enableGrallockHandles();
+ status_t allocateBuffersOutput();
+ void freeBuffersOnOutput();
+ void freeBuffersOnInput();
+ status_t doPortReconfigure();
+ void dumpPortSettings(PortType port);
+ status_t getAndConfigureDecoder();
+ status_t configureJpegPorts(int width, int height);
+ status_t switchToIdle();
+ status_t allocateBuffersInput();
+ status_t disablePortSync(int port);
+ status_t enablePortSync(int port);
+ void queueOutputBuffers();
+ status_t setVideoOutputFormat(OMX_U32 width, OMX_U32 height);
+
+
+ status_t omxInit();
+ status_t omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData, OMX_CALLBACKTYPE & callbacks);
+ OmxDecoderState getOmxState() { return mCurrentState; }
+ status_t commitState(OmxDecoderState state) { mPreviousState = mCurrentState; mCurrentState = state; return NO_ERROR; }
+ status_t setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat);
+ status_t omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param);
+ status_t omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr);
+ status_t omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr);
+ void omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def);
+ void omxDumpBufferHeader (OMX_BUFFERHEADERTYPE* bh);
+ status_t omxSwitchToExecutingSync();
+
+ bool mOmxInialized;
+
+ OMX_HANDLETYPE mHandleComp;
+ OmxDecoderState mCurrentState;
+ OmxDecoderState mPreviousState;
+
+ // Condition and Mutex used during OpenMAX state transitions & command completion
+ android::Condition mStateCondition;
+ android::Mutex mHwLock;
+
+ android::Vector<OMX_BUFFERHEADERTYPE*> mOutBufferHeaders;
+ android::Vector<OMX_BUFFERHEADERTYPE*> mInBufferHeaders;
+
+ CallbackDispatcher mDispatcher;
+
+ bool mStopping;
+ DecoderType mDecoderType;
+
+ // If true we will search for DHT in JPEG buffer
+ bool mIsNeedCheckDHT;
+ // If true we always append DHT to JPEG buffer
+ bool mAlwaysAppendDHT;
+};
+
+} //namespace Camera
+} //namespace Ti
+#endif /* OMXFRAMEDECODER_H_ */
diff --git a/camera/inc/SwFrameDecoder.h b/camera/inc/SwFrameDecoder.h
new file mode 100644
index 0000000..f123940
--- /dev/null
+++ b/camera/inc/SwFrameDecoder.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SWFRAMEDECODER_H_
+#define SWFRAMEDECODER_H_
+
+#include "FrameDecoder.h"
+#include "Decoder_libjpeg.h"
+
+namespace Ti {
+namespace Camera {
+
+class SwFrameDecoder: public FrameDecoder {
+public:
+ SwFrameDecoder();
+ virtual ~SwFrameDecoder();
+
+protected:
+ virtual void doConfigure(const DecoderParameters& config);
+ virtual void doProcessInputBuffer();
+ virtual status_t doStart() { return NO_ERROR; }
+ virtual void doStop() { }
+ virtual void doFlush() { }
+ virtual void doRelease() { }
+
+private:
+ int mjpegWithHdrSize;
+ Decoder_libjpeg mJpgdecoder;
+ unsigned char* mJpegWithHeaderBuffer;
+};
+
+} // namespace Camera
+} // namespace Ti
+#endif /* SWFRAMEDECODER_H_ */
diff --git a/camera/inc/TICameraParameters.h b/camera/inc/TICameraParameters.h
index 84fa9fa..c5a24e9 100644
--- a/camera/inc/TICameraParameters.h
+++ b/camera/inc/TICameraParameters.h
@@ -135,6 +135,7 @@ static const char HIGH_QUALITY_MODE[];
static const char HIGH_QUALITY_ZSL_MODE[];
static const char CP_CAM_MODE[];
static const char VIDEO_MODE[];
+static const char VIDEO_MODE_HQ[];
static const char EXPOSURE_BRACKETING[];
static const char ZOOM_BRACKETING[];
static const char TEMP_BRACKETING[];
@@ -244,13 +245,16 @@ static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED[];
static const char KEY_MECHANICAL_MISALIGNMENT_CORRECTION[];
//TI extensions for enable/disable algos
-static const char KEY_ALGO_FIXED_GAMMA[];
+static const char KEY_ALGO_EXTERNAL_GAMMA[];
static const char KEY_ALGO_NSF1[];
static const char KEY_ALGO_NSF2[];
static const char KEY_ALGO_SHARPENING[];
static const char KEY_ALGO_THREELINCOLORMAP[];
static const char KEY_ALGO_GIC[];
+//Gamma table
+static const char KEY_GAMMA_TABLE[];
+
};
} // namespace Camera
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
index cf277ac..2189727 100644..100755
--- a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -24,11 +24,19 @@
#include "CameraHal.h"
#include "BaseCameraAdapter.h"
#include "DebugUtils.h"
+#include "Decoder_libjpeg.h"
+#include "FrameDecoder.h"
+
namespace Ti {
namespace Camera {
+#ifndef V4L2_PIX_FMT_H264
+#define V4L2_PIX_FMT_H264 0
+#endif
+
#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
+#define DEFAULT_CAPTURE_FORMAT V4L2_PIX_FMT_YUYV
#define NB_BUFFER 10
#define DEVICE "/dev/videoxx"
@@ -99,7 +107,7 @@ public:
public:
- V4LCameraAdapter(size_t sensor_index);
+ V4LCameraAdapter(size_t sensor_index, CameraHal* hal);
~V4LCameraAdapter();
@@ -116,6 +124,8 @@ public:
static status_t getCaps(const int sensorId, CameraProperties::Properties* params, V4L_HANDLETYPE handle);
+ void setupWorkingMode();
+
protected:
//----------Parent class method implementation------------------------------------
@@ -127,7 +137,7 @@ protected:
virtual status_t useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable);
virtual status_t fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType);
virtual status_t getFrameSize(size_t &width, size_t &height);
- virtual status_t getPictureBufferSize(CameraFrame *frame, size_t bufferCount);
+ virtual status_t getPictureBufferSize(CameraFrame &frame, size_t bufferCount);
virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
//-----------------------------------------------------------------------------
@@ -153,12 +163,10 @@ private:
//Used for calculation of the average frame rate during preview
status_t recalculateFPS();
- char * GetFrame(int &index);
+ char * GetFrame(int &index, int &filledLen);
int previewThread();
-public:
-
private:
//capabilities data
static const CapPixelformat mPixelformats [];
@@ -175,6 +183,7 @@ private:
static const char DEFAULT_PICTURE_FORMAT[];
static const char DEFAULT_PICTURE_SIZE[];
static const char DEFAULT_FOCUS_MODE[];
+ static const char DEFAULT_FRAMERATE_RANGE[];
static const char * DEFAULT_VSTAB;
static const char * DEFAULT_VNF;
@@ -193,23 +202,23 @@ private:
status_t v4lStopStreaming(int nBufferCount);
status_t v4lSetFormat(int, int, uint32_t);
status_t restartPreview();
-
+ status_t applyFpsValue();
+ status_t returnBufferToV4L(int id);
+ void returnOutputBuffer(int index);
+ bool isNeedToUseDecoder() const;
int mPreviewBufferCount;
int mPreviewBufferCountQueueable;
int mCaptureBufferCount;
int mCaptureBufferCountQueueable;
- android::KeyedVector<CameraBuffer *, int> mPreviewBufs;
+ CameraBuffer *mPreviewBufs[NB_BUFFER];
android::KeyedVector<CameraBuffer *, int> mCaptureBufs;
- mutable android::Mutex mPreviewBufsLock;
- mutable android::Mutex mCaptureBufsLock;
- mutable android::Mutex mStopPreviewLock;
android::CameraParameters mParams;
bool mPreviewing;
bool mCapturing;
- android::Mutex mLock;
+ mutable android::Mutex mLock;
int mFrameCount;
int mLastFrameCount;
@@ -229,7 +238,21 @@ private:
int nQueued;
int nDequeued;
+ int mQueuedOutputBuffers;
+
+ FrameDecoder* mDecoder;
+ android::Vector< android::sp<MediaBuffer> > mInBuffers;
+ android::Vector< android::sp<MediaBuffer> > mOutBuffers;
+
+ android::Mutex mV4LLock;
+
+ int mPixelFormat;
+ int mFrameRate;
+
+ android::Mutex mStopLock;
+ android::Condition mStopCondition;
+ CameraHal* mCameraHal;
};
} // namespace Camera
diff --git a/cpcam/java/com/ti/omap/android/cpcam/CPCam.java b/cpcam/java/com/ti/omap/android/cpcam/CPCam.java
index fde817b..9865e70 100644
--- a/cpcam/java/com/ti/omap/android/cpcam/CPCam.java
+++ b/cpcam/java/com/ti/omap/android/cpcam/CPCam.java
@@ -416,6 +416,17 @@ public class CPCam {
public native final void setBufferSource(CPCamBufferQueue tapIn, CPCamBufferQueue tapOut) throws IOException;
/**
+ * Releases the {@link SurfaceTexture}s used for tap-out and tap-in.
+ * This is used in conjuntion with cp-cam mode.
+ *
+ * @param tapIn surfaceTexture the {@link SurfaceTexture} to be cleared
+ * @param tapOut surfaceTexture the {@link SurfaceTexture} to be cleared
+ * @throws IOException if the method fails (for example, if the surface
+ * texture is unavailable or unsuitable).
+ */
+ public native final void releaseBufferSource(CPCamBufferQueue tapIn, CPCamBufferQueue tapOut) throws IOException;
+
+ /**
* Sets the {@link SurfaceTexture} to be used for tap-out.
* This is used in conjuntion with cp-cam mode.
*
diff --git a/cpcam/java/com/ti/omap/android/cpcam/CPCamBufferQueue.java b/cpcam/java/com/ti/omap/android/cpcam/CPCamBufferQueue.java
index 3258769..b567131 100644
--- a/cpcam/java/com/ti/omap/android/cpcam/CPCamBufferQueue.java
+++ b/cpcam/java/com/ti/omap/android/cpcam/CPCamBufferQueue.java
@@ -143,6 +143,16 @@ public class CPCamBufferQueue {
}
/**
+ * Gets the unique ID of the Buffer Queue
+ *
+ * @param slot indicates the slot index of the buffer to be released
+ *
+ */
+ public String getId() {
+ return nativeGetId();
+ }
+
+ /**
* Retrieve the timestamp associated with the texture image set by the most recent call to
* updateTexImage.
*
@@ -224,6 +234,7 @@ public class CPCamBufferQueue {
private native void nativeReleaseBuffer(int slot);
private native int nativeGetQueuedCount();
private native void nativeRelease();
+ private native final String nativeGetId();
/*
* We use a class initializer to allow the native code to cache some
diff --git a/cpcam/jni/com_ti_omap_android_cpcam_CPCam.cpp b/cpcam/jni/com_ti_omap_android_cpcam_CPCam.cpp
index d45526c..1b09ac4 100644
--- a/cpcam/jni/com_ti_omap_android_cpcam_CPCam.cpp
+++ b/cpcam/jni/com_ti_omap_android_cpcam_CPCam.cpp
@@ -686,6 +686,41 @@ static void com_ti_omap_android_cpcam_CPCam_setBufferSource(JNIEnv *env,
}
}
+static void com_ti_omap_android_cpcam_CPCam_releaseBufferSource(JNIEnv *env,
+ jobject thiz, jobject jTapIn, jobject jTapOut)
+{
+ CAMHAL_LOGV("releaseBufferSource");
+ sp<Camera> camera = get_native_camera(env, thiz, NULL);
+ if (camera == 0) return;
+
+ sp<PREVIEW_TEXTURE_TYPE> tapOut = NULL;
+ if (jTapOut!= NULL) {
+ tapOut = reinterpret_cast<PREVIEW_TEXTURE_TYPE *>(env->GetIntField(
+ jTapOut, fields.bufferQueue));
+ if (tapOut == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Tap out already released in releaseBufferSource");
+ return;
+ }
+ }
+
+ sp<PREVIEW_TEXTURE_TYPE> tapIn = NULL;
+ if (jTapIn != NULL) {
+ tapIn = reinterpret_cast<PREVIEW_TEXTURE_TYPE *>(env->GetIntField(
+ jTapIn, fields.bufferQueue));
+ if (tapIn == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Tap in already released in releaseBufferSource");
+ return;
+ }
+ }
+
+ if (camera->releaseBufferSource(tapIn, tapOut) != NO_ERROR) {
+ jniThrowException(env, "java/io/IOException",
+ "releaseBufferSource failed");
+ }
+}
+
static void com_ti_omap_android_cpcam_CPCam_reprocess(JNIEnv *env,
jobject thiz, jint msgType, jstring jShotParams)
{
@@ -988,6 +1023,9 @@ static JNINativeMethod cpcamMethods[] = {
{ "setBufferSource",
"(Lcom/ti/omap/android/cpcam/CPCamBufferQueue;Lcom/ti/omap/android/cpcam/CPCamBufferQueue;)V",
(void *)com_ti_omap_android_cpcam_CPCam_setBufferSource },
+ { "releaseBufferSource",
+ "(Lcom/ti/omap/android/cpcam/CPCamBufferQueue;Lcom/ti/omap/android/cpcam/CPCamBufferQueue;)V",
+ (void *)com_ti_omap_android_cpcam_CPCam_releaseBufferSource },
{ "native_reprocess",
"(ILjava/lang/String;)V",
(void *)com_ti_omap_android_cpcam_CPCam_reprocess },
diff --git a/cpcam/jni/com_ti_omap_android_cpcam_CPCamBufferQueue.cpp b/cpcam/jni/com_ti_omap_android_cpcam_CPCamBufferQueue.cpp
index fb9d6aa..0c3a8bd 100644
--- a/cpcam/jni/com_ti_omap_android_cpcam_CPCamBufferQueue.cpp
+++ b/cpcam/jni/com_ti_omap_android_cpcam_CPCamBufferQueue.cpp
@@ -370,6 +370,13 @@ static void CPCamBufferQueue_release(JNIEnv* env, jobject thiz)
env->SetIntField(thiz, fields.context, 0);
}
+static jstring CPCamBufferQueue_getId(JNIEnv* env, jobject thiz)
+{
+ sp<BufferQueue> bufferQueue(CPCamBufferQueue_getCPCamBufferQueue(env, thiz));
+ String8 id = bufferQueue->getId();
+ return env->NewStringUTF(id.string());
+}
+
// ----------------------------------------------------------------------------
static JNINativeMethod gCPCamBufferQueueMethods[] = {
@@ -381,6 +388,7 @@ static JNINativeMethod gCPCamBufferQueueMethods[] = {
{"nativeReleaseBuffer", "(I)V", (void*)CPCamBufferQueue_releaseBuffer },
{"nativeGetTimestamp", "(I)J", (void*)CPCamBufferQueue_getTimestamp },
{"nativeRelease", "()V", (void*)CPCamBufferQueue_release },
+ {"nativeGetId", "()Ljava/lang/String;", (void*)CPCamBufferQueue_getId },
};
struct field {
diff --git a/test/CameraHal/camera_test.h b/test/CameraHal/camera_test.h
index 1fc4baf..166c620 100644
--- a/test/CameraHal/camera_test.h
+++ b/test/CameraHal/camera_test.h
@@ -68,18 +68,25 @@
#define KEY_MECHANICAL_MISALIGNMENT_CORRECTION "mechanical-misalignment-correction"
//TI extensions for enable/disable algos
-#define KEY_ALGO_FIXED_GAMMA "ti-algo-fixed-gamma"
+#define KEY_ALGO_EXTERNAL_GAMMA "ti-algo-external-gamma"
#define KEY_ALGO_NSF1 "ti-algo-nsf1"
#define KEY_ALGO_NSF2 "ti-algo-nsf2"
#define KEY_ALGO_SHARPENING "ti-algo-sharpening"
#define KEY_ALGO_THREELINCOLORMAP "ti-algo-threelinecolormap"
#define KEY_ALGO_GIC "ti-algo-gic"
+#define KEY_TAP_OUT_SURFACES "tap-out"
+#define KEY_TAP_IN_SURFACE "tap-in"
+
+#define KEY_GAMMA_TABLE "gamma-table"
+
#define BRACKETING_IDX_DEFAULT 0
#define BRACKETING_IDX_STREAM 1
#define BRACKETING_STREAM_BUFFERS 9
#define SDCARD_PATH "/sdcard/"
+#define SECONDARY_SENSOR "_SEC"
+#define S3D_SENSOR "_S3D"
#define MAX_BURST 15
#define BURST_INC 5
@@ -213,6 +220,8 @@ typedef struct buffer_info {
int width;
int height;
int format;
+ size_t offset;
+ Rect crop;
sp<GraphicBuffer> buf;
} buffer_info_t;
@@ -224,6 +233,12 @@ typedef struct param_NamedExpBracketList_t {
const char *value;
} param_NamedExpBracketList;
+typedef struct param_GammaTblList_t {
+ const char *desc;
+ const char *r;
+ const char *g;
+ const char *b;
+} param_GammaTblList;
char * get_cycle_cmd(const char *aSrc);
void trim_script_cmd(char *cmd);
@@ -232,6 +247,8 @@ status_t dump_mem_status();
int openCamera();
int closeCamera();
void createBufferOutputSource();
+void createBufferInputSource();
+void requestBufferSourceReset();
void initDefaults();
void setDefaultExpGainPreset(ShotParameters &params, int idx);
void setSingleExpGainPreset(ShotParameters &params, int idx, int exp, int gain);
@@ -263,6 +280,7 @@ int getSupportedParametersNames(int width, int height, param_Array array[], int
int checkSupportedParamScript(char **array, int size, char *param);
int checkSupportedParamScriptLayout(char **array, int size, char *param,int *index);
int checkSupportedParamScriptResol(param_Array **array, int size, char *param, int *num);
+int checkSupportedParamScriptResol(param_Array **array, int size, int w, int h, int *num);
int getSupportedParametersfps(char* parameters, int *optionsCount);
int checkSupportedParamScriptfpsConst(int *array, int size, char *param, int *num);
int checkSupportedParamScriptfpsRange(char **array, int size, char *param, int *num);
@@ -313,6 +331,7 @@ public:
uint8_t *mappedBuffer;
unsigned int count;
unsigned int slot;
+ Rect crop;
};
public:
Defer(BufferSourceThread* bst) :
@@ -344,7 +363,8 @@ public:
if (!mExiting) {
DeferContainer defer = mDeferQueue.itemAt(0);
printf ("=== handling buffer %d\n", defer.count);
- mBST->handleBuffer(defer.graphicBuffer, defer.mappedBuffer, defer.count);
+ mBST->handleBuffer(defer.graphicBuffer, defer.mappedBuffer,
+ defer.count, defer.crop);
defer.graphicBuffer->unlock();
mDeferQueue.removeAt(0);
mBST->onHandled(defer.graphicBuffer, defer.slot);
@@ -352,12 +372,14 @@ public:
}
return false;
}
- void add(sp<GraphicBuffer> &gbuf, unsigned int count, unsigned int slot = 0) {
+ void add(sp<GraphicBuffer> &gbuf, const Rect &crop,
+ unsigned int count, unsigned int slot = 0) {
Mutex::Autolock lock(mFrameQueueMutex);
DeferContainer defer;
defer.graphicBuffer = gbuf;
defer.count = count;
defer.slot = slot;
+ defer.crop = crop;
gbuf->lock(GRALLOC_USAGE_SW_READ_RARELY, (void**) &defer.mappedBuffer);
mDeferQueue.add(defer);
mFrameQueueCondition.signal();
@@ -393,13 +415,13 @@ public:
virtual bool threadLoop() { return false;}
virtual void requestExit() {};
- virtual void setBuffer() {};
+ virtual void setBuffer(android::ShotParameters &params) {};
virtual void onHandled(sp<GraphicBuffer> &g, unsigned int slot) {};
- bool toggleStreamCapture(int expBracketIdx) {
+ bool setStreamCapture(bool restart, int expBracketIdx) {
Mutex::Autolock lock(mToggleStateMutex);
mExpBracketIdx = expBracketIdx;
- mRestartCapture = !mRestartCapture;
+ mRestartCapture = restart;
return mRestartCapture;
}
@@ -418,7 +440,8 @@ public:
return !mReturnedBuffers.isEmpty();
}
- void handleBuffer(sp<GraphicBuffer> &, uint8_t *, unsigned int);
+ void handleBuffer(sp<GraphicBuffer> &, uint8_t *, unsigned int, const Rect &);
+ Rect getCrop(sp<GraphicBuffer> &buffer, const float *mtx);
void showMetadata(sp<IMemory> data);
protected:
void restartCapture() {
@@ -427,6 +450,7 @@ protected:
ShotParameters shotParams;
calcNextSingleExpGainPreset(mExpBracketIdx, mExp, mGain),
setSingleExpGainPreset(shotParams, mExpBracketIdx, mExp, mGain);
+ shotParams.set(ShotParameters::KEY_BURST, 1);
mCamera->takePictureWithParameters(0, shotParams.flatten());
}
}
@@ -450,16 +474,19 @@ private:
class BufferSourceInput : public RefBase {
public:
BufferSourceInput(sp<Camera> camera) : mCamera(camera) {
+ mTapOut = new BufferSourceThread(camera);
+ mTapOut->run();
}
virtual ~BufferSourceInput() {
+ mTapOut->requestExit();
+ mTapOut.clear();
}
- virtual void init() = 0;
-
- virtual void setInput(buffer_info_t, const char *format);
+ virtual void setInput(buffer_info_t, const char *format, ShotParameters &params);
protected:
+ sp<BufferSourceThread> mTapOut;
sp<ANativeWindow> mWindowTapIn;
sp<Camera> mCamera;
};
diff --git a/test/CameraHal/camera_test_bufferqueue.h b/test/CameraHal/camera_test_bufferqueue.h
index c81b1ee..e0542fa 100644
--- a/test/CameraHal/camera_test_bufferqueue.h
+++ b/test/CameraHal/camera_test_bufferqueue.h
@@ -60,8 +60,10 @@ public:
mFW = new FrameConsumer();
mBufferQueue->setSynchronousMode(true);
mBufferQueue->consumerConnect(mFW);
+ mCamera->setBufferSource(NULL, mBufferQueue);
}
virtual ~BQ_BufferSourceThread() {
+ mCamera->releaseBufferSource(NULL, mBufferQueue);
}
virtual bool threadLoop() {
@@ -83,11 +85,12 @@ public:
// the first time we acquire it. We are expected to hold a reference to
// it there after...
mBufferSlots[slot].mGraphicBuffer = item.mGraphicBuffer;
+ mBufferSlots[slot].mCrop = item.mCrop;
}
showMetadata(item.mMetadata);
printf("\n");
graphic_buffer = mBufferSlots[item.mBuf].mGraphicBuffer;
- mDeferThread->add(graphic_buffer, mCounter++, item.mBuf);
+ mDeferThread->add(graphic_buffer, item.mCrop, mCounter++, item.mBuf);
restartCapture();
return true;
}
@@ -101,8 +104,16 @@ public:
mFW->onFrameAvailable();
}
- virtual void setBuffer() {
- mCamera->setBufferSource(NULL, mBufferQueue);
+ virtual void setBuffer(android::ShotParameters &params) {
+ {
+ String8 id = mBufferQueue->getId();
+
+ if (!id.isEmpty()) {
+ params.set(KEY_TAP_OUT_SURFACES, id);
+ } else {
+ params.remove(KEY_TAP_OUT_SURFACES);
+ }
+ }
}
virtual void onHandled(sp<GraphicBuffer> &gbuf, unsigned int slot) {
@@ -129,19 +140,29 @@ public:
#else
mBufferQueue = new BufferQueue(true, 1);
#endif
- }
- virtual ~BQ_BufferSourceInput() {
- }
-
- virtual void init() {
sp<ISurfaceTexture> surfaceTexture = mBufferQueue;
mWindowTapIn = new SurfaceTextureClient(surfaceTexture);
+ mCamera->setBufferSource(mBufferQueue, NULL);
+ }
+ virtual ~BQ_BufferSourceInput() {
+ mCamera->releaseBufferSource(mBufferQueue, NULL);
}
- virtual void setInput(buffer_info_t bufinfo, const char *format) {
+ virtual void setInput(buffer_info_t bufinfo, const char *format, android::ShotParameters &params) {
mBufferQueue->setDefaultBufferSize(bufinfo.width, bufinfo.height);
- BufferSourceInput::setInput(bufinfo, format);
- mCamera->setBufferSource(mBufferQueue, NULL);
+ // Reset buffer slots, any remaining buffers slots that were
+ // previously added should get flushed.
+ mBufferQueue->setBufferCount(android::BufferQueue::NUM_BUFFER_SLOTS);
+ BufferSourceInput::setInput(bufinfo, format, params);
+ {
+ String8 id = mBufferQueue->getId();
+
+ if (!id.isEmpty()) {
+ params.set(KEY_TAP_IN_SURFACE, id);
+ } else {
+ params.remove(KEY_TAP_IN_SURFACE);
+ }
+ }
}
private:
diff --git a/test/CameraHal/camera_test_menu.cpp b/test/CameraHal/camera_test_menu.cpp
index 06030a1..ab0d830 100644
--- a/test/CameraHal/camera_test_menu.cpp
+++ b/test/CameraHal/camera_test_menu.cpp
@@ -88,6 +88,7 @@ bool vnftoggle = false;
bool faceDetectToggle = false;
bool metaDataToggle = false;
bool shotConfigFlush = false;
+bool streamCapture = false;
int saturation = 0;
int zoomIDX = 0;
int videoCodecIDX = 0;
@@ -95,7 +96,8 @@ int audioCodecIDX = 0;
int outputFormatIDX = 0;
int contrast = 0;
int brightness = 0;
-unsigned int burst = 9;
+unsigned int burst = 0;
+unsigned int burstCount = 0;
int sharpness = 0;
int iso_mode = 0;
int capture_mode = 0;
@@ -265,7 +267,7 @@ param_NamedExpBracketList_t expBracketing[] = {
},
};
-const char *tempBracketing[] = {"disable", "enable"};
+const char *tempBracketing[] = {"false", "true"};
const char *faceDetection[] = {"disable", "enable"};
const char *afTimeout[] = {"enable", "disable" };
@@ -520,19 +522,105 @@ bool firstTime = true;
bool firstTimeStereo = true;
//TI extensions for enable/disable algos
-const char *algoFixedGamma[] = {CameraParameters::FALSE, CameraParameters::TRUE};
+const char *algoExternalGamma[] = {CameraParameters::FALSE, CameraParameters::TRUE};
const char *algoNSF1[] = {CameraParameters::FALSE, CameraParameters::TRUE};
const char *algoNSF2[] = {CameraParameters::FALSE, CameraParameters::TRUE};
const char *algoSharpening[] = {CameraParameters::FALSE, CameraParameters::TRUE};
const char *algoThreeLinColorMap[] = {CameraParameters::FALSE, CameraParameters::TRUE};
const char *algoGIC[] = {CameraParameters::FALSE, CameraParameters::TRUE};
-int algoFixedGammaIDX = 1;
+int algoExternalGammaIDX = 0;
int algoNSF1IDX = 1;
int algoNSF2IDX = 1;
int algoSharpeningIDX = 1;
int algoThreeLinColorMapIDX = 1;
int algoGICIDX = 1;
+/** Buffer source reset */
+bool bufferSourceInputReset = false;
+bool bufferSourceOutputReset = false;
+
+/** Gamma table */
+const char *gammaTbl22 = "("
+ "0:8,8:12,20:8,28:8,36:8,44:12,56:8,64:8,72:8,80:12,92:4,96:8,104:4,108:8,116:8,124:8,"
+ "132:4,136:8,144:4,148:8,156:4,160:8,168:4,172:4,176:4,180:8,188:4,192:8,200:4,204:4,208:4,212:4,"
+ "216:4,220:4,224:4,228:8,236:4,240:4,244:4,248:4,252:4,256:4,260:4,264:4,268:0,268:4,272:4,276:4,"
+ "280:4,284:4,288:4,292:4,296:4,300:4,304:0,304:4,308:4,312:4,316:4,320:4,324:0,324:4,328:4,332:4,"
+ "336:0,336:4,340:4,344:4,348:0,348:4,352:4,356:4,360:0,360:4,364:4,368:4,372:0,372:4,376:0,376:4,"
+ "380:4,384:4,388:0,388:4,392:0,392:4,396:4,400:4,404:0,404:4,408:0,408:4,412:0,412:4,416:4,420:4,"
+ "424:0,424:4,428:0,428:4,432:0,432:4,436:4,440:4,444:0,444:4,448:0,448:4,452:0,452:4,456:0,456:4,"
+ "460:0,460:4,464:0,464:4,468:4,472:4,476:0,476:4,480:0,480:4,484:0,484:4,488:0,488:4,492:0,492:4,"
+ "496:0,496:4,500:0,500:4,504:0,504:4,508:0,508:4,512:0,512:4,516:0,516:4,520:0,520:4,524:0,524:4,"
+ "528:0,528:4,532:0,532:4,536:0,536:4,540:0,540:4,544:0,544:4,548:0,548:4,552:0,552:4,556:0,556:4,"
+ "560:0,560:4,564:0,564:4,568:0,568:4,572:0,572:4,576:0,576:4,580:0,580:0,580:0,580:4,584:0,584:4,"
+ "588:0,588:4,592:0,592:4,596:0,596:4,600:0,600:4,604:0,604:4,608:0,608:0,608:0,608:4,612:0,612:4,"
+ "616:0,616:4,620:0,620:4,624:0,624:4,628:0,628:4,632:0,632:0,632:0,632:4,636:0,636:4,640:0,640:4,"
+ "644:0,644:4,648:0,648:0,648:0,648:4,652:0,652:4,656:0,656:4,660:0,660:4,664:0,664:0,664:0,664:4,"
+ "668:0,668:4,672:0,672:4,676:0,676:4,680:0,680:0,680:0,680:4,684:0,684:4,688:0,688:4,692:0,692:0,"
+ "692:0,692:4,696:0,696:4,700:0,700:4,704:0,704:0,704:0,704:4,708:0,708:4,712:0,712:4,716:0,716:0,"
+ "716:0,716:4,720:0,720:4,724:0,724:0,724:0,724:4,728:0,728:4,732:0,732:4,736:0,736:0,736:0,736:4,"
+ "740:0,740:4,744:0,744:0,744:0,744:4,748:0,748:4,752:0,752:0,752:0,752:4,756:0,756:4,760:0,760:4,"
+ "764:0,764:0,764:0,764:4,768:0,768:4,772:0,772:0,772:0,772:4,776:0,776:4,780:0,780:0,780:0,780:4,"
+ "784:0,784:4,788:0,788:0,788:0,788:4,792:0,792:4,796:0,796:0,796:0,796:4,800:0,800:0,800:0,800:4,"
+ "804:0,804:4,808:0,808:0,808:0,808:4,812:0,812:4,816:0,816:0,816:0,816:4,820:0,820:4,824:0,824:0,"
+ "824:0,824:4,828:0,828:0,828:0,828:4,832:0,832:4,836:0,836:0,836:0,836:4,840:0,840:4,844:0,844:0,"
+ "844:0,844:4,848:0,848:0,848:0,848:4,852:0,852:4,856:0,856:0,856:0,856:4,860:0,860:0,860:0,860:4,"
+ "864:0,864:4,868:0,868:0,868:0,868:4,872:0,872:0,872:0,872:4,876:0,876:0,876:0,876:4,880:0,880:4,"
+ "884:0,884:0,884:0,884:4,888:0,888:0,888:0,888:4,892:0,892:4,896:0,896:0,896:0,896:4,900:0,900:0,"
+ "900:0,900:4,904:0,904:0,904:0,904:4,908:0,908:4,912:0,912:0,912:0,912:4,916:0,916:0,916:0,916:4,"
+ "920:0,920:0,920:0,920:4,924:0,924:0,924:0,924:4,928:0,928:4,932:0,932:0,932:0,932:4,936:0,936:0,"
+ "936:0,936:4,940:0,940:0,940:0,940:4,944:0,944:0,944:0,944:4,948:0,948:0,948:0,948:4,952:0,952:0,"
+ "952:0,952:4,956:0,956:4,960:0,960:0,960:0,960:4,964:0,964:0,964:0,964:4,968:0,968:0,968:0,968:4,"
+ "972:0,972:0,972:0,972:4,976:0,976:0,976:0,976:4,980:0,980:0,980:0,980:4,984:0,984:0,984:0,984:4,"
+ "988:0,988:0,988:0,988:4,992:0,992:0,992:0,992:4,996:0,996:0,996:0,996:4,1000:0,1000:0,1000:0,1000:4,"
+ "1004:0,1004:0,1004:0,1004:4,1008:0,1008:0,1008:0,1008:4,1012:0,1012:4,1016:0,1016:0,1016:0,1016:4,1020:0,1020:0"
+ ")";
+const char *gammaTbl0 = "("
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,"
+ "4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0,4:0"
+ ")";
+const param_GammaTblList_t manualGammaModes[] = {
+ {"Off", NULL, NULL, NULL},
+ {"White", gammaTbl22, gammaTbl22, gammaTbl22},
+ {"Yellow", gammaTbl22, gammaTbl22, gammaTbl0},
+ {"Cyan", gammaTbl0, gammaTbl22, gammaTbl22},
+ {"Green", gammaTbl0, gammaTbl22, gammaTbl0},
+ {"Magenta", gammaTbl22, gammaTbl0, gammaTbl22},
+ {"Red", gammaTbl22, gammaTbl0, gammaTbl0},
+ {"Blue", gammaTbl0, gammaTbl0, gammaTbl22},
+ {"Black", gammaTbl0, gammaTbl0, gammaTbl0}
+};
+int manualGammaModeIDX = 0;
+
/** Calculate delay from a reference time */
unsigned long long timeval_delay(const timeval *ref) {
unsigned long long st, end, delay;
@@ -561,8 +649,10 @@ void my_raw_callback(const sp<IMemory>& mem) {
if (mem == NULL)
goto out;
- //Start preview after capture.
- camera->startPreview();
+ if( strcmp(modevalues[capture_mode], "cp-cam") ) {
+ //Start preview after capture.
+ camera->startPreview();
+ }
fn[0] = 0;
sprintf(fn, "%s/img%03d.raw", images_dir_path, counter);
@@ -691,8 +781,16 @@ void my_jpeg_callback(const sp<IMemory>& mem) {
LOG_FUNCTION_NAME;
- //Start preview after capture.
- camera->startPreview();
+ if( strcmp(modevalues[capture_mode], "cp-cam")) {
+ if(burstCount > 1) {
+ burstCount --;
+ // Restart preview if taking a single capture
+ // or after the last iteration of burstCount
+ } else if(burstCount == 0 || burstCount == 1) {
+ camera->startPreview();
+ burstCount = burst;
+ }
+ }
if (mem == NULL)
goto out;
@@ -1002,6 +1100,8 @@ int configureRecorder() {
char videoFile[384],vbit_string[50];
videoFd = -1;
+ struct CameraInfo cameraInfo;
+ camera->getCameraInfo(camera_index, &cameraInfo);
if ( ( NULL == recorder.get() ) || ( NULL == camera.get() ) ) {
printf("invalid recorder and/or camera references\n");
@@ -1064,10 +1164,16 @@ int configureRecorder() {
recording_counter++;
- if ( recorder->setVideoSize(Vcapture_Array[VcaptureSizeIDX]->width, Vcapture_Array[VcaptureSizeIDX]->height) < 0 ) {
- printf("error while configuring video size\n");
-
- return -1;
+ if (cameraInfo.orientation == 90 || cameraInfo.orientation == 270 ) {
+ if ( recorder->setVideoSize(Vcapture_Array[VcaptureSizeIDX]->height, Vcapture_Array[VcaptureSizeIDX]->width) < 0 ) {
+ printf("error while configuring video size\n");
+ return -1;
+ }
+ } else {
+ if ( recorder->setVideoSize(Vcapture_Array[VcaptureSizeIDX]->width, Vcapture_Array[VcaptureSizeIDX]->height) < 0 ) {
+ printf("error while configuring video size\n");
+ return -1;
+ }
}
if ( recorder->setVideoEncoder(videoCodecs[videoCodecIDX].type) < 0 ) {
@@ -1167,6 +1273,8 @@ int openCamera() {
layoutstr = new char[256];
capturelayoutstr = new char[256];
+ requestBufferSourceReset();
+
printf("openCamera(camera_index=%d)\n", camera_index);
camera = Camera::connect(camera_index);
@@ -1215,6 +1323,10 @@ int closeCamera() {
}
void createBufferOutputSource() {
+ if(bufferSourceOutputThread.get() && bufferSourceOutputReset) {
+ bufferSourceOutputThread->requestExit();
+ bufferSourceOutputThread.clear();
+ }
if(!bufferSourceOutputThread.get()) {
#ifdef ANDROID_API_JB_OR_LATER
bufferSourceOutputThread = new BQ_BufferSourceThread(123, camera);
@@ -1223,6 +1335,26 @@ void createBufferOutputSource() {
#endif
bufferSourceOutputThread->run();
}
+ bufferSourceOutputReset = false;
+}
+
+void createBufferInputSource() {
+ if (bufferSourceInput.get() && bufferSourceInputReset) {
+ bufferSourceInput.clear();
+ }
+ if (!bufferSourceInput.get()) {
+#ifdef ANDROID_API_JB_OR_LATER
+ bufferSourceInput = new BQ_BufferSourceInput(1234, camera);
+#else
+ bufferSourceInput = new ST_BufferSourceInput(1234, camera);
+#endif
+ }
+ bufferSourceInputReset = false;
+}
+
+void requestBufferSourceReset() {
+ bufferSourceInputReset = true;
+ bufferSourceOutputReset = true;
}
int startPreview() {
@@ -1287,7 +1419,11 @@ int startPreview() {
params.set(KEY_S3D_CAP_FRAME_LAYOUT, stereoCapLayout[stereoCapLayoutIDX]);
}
- params.setPreviewSize(preview_Array[previewSizeIDX]->width, preview_Array[previewSizeIDX]->height);
+ if ((cameraInfo.orientation == 90 || cameraInfo.orientation == 270) && recordingMode) {
+ params.setPreviewSize(previewHeight, previewWidth);
+ } else {
+ params.setPreviewSize(previewWidth, previewHeight);
+ }
params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
// calculate display orientation from sensor orientation
@@ -1298,6 +1434,11 @@ int startPreview() {
} else { // back-facing
orientation = (cameraInfo.orientation - dinfo.orientation + 360) % 360;
}
+
+ if(!strcmp(params.get(KEY_MODE), "video-mode") ) {
+ orientation = 0;
+ }
+
camera->sendCommand(CAMERA_CMD_SET_DISPLAY_ORIENTATION, orientation, 0);
camera->setParameters(params.flatten());
@@ -1309,6 +1450,12 @@ int startPreview() {
previewRunning = true;
reSizePreview = false;
+ const char *format = params.getPictureFormat();
+ if((NULL != format) && isRawPixelFormat(format)) {
+ createBufferOutputSource();
+ createBufferInputSource();
+ }
+
return 0;
}
@@ -1983,10 +2130,14 @@ int deleteAllocatedMemory() {
delete [] layoutstr;
delete [] capturelayoutstr;
+ // Release buffer sources if any
if (bufferSourceOutputThread.get()) {
bufferSourceOutputThread->requestExit();
bufferSourceOutputThread.clear();
}
+ if ( bufferSourceInput.get() ) {
+ bufferSourceInput.clear();
+ }
return 0;
}
@@ -2047,6 +2198,15 @@ void stopPreview() {
void initDefaults() {
+ struct CameraInfo cameraInfo;
+
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ if (cameraInfo.facing == CAMERA_FACING_FRONT) {
+ rotation = cameraInfo.orientation;
+ } else { // back-facing
+ rotation = cameraInfo.orientation;
+ }
+
antibanding_mode = getDefaultParameter("off", numAntibanding, antiband);
focus_mode = getDefaultParameter("auto", numfocus, focus);
fpsRangeIdx = getDefaultParameter("5000,30000", rangeCnt, fps_range_str);
@@ -2064,6 +2224,7 @@ void initDefaults() {
caf_mode = 0;
shotConfigFlush = false;
+ streamCapture = false;
vstabtoggle = false;
vnftoggle = false;
AutoExposureLocktoggle = false;
@@ -2072,7 +2233,6 @@ void initDefaults() {
metaDataToggle = false;
expBracketIdx = BRACKETING_IDX_DEFAULT;
flashIdx = getDefaultParameter("off", numflash, flash);
- rotation = 0;
previewRotation = 0;
zoomIDX = 0;
videoCodecIDX = 0;
@@ -2104,13 +2264,15 @@ void initDefaults() {
manualExp = manualExpMin;
manualGain = manualGainMin;
- algoFixedGammaIDX = 1;
+ algoExternalGammaIDX = 0;
algoNSF1IDX = 1;
algoNSF2IDX = 1;
algoSharpeningIDX = 1;
algoThreeLinColorMapIDX = 1;
algoGICIDX = 1;
+ manualGammaModeIDX = 0;
+
params.set(params.KEY_VIDEO_STABILIZATION, params.FALSE);
params.set("vnf", params.FALSE);
params.setPreviewSize(preview_Array[previewSizeIDX]->width, preview_Array[previewSizeIDX]->height);
@@ -2156,7 +2318,11 @@ void initDefaults() {
}
void setDefaultExpGainPreset(ShotParameters &params, int idx) {
- setExpGainPreset(params, expBracketing[idx].value, false, expBracketing[idx].param_type, shotConfigFlush);
+ if ( ((int)ARRAY_SIZE(expBracketing) > idx) && (0 <= idx) ) {
+ setExpGainPreset(params, expBracketing[idx].value, false, expBracketing[idx].param_type, shotConfigFlush);
+ } else {
+ printf("setDefaultExpGainPreset: Index (%d) is out of range 0 ~ %u\n", idx, ARRAY_SIZE(expBracketing) - 1);
+ }
}
void setSingleExpGainPreset(ShotParameters &params, int idx, int exp, int gain) {
@@ -2169,7 +2335,7 @@ void setSingleExpGainPreset(ShotParameters &params, int idx, int exp, int gain)
if (PARAM_EXP_BRACKET_VALUE_REL == expBracketing[idx].value_type) {
val.appendFormat("%+d", exp);
} else {
- val.appendFormat("%u", (unsigned int) abs);
+ val.appendFormat("%u", (unsigned int) exp);
}
if (PARAM_EXP_BRACKET_PARAM_PAIR == expBracketing[idx].param_type) {
@@ -2205,6 +2371,7 @@ void setExpGainPreset(ShotParameters &params, const char *input, bool force, par
printf("relative EV input: \"%s\"\nnumber of relative EV values: %d (%s)\n",
input, i, flush ? "reset" : "append");
burst = i;
+ burstCount = i;
params.set(ShotParameters::KEY_BURST, burst);
params.set(ShotParameters::KEY_EXP_COMPENSATION, input);
params.remove(ShotParameters::KEY_EXP_GAIN_PAIRS);
@@ -2220,6 +2387,7 @@ void setExpGainPreset(ShotParameters &params, const char *input, bool force, par
printf("absolute exposure,gain input: \"%s\"\nNumber of brackets: %d (%s)\n",
input, i, flush ? "reset" : "append");
burst = i;
+ burstCount = i;
params.set(ShotParameters::KEY_BURST, burst);
params.set(ShotParameters::KEY_EXP_GAIN_PAIRS, input);
params.remove(ShotParameters::KEY_EXP_COMPENSATION);
@@ -2356,12 +2524,13 @@ int menu_algo() {
if (print_menu) {
printf("\n\n== ALGO ENABLE/DISABLE MENU ============\n\n");
- printf(" a. Fixed Gamma: %s\n", algoFixedGamma[algoFixedGammaIDX]);
+ printf(" a. External Gamma: %s\n", algoExternalGamma[algoExternalGammaIDX]);
printf(" s. NSF1: %s\n", algoNSF1[algoNSF1IDX]);
printf(" d. NSF2: %s\n", algoNSF2[algoNSF2IDX]);
printf(" f. Sharpening: %s\n", algoSharpening[algoSharpeningIDX]);
printf(" g. Color Conversion: %s\n", algoThreeLinColorMap[algoThreeLinColorMapIDX]);
printf(" h. Green Inballance Correction: %s\n", algoGIC[algoGICIDX]);
+ printf(" j. Manual gamma table: %s\n", manualGammaModes[manualGammaModeIDX].desc);
printf("\n");
printf(" q. Return to main menu\n");
printf("\n");
@@ -2377,9 +2546,9 @@ int menu_algo() {
case 'a':
case 'A':
- algoFixedGammaIDX++;
- algoFixedGammaIDX %= ARRAY_SIZE(algoFixedGamma);
- params.set(KEY_ALGO_FIXED_GAMMA, (algoFixedGamma[algoFixedGammaIDX]));
+ algoExternalGammaIDX++;
+ algoExternalGammaIDX %= ARRAY_SIZE(algoExternalGamma);
+ params.set(KEY_ALGO_EXTERNAL_GAMMA, (algoExternalGamma[algoExternalGammaIDX]));
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -2441,6 +2610,29 @@ int menu_algo() {
break;
+ case 'j':
+ case 'J':
+ manualGammaModeIDX++;
+ manualGammaModeIDX %= ARRAY_SIZE(manualGammaModes);
+ if ( (NULL != manualGammaModes[manualGammaModeIDX].r) &&
+ (NULL != manualGammaModes[manualGammaModeIDX].g) &&
+ (NULL != manualGammaModes[manualGammaModeIDX].b) ) {
+ String8 Val;
+ Val.append(manualGammaModes[manualGammaModeIDX].r);
+ Val.append(",");
+ Val.append(manualGammaModes[manualGammaModeIDX].g);
+ Val.append(",");
+ Val.append(manualGammaModes[manualGammaModeIDX].b);
+ params.set(KEY_GAMMA_TABLE, Val);
+ } else {
+ params.remove(KEY_GAMMA_TABLE);
+ }
+
+ if ( hardwareActive )
+ camera->setParameters(params.flatten());
+
+ break;
+
case 'Q':
case 'q':
return -1;
@@ -2460,6 +2652,8 @@ int functional_menu() {
int j = 0;
int k = 0;
const char *valstr = NULL;
+ struct CameraInfo cameraInfo;
+ bool queueEmpty = true;
memset(area1, '\0', MAX_LINES*(MAX_SYMBOLS+1));
memset(area2, '\0', MAX_LINES*(MAX_SYMBOLS+1));
@@ -2660,13 +2854,7 @@ int functional_menu() {
} else {
stopPreview();
}
- if (bufferSourceOutputThread.get()) {
- bufferSourceOutputThread->requestExit();
- bufferSourceOutputThread.clear();
- }
- if ( bufferSourceInput.get() ) {
- bufferSourceInput.clear();
- }
+
break;
case '3':
@@ -2713,6 +2901,9 @@ int functional_menu() {
if ( hardwareActive )
camera->setParameters(params.flatten());
+
+ requestBufferSourceReset();
+
break;
case 'l':
@@ -2847,7 +3038,13 @@ int functional_menu() {
printf("numpreviewFormat %d\n", numpictureFormat);
params.setPictureFormat(pictureFormatArray[pictureFormat]);
- if ( hardwareActive )
+ queueEmpty = true;
+ if ( bufferSourceOutputThread.get() ) {
+ if ( 0 < bufferSourceOutputThread->hasBuffer() ) {
+ queueEmpty = false;
+ }
+ }
+ if ( hardwareActive && queueEmpty )
camera->setParameters(params.flatten());
break;
@@ -2964,6 +3161,8 @@ int functional_menu() {
if ( hardwareActive )
camera->setParameters(params.flatten());
+ requestBufferSourceReset();
+
break;
case 'K':
@@ -3026,6 +3225,7 @@ int functional_menu() {
} else {
burst += BURST_INC;
}
+ burstCount = burst;
params.set(KEY_TI_BURST, burst);
if ( hardwareActive )
@@ -3054,17 +3254,43 @@ int functional_menu() {
ippIDX = 3;
params.set(KEY_IPP, ipp_mode[ippIDX]);
params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
} else if ( !strcmp(modevalues[capture_mode], "video-mode") ) {
params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ previewRotation = ((360-cameraInfo.orientation)%360);
+ if (previewRotation >= 0 || previewRotation <=360) {
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+ }
} else {
ippIDX = ippIDX_old;
params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
}
params.set(KEY_MODE, (modevalues[capture_mode]));
- if ( hardwareActive )
+ if ( hardwareActive ) {
+ if (previewRunning) {
+ stopPreview();
+ }
camera->setParameters(params.flatten());
+ // Get parameters from capabilities for the new capture mode
+ params = camera->getParameters();
+ getSizeParametersFromCapabilities();
+ getParametersFromCapabilities();
+ // Set framerate 30fps and 12MP capture resolution if available for the new capture mode.
+ // If not available set framerate and capture mode under index 0 from fps_const_str and capture_Array.
+ frameRateIDX = getDefaultParameter("30000,30000", constCnt, fps_const_str);
+ captureSizeIDX = getDefaultParameterResol("12MP", numcaptureSize, capture_Array);
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRateIDX]);
+ params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
+ camera->setParameters(params.flatten());
+ }
+
+ requestBufferSourceReset();
break;
@@ -3389,77 +3615,66 @@ int functional_menu() {
{
int msgType = 0;
- if(isRawPixelFormat(pictureFormatArray[pictureFormat])) {
- createBufferOutputSource();
- if (bufferSourceOutputThread.get()) {
- bufferSourceOutputThread->setBuffer();
- }
- } else {
- msgType = CAMERA_MSG_COMPRESSED_IMAGE |
- CAMERA_MSG_RAW_IMAGE;
-#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
- msgType |= CAMERA_MSG_RAW_BURST;
-#endif
- }
-
if((0 == strcmp(modevalues[capture_mode], "video-mode")) &&
(0 != strcmp(videosnapshotstr, "true"))) {
printf("Video Snapshot is not supported\n");
- } else {
- gettimeofday(&picture_start, 0);
- if ( hardwareActive ) {
- camera->setParameters(params.flatten());
- camera->takePictureWithParameters(msgType, shotParams.flatten());
+ } else if ( hardwareActive ) {
+ if(isRawPixelFormat(pictureFormatArray[pictureFormat])) {
+ createBufferOutputSource();
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setBuffer(shotParams);
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
+ }
+ } else {
+ msgType = CAMERA_MSG_COMPRESSED_IMAGE |
+ CAMERA_MSG_RAW_IMAGE;
+#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
+ msgType |= CAMERA_MSG_RAW_BURST;
+#endif
}
+
+ gettimeofday(&picture_start, 0);
+ camera->setParameters(params.flatten());
+ camera->takePictureWithParameters(msgType, shotParams.flatten());
}
break;
}
case 'S':
{
- createBufferOutputSource();
- if (bufferSourceOutputThread.get()) {
- if (bufferSourceOutputThread->toggleStreamCapture(expBracketIdx)) {
- setSingleExpGainPreset(shotParams, expBracketIdx, 0, 0);
- // Queue more frames initially
- shotParams.set(ShotParameters::KEY_BURST, BRACKETING_STREAM_BUFFERS);
- } else {
- setDefaultExpGainPreset(shotParams, expBracketIdx);
+ if (streamCapture) {
+ streamCapture = false;
+ setDefaultExpGainPreset(shotParams, expBracketIdx);
+ // Stop streaming
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
}
+ } else {
+ streamCapture = true;
+ setSingleExpGainPreset(shotParams, expBracketIdx, 0, 0);
+ // Queue more frames initially
+ shotParams.set(ShotParameters::KEY_BURST, BRACKETING_STREAM_BUFFERS);
}
break;
}
case 'P':
{
- int msgType = CAMERA_MSG_COMPRESSED_IMAGE |
- CAMERA_MSG_RAW_IMAGE;
-#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
- msgType |= CAMERA_MSG_RAW_BURST;
-#endif
- gettimeofday(&picture_start, 0);
- if (!bufferSourceInput.get()) {
-#ifdef ANDROID_API_JB_OR_LATER
- bufferSourceInput = new BQ_BufferSourceInput(1234, camera);
-#else
- bufferSourceInput = new ST_BufferSourceInput(1234, camera);
-#endif
- bufferSourceInput->init();
- }
+ int msgType = CAMERA_MSG_COMPRESSED_IMAGE;
+ ShotParameters reprocParams;
+ gettimeofday(&picture_start, 0);
+ createBufferInputSource();
if (bufferSourceOutputThread.get() &&
bufferSourceOutputThread->hasBuffer())
{
- CameraParameters temp = params;
- // Set pipeline to capture 2592x1944 JPEG
- temp.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);
- temp.setPictureSize(2592, 1944);
- if (hardwareActive) camera->setParameters(temp.flatten());
+ bufferSourceOutputThread->setStreamCapture(false, expBracketIdx);
+ if (hardwareActive) camera->setParameters(params.flatten());
if (bufferSourceInput.get()) {
buffer_info_t info = bufferSourceOutputThread->popBuffer();
- bufferSourceInput->setInput(info, pictureFormatArray[pictureFormat]);
- if (hardwareActive) camera->reprocess(msgType, String8());
+ bufferSourceInput->setInput(info, pictureFormatArray[pictureFormat], reprocParams);
+ if (hardwareActive) camera->reprocess(msgType, reprocParams.flatten());
}
}
break;
@@ -3957,6 +4172,7 @@ int setOutputDirPath(cmd_args_t *cmd_args, int restart_count) {
const char *config = cmd_args->script_file_name;
char dir_name[40];
size_t count = 0;
+ char *p;
// remove just the '.txt' part of the config
while ((config[count] != '.') && ((count + 1) < sizeof(dir_name))) {
@@ -3966,8 +4182,22 @@ int setOutputDirPath(cmd_args_t *cmd_args, int restart_count) {
strncpy(dir_name, config, count);
dir_name[count] = NULL;
+ p = dir_name;
+ while (*p != '\0') {
+ if (*p == '/') {
+ printf("SDCARD_PATH is not added to the output directory.\n");
+ // Needed when camera_test script is executed using the OTC
+ strcpy(output_dir_path, "");
+ break;
+ }
+ }
strcat(output_dir_path, dir_name);
+ if (camera_index == 1) {
+ strcat(output_dir_path, SECONDARY_SENSOR);
+ }else if (camera_index == 2) {
+ strcat(output_dir_path, S3D_SENSOR);
+ }
}
}
diff --git a/test/CameraHal/camera_test_script.cpp b/test/CameraHal/camera_test_script.cpp
index ccbefbc..21ed7e7 100644
--- a/test/CameraHal/camera_test_script.cpp
+++ b/test/CameraHal/camera_test_script.cpp
@@ -36,6 +36,7 @@ extern sp<BufferSourceInput> bufferSourceInput;
extern CameraParameters params;
extern ShotParameters shotParams;
extern bool shotConfigFlush;
+extern bool streamCapture;
extern bool recordingMode;
extern int camera_index;
extern int rotation;
@@ -168,6 +169,12 @@ int manC = 0;
extern int manualConv ;
extern int manualConvMin ;
extern int manualConvMax ;
+extern bool faceDetectToggle;
+extern unsigned int burstCount;
+
+/** Buffer source reset */
+extern bool bufferSourceInputReset;
+extern bool bufferSourceOutputReset;
void trim_script_cmd(char *cmd) {
char *nl, *cr;
@@ -207,6 +214,8 @@ int execute_functional_script(char *script) {
//int frameR = 20;
int frameRConst = 0;
int frameRRange = 0;
+ struct CameraInfo cameraInfo;
+ bool queueEmpty = true;
LOG_FUNCTION_NAME;
@@ -348,10 +357,6 @@ int execute_functional_script(char *script) {
} else {
stopPreview();
}
- if (bufferSourceOutputThread.get()) {
- bufferSourceOutputThread->requestExit();
- bufferSourceOutputThread.clear();
- }
break;
@@ -424,12 +429,18 @@ int execute_functional_script(char *script) {
resC = strtok(NULL, "x");
heightC = atoi(resC);
params.setPictureSize(widthC,heightC);
+ a = checkSupportedParamScriptResol(capture_Array, numcaptureSize,
+ widthC, heightC, &resol_index);
+ if (a > -1) captureSizeIDX = resol_index;
}
if ( hardwareActive ) {
camera->setParameters(params.flatten());
}
}
+
+ requestBufferSourceReset();
+
break;
case '6':
@@ -539,8 +550,15 @@ int execute_functional_script(char *script) {
printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
}
- if ( hardwareActive )
+ queueEmpty = true;
+ if ( bufferSourceOutputThread.get() ) {
+ if ( 0 < bufferSourceOutputThread->hasBuffer() ) {
+ queueEmpty = false;
+ }
+ }
+ if ( hardwareActive && queueEmpty ) {
camera->setParameters(params.flatten());
+ }
break;
case '-':
@@ -714,6 +732,8 @@ int execute_functional_script(char *script) {
if ( hardwareActive )
camera->setParameters(params.flatten());
+ requestBufferSourceReset();
+
break;
case 'K':
@@ -724,8 +744,10 @@ int execute_functional_script(char *script) {
break;
case 'F':
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->sendCommand(CAMERA_CMD_START_FACE_DETECTION, 0, 0);
+ faceDetectToggle = true;
+ }
break;
@@ -739,8 +761,12 @@ int execute_functional_script(char *script) {
case 'T':
- if ( hardwareActive )
+ if ( hardwareActive ) {
camera->sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
+ faceDetectToggle = false;
+ }
+
+ break;
case 'O':
params.set(KEY_GLBCE, (cmd+1));
@@ -752,16 +778,26 @@ int execute_functional_script(char *script) {
case 'u':
// HQ should always be in ldc-nsf
// if not HQ, then return the ipp to its previous state
- if( !strcmp(modevalues[capture_mode], "high-quality") ) {
+ if ( !strcmp((cmd + 1), "high-quality") ) {
ippIDX_old = ippIDX;
ippIDX = 3;
params.set(KEY_IPP, ipp_mode[ippIDX]);
params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
} else if ( !strcmp((cmd + 1), "video-mode") ) {
params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+ camera->getCameraInfo(camera_index, &cameraInfo);
+ previewRotation = ((360-cameraInfo.orientation)%360);
+ if (previewRotation >= 0 || previewRotation <=360) {
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
+ }
+ printf("previewRotation: %d\n", previewRotation);
} else {
ippIDX = ippIDX_old;
params.set(CameraParameters::KEY_RECORDING_HINT, CameraParameters::FALSE);
+ previewRotation = 0;
+ params.set(KEY_SENSOR_ORIENTATION, previewRotation);
}
a = checkSupportedParamScript(modevalues, nummodevalues, cmd);
if (a > -1) {
@@ -770,8 +806,25 @@ int execute_functional_script(char *script) {
printf("\nNot supported parameter %s from sensor %d\n\n", cmd + 1, camera_index);
}
- if ( hardwareActive )
+ if ( hardwareActive ) {
+ if (previewRunning) {
+ stopPreview();
+ }
camera->setParameters(params.flatten());
+ // Get parameters from capabilities for the new capture mode
+ params = camera->getParameters();
+ getSizeParametersFromCapabilities();
+ getParametersFromCapabilities();
+ // Set framerate 30fps and 12MP capture resolution if available for the new capture mode.
+ // If not available set framerate and capture mode under index 0 from fps_const_str and capture_Array.
+ frameRateIDX = getDefaultParameter("30000,30000", constCnt, fps_const_str);
+ captureSizeIDX = getDefaultParameterResol("12MP", numcaptureSize, capture_Array);
+ params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, fps_const_str[frameRateIDX]);
+ params.setPictureSize(capture_Array[captureSizeIDX]->width, capture_Array[captureSizeIDX]->height);
+ camera->setParameters(params.flatten());
+ }
+
+ requestBufferSourceReset();
break;
@@ -833,6 +886,7 @@ int execute_functional_script(char *script) {
case '#':
params.set(KEY_TI_BURST, atoi(cmd + 1));
+ burstCount = atoi(cmd + 1);
if ( hardwareActive )
camera->setParameters(params.flatten());
@@ -1146,46 +1200,45 @@ int execute_functional_script(char *script) {
case 'p':
{
- int msgType = 0;
- const char *format = params.getPictureFormat();
+ int msgType = 0;
+ const char *format = params.getPictureFormat();
- if((NULL != format) && isRawPixelFormat(format)) {
- createBufferOutputSource();
- if (bufferSourceOutputThread.get()) {
- bufferSourceOutputThread->setBuffer();
- }
- } else if(strcmp(modevalues[capture_mode], "video-mode") == 0) {
- msgType = CAMERA_MSG_COMPRESSED_IMAGE |
- CAMERA_MSG_RAW_IMAGE;
+ if((0 == strcmp(modevalues[capture_mode], "video-mode")) &&
+ (0 != strcmp(videosnapshotstr, "true"))) {
+ printf("Video Snapshot is not supported\n");
+ } else if ( hardwareActive ) {
+ if((NULL != format) && isRawPixelFormat(format)) {
+ createBufferOutputSource();
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setBuffer(shotParams);
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
+ }
+ } else if(strcmp(modevalues[capture_mode], "video-mode") == 0) {
+ msgType = CAMERA_MSG_COMPRESSED_IMAGE |
+ CAMERA_MSG_RAW_IMAGE;
#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
- msgType |= CAMERA_MSG_RAW_BURST;
+ msgType |= CAMERA_MSG_RAW_BURST;
#endif
- } else {
- msgType = CAMERA_MSG_POSTVIEW_FRAME |
- CAMERA_MSG_RAW_IMAGE_NOTIFY |
- CAMERA_MSG_COMPRESSED_IMAGE |
- CAMERA_MSG_SHUTTER;
+ } else {
+ msgType = CAMERA_MSG_POSTVIEW_FRAME |
+ CAMERA_MSG_RAW_IMAGE_NOTIFY |
+ CAMERA_MSG_COMPRESSED_IMAGE |
+ CAMERA_MSG_SHUTTER;
#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
- msgType |= CAMERA_MSG_RAW_BURST;
+ msgType |= CAMERA_MSG_RAW_BURST;
#endif
- }
+ }
- if((0 == strcmp(modevalues[capture_mode], "video-mode")) &&
- (0 != strcmp(videosnapshotstr, "true"))) {
- printf("Video Snapshot is not supported\n");
- } else {
gettimeofday(&picture_start, 0);
- if ( hardwareActive ) {
- ret = camera->setParameters(params.flatten());
- if ( ret != NO_ERROR ) {
- printf("Error returned while setting parameters");
- break;
- }
- ret = camera->takePictureWithParameters(msgType, shotParams.flatten());
- if ( ret != NO_ERROR ) {
- printf("Error returned while taking a picture");
- break;
- }
+ ret = camera->setParameters(params.flatten());
+ if ( ret != NO_ERROR ) {
+ printf("Error returned while setting parameters");
+ break;
+ }
+ ret = camera->takePictureWithParameters(msgType, shotParams.flatten());
+ if ( ret != NO_ERROR ) {
+ printf("Error returned while taking a picture");
+ break;
}
}
break;
@@ -1193,50 +1246,42 @@ int execute_functional_script(char *script) {
case 'S':
{
- createBufferOutputSource();
- if (bufferSourceOutputThread.get()) {
- if (bufferSourceOutputThread->toggleStreamCapture(expBracketIdx)) {
- expBracketIdx = BRACKETING_IDX_STREAM;
- setSingleExpGainPreset(shotParams, expBracketIdx, 0, 0);
- // Queue more frames initially
- shotParams.set(ShotParameters::KEY_BURST, BRACKETING_STREAM_BUFFERS);
- } else {
- expBracketIdx = BRACKETING_IDX_DEFAULT;
- setDefaultExpGainPreset(shotParams, expBracketIdx);
+ if (streamCapture) {
+ streamCapture = false;
+ expBracketIdx = BRACKETING_IDX_DEFAULT;
+ setDefaultExpGainPreset(shotParams, expBracketIdx);
+ // Stop streaming
+ if (bufferSourceOutputThread.get()) {
+ bufferSourceOutputThread->setStreamCapture(streamCapture, expBracketIdx);
}
+ } else {
+ streamCapture = true;
+ expBracketIdx = BRACKETING_IDX_STREAM;
+ setSingleExpGainPreset(shotParams, expBracketIdx, 0, 0);
+ // Queue more frames initially
+ shotParams.set(ShotParameters::KEY_BURST, BRACKETING_STREAM_BUFFERS);
}
break;
}
case 'P':
{
- int msgType = CAMERA_MSG_COMPRESSED_IMAGE |
- CAMERA_MSG_RAW_IMAGE;
-#ifdef OMAP_ENHANCEMENT_BURST_CAPTURE
- msgType |= CAMERA_MSG_RAW_BURST;
-#endif
+ int msgType = CAMERA_MSG_COMPRESSED_IMAGE;
+ ShotParameters reprocParams;
+
gettimeofday(&picture_start, 0);
- if (!bufferSourceInput.get()) {
-#ifdef ANDROID_API_JB_OR_LATER
- bufferSourceInput = new BQ_BufferSourceInput(1234, camera);
-#else
- bufferSourceInput = new ST_BufferSourceInput(1234, camera);
-#endif
- bufferSourceInput->init();
- }
+
+ createBufferInputSource();
if (bufferSourceOutputThread.get() &&
bufferSourceOutputThread->hasBuffer())
{
- CameraParameters temp = params;
- // Set pipeline to capture 2592x1944 JPEG
- temp.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);
- temp.setPictureSize(2592, 1944);
- if (hardwareActive) camera->setParameters(temp.flatten());
+ bufferSourceOutputThread->setStreamCapture(false, expBracketIdx);
+ if (hardwareActive) camera->setParameters(params.flatten());
if (bufferSourceInput.get()) {
buffer_info_t info = bufferSourceOutputThread->popBuffer();
- bufferSourceInput->setInput(info, params.getPictureFormat());
+ bufferSourceInput->setInput(info, params.getPictureFormat(), reprocParams);
if (hardwareActive) camera->reprocess(msgType, String8());
}
}
@@ -1491,6 +1536,17 @@ int checkSupportedParamScriptResol(param_Array **array, int size, char *param, i
return -1;
}
+int checkSupportedParamScriptResol(param_Array **array, int size,
+ int width, int height, int *num) {
+ for (int i=0; i<size; i++) {
+ if ((width == array[i]->width) && (height == array[i]->height)) {
+ *num = i;
+ return 0;
+ }
+ }
+ return -1;
+}
+
int checkSupportedParamScriptfpsConst(int *array, int size, char *param, int *num) {
for (int i=0; i<size; i++) {
if (atoi(param + 1) == array[i]) {
diff --git a/test/CameraHal/camera_test_surfacetexture.cpp b/test/CameraHal/camera_test_surfacetexture.cpp
index 1108caf..af9c904 100644
--- a/test/CameraHal/camera_test_surfacetexture.cpp
+++ b/test/CameraHal/camera_test_surfacetexture.cpp
@@ -8,6 +8,7 @@
#include <pthread.h>
#include <string.h>
#include <climits>
+#include <math.h>
#include <gui/SurfaceTexture.h>
#include <gui/SurfaceTextureClient.h>
@@ -51,9 +52,9 @@
//temporarily define format here
#define HAL_PIXEL_FORMAT_TI_NV12 0x100
-#define HAL_PIXEL_FORMAT_TI_NV12_1D 0x102
#define HAL_PIXEL_FORMAT_TI_Y8 0x103
#define HAL_PIXEL_FORMAT_TI_Y16 0x104
+#define HAL_PIXEL_FORMAT_TI_UYVY 0x105
using namespace android;
@@ -70,10 +71,11 @@ static size_t calcBufSize(int format, int width, int height)
int buf_size;
switch (format) {
- case HAL_PIXEL_FORMAT_TI_NV12_1D:
+ case HAL_PIXEL_FORMAT_TI_NV12:
buf_size = width * height * 3 /2;
break;
case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
buf_size = width * height * 2;
break;
// add more formats later
@@ -85,22 +87,229 @@ static size_t calcBufSize(int format, int width, int height)
return buf_size;
}
+static unsigned int calcOffset(int format, unsigned int width, unsigned int top, unsigned int left)
+{
+ unsigned int bpp;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ bpp = 1;
+ break;
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ bpp = 2;
+ break;
+ // add more formats later
+ default:
+ bpp = 1;
+ break;
+ }
+
+ return top * width + left * bpp;
+}
+
static int getHalPixFormat(const char *format)
{
- int pixformat = HAL_PIXEL_FORMAT_TI_NV12_1D;
+ int pixformat = HAL_PIXEL_FORMAT_TI_NV12;
if ( NULL != format ) {
if ( strcmp(format, CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0 ) {
pixformat = HAL_PIXEL_FORMAT_TI_Y16;
} else if ( strcmp(format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ) {
- pixformat = HAL_PIXEL_FORMAT_TI_NV12_1D;
+ pixformat = HAL_PIXEL_FORMAT_TI_NV12;
+ } else if ( strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422I) == 0 ) {
+ pixformat = HAL_PIXEL_FORMAT_TI_UYVY;
} else {
- pixformat = HAL_PIXEL_FORMAT_TI_NV12_1D;
+ pixformat = HAL_PIXEL_FORMAT_TI_NV12;
}
}
return pixformat;
}
+static int getUsageFromANW(int format)
+{
+ int usage = GRALLOC_USAGE_SW_READ_RARELY |
+ GRALLOC_USAGE_SW_WRITE_NEVER;
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ // This usage flag indicates to gralloc we want the
+ // buffers to come from system heap
+ usage |= GRALLOC_USAGE_PRIVATE_0;
+ break;
+ default:
+ // No special flags needed
+ break;
+ }
+ return usage;
+}
+
+static status_t writeCroppedNV12(unsigned int offset,
+ unsigned int stride,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ int fd,
+ unsigned char *buffer)
+{
+ unsigned char *luma = NULL, *chroma = NULL, *src = NULL;
+ unsigned int uvoffset;
+ int write_size;
+
+ if (!buffer || !crop.isValid()) {
+ return BAD_VALUE;
+ }
+
+ src = buffer;
+ // offset to beginning of uv plane
+ uvoffset = stride * bufHeight;
+ // offset to beginning of valid region of uv plane
+ uvoffset += (offset - (offset % stride)) / 2 + (offset % stride);
+
+ // start of valid luma region
+ luma = src + offset;
+ // start of valid chroma region
+ chroma = src + uvoffset;
+
+ // write luma line x line
+ unsigned int height = crop.height();
+ unsigned int width = crop.width();
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ if (write_size != write(fd, luma, width)) {
+ printf("Bad Write error (%d)%s\n",
+ errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+ luma += stride;
+ }
+
+ // write chroma line x line
+ height /= 2;
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ if (write_size != write(fd, chroma, width)) {
+ printf("Bad Write error (%d)%s\n",
+ errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+ chroma += stride;
+ }
+
+ return NO_ERROR;
+}
+
+static status_t writeCroppedUYVY(unsigned int offset,
+ unsigned int stride,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ int fd,
+ unsigned char *buffer)
+{
+ unsigned char *src = NULL;
+ int write_size;
+
+ if (!buffer || !crop.isValid()) {
+ return BAD_VALUE;
+ }
+
+ src = buffer + offset;
+ int height = crop.height();
+ int width = crop.width();
+ write_size = width*2;
+ for (unsigned int i = 0; i < height; i++) {
+ if (write_size != write(fd, src, width*2)) {
+ printf("Bad Write error (%d)%s\n",
+ errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+ src += stride*2;
+ }
+
+ return NO_ERROR;
+}
+
+static status_t copyCroppedNV12(unsigned int offset,
+ unsigned int strideSrc,
+ unsigned int strideDst,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ void *bufferSrc,
+ void *bufferDst)
+{
+ unsigned char *lumaSrc = NULL, *chromaSrc = NULL;
+ unsigned char *lumaDst = NULL, *chromaDst = NULL;
+ unsigned int uvoffset;
+ int write_size;
+
+ if (!bufferSrc || !bufferDst || !crop.isValid()) {
+ return BAD_VALUE;
+ }
+
+ uvoffset = strideSrc * crop.height();
+ uvoffset += (offset - (offset % strideSrc)) / 2 + (offset % strideSrc);
+
+ lumaSrc = static_cast<unsigned char *>(bufferSrc) + offset;
+ chromaSrc = static_cast<unsigned char *>(bufferSrc) + uvoffset;
+
+ int height = crop.height();
+ int width = crop.width();
+
+ uvoffset = strideDst * height;
+
+ lumaDst = static_cast<unsigned char *>(bufferDst);
+ chromaDst = static_cast<unsigned char *>(bufferDst) + uvoffset;
+
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(lumaDst, lumaSrc, width);
+ lumaSrc += strideSrc;
+ lumaDst += strideDst;
+ }
+
+ height /= 2;
+ write_size = width;
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(chromaDst, chromaSrc, width);
+ chromaSrc += strideSrc;
+ chromaDst += strideDst;
+ }
+
+ return NO_ERROR;
+}
+
+static status_t copyCroppedPacked16(unsigned int offset,
+ unsigned int stride,
+ unsigned int bufWidth,
+ unsigned int bufHeight,
+ const Rect &crop,
+ void *bufferSrc,
+ void *bufferDst)
+{
+ unsigned char *src = NULL, *dst = NULL;
+
+ if (!bufferSrc || !bufferDst || !crop.isValid()) {
+ return BAD_VALUE;
+ }
+
+ src = static_cast<unsigned char *>(bufferSrc) + offset;
+ dst = static_cast<unsigned char *>(bufferDst);
+
+ int height = crop.height();
+ int width = crop.width();
+ for (unsigned int i = 0; i < height; i++) {
+ memcpy(dst, src, width*2);
+ src += stride*2;
+ dst += width*2;
+ }
+
+ return NO_ERROR;
+}
+
void GLSurface::initialize(int display) {
mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
ASSERT(EGL_SUCCESS == eglGetError());
@@ -312,6 +521,17 @@ void SurfaceTextureBase::deinit() {
mST.clear();
}
+void SurfaceTextureBase::getId(const char **name) {
+ sp<ANativeWindow> windowTapOut = mSTC;
+
+ *name = NULL;
+ if (windowTapOut.get()) {
+ windowTapOut->perform(windowTapOut.get(), NATIVE_WINDOW_GET_ID, name);
+ }
+
+ windowTapOut.clear();
+}
+
// SurfaceTexture with GL specific
void SurfaceTextureGL::initialize(int display, int tex_id) {
@@ -405,9 +625,11 @@ void SurfaceTextureGL::drawTexture() {
}
// buffer source stuff
-void BufferSourceThread::handleBuffer(sp<GraphicBuffer> &graphic_buffer, uint8_t *buffer, unsigned int count) {
+void BufferSourceThread::handleBuffer(sp<GraphicBuffer> &graphic_buffer, uint8_t *buffer,
+ unsigned int count, const Rect &crop) {
int size;
buffer_info_t info;
+ unsigned int offset = 0;
int fd = -1;
char fn[256];
@@ -434,11 +656,18 @@ void BufferSourceThread::handleBuffer(sp<GraphicBuffer> &graphic_buffer, uint8_t
info.height = graphic_buffer->getHeight();
info.format = graphic_buffer->getPixelFormat();
info.buf = graphic_buffer;
+ info.crop = crop;
{
Mutex::Autolock lock(mReturnedBuffersMutex);
if (mReturnedBuffers.size() >= kReturnedBuffersMaxCapacity) mReturnedBuffers.removeAt(0);
}
+
+ // re-calculate size and offset
+ size = calcBufSize((int) graphic_buffer->getPixelFormat(), crop.width(), crop.height());
+ offset = calcOffset((int) graphic_buffer->getPixelFormat(), info.width, crop.top, crop.left);
+
+ info.offset = offset;
mReturnedBuffers.add(info);
// Do not write buffer to file if we are streaming capture
@@ -448,11 +677,21 @@ void BufferSourceThread::handleBuffer(sp<GraphicBuffer> &graphic_buffer, uint8_t
sprintf(fn, "/sdcard/img%03d.raw", count);
fd = open(fn, O_CREAT | O_WRONLY | O_TRUNC, 0777);
if (fd >= 0) {
- if (size != write(fd, buffer, size)) {
+ if (HAL_PIXEL_FORMAT_TI_NV12 == info.format) {
+ writeCroppedNV12(offset, info.width, info.width, info.height,
+ crop, fd, buffer);
+ } else if (HAL_PIXEL_FORMAT_TI_UYVY == info.format) {
+ writeCroppedUYVY(offset, info.width, info.width, info.height,
+ crop, fd, buffer);
+ } else if (size != write(fd, buffer + offset, size)) {
printf("Bad Write int a %s error (%d)%s\n", fn, errno, strerror(errno));
}
- printf("%s: buffer=%08X, size=%d stored at %s\n",
- __FUNCTION__, (int)buffer, info.size, fn);
+ printf("%s: buffer=%08X, size=%d stored at %s\n"
+ "\tRect: top[%d] left[%d] right[%d] bottom[%d] width[%d] height[%d] offset[%d] stride[%d]\n",
+ __FUNCTION__, (int)buffer, size, fn,
+ crop.top, crop.left, crop.right, crop.bottom,
+ crop.width(), crop.height(),
+ offset, info.width);
close(fd);
} else {
printf("error opening or creating %s\n", fn);
@@ -460,83 +699,172 @@ void BufferSourceThread::handleBuffer(sp<GraphicBuffer> &graphic_buffer, uint8_t
}
}
-void BufferSourceInput::setInput(buffer_info_t bufinfo, const char *format) {
+Rect BufferSourceThread::getCrop(sp<GraphicBuffer> &graphic_buffer, const float *mtx) {
+ Rect crop(graphic_buffer->getWidth(), graphic_buffer->getHeight());
+
+ // calculate crop rectangle from tranformation matrix
+ float sx, sy, tx, ty, h, w;
+ unsigned int rect_x, rect_y;
+ /* sx, 0, 0, 0,
+ 0, sy, 0, 0,
+ 0, 0, 1, 0,
+ tx, ty, 0, 1 */
+
+ sx = mtx[0];
+ sy = mtx[5];
+ tx = mtx[12];
+ ty = mtx[13];
+ w = float(graphic_buffer->getWidth());
+ h = float(graphic_buffer->getHeight());
+
+ unsigned int bottom = (unsigned int)(h - (ty * h + 1));
+ unsigned int left = (unsigned int)(tx * w -1);
+ rect_y = (unsigned int)(fabsf(sy) * h);
+ rect_x = (unsigned int)(fabsf(sx) * w);
+
+ // handle v-flip
+ if (sy < 0.0f) {
+ bottom = h - bottom;
+ }
+
+ // handle h-flip
+ if (sx < 0.0f) {
+ left = w - left;
+ }
+
+ unsigned int top = bottom - rect_y;
+ unsigned int right = left + rect_x;
+
+ Rect updatedCrop(left, top, right, bottom);
+ if (updatedCrop.isValid()) {
+ crop = updatedCrop;
+ } else {
+ printf("Crop for buffer %d is not valid: "
+ "left=%u, top=%u, right=%u, bottom=%u. "
+ "Will use default.\n",
+ mCounter,
+ left, top, right, bottom);
+ }
+
+ return crop;
+}
+
+void BufferSourceInput::setInput(buffer_info_t bufinfo, const char *format, ShotParameters &params) {
ANativeWindowBuffer* anb;
GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- void *data = NULL;
- void *input = NULL;
- int pixformat = HAL_PIXEL_FORMAT_TI_NV12_1D;
+ int pixformat = HAL_PIXEL_FORMAT_TI_NV12;
+ size_t tapInMinUndequeued = 0;
int aligned_width, aligned_height;
- aligned_width = ALIGN_UP(bufinfo.width, ALIGN_WIDTH);
- aligned_height = bufinfo.height; //aligned_width * bufinfo.height / bufinfo.width;
- // aligned_height = ALIGN_DOWN(aligned_height, ALIGN_HEIGHT);
- printf("aligned width: %d height: %d", aligned_width, aligned_height);
- Rect bounds(bufinfo.width, bufinfo.height);
+ pixformat = bufinfo.format;
- if (mWindowTapIn.get() == 0) {
- return;
+ // Aligning is not needed for Bayer
+ if ( ( pixformat == HAL_PIXEL_FORMAT_TI_Y16 ) ||
+ ( pixformat == HAL_PIXEL_FORMAT_TI_UYVY ) ) {
+ aligned_width = bufinfo.crop.right - bufinfo.crop.left;
+ } else {
+ aligned_width = ALIGN_UP(bufinfo.crop.right - bufinfo.crop.left, ALIGN_WIDTH);
}
+ aligned_height = bufinfo.crop.bottom - bufinfo.crop.top;
+ printf("aligned width: %d height: %d \n", aligned_width, aligned_height);
- if ( NULL != format ) {
- pixformat = getHalPixFormat(format);
+ if (mWindowTapIn.get() == 0) {
+ return;
}
native_window_set_usage(mWindowTapIn.get(),
- GRALLOC_USAGE_SW_READ_RARELY |
- GRALLOC_USAGE_SW_WRITE_NEVER);
- native_window_set_buffer_count(mWindowTapIn.get(), 1);
+ getUsageFromANW(pixformat));
+ mWindowTapIn->perform(mWindowTapIn.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &tapInMinUndequeued);
+ native_window_set_buffer_count(mWindowTapIn.get(), tapInMinUndequeued);
native_window_set_buffers_geometry(mWindowTapIn.get(),
aligned_width, aligned_height, bufinfo.format);
+
+ // if buffer dimensions are the same as the aligned dimensions, then we can
+ // queue the buffer directly to tapin surface. if the dimensions are different
+ // then the aligned ones, then we have to copy the buffer into our own buffer
+ // to make sure the stride of the buffer is correct
+ if ((aligned_width != bufinfo.width) || (aligned_height != bufinfo.height) ||
+ ( pixformat == HAL_PIXEL_FORMAT_TI_Y16 ) ||
+ ( pixformat == HAL_PIXEL_FORMAT_TI_UYVY) ) {
+ void *dest[3] = { 0 };
+ void *src[3] = { 0 };
+ Rect bounds(aligned_width, aligned_height);
+
#ifdef ANDROID_API_JB_MR1_OR_LATER
- mWindowTapIn->dequeueBuffer_DEPRECATED(mWindowTapIn.get(), &anb);
+ mWindowTapIn->dequeueBuffer_DEPRECATED(mWindowTapIn.get(), &anb);
#else
- mWindowTapIn->dequeueBuffer(mWindowTapIn.get(), &anb);
+ mWindowTapIn->dequeueBuffer(mWindowTapIn.get(), &anb);
#endif
- mapper.lock(anb->handle, GRALLOC_USAGE_SW_READ_RARELY, bounds, &data);
- // copy buffer to input buffer if available
- if (bufinfo.buf.get()) {
- bufinfo.buf->lock(GRALLOC_USAGE_SW_READ_RARELY, &input);
- }
- if (input) {
- if ( HAL_PIXEL_FORMAT_TI_Y16 == pixformat ) {
- int size = calcBufSize(pixformat, bufinfo.width, bufinfo.height);
- memcpy(data, input, size);
- } else {
- if (bufinfo.width == aligned_width) {
- memcpy(data, input, bufinfo.size);
- } else {
- // need to copy line by line to adjust for stride
- uint8_t *dst = (uint8_t*) data;
- uint8_t *src = (uint8_t*) input;
- // hrmm this copy only works for NV12 and YV12
- // copy Y first
- for (int i = 0; i < aligned_height; i++) {
- memcpy(dst, src, bufinfo.width);
- dst += aligned_width;
- src += bufinfo.width;
- }
- // copy UV plane
- for (int i = 0; i < (aligned_height / 2); i++) {
- memcpy(dst, src, bufinfo.width);
- dst += aligned_width ;
- src += bufinfo.width ;
- }
+ mapper.lock(anb->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, dest);
+ // copy buffer to input buffer if available
+ if (bufinfo.buf.get()) {
+ bufinfo.buf->lock(GRALLOC_USAGE_SW_READ_OFTEN, src);
+ }
+ if (src[0]) {
+ switch (pixformat) {
+ case HAL_PIXEL_FORMAT_TI_Y16:
+ case HAL_PIXEL_FORMAT_TI_UYVY:
+ copyCroppedPacked16(bufinfo.offset,
+ bufinfo.width,
+ bufinfo.width,
+ bufinfo.height,
+ bufinfo.crop,
+ src[0],
+ dest[0]);
+ break;
+ case HAL_PIXEL_FORMAT_TI_NV12:
+ copyCroppedNV12(bufinfo.offset,
+ bufinfo.width,
+ aligned_width,
+ bufinfo.width,
+ bufinfo.height,
+ bufinfo.crop,
+ src[0],
+ dest[0]);
+ break;
+ default:
+ printf("Pixel format 0x%x not supported\n", pixformat);
+ exit(1);
+ break;
}
}
- }
- if (bufinfo.buf.get()) {
- bufinfo.buf->unlock();
- }
+ if (bufinfo.buf.get()) {
+ bufinfo.buf->unlock();
+ }
- mapper.unlock(anb->handle);
+ mapper.unlock(anb->handle);
+ } else {
+ mWindowTapIn->perform(mWindowTapIn.get(), NATIVE_WINDOW_ADD_BUFFER_SLOT, &bufinfo.buf);
+ anb = bufinfo.buf->getNativeBuffer();
+ }
#ifdef ANDROID_API_JB_MR1_OR_LATER
mWindowTapIn->queueBuffer_DEPRECATED(mWindowTapIn.get(), anb);
#else
mWindowTapIn->queueBuffer(mWindowTapIn.get(), anb);
#endif
+
+#ifndef ANDROID_API_JB_OR_LATER
+ {
+ sp<ANativeWindow> windowTapIn = mWindowTapIn;
+ const char* id = NULL;
+
+ if (windowTapIn.get()) {
+ windowTapIn->perform(windowTapIn.get(), NATIVE_WINDOW_GET_ID, &id);
+ }
+
+ if (id) {
+ params.set(KEY_TAP_IN_SURFACE, id);
+ } else {
+ params.remove(KEY_TAP_IN_SURFACE);
+ }
+
+ windowTapIn.clear();
+ }
+#endif
}
void BufferSourceThread::showMetadata(sp<IMemory> data) {
diff --git a/test/CameraHal/camera_test_surfacetexture.h b/test/CameraHal/camera_test_surfacetexture.h
index 03a0fa5..395e82d 100644
--- a/test/CameraHal/camera_test_surfacetexture.h
+++ b/test/CameraHal/camera_test_surfacetexture.h
@@ -97,6 +97,7 @@ public:
void initialize(int tex_id, EGLenum tex_target = EGL_NONE);
void deinit();
+ void getId(const char **name);
virtual sp<SurfaceTexture> getST();
@@ -133,8 +134,14 @@ public:
mSurfaceTexture->setSynchronousMode(true);
mFW = new FrameWaiter();
mSurfaceTexture->setFrameAvailableListener(mFW);
+#ifndef ANDROID_API_JB_OR_LATER
+ mCamera->setBufferSource(NULL, mSurfaceTexture);
+#endif
}
virtual ~ST_BufferSourceThread() {
+#ifndef ANDROID_API_JB_OR_LATER
+ mCamera->releaseBufferSource(NULL, mSurfaceTexture);
+#endif
mSurfaceTextureBase->deinit();
delete mSurfaceTextureBase;
}
@@ -144,6 +151,7 @@ public:
mFW->waitForFrame();
if (!mDestroying) {
+ float mtx[16] = {0.0};
mSurfaceTexture->updateTexImage();
printf("=== Metadata for buffer %d ===\n", mCounter);
#ifndef ANDROID_API_JB_OR_LATER
@@ -151,7 +159,10 @@ public:
#endif
printf("\n");
graphic_buffer = mSurfaceTexture->getCurrentBuffer();
- mDeferThread->add(graphic_buffer, mCounter++);
+ mSurfaceTexture->getTransformMatrix(mtx);
+ Rect crop = getCrop(graphic_buffer, mtx);
+
+ mDeferThread->add(graphic_buffer, crop, mCounter++);
restartCapture();
return true;
}
@@ -165,10 +176,18 @@ public:
mFW->onFrameAvailable();
}
- virtual void setBuffer() {
-#ifndef ANDROID_API_JB_OR_LATER
- mCamera->setBufferSource(NULL, mSurfaceTexture);
-#endif
+ virtual void setBuffer(android::ShotParameters &params) {
+ {
+ const char* id = NULL;
+
+ mSurfaceTextureBase->getId(&id);
+
+ if (id) {
+ params.set(KEY_TAP_OUT_SURFACES, id);
+ } else {
+ params.remove(KEY_TAP_OUT_SURFACES);
+ }
+ }
}
private:
@@ -182,29 +201,32 @@ public:
ST_BufferSourceInput(int tex_id, sp<Camera> camera) :
BufferSourceInput(camera), mTexId(tex_id) {
mSurfaceTexture = new SurfaceTextureBase();
- }
- virtual ~ST_BufferSourceInput() {
- delete mSurfaceTexture;
- }
-
- virtual void init() {
sp<SurfaceTexture> surface_texture;
mSurfaceTexture->initialize(mTexId);
surface_texture = mSurfaceTexture->getST();
surface_texture->setSynchronousMode(true);
mWindowTapIn = new SurfaceTextureClient(surface_texture);
- }
-
- virtual void setInput(buffer_info_t bufinfo, const char *format) {
- mSurfaceTexture->getST()->setDefaultBufferSize(bufinfo.width, bufinfo.height);
- BufferSourceInput::setInput(bufinfo, format);
#ifndef ANDROID_API_JB_OR_LATER
mCamera->setBufferSource(mSurfaceTexture->getST(), NULL);
#else
mCamera->setBufferSource(mSurfaceTexture->getST()->getBufferQueue(), NULL);
#endif
}
+ virtual ~ST_BufferSourceInput() {
+#ifndef ANDROID_API_JB_OR_LATER
+ mCamera->releaseBufferSource(mSurfaceTexture->getST(), NULL);
+#else
+ mCamera->releaseBufferSource(mSurfaceTexture->getST()->getBufferQueue(), NULL);
+#endif
+ delete mSurfaceTexture;
+ }
+
+ virtual void setInput(buffer_info_t bufinfo, const char *format) {
+ android::ShotParameters params;
+ mSurfaceTexture->getST()->setDefaultBufferSize(bufinfo.width, bufinfo.height);
+ BufferSourceInput::setInput(bufinfo, format, params);
+ }
private:
int mTexId;