summaryrefslogtreecommitdiffstats
path: root/camera/V4LCameraAdapter
diff options
context:
space:
mode:
authorKyle Repinski <repinski23@gmail.com>2015-01-07 08:39:28 -0600
committerZiyan <jaraidaniel@gmail.com>2015-04-11 20:24:52 +0200
commitc5f4358c48d6e50b2affb3ad6c2c0f59546f5b04 (patch)
tree74070cb6d50ec822d3ef7acdaa390cf2f94c4097 /camera/V4LCameraAdapter
parentc52c76fa1593f374173a818b4de5bd7c51903fbc (diff)
downloaddevice_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.zip
device_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.tar.gz
device_samsung_tuna-c5f4358c48d6e50b2affb3ad6c2c0f59546f5b04.tar.bz2
camera: Update camera HAL.
OmapZoom p-jb-release branch with 'CameraHal: Camera Capabilities query update' reverted, as well as a bunch of stuff ifdef'd out. Needs a lot of work still. At this point it's a regression, but it has to be done.
Diffstat (limited to 'camera/V4LCameraAdapter')
-rw-r--r--camera/V4LCameraAdapter/V4LCameraAdapter.cpp1165
-rw-r--r--camera/V4LCameraAdapter/V4LCapabilities.cpp346
2 files changed, 1282 insertions, 229 deletions
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index c365023..29c71c7 100644
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -25,6 +25,7 @@
#include "V4LCameraAdapter.h"
#include "CameraHal.h"
#include "TICameraParameters.h"
+#include "DebugUtils.h"
#include <signal.h>
#include <stdio.h>
#include <stdlib.h>
@@ -37,6 +38,8 @@
#include <sys/select.h>
#include <linux/videodev.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
#include <cutils/properties.h>
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
@@ -44,28 +47,250 @@ static int mDebugFps = 0;
#define Q16_OFFSET 16
-#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+#define HERE(Msg) {CAMHAL_LOGEB("--=== %s===--\n", Msg);}
-namespace android {
-
-#undef LOG_TAG
-///Maintain a separate tag for V4LCameraAdapter logs to isolate issues OMX specific
-#define LOG_TAG "CameraHAL"
+namespace Ti {
+namespace Camera {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-Mutex gAdapterLock;
-const char *device = DEVICE;
+//define this macro to save first few raw frames when starting the preview.
+//#define SAVE_RAW_FRAMES 1
+//#define DUMP_CAPTURE_FRAME 1
+//#define PPM_PER_FRAME_CONVERSION 1
+
+//Proto Types
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
+static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height );
+
+android::Mutex gV4LAdapterLock;
+char device[15];
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
-status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+/*--------------------V4L wrapper functions -------------------------------*/
+status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
+ status_t ret = NO_ERROR;
+ errno = 0;
+
+ do {
+ ret = ioctl (fd, req, argp);
+ }while (-1 == ret && EINTR == errno);
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitMmap(int& count) {
+ status_t ret = NO_ERROR;
+
+ //First allocate adapter internal buffers at V4L level for USB Cam
+ //These are the buffers from which we will copy the data into overlay buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+ for (int i = 0; i < count; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (NULL,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ CAMHAL_LOGVB(" mVideoInfo->mem[%d]=%p ; mVideoInfo->buf.length = %d", i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer [%d]. (%s)", i, strerror(errno));
+ return -1;
+ }
+ }
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lInitUsrPtr(int& count) {
+ status_t ret = NO_ERROR;
+
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_USERPTR;
+ mVideoInfo->rb.count = count;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed for USERPTR: %s", strerror(errno));
+ return ret;
+ }
+
+ count = mVideoInfo->rb.count;
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStartStreaming () {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ if (!mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
+ return ret;
+ }
+ mVideoInfo->isStreaming = true;
+ }
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
+ status_t ret = NO_ERROR;
+ enum v4l2_buf_type bufType;
+
+ if (mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ goto EXIT;
+ }
+ mVideoInfo->isStreaming = false;
+
+ /* Unmap buffers */
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ for (int i = 0; i < nBufferCount; i++) {
+ if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0) {
+ CAMHAL_LOGEA("munmap() failed");
+ }
+ }
+
+ //free the memory allocated during REQBUFS, by setting the count=0
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = 0;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ }
+EXIT:
+ return ret;
+}
+
+status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
+ status_t ret = NO_ERROR;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_G_FMT Failed: %s", strerror(errno));
+ }
+
+ mVideoInfo->width = width;
+ mVideoInfo->height = height;
+ mVideoInfo->framesizeIn = (width * height << 1);
+ mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->format.fmt.pix.width = width;
+ mVideoInfo->format.fmt.pix.height = height;
+ mVideoInfo->format.fmt.pix.pixelformat = pix_format;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_FMT Failed: %s", strerror(errno));
+ return ret;
+ }
+ v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
+ CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
+ return ret;
+}
+
+status_t V4LCameraAdapter::restartPreview ()
{
- LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ struct v4l2_streamparm streamParams;
+
+ //configure for preview size and pixel format.
+ mParams.getPreviewSize(&width, &height);
+
+ ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = v4lInitMmap(mPreviewBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ //set frame rate
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_PARM Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+ nQueued++;
+ }
+ ret = v4lStartStreaming();
+ CAMHAL_LOGDA("Ready for preview....");
+EXIT:
+ return ret;
+}
+
+/*--------------------Camera Adapter Functions-----------------------------*/
+status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+{
char value[PROPERTY_VALUE_MAX];
+
+ LOG_FUNCTION_NAME;
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
@@ -73,115 +298,129 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
// Allocate memory for video info structure
mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
- if(!mVideoInfo)
- {
- return NO_MEMORY;
- }
+ if(!mVideoInfo) {
+ ret = NO_MEMORY;
+ goto EXIT;
+ }
- if ((mCameraHandle = open(device, O_RDWR)) == -1)
- {
+ if ((mCameraHandle = open(device, O_RDWR) ) == -1) {
CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
- if (ret < 0)
- {
+ ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
+ if (ret < 0) {
CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
- {
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING))
- {
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)) {
CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
- return -EINVAL;
- }
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
// Initialize flags
mPreviewing = false;
mVideoInfo->isStreaming = false;
mRecording = false;
-
+ mCapturing = false;
+EXIT:
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
-status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
{
-
status_t ret = NO_ERROR;
+ int idx = 0;
+ LOG_FUNCTION_NAME;
- if ( !mVideoInfo->isStreaming )
- {
- return NO_ERROR;
+ if ( frameType == CameraFrame::IMAGE_FRAME) { //(1 > mCapturedFrames)
+ // Signal end of image capture
+ if ( NULL != mEndImageCaptureCallback) {
+ CAMHAL_LOGDB("===========Signal End Image Capture==========");
+ mEndImageCaptureCallback(mEndCaptureData);
}
+ goto EXIT;
+ }
+ if ( !mVideoInfo->isStreaming ) {
+ goto EXIT;
+ }
- int i = mPreviewBufs.valueFor(( unsigned int )frameBuf);
- if(i<0)
- {
- return BAD_VALUE;
- }
+ idx = mPreviewBufs.valueFor(frameBuf);
+ if(idx < 0) {
+ CAMHAL_LOGEB("Wrong index = %d",idx);
+ goto EXIT;
+ }
- mVideoInfo->buf.index = i;
+ mVideoInfo->buf.index = idx;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
- CAMHAL_LOGEA("Init: VIDIOC_QBUF Failed");
- return -1;
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
}
-
nQueued++;
-
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
-status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
+status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params)
{
- LOG_FUNCTION_NAME;
-
status_t ret = NO_ERROR;
-
int width, height;
+ struct v4l2_streamparm streamParams;
- params.getPreviewSize(&width, &height);
-
- CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
-
- mVideoInfo->width = width;
- mVideoInfo->height = height;
- mVideoInfo->framesizeIn = (width * height << 1);
- mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+ LOG_FUNCTION_NAME;
- mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->format.fmt.pix.width = width;
- mVideoInfo->format.fmt.pix.height = height;
- mVideoInfo->format.fmt.pix.pixelformat = DEFAULT_PIXEL_FORMAT;
+ if(!mPreviewing && !mCapturing) {
+ params.getPreviewSize(&width, &height);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
- ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
- if (ret < 0) {
- CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
- return ret;
+ ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ //set frame rate
+ // Now its fixed to 30 FPS
+ streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
+ streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
+ streamParams.parm.capture.timeperframe.numerator= 1;
+ ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
+ if (ret < 0) {
+ CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+ int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator;
+ CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps);
}
// Udpate the current parameter set
mParams = params;
+EXIT:
LOG_FUNCTION_NAME_EXIT;
return ret;
}
-void V4LCameraAdapter::getParameters(CameraParameters& params)
+void V4LCameraAdapter::getParameters(android::CameraParameters& params)
{
LOG_FUNCTION_NAME;
@@ -193,27 +432,37 @@ void V4LCameraAdapter::getParameters(CameraParameters& params)
///API to give the buffers to Adapter
-status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+status_t V4LCameraAdapter::useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
switch(mode)
{
case CAMERA_PREVIEW:
+ mPreviewBufferCountQueueable = queueable;
ret = UseBuffersPreview(bufArr, num);
break;
- //@todo Insert Image capture case here
+ case CAMERA_IMAGE_CAPTURE:
+ mCaptureBufferCountQueueable = queueable;
+ ret = UseBuffersCapture(bufArr, num);
+ break;
case CAMERA_VIDEO:
//@warn Video capture is not fully supported yet
+ mPreviewBufferCountQueueable = queueable;
ret = UseBuffersPreview(bufArr, num);
break;
+ case CAMERA_MEASUREMENT:
+ break;
+
+ default:
+ break;
}
LOG_FUNCTION_NAME_EXIT;
@@ -221,172 +470,301 @@ status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
return ret;
}
-status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
-{
+status_t V4LCameraAdapter::UseBuffersCapture(CameraBuffer *bufArr, int num) {
int ret = NO_ERROR;
- if(NULL == bufArr)
- {
- return BAD_VALUE;
- }
-
- //First allocate adapter internal buffers at V4L level for USB Cam
- //These are the buffers from which we will copy the data into overlay buffers
- /* Check if camera can handle NB_BUFFER buffers */
- mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
- mVideoInfo->rb.count = num;
-
- ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
- if (ret < 0) {
- CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
- return ret;
+ LOG_FUNCTION_NAME;
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
}
for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mCaptureBufs.add(&bufArr[i], i);
+ CAMHAL_LOGDB("capture- buff [%d] = 0x%x ",i, mCaptureBufs.keyAt(i));
+ }
- memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+ // Update the preview buffer count
+ mCaptureBufferCount = num;
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
- mVideoInfo->buf.index = i;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+}
- ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
- if (ret < 0) {
- CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
- return ret;
- }
+status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
+{
+ int ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
- mVideoInfo->mem[i] = mmap (0,
- mVideoInfo->buf.length,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- mCameraHandle,
- mVideoInfo->buf.m.offset);
+ if(NULL == bufArr) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (mVideoInfo->mem[i] == MAP_FAILED) {
- CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
- return -1;
+ ret = v4lInitMmap(num);
+ if (ret == NO_ERROR) {
+ for (int i = 0; i < num; i++) {
+ //Associate each Camera internal buffer with the one from Overlay
+ mPreviewBufs.add(&bufArr[i], i);
+ CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs.keyAt(i));
}
- uint32_t *ptr = (uint32_t*) bufArr;
+ // Update the preview buffer count
+ mPreviewBufferCount = num;
+ }
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::takePicture() {
+ status_t ret = NO_ERROR;
+ int width = 0;
+ int height = 0;
+ size_t yuv422i_buff_size = 0;
+ int index = 0;
+ char *fp = NULL;
+ CameraBuffer *buffer = NULL;
+ CameraFrame frame;
- //Associate each Camera internal buffer with the one from Overlay
- mPreviewBufs.add((int)ptr[i], i);
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mCaptureBufsLock);
+ if(mCapturing) {
+ CAMHAL_LOGEA("Already Capture in Progress...");
+ ret = BAD_VALUE;
+ goto EXIT;
}
- // Update the preview buffer count
- mPreviewBufferCount = num;
+ mCapturing = true;
+ mPreviewing = false;
- return ret;
-}
+ // Stop preview streaming
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
-status_t V4LCameraAdapter::startPreview()
-{
- status_t ret = NO_ERROR;
+ //configure for capture image size and pixel format.
+ mParams.getPictureSize(&width, &height);
+ CAMHAL_LOGDB("Image Capture Size WxH = %dx%d",width,height);
+ yuv422i_buff_size = width * height * 2;
- Mutex::Autolock lock(mPreviewBufsLock);
+ ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
+ goto EXIT;
+ }
- if(mPreviewing)
- {
- return BAD_VALUE;
+ ret = v4lInitMmap(mCaptureBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
+ goto EXIT;
}
- for (int i = 0; i < mPreviewBufferCount; i++) {
+ for (int i = 0; i < mCaptureBufferCountQueueable; i++) {
mVideoInfo->buf.index = i;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
CAMHAL_LOGEA("VIDIOC_QBUF Failed");
- return -EINVAL;
+ ret = BAD_VALUE;
+ goto EXIT;
}
-
nQueued++;
- }
+ }
- enum v4l2_buf_type bufType;
- if (!mVideoInfo->isStreaming) {
- bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = v4lStartStreaming();
+ if (ret < 0) {
+ CAMHAL_LOGEB("v4lStartStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
- ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
- if (ret < 0) {
- CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
- return ret;
- }
+ CAMHAL_LOGDA("Streaming started for Image Capture");
- mVideoInfo->isStreaming = true;
- }
+ //get the frame and send to encode as JPG
+ fp = this->GetFrame(index);
+ if(!fp) {
+ CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ CAMHAL_LOGDA("::Capture Frame received from V4L::");
+ buffer = mCaptureBufs.keyAt(index);
+ CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size);
+
+ //copy the yuv422i data to the image buffer.
+ memcpy(buffer->opaque, fp, yuv422i_buff_size);
+
+#ifdef DUMP_CAPTURE_FRAME
+ //dump the YUV422 buffer in to a file
+ //a folder should have been created at /data/misc/camera/raw/
+ {
+ int fd =-1;
+ fd = open("/data/misc/camera/raw/captured_yuv422i_dump.yuv", O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGEB("Unable to open file: %s", strerror(fd));
+ }
+ else {
+ write(fd, fp, yuv422i_buff_size );
+ close(fd);
+ CAMHAL_LOGDB("::Captured Frame dumped at /data/misc/camera/raw/captured_yuv422i_dump.yuv::");
+ }
+ }
+#endif
+
+ CAMHAL_LOGDA("::sending capture frame to encoder::");
+ frame.mFrameType = CameraFrame::IMAGE_FRAME;
+ frame.mBuffer = buffer;
+ frame.mLength = yuv422i_buff_size;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mAlignment = width*2;
+ frame.mOffset = 0;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::IMAGE_FRAME;
+ frame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
+ frame.mQuirks |= CameraFrame::FORMAT_YUV422I_YUYV;
+
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
- // Create and start preview thread for receiving buffers from V4L Camera
- mPreviewThread = new PreviewThread(this);
+ // Stop streaming after image capture
+ ret = v4lStopStreaming(mCaptureBufferCount);
+ if (ret < 0 ) {
+ CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
+ goto EXIT;
+ }
+
+ ret = restartPreview();
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
- CAMHAL_LOGDA("Created preview thread");
+status_t V4LCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ //Release image buffers
+ if ( NULL != mReleaseImageBuffersCallback ) {
+ mReleaseImageBuffersCallback(mReleaseData);
+ }
+ mCaptureBufs.clear();
- //Update the flag to indicate we are previewing
- mPreviewing = true;
+ mCapturing = false;
+ mPreviewing = true;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
- return ret;
+status_t V4LCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ //autoFocus is not implemented. Just return.
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
}
-status_t V4LCameraAdapter::stopPreview()
+status_t V4LCameraAdapter::startPreview()
{
- enum v4l2_buf_type bufType;
- int ret = NO_ERROR;
+ status_t ret = NO_ERROR;
- Mutex::Autolock lock(mPreviewBufsLock);
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mPreviewBufsLock);
- if(!mPreviewing)
- {
- return NO_INIT;
- }
+ if(mPreviewing) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
- if (mVideoInfo->isStreaming) {
- bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
if (ret < 0) {
- CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
- return ret;
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
}
+ nQueued++;
+ }
- mVideoInfo->isStreaming = false;
+ ret = v4lStartStreaming();
+
+ // Create and start preview thread for receiving buffers from V4L Camera
+ if(!mCapturing) {
+ mPreviewThread = new PreviewThread(this);
+ CAMHAL_LOGDA("Created preview thread");
}
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ //Update the flag to indicate we are previewing
+ mPreviewing = true;
+ mCapturing = false;
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t V4LCameraAdapter::stopPreview()
+{
+ enum v4l2_buf_type bufType;
+ int ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+ android::AutoMutex lock(mStopPreviewLock);
+
+ if(!mPreviewing) {
+ return NO_INIT;
+ }
+ mPreviewing = false;
+
+ ret = v4lStopStreaming(mPreviewBufferCount);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
+ }
nQueued = 0;
nDequeued = 0;
-
- /* Unmap buffers */
- for (int i = 0; i < mPreviewBufferCount; i++)
- if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0)
- CAMHAL_LOGEA("Unmap failed");
+ mFramesWithEncoder = 0;
mPreviewBufs.clear();
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
+ LOG_FUNCTION_NAME_EXIT;
return ret;
-
}
char * V4LCameraAdapter::GetFrame(int &index)
{
- int ret;
+ int ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
/* DQ */
- ret = ioctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
if (ret < 0) {
CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
return NULL;
@@ -395,6 +773,7 @@ char * V4LCameraAdapter::GetFrame(int &index)
index = mVideoInfo->buf.index;
+ LOG_FUNCTION_NAME_EXIT;
return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
}
@@ -403,6 +782,7 @@ char * V4LCameraAdapter::GetFrame(int &index)
status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
{
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
// Just return the current preview size, nothing more to do here.
mParams.getPreviewSize(( int * ) &width,
@@ -419,9 +799,27 @@ status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t buffer
return NO_ERROR;
}
-status_t V4LCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame *frame, size_t bufferCount)
{
- // We don't support image capture yet, safely return from here without messing up
+ int width = 0;
+ int height = 0;
+ int bytesPerPixel = 2; // for YUV422i; default pixel format
+
+ LOG_FUNCTION_NAME;
+
+ if (frame == NULL) {
+ return BAD_VALUE;
+ }
+
+ mParams.getPictureSize( &width, &height );
+ frame->mLength = width * height * bytesPerPixel;
+ frame->mWidth = width;
+ frame->mHeight = height;
+ frame->mAlignment = width * bytesPerPixel;
+
+ CAMHAL_LOGDB("Picture size: W x H = %u x %u (size=%u bytes, alignment=%u bytes)",
+ frame->mWidth, frame->mHeight, frame->mLength, frame->mAlignment);
+ LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
@@ -431,16 +829,17 @@ static void debugShowFPS()
static int mLastFrameCount = 0;
static nsecs_t mLastFpsTime = 0;
static float mFps = 0;
- mFrameCount++;
- if (!(mFrameCount & 0x1F)) {
- nsecs_t now = systemTime();
- nsecs_t diff = now - mLastFpsTime;
- mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
- mLastFpsTime = now;
- mLastFrameCount = mFrameCount;
- ALOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ if(mDebugFps) {
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
}
- // XXX: mFPS has the value we want
}
status_t V4LCameraAdapter::recalculateFPS()
@@ -487,6 +886,7 @@ V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
LOG_FUNCTION_NAME;
// Nothing useful to do in the constructor
+ mFramesWithEncoder = 0;
LOG_FUNCTION_NAME_EXIT;
}
@@ -507,6 +907,202 @@ V4LCameraAdapter::~V4LCameraAdapter()
LOG_FUNCTION_NAME_EXIT;
}
+static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ) {
+ //convert YUV422I yuyv to uyvy format.
+ uint32_t *bf = (uint32_t*)src;
+ uint32_t *dst = (uint32_t*)dest;
+
+ LOG_FUNCTION_NAME;
+
+ if (!src || !dest) {
+ return;
+ }
+
+ for(size_t i = 0; i < size; i = i+4)
+ {
+ dst[0] = ((bf[0] & 0x00FF00FF) << 8) | ((bf[0] & 0xFF00FF00) >> 8);
+ bf++;
+ dst++;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format and copies directly to preview buffers (Tiler memory).
+ int stride = 4096;
+ unsigned char *bf = src;
+ unsigned char *dst_y = dest;
+ unsigned char *dst_uv = dest + ( height * stride);
+#ifdef PPM_PER_FRAME_CONVERSION
+ static int frameCount = 0;
+ static nsecs_t ppm_diff = 0;
+ nsecs_t ppm_start = systemTime();
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ dst_y += (stride - width);
+ }
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ dst_uv = dst_uv + (stride - width);
+ }
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ dst_y = dst_y + (stride - width);
+ if (skip == 0) {
+ dst_uv = dst_uv + (stride - width);
+ }
+ } //end of for()
+ }
+
+#ifdef PPM_PER_FRAME_CONVERSION
+ ppm_diff += (systemTime() - ppm_start);
+ frameCount++;
+
+ if (frameCount >= 30) {
+ ppm_diff = ppm_diff / frameCount;
+ LOGD("PPM: YUV422i to NV12 Conversion(%d x %d): %llu us ( %llu ms )", width, height,
+ ns2us(ppm_diff), ns2ms(ppm_diff) );
+ ppm_diff = 0;
+ frameCount = 0;
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ) {
+ //convert YUV422I to YUV420 NV12 format.
+ unsigned char *bf = src;
+ unsigned char *dst_y = dest;
+ unsigned char *dst_uv = dest + (width * height);
+
+ LOG_FUNCTION_NAME;
+
+ if (width % 16 ) {
+ for(int i = 0; i < height; i++) {
+ for(int j = 0; j < width; j++) {
+ *dst_y = *bf;
+ dst_y++;
+ bf = bf + 2;
+ }
+ }
+
+ bf = src;
+ bf++; //UV sample
+ for(int i = 0; i < height/2; i++) {
+ for(int j=0; j<width; j++) {
+ *dst_uv = *bf;
+ dst_uv++;
+ bf = bf + 2;
+ }
+ bf = bf + width*2;
+ }
+ } else {
+ //neon conversion
+ for(int i = 0; i < height; i++) {
+ int n = width;
+ int skip = i & 0x1; // skip uv elements for the odd rows
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel copy \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv.. \n\t"
+ " @ now q0 = y q1 = uv \n\t"
+ " vst1.32 {d0,d1}, [%[dst_y]]! \n\t"
+ " cmp %[skip], #0 \n\t"
+ " bne 1f \n\t"
+ " vst1.32 {d2,d3},[%[dst_uv]]! \n\t"
+ "1: @ skip odd rows for UV \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width), [skip] "r" (skip)
+ : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
+ );
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+#ifdef SAVE_RAW_FRAMES
+void saveFile(unsigned char* buff, int buff_size) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+ if (counter > 3) {
+ return;
+ }
+ //dump nv12 buffer
+ counter++;
+ sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter);
+ CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn);
+
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ write(fd, buff, buff_size );
+ close(fd);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+#endif
+
/* Preview Thread */
// ---------------------------------------------------------------------------
@@ -515,63 +1111,115 @@ int V4LCameraAdapter::previewThread()
status_t ret = NO_ERROR;
int width, height;
CameraFrame frame;
+ void *y_uv[2];
+ int index = 0;
+ int stride = 4096;
+ char *fp = NULL;
- if (mPreviewing)
- {
- int index = 0;
- char *fp = this->GetFrame(index);
- if(!fp)
- {
- return BAD_VALUE;
- }
+ mParams.getPreviewSize(&width, &height);
- uint8_t* ptr = (uint8_t*) mPreviewBufs.keyAt(index);
+ if (mPreviewing) {
- int width, height;
- uint16_t* dest = (uint16_t*)ptr;
- uint16_t* src = (uint16_t*) fp;
- mParams.getPreviewSize(&width, &height);
- for(int i=0;i<height;i++)
- {
- for(int j=0;j<width;j++)
- {
- //*dest = *src;
- //convert from YUYV to UYVY supported in Camera service
- *dest = (((*src & 0xFF000000)>>24)<<16)|(((*src & 0x00FF0000)>>16)<<24) |
- (((*src & 0xFF00)>>8)<<0)|(((*src & 0x00FF)>>0)<<8);
- src++;
- dest++;
- }
- dest += 4096/2-width;
- }
+ fp = this->GetFrame(index);
+ if(!fp) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ CameraBuffer *buffer = mPreviewBufs.keyAt(index);
+ CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer);
+ if (!lframe) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+
+ debugShowFPS();
+
+ if ( mFrameSubscribers.size() == 0 ) {
+ ret = BAD_VALUE;
+ goto EXIT;
+ }
+ y_uv[0] = (void*) lframe->mYuv[0];
+ //y_uv[1] = (void*) lframe->mYuv[1];
+ //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride);
+ convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height);
+ CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] );
+
+#ifdef SAVE_RAW_FRAMES
+ unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
+ //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
+ convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
+ saveFile( nv12_buff, ((width*height)*3/2) );
+ free (nv12_buff);
+#endif
- mParams.getPreviewSize(&width, &height);
frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
- frame.mBuffer = ptr;
- frame.mLength = width*height*2;
- frame.mAlignment = width*2;
+ frame.mBuffer = buffer;
+ frame.mLength = width*height*3/2;
+ frame.mAlignment = stride;
frame.mOffset = 0;
- frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
- ret = sendFrameToSubscribers(&frame);
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
}
+ }
+EXIT:
return ret;
}
-extern "C" CameraAdapter* CameraAdapter_Factory()
+//scan for video devices
+void detectVideoDevice(char** video_device_list, int& num_device) {
+ char dir_path[20];
+ char* filename;
+ char** dev_list = video_device_list;
+ DIR *d;
+ struct dirent *dir;
+ int index = 0;
+
+ strcpy(dir_path, DEVICE_PATH);
+ d = opendir(dir_path);
+ if(d) {
+ //read each entry in the /dev/ and find if there is videox entry.
+ while ((dir = readdir(d)) != NULL) {
+ filename = dir->d_name;
+ if (strncmp(filename, DEVICE_NAME, 5) == 0) {
+ strcpy(dev_list[index],DEVICE_PATH);
+ strncat(dev_list[index],filename,sizeof(DEVICE_NAME));
+ index++;
+ }
+ } //end of while()
+ closedir(d);
+ num_device = index;
+
+ for(int i=0; i<index; i++){
+ CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]);
+ }
+ }
+}
+
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gV4LAdapterLock);
LOG_FUNCTION_NAME;
adapter = new V4LCameraAdapter(sensor_index);
if ( adapter ) {
- CAMHAL_LOGDB("New OMX Camera adapter instance created for sensor %d",sensor_index);
+ CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
} else {
- CAMHAL_LOGEA("Camera adapter create failed!");
+ CAMHAL_LOGEA("V4L Camera adapter create failed for sensor index = %d!",sensor_index);
}
LOG_FUNCTION_NAME_EXIT;
@@ -579,32 +1227,91 @@ extern "C" CameraAdapter* CameraAdapter_Factory()
return adapter;
}
-extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
- const unsigned int starting_camera,
- const unsigned int max_camera) {
+extern "C" status_t V4LCameraAdapter_Capabilities(
+ CameraProperties::Properties * const properties_array,
+ const int starting_camera, const int max_camera, int & supportedCameras)
+{
+ status_t ret = NO_ERROR;
+ struct v4l2_capability cap;
+ int tempHandle = NULL;
int num_cameras_supported = 0;
+ char device_list[5][15];
+ char* video_device_list[5];
+ int num_v4l_devices = 0;
+ int sensorId = 0;
CameraProperties::Properties* properties = NULL;
LOG_FUNCTION_NAME;
- if(!properties_array)
- {
- return -EINVAL;
+ supportedCameras = 0;
+ memset((void*)&cap, 0, sizeof(v4l2_capability));
+
+ if (!properties_array) {
+ CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
}
- // TODO: Need to tell camera properties what other cameras we can support
- if (starting_camera + num_cameras_supported < max_camera) {
- num_cameras_supported++;
- properties = properties_array + starting_camera;
- properties->set(CameraProperties::CAMERA_NAME, "USBCamera");
+ for (int i = 0; i < 5; i++) {
+ video_device_list[i] = device_list[i];
}
+ //look for the connected video devices
+ detectVideoDevice(video_device_list, num_v4l_devices);
- LOG_FUNCTION_NAME_EXIT;
+ for (int i = 0; i < num_v4l_devices; i++) {
+ if ( (starting_camera + num_cameras_supported) < max_camera) {
+ sensorId = starting_camera + num_cameras_supported;
+
+ CAMHAL_LOGDB("Opening device[%d] = %s..",i, video_device_list[i]);
+ if ((tempHandle = open(video_device_list[i], O_RDWR)) == -1) {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera(%s): %s",video_device_list[i], strerror(errno));
+ continue;
+ }
- return num_cameras_supported;
+ ret = ioctl (tempHandle, VIDIOC_QUERYCAP, &cap);
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ close(tempHandle);
+ continue;
+ }
+
+ //check for video capture devices
+ if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ close(tempHandle);
+ continue;
+ }
+
+ strcpy(device, video_device_list[i]);
+ properties = properties_array + starting_camera + num_cameras_supported;
+
+ //fetch capabilities for this camera
+ ret = V4LCameraAdapter::getCaps( sensorId, properties, tempHandle );
+ if (ret < 0) {
+ CAMHAL_LOGEA("Error while getting capabilities.");
+ close(tempHandle);
+ continue;
+ }
+
+ num_cameras_supported++;
+
+ }
+ //For now exit this loop once a valid video capture device is found.
+ //TODO: find all V4L capture devices and it capabilities
+ break;
+ }//end of for() loop
+
+ supportedCameras = num_cameras_supported;
+ CAMHAL_LOGDB("Number of V4L cameras detected =%d", num_cameras_supported);
+
+EXIT:
+ LOG_FUNCTION_NAME_EXIT;
+ close(tempHandle);
+ return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/V4LCameraAdapter/V4LCapabilities.cpp b/camera/V4LCameraAdapter/V4LCapabilities.cpp
new file mode 100644
index 0000000..3a84268
--- /dev/null
+++ b/camera/V4LCameraAdapter/V4LCapabilities.cpp
@@ -0,0 +1,346 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCapabilities.cpp
+*
+* This file implements the V4L Capabilities feature.
+*
+*/
+
+#include "CameraHal.h"
+#include "V4LCameraAdapter.h"
+#include "ErrorUtils.h"
+#include "TICameraParameters.h"
+
+namespace Ti {
+namespace Camera {
+
+/************************************
+ * global constants and variables
+ *************************************/
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+#define MAX_RES_STRING_LENGTH 10
+#define DEFAULT_WIDTH 640
+#define DEFAULT_HEIGHT 480
+
+static const char PARAM_SEP[] = ",";
+
+//Camera defaults
+const char V4LCameraAdapter::DEFAULT_PICTURE_FORMAT[] = "jpeg";
+const char V4LCameraAdapter::DEFAULT_PICTURE_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_FORMAT[] = "yuv422i-yuyv";
+const char V4LCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
+const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
+const char V4LCameraAdapter::DEFAULT_FRAMERATE[] = "30";
+const char V4LCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
+const char * V4LCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * V4LCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
+
+
+const CapPixelformat V4LCameraAdapter::mPixelformats [] = {
+ { V4L2_PIX_FMT_YUYV, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { V4L2_PIX_FMT_JPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
+};
+
+/*****************************************
+ * internal static function declarations
+ *****************************************/
+
+/**** Utility functions to help translate V4L Caps to Parameter ****/
+
+status_t V4LCameraAdapter::insertDefaults(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
+
+ params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
+
+ params->set(CameraProperties::CAMERA_NAME, "USBCAMERA");
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, "320x240");
+ params->set(CameraProperties::JPEG_QUALITY, "90");
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, "50");
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(30000,30000)");
+ params->set(CameraProperties::FRAMERATE_RANGE, "30000,30000");
+ params->set(CameraProperties::S3D_PRV_FRAME_LAYOUT, "none");
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, "auto");
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, "auto");
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, "auto");
+ params->set(CameraProperties::SUPPORTED_EFFECTS, "none");
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, "ldc-nsf");
+ params->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
+ params->set(CameraProperties::ORIENTATION_INDEX, 0);
+ params->set(CameraProperties::SENSOR_ORIENTATION, "0");
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::VNF, DEFAULT_VNF);
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewFormatCount; i++) {
+ for (unsigned int j = 0; j < ARRAY_SIZE(mPixelformats); j++) {
+ if(caps.ePreviewFormats[i] == mPixelformats[j].pixelformat ) {
+ strncat (supported, mPixelformats[j].param, MAX_PROP_VALUE_LENGTH-1 );
+ strncat (supported, PARAM_SEP, 1 );
+ }
+ }
+ }
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertPreviewSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulPreviewResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tPreviewRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, supported);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertImageSizes(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulCaptureResCount; i++) {
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, caps.tCaptureRes[i].param, MAX_PROP_VALUE_LENGTH-1 );
+ }
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertFrameRates(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps) {
+
+ char supported[MAX_PROP_VALUE_LENGTH];
+ char temp[10];
+
+ memset(supported, '\0', MAX_PROP_VALUE_LENGTH);
+ for (int i = 0; i < caps.ulFrameRateCount; i++) {
+ snprintf (temp, 10, "%d", caps.ulFrameRates[i] );
+ if (supported[0] != '\0') {
+ strncat(supported, PARAM_SEP, 1);
+ }
+ strncat (supported, temp, MAX_PROP_VALUE_LENGTH-1 );
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::insertCapabilities(CameraProperties::Properties* params, V4L_TI_CAPTYPE &caps)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewFormats(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertImageSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertPreviewSizes(params, caps);
+ }
+
+ if ( NO_ERROR == ret ) {
+ ret = insertFrameRates(params, caps);
+ }
+
+ //Insert Supported Focus modes.
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, "infinity");
+
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, "jpeg");
+
+ if ( NO_ERROR == ret ) {
+ ret = insertDefaults(params, caps);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::sortAscend(V4L_TI_CAPTYPE &caps, uint16_t count) {
+ size_t tempRes;
+ size_t w, h, tmpW,tmpH;
+ for (int i=0; i<count; i++) {
+ w = caps.tPreviewRes[i].width;
+ h = caps.tPreviewRes[i].height;
+ tempRes = w*h;
+ for (int j=i+1; j<count; j++) {
+ tmpW = caps.tPreviewRes[j].width;
+ tmpH = caps.tPreviewRes[j].height;
+
+ if (tempRes > (tmpW * tmpH) ) {
+ caps.tPreviewRes[j].width = w;
+ caps.tPreviewRes[j].height = h;
+ w = tmpW;
+ h = tmpH;
+ }
+ }
+ caps.tPreviewRes[i].width = w;
+ caps.tPreviewRes[i].height = h;
+
+ }
+ return NO_ERROR;
+}
+
+/*****************************************
+ * public exposed function declarations
+ *****************************************/
+
+status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params,
+ V4L_HANDLETYPE handle) {
+ status_t status = NO_ERROR;
+ V4L_TI_CAPTYPE caps;
+ int i = 0;
+ int j = 0;
+ struct v4l2_fmtdesc fmtDesc;
+ struct v4l2_frmsizeenum frmSizeEnum;
+ struct v4l2_frmivalenum frmIvalEnum;
+
+ //get supported pixel formats
+ for ( i = 0; status == NO_ERROR; i++) {
+ fmtDesc.index = i;
+ fmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ status = ioctl (handle, VIDIOC_ENUM_FMT, &fmtDesc);
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("fmtDesc[%d].description::pixelformat::flags== (%s::%d::%d)",i, fmtDesc.description,fmtDesc.pixelformat,fmtDesc.flags);
+ caps.ePreviewFormats[i] = fmtDesc.pixelformat;
+ }
+ }
+ caps.ulPreviewFormatCount = i;
+
+ //get preview sizes & capture image sizes
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmSizeEnum.index = i;
+ //Check for frame sizes for default pixel format
+ //TODO: Check for frame sizes for all supported pixel formats
+ frmSizeEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ status = ioctl (handle, VIDIOC_ENUM_FRAMESIZES, &frmSizeEnum);
+ if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ break;
+ }
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("frmSizeEnum.index[%d].width x height == (%d x %d)", i, frmSizeEnum.discrete.width, frmSizeEnum.discrete.height);
+ caps.tPreviewRes[i].width = frmSizeEnum.discrete.width;
+ caps.tPreviewRes[i].height = frmSizeEnum.discrete.height;
+ snprintf(caps.tPreviewRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",frmSizeEnum.discrete.width,frmSizeEnum.discrete.height);
+
+ caps.tCaptureRes[i].width = frmSizeEnum.discrete.width;
+ caps.tCaptureRes[i].height = frmSizeEnum.discrete.height;
+ snprintf(caps.tCaptureRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",frmSizeEnum.discrete.width,frmSizeEnum.discrete.height);
+ }
+ else {
+ caps.ulCaptureResCount = i;
+ caps.ulPreviewResCount = i;
+ }
+ }
+ if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ CAMHAL_LOGDB("\nmin_width x height = %d x %d ",frmSizeEnum.stepwise.min_width, frmSizeEnum.stepwise.min_height);
+ CAMHAL_LOGDB("\nmax_width x height = %d x %d ",frmSizeEnum.stepwise.max_width, frmSizeEnum.stepwise.max_height);
+ CAMHAL_LOGDB("\nstep width x height = %d x %d ",frmSizeEnum.stepwise.step_width,frmSizeEnum.stepwise.step_height);
+ //TODO: populate the sizes when type = V4L2_FRMSIZE_TYPE_STEPWISE
+ }
+
+ //sort the preview sizes in ascending order
+ sortAscend(caps, caps.ulPreviewResCount);
+
+ //get supported frame rates
+ bool fps30 = false;
+ for ( j=caps.ulPreviewResCount-1; j >= 0; j--) {
+ CAMHAL_LOGDB(" W x H = %d x %d", caps.tPreviewRes[j].width, caps.tPreviewRes[j].height);
+ status = NO_ERROR;
+ for ( i = 0; status == NO_ERROR; i++) {
+ frmIvalEnum.index = i;
+ //Check for supported frame rates for the default pixel format.
+ frmIvalEnum.pixel_format = V4L2_PIX_FMT_YUYV;
+ frmIvalEnum.width = caps.tPreviewRes[j].width;
+ frmIvalEnum.height = caps.tPreviewRes[j].height;
+
+ status = ioctl (handle, VIDIOC_ENUM_FRAMEINTERVALS, &frmIvalEnum);
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ break;
+ }
+ if (status == NO_ERROR) {
+ CAMHAL_LOGDB("frmIvalEnum[%d].frame rate= %d)",i, (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator));
+ caps.ulFrameRates[i] = (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator);
+ if (caps.ulFrameRates[i] == 30) {
+ fps30 = true;
+ }
+ }
+ else {
+ caps.ulFrameRateCount = i;
+ }
+ }
+ if(fps30) {
+ break;
+ }
+ }
+
+ if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ //TODO: populate the frame rates when type = V4L2_FRMIVAL_TYPE_STEPWISE;
+ }
+
+ //update the preview resolution with the highest resolution which supports 30fps.
+/* // for video preview the application choose the resolution from the mediaprofiles.xml.
+ // so populating all supported preview resolution is required for video mode.
+ caps.tPreviewRes[0].width = caps.tPreviewRes[j].width;
+ caps.tPreviewRes[0].height = caps.tPreviewRes[j].height;
+ snprintf(caps.tPreviewRes[0].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tPreviewRes[j].width,caps.tPreviewRes[j].height);
+ caps.ulPreviewResCount = 1;
+*/
+ insertCapabilities (params, caps);
+ return NO_ERROR;
+}
+
+
+
+} // namespace Camera
+} // namespace Ti