summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/CameraSourceTimeLapse.cpp
diff options
context:
space:
mode:
authorNipun Kwatra <nkwatra@google.com>2010-07-27 22:21:44 -0700
committerNipun Kwatra <nkwatra@google.com>2010-07-29 12:18:58 -0700
commitc4e47d1e81c4e4403663cb911e98dbf3ada9942c (patch)
tree137b5991ba9f4ebaf4e64a6f44479414f61ee207 /media/libstagefright/CameraSourceTimeLapse.cpp
parentdb205a1d75c1e9a7d0dbd8fa011335249ad6f4ac (diff)
downloadframeworks_av-c4e47d1e81c4e4403663cb911e98dbf3ada9942c.zip
frameworks_av-c4e47d1e81c4e4403663cb911e98dbf3ada9942c.tar.gz
frameworks_av-c4e47d1e81c4e4403663cb911e98dbf3ada9942c.tar.bz2
Support for video size to be different from captured picture size.
Adding support to allow video size to be different than the supported picture sizes. A picture size larger than the demanded video size is chosen. Captured pictures are then cropped to the size of the desired video size. Cropping is done from the the center portion of the picture. Change-Id: I6bcbe16f94b6ecbcf28b7f46826a81b4b6b8cbc3
Diffstat (limited to 'media/libstagefright/CameraSourceTimeLapse.cpp')
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp112
1 files changed, 93 insertions, 19 deletions
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index c6186f6..a01450b 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -24,9 +24,13 @@
#include <media/stagefright/CameraSourceTimeLapse.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
#include <utils/String8.h>
+#include "OMX_Video.h"
namespace android {
@@ -72,7 +76,11 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
mSkipCurrentFrame(false) {
LOGV("starting time lapse mode");
- if(mUseStillCameraForTimeLapse) {
+ mVideoWidth = width;
+ mVideoHeight = height;
+ if (mUseStillCameraForTimeLapse) {
+ setPictureSizeToClosestSupported(width, height);
+ mNeedCropping = computeCropRectangleOffset();
mMeta->setInt32(kKeyWidth, width);
mMeta->setInt32(kKeyHeight, height);
}
@@ -81,6 +89,31 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
CameraSourceTimeLapse::~CameraSourceTimeLapse() {
}
+void CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+ // TODO: Currently fixed to the highest resolution.
+ // Need to poll the camera and set accordingly.
+ mPictureWidth = 2048;
+ mPictureHeight = 1536;
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+ if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+ return false;
+ }
+
+ CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+ int32_t widthDifference = mPictureWidth - mVideoWidth;
+ int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+ mCropRectStartX = widthDifference/2;
+ mCropRectStartY = heightDifference/2;
+
+ LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+ return true;
+}
+
// static
void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
@@ -90,7 +123,7 @@ void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
void CameraSourceTimeLapse::threadTimeLapseEntry() {
while(mStarted) {
- if(mCameraIdle) {
+ if (mCameraIdle) {
LOGV("threadTimeLapseEntry: taking picture");
CHECK_EQ(OK, mCamera->takePicture());
mCameraIdle = false;
@@ -103,20 +136,15 @@ void CameraSourceTimeLapse::threadTimeLapseEntry() {
}
void CameraSourceTimeLapse::startCameraRecording() {
- if(mUseStillCameraForTimeLapse) {
+ if (mUseStillCameraForTimeLapse) {
LOGV("start time lapse recording using still camera");
- int32_t width;
- int32_t height;
- mMeta->findInt32(kKeyWidth, &width);
- mMeta->findInt32(kKeyHeight, &height);
-
int64_t token = IPCThreadState::self()->clearCallingIdentity();
String8 s = mCamera->getParameters();
IPCThreadState::self()->restoreCallingIdentity(token);
CameraParameters params(s);
- params.setPictureSize(width, height);
+ params.setPictureSize(mPictureWidth, mPictureHeight);
mCamera->setParameters(params.flatten());
mCameraIdle = true;
@@ -134,7 +162,7 @@ void CameraSourceTimeLapse::startCameraRecording() {
}
void CameraSourceTimeLapse::stopCameraRecording() {
- if(mUseStillCameraForTimeLapse) {
+ if (mUseStillCameraForTimeLapse) {
void *dummy;
pthread_join(mThreadTimeLapse, &dummy);
} else {
@@ -143,7 +171,7 @@ void CameraSourceTimeLapse::stopCameraRecording() {
}
void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
- if(!mUseStillCameraForTimeLapse) {
+ if (!mUseStillCameraForTimeLapse) {
mCamera->releaseRecordingFrame(frame);
}
}
@@ -158,6 +186,13 @@ sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_d
return newMemory;
}
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+ return newMemory;
+}
+
// static
void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
@@ -182,12 +217,45 @@ void CameraSourceTimeLapse::restartPreview() {
pthread_attr_destroy(&attr);
}
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate memory for cropped image and setup a canvas using it.
+ sp<IMemory> croppedImageMemory = allocateIMemory(
+ YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+ YUVImage yuvImageCropped(yuvFormat,
+ mVideoWidth, mVideoHeight,
+ (uint8_t *)croppedImageMemory->pointer());
+ YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mPictureWidth, mPictureHeight,
+ (uint8_t *)source_data->pointer());
+ yuvCanvasCrop.CopyImageRect(
+ Rect(mCropRectStartX, mCropRectStartY,
+ mCropRectStartX + mVideoWidth,
+ mCropRectStartY + mVideoHeight),
+ 0, 0,
+ yuvImageSource);
+
+ return croppedImageMemory;
+}
+
void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
- if(msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+ if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
// takePicture will complete after this callback, so restart preview.
restartPreview();
+ return;
}
- if(msgType != CAMERA_MSG_RAW_IMAGE) {
+ if (msgType != CAMERA_MSG_RAW_IMAGE) {
return;
}
@@ -200,12 +268,18 @@ void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &dat
} else {
timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
}
- sp<IMemory> dataCopy = createIMemoryCopy(data);
- dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+
+ if (mNeedCropping) {
+ sp<IMemory> croppedImageData = cropYUVImage(data);
+ dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+ } else {
+ sp<IMemory> dataCopy = createIMemoryCopy(data);
+ dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+ }
}
bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
- if(mSkipCurrentFrame) {
+ if (mSkipCurrentFrame) {
mSkipCurrentFrame = false;
return true;
} else {
@@ -214,8 +288,8 @@ bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
}
bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
- if(!mUseStillCameraForTimeLapse) {
- if(mLastTimeLapseFrameRealTimestampUs == 0) {
+ if (!mUseStillCameraForTimeLapse) {
+ if (mLastTimeLapseFrameRealTimestampUs == 0) {
// First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
// to current time (timestampUs) and save frame data.
LOGV("dataCallbackTimestamp timelapse: initial frame");
@@ -244,7 +318,7 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
const sp<IMemory> &data) {
- if(!mUseStillCameraForTimeLapse) {
+ if (!mUseStillCameraForTimeLapse) {
mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
}
CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);