summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/media/stagefright/CameraSourceTimeLapse.h33
-rw-r--r--media/libstagefright/Android.mk1
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp112
3 files changed, 127 insertions, 19 deletions
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
index fa11b3e..8ea532c 100644
--- a/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -49,6 +49,23 @@ private:
// If false, will use the videocamera frames instead.
bool mUseStillCameraForTimeLapse;
+ // Size of picture taken from still camera. This may be larger than the size
+ // of the video, as still camera may not support the exact video resolution
+ // demanded. See setPictureSizeToClosestSupported().
+ int32_t mPictureWidth;
+ int32_t mPictureHeight;
+
+ // size of the encoded video.
+ int32_t mVideoWidth;
+ int32_t mVideoHeight;
+
+ // True if we need to crop the still camera image to get the video frame.
+ bool mNeedCropping;
+
+ // Start location of the cropping rectangle.
+ int32_t mCropRectStartX;
+ int32_t mCropRectStartY;
+
// Time between capture of two frames during time lapse recording
// Negative value indicates that timelapse is disabled.
int64_t mTimeBetweenTimeLapseFrameCaptureUs;
@@ -107,6 +124,22 @@ private:
virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
const sp<IMemory> &data);
+ // The still camera may not support the demanded video width and height.
+ // We look for the supported picture sizes from the still camera and
+ // choose the size with either dimensions higher than the corresponding video
+ // dimensions. The still picture will be cropped to get the video frame.
+ void setPictureSizeToClosestSupported(int32_t width, int32_t height);
+
+ // Computes the offset of the rectangle from where to start cropping the
+ // still image into the video frame. We choose the center of the image to be
+ // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY).
+ bool computeCropRectangleOffset();
+
+ // Crops the source data into a smaller image starting at
+ // (mCropRectStartX, mCropRectStartY) and of the size of the video frame.
+ // The data is returned into a newly allocated IMemory.
+ sp<IMemory> cropYUVImage(const sp<IMemory> &source_data);
+
// When video camera is used for time lapse capture, returns true
// until enough time has passed for the next time lapse frame. When
// the frame needs to be encoded, it returns false and also modifies
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index bf5643d..0708eec 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -57,6 +57,7 @@ LOCAL_SHARED_LIBRARIES := \
libsonivox \
libvorbisidec \
libsurfaceflinger_client \
+ libstagefright_yuv \
libcamera_client
LOCAL_STATIC_LIBRARIES := \
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index c6186f6..a01450b 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -24,9 +24,13 @@
#include <media/stagefright/CameraSourceTimeLapse.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
#include <utils/String8.h>
+#include "OMX_Video.h"
namespace android {
@@ -72,7 +76,11 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
mSkipCurrentFrame(false) {
LOGV("starting time lapse mode");
- if(mUseStillCameraForTimeLapse) {
+ mVideoWidth = width;
+ mVideoHeight = height;
+ if (mUseStillCameraForTimeLapse) {
+ setPictureSizeToClosestSupported(width, height);
+ mNeedCropping = computeCropRectangleOffset();
mMeta->setInt32(kKeyWidth, width);
mMeta->setInt32(kKeyHeight, height);
}
@@ -81,6 +89,31 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
CameraSourceTimeLapse::~CameraSourceTimeLapse() {
}
+void CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+ // TODO: Currently fixed to the highest resolution.
+ // Need to poll the camera and set accordingly.
+ mPictureWidth = 2048;
+ mPictureHeight = 1536;
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+ if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+ return false;
+ }
+
+ CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+ int32_t widthDifference = mPictureWidth - mVideoWidth;
+ int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+ mCropRectStartX = widthDifference/2;
+ mCropRectStartY = heightDifference/2;
+
+ LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+ return true;
+}
+
// static
void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
@@ -90,7 +123,7 @@ void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
void CameraSourceTimeLapse::threadTimeLapseEntry() {
while(mStarted) {
- if(mCameraIdle) {
+ if (mCameraIdle) {
LOGV("threadTimeLapseEntry: taking picture");
CHECK_EQ(OK, mCamera->takePicture());
mCameraIdle = false;
@@ -103,20 +136,15 @@ void CameraSourceTimeLapse::threadTimeLapseEntry() {
}
void CameraSourceTimeLapse::startCameraRecording() {
- if(mUseStillCameraForTimeLapse) {
+ if (mUseStillCameraForTimeLapse) {
LOGV("start time lapse recording using still camera");
- int32_t width;
- int32_t height;
- mMeta->findInt32(kKeyWidth, &width);
- mMeta->findInt32(kKeyHeight, &height);
-
int64_t token = IPCThreadState::self()->clearCallingIdentity();
String8 s = mCamera->getParameters();
IPCThreadState::self()->restoreCallingIdentity(token);
CameraParameters params(s);
- params.setPictureSize(width, height);
+ params.setPictureSize(mPictureWidth, mPictureHeight);
mCamera->setParameters(params.flatten());
mCameraIdle = true;
@@ -134,7 +162,7 @@ void CameraSourceTimeLapse::startCameraRecording() {
}
void CameraSourceTimeLapse::stopCameraRecording() {
- if(mUseStillCameraForTimeLapse) {
+ if (mUseStillCameraForTimeLapse) {
void *dummy;
pthread_join(mThreadTimeLapse, &dummy);
} else {
@@ -143,7 +171,7 @@ void CameraSourceTimeLapse::stopCameraRecording() {
}
void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
- if(!mUseStillCameraForTimeLapse) {
+ if (!mUseStillCameraForTimeLapse) {
mCamera->releaseRecordingFrame(frame);
}
}
@@ -158,6 +186,13 @@ sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_d
return newMemory;
}
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+ return newMemory;
+}
+
// static
void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
@@ -182,12 +217,45 @@ void CameraSourceTimeLapse::restartPreview() {
pthread_attr_destroy(&attr);
}
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate memory for cropped image and setup a canvas using it.
+ sp<IMemory> croppedImageMemory = allocateIMemory(
+ YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+ YUVImage yuvImageCropped(yuvFormat,
+ mVideoWidth, mVideoHeight,
+ (uint8_t *)croppedImageMemory->pointer());
+ YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mPictureWidth, mPictureHeight,
+ (uint8_t *)source_data->pointer());
+ yuvCanvasCrop.CopyImageRect(
+ Rect(mCropRectStartX, mCropRectStartY,
+ mCropRectStartX + mVideoWidth,
+ mCropRectStartY + mVideoHeight),
+ 0, 0,
+ yuvImageSource);
+
+ return croppedImageMemory;
+}
+
void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
- if(msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+ if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
// takePicture will complete after this callback, so restart preview.
restartPreview();
+ return;
}
- if(msgType != CAMERA_MSG_RAW_IMAGE) {
+ if (msgType != CAMERA_MSG_RAW_IMAGE) {
return;
}
@@ -200,12 +268,18 @@ void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &dat
} else {
timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
}
- sp<IMemory> dataCopy = createIMemoryCopy(data);
- dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+
+ if (mNeedCropping) {
+ sp<IMemory> croppedImageData = cropYUVImage(data);
+ dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+ } else {
+ sp<IMemory> dataCopy = createIMemoryCopy(data);
+ dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+ }
}
bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
- if(mSkipCurrentFrame) {
+ if (mSkipCurrentFrame) {
mSkipCurrentFrame = false;
return true;
} else {
@@ -214,8 +288,8 @@ bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
}
bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
- if(!mUseStillCameraForTimeLapse) {
- if(mLastTimeLapseFrameRealTimestampUs == 0) {
+ if (!mUseStillCameraForTimeLapse) {
+ if (mLastTimeLapseFrameRealTimestampUs == 0) {
// First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
// to current time (timestampUs) and save frame data.
LOGV("dataCallbackTimestamp timelapse: initial frame");
@@ -244,7 +318,7 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
const sp<IMemory> &data) {
- if(!mUseStillCameraForTimeLapse) {
+ if (!mUseStillCameraForTimeLapse) {
mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
}
CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);