summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/CameraSourceTimeLapse.cpp
diff options
context:
space:
mode:
authorJames Dong <jdong@google.com>2011-06-29 16:56:52 -0700
committerJames Dong <jdong@google.com>2011-07-24 10:33:54 -0700
commit3cecf640c4daf2df616b278bd9986018c8182908 (patch)
tree0a9a7306e8a13af5a7149471569ca73827449aee /media/libstagefright/CameraSourceTimeLapse.cpp
parent77882a8deb5167235ae591e49c9dcff9abb373c1 (diff)
downloadframeworks_av-3cecf640c4daf2df616b278bd9986018c8182908.zip
frameworks_av-3cecf640c4daf2df616b278bd9986018c8182908.tar.gz
frameworks_av-3cecf640c4daf2df616b278bd9986018c8182908.tar.bz2
Do not support still image capture mode for timelapse video recording
related-to-bug: 4973779 Change-Id: Ica665217ab10247b2242acc4e93d4fe9f83e3f45
Diffstat (limited to 'media/libstagefright/CameraSourceTimeLapse.cpp')
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp389
1 files changed, 72 insertions, 317 deletions
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index fe78c46..1ba79e5 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -24,15 +24,10 @@
#include <media/stagefright/CameraSourceTimeLapse.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MetaData.h>
-#include <media/stagefright/YUVImage.h>
-#include <media/stagefright/YUVCanvas.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
-#include <ui/Rect.h>
#include <utils/String8.h>
#include <utils/Vector.h>
-#include "OMX_Video.h"
-#include <limits.h>
namespace android {
@@ -74,20 +69,14 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(
mLastTimeLapseFrameRealTimestampUs(0),
mSkipCurrentFrame(false) {
- LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs);
+ LOGD("starting time lapse mode: %lld us",
+ mTimeBetweenTimeLapseFrameCaptureUs);
+
mVideoWidth = videoSize.width;
mVideoHeight = videoSize.height;
- if (trySettingVideoSize(videoSize.width, videoSize.height)) {
- mUseStillCameraForTimeLapse = false;
- } else {
- // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
- // than the fastest rate at which the still camera can take pictures.
- mUseStillCameraForTimeLapse = true;
- CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
- mNeedCropping = computeCropRectangleOffset();
- mMeta->setInt32(kKeyWidth, videoSize.width);
- mMeta->setInt32(kKeyHeight, videoSize.height);
+ if (!trySettingVideoSize(videoSize.width, videoSize.height)) {
+ mInitCheck = NO_INIT;
}
// Initialize quick stop variables.
@@ -101,24 +90,22 @@ CameraSourceTimeLapse::~CameraSourceTimeLapse() {
}
void CameraSourceTimeLapse::startQuickReadReturns() {
+ LOGV("startQuickReadReturns");
Mutex::Autolock autoLock(mQuickStopLock);
- LOGV("Enabling quick read returns");
// Enable quick stop mode.
mQuickStop = true;
- if (mUseStillCameraForTimeLapse) {
- // wake up the thread right away.
- mTakePictureCondition.signal();
- } else {
- // Force dataCallbackTimestamp() coming from the video camera to not skip the
- // next frame as we want read() to get a get a frame right away.
- mForceRead = true;
- }
+ // Force dataCallbackTimestamp() coming from the video camera to
+ // not skip the next frame as we want read() to get a get a frame
+ // right away.
+ mForceRead = true;
}
-bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) {
- LOGV("trySettingVideoSize: %dx%d", width, height);
+bool CameraSourceTimeLapse::trySettingVideoSize(
+ int32_t width, int32_t height) {
+
+ LOGV("trySettingVideoSize");
int64_t token = IPCThreadState::self()->clearCallingIdentity();
String8 s = mCamera->getParameters();
@@ -162,53 +149,8 @@ bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) {
return isSuccessful;
}
-bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
- LOGV("setPictureSizeToClosestSupported: %dx%d", width, height);
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- String8 s = mCamera->getParameters();
- IPCThreadState::self()->restoreCallingIdentity(token);
-
- CameraParameters params(s);
- Vector<Size> supportedSizes;
- params.getSupportedPictureSizes(supportedSizes);
-
- int32_t minPictureSize = INT_MAX;
- for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
- int32_t pictureWidth = supportedSizes[i].width;
- int32_t pictureHeight = supportedSizes[i].height;
-
- if ((pictureWidth >= width) && (pictureHeight >= height)) {
- int32_t pictureSize = pictureWidth*pictureHeight;
- if (pictureSize < minPictureSize) {
- minPictureSize = pictureSize;
- mPictureWidth = pictureWidth;
- mPictureHeight = pictureHeight;
- }
- }
- }
- LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
- return (minPictureSize != INT_MAX);
-}
-
-bool CameraSourceTimeLapse::computeCropRectangleOffset() {
- if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
- return false;
- }
-
- CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
-
- int32_t widthDifference = mPictureWidth - mVideoWidth;
- int32_t heightDifference = mPictureHeight - mVideoHeight;
-
- mCropRectStartX = widthDifference/2;
- mCropRectStartY = heightDifference/2;
-
- LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
-
- return true;
-}
-
void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+ LOGV("signalBufferReturned");
Mutex::Autolock autoLock(mQuickStopLock);
if (mQuickStop && (buffer == mLastReadBufferCopy)) {
buffer->setObserver(NULL);
@@ -218,7 +160,12 @@ void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
}
}
-void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
+void createMediaBufferCopy(
+ const MediaBuffer& sourceBuffer,
+ int64_t frameTime,
+ MediaBuffer **newBuffer) {
+
+ LOGV("createMediaBufferCopy");
size_t sourceSize = sourceBuffer.size();
void* sourcePointer = sourceBuffer.data();
@@ -229,6 +176,7 @@ void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, M
}
void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+ LOGV("fillLastReadBufferCopy");
int64_t frameTime;
CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
@@ -238,11 +186,12 @@ void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
status_t CameraSourceTimeLapse::read(
MediaBuffer **buffer, const ReadOptions *options) {
+ LOGV("read");
if (mLastReadBufferCopy == NULL) {
mLastReadStatus = CameraSource::read(buffer, options);
- // mQuickStop may have turned to true while read was blocked. Make a copy of
- // the buffer in that case.
+ // mQuickStop may have turned to true while read was blocked.
+ // Make a copy of the buffer in that case.
Mutex::Autolock autoLock(mQuickStopLock);
if (mQuickStop && *buffer) {
fillLastReadBufferCopy(**buffer);
@@ -255,105 +204,19 @@ status_t CameraSourceTimeLapse::read(
}
}
-// static
-void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
- CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
- source->threadTimeLapseEntry();
- return NULL;
-}
-
-void CameraSourceTimeLapse::threadTimeLapseEntry() {
- while (mStarted) {
- {
- Mutex::Autolock autoLock(mCameraIdleLock);
- if (!mCameraIdle) {
- mCameraIdleCondition.wait(mCameraIdleLock);
- }
- CHECK(mCameraIdle);
- mCameraIdle = false;
- }
-
- // Even if mQuickStop == true we need to take one more picture
- // as a read() may be blocked, waiting for a frame to get available.
- // After this takePicture, if mQuickStop == true, we can safely exit
- // this thread as read() will make a copy of this last frame and keep
- // returning it in the quick stop mode.
- Mutex::Autolock autoLock(mQuickStopLock);
- CHECK_EQ(OK, mCamera->takePicture(CAMERA_MSG_RAW_IMAGE));
- if (mQuickStop) {
- LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
- return;
- }
- mTakePictureCondition.waitRelative(mQuickStopLock,
- mTimeBetweenTimeLapseFrameCaptureUs * 1000);
- }
- LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
-}
-
-void CameraSourceTimeLapse::startCameraRecording() {
- if (mUseStillCameraForTimeLapse) {
- LOGV("start time lapse recording using still camera");
-
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- String8 s = mCamera->getParameters();
-
- CameraParameters params(s);
- params.setPictureSize(mPictureWidth, mPictureHeight);
- mCamera->setParameters(params.flatten());
- mCameraIdle = true;
- mStopWaitingForIdleCamera = false;
-
- // disable shutter sound and play the recording sound.
- mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
- mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
- IPCThreadState::self()->restoreCallingIdentity(token);
-
- // create a thread which takes pictures in a loop
- pthread_attr_t attr;
- pthread_attr_init(&attr);
- pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
-
- pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
- pthread_attr_destroy(&attr);
- } else {
- LOGV("start time lapse recording using video camera");
- CameraSource::startCameraRecording();
- }
-}
-
void CameraSourceTimeLapse::stopCameraRecording() {
- if (mUseStillCameraForTimeLapse) {
- void *dummy;
- pthread_join(mThreadTimeLapse, &dummy);
-
- // Last takePicture may still be underway. Wait for the camera to get
- // idle.
- Mutex::Autolock autoLock(mCameraIdleLock);
- mStopWaitingForIdleCamera = true;
- if (!mCameraIdle) {
- mCameraIdleCondition.wait(mCameraIdleLock);
- }
- CHECK(mCameraIdle);
- mCamera->setListener(NULL);
-
- // play the recording sound.
- mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
- } else {
- CameraSource::stopCameraRecording();
- }
+ LOGV("stopCameraRecording");
+ CameraSource::stopCameraRecording();
if (mLastReadBufferCopy) {
mLastReadBufferCopy->release();
mLastReadBufferCopy = NULL;
}
}
-void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
- if (!mUseStillCameraForTimeLapse) {
- CameraSource::releaseRecordingFrame(frame);
- }
-}
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
+ const sp<IMemory> &source_data) {
-sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+ LOGV("createIMemoryCopy");
size_t source_size = source_data->size();
void* source_pointer = source_data->pointer();
@@ -363,102 +226,8 @@ sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_d
return newMemory;
}
-// Allocates IMemory of final type MemoryBase with the given size.
-sp<IMemory> allocateIMemory(size_t size) {
- sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
- sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
- return newMemory;
-}
-
-// static
-void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
- CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
- source->threadStartPreview();
- return NULL;
-}
-
-void CameraSourceTimeLapse::threadStartPreview() {
- CHECK_EQ(OK, mCamera->startPreview());
- Mutex::Autolock autoLock(mCameraIdleLock);
- mCameraIdle = true;
- mCameraIdleCondition.signal();
-}
-
-void CameraSourceTimeLapse::restartPreview() {
- // Start this in a different thread, so that the dataCallback can return
- LOGV("restartPreview");
- pthread_attr_t attr;
- pthread_attr_init(&attr);
- pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
-
- pthread_t threadPreview;
- pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
- pthread_attr_destroy(&attr);
-}
-
-sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
- // find the YUV format
- int32_t srcFormat;
- CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
- YUVImage::YUVFormat yuvFormat;
- if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
- yuvFormat = YUVImage::YUV420SemiPlanar;
- } else {
- CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
- yuvFormat = YUVImage::YUV420Planar;
- }
-
- // allocate memory for cropped image and setup a canvas using it.
- sp<IMemory> croppedImageMemory = allocateIMemory(
- YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
- YUVImage yuvImageCropped(yuvFormat,
- mVideoWidth, mVideoHeight,
- (uint8_t *)croppedImageMemory->pointer());
- YUVCanvas yuvCanvasCrop(yuvImageCropped);
-
- YUVImage yuvImageSource(yuvFormat,
- mPictureWidth, mPictureHeight,
- (uint8_t *)source_data->pointer());
- yuvCanvasCrop.CopyImageRect(
- Rect(mCropRectStartX, mCropRectStartY,
- mCropRectStartX + mVideoWidth,
- mCropRectStartY + mVideoHeight),
- 0, 0,
- yuvImageSource);
-
- return croppedImageMemory;
-}
-
-void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
- if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
- // takePicture will complete after this callback, so restart preview.
- restartPreview();
- return;
- }
- if (msgType != CAMERA_MSG_RAW_IMAGE) {
- return;
- }
-
- LOGV("dataCallback for timelapse still frame");
- CHECK_EQ(true, mUseStillCameraForTimeLapse);
-
- int64_t timestampUs;
- if (mNumFramesReceived == 0) {
- timestampUs = mStartTimeUs;
- } else {
- timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
- }
-
- if (mNeedCropping) {
- sp<IMemory> croppedImageData = cropYUVImage(data);
- dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
- } else {
- sp<IMemory> dataCopy = createIMemoryCopy(data);
- dataCallbackTimestamp(timestampUs, msgType, dataCopy);
- }
-}
-
bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+ LOGV("skipCurrentFrame");
if (mSkipCurrentFrame) {
mSkipCurrentFrame = false;
return true;
@@ -468,72 +237,58 @@ bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
}
bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
- if (!mUseStillCameraForTimeLapse) {
- if (mLastTimeLapseFrameRealTimestampUs == 0) {
- // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
- // to current time (timestampUs) and save frame data.
- LOGV("dataCallbackTimestamp timelapse: initial frame");
+ LOGV("skipFrameAndModifyTimeStamp");
+ if (mLastTimeLapseFrameRealTimestampUs == 0) {
+ // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+ // to current time (timestampUs) and save frame data.
+ LOGV("dataCallbackTimestamp timelapse: initial frame");
- mLastTimeLapseFrameRealTimestampUs = *timestampUs;
- return false;
- }
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ return false;
+ }
- {
- Mutex::Autolock autoLock(mQuickStopLock);
-
- // mForceRead may be set to true by startQuickReadReturns(). In that
- // case don't skip this frame.
- if (mForceRead) {
- LOGV("dataCallbackTimestamp timelapse: forced read");
- mForceRead = false;
- *timestampUs =
- mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
- return false;
- }
- }
+ {
+ Mutex::Autolock autoLock(mQuickStopLock);
- // Workaround to bypass the first 2 input frames for skipping.
- // The first 2 output frames from the encoder are: decoder specific info and
- // the compressed video frame data for the first input video frame.
- if (mNumFramesEncoded >= 1 && *timestampUs <
- (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
- // Skip all frames from last encoded frame until
- // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
- // Tell the camera to release its recording frame and return.
- LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
- return true;
- } else {
- // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
- // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
- // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
- // of the last encoded frame's time stamp.
- LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
-
- mLastTimeLapseFrameRealTimestampUs = *timestampUs;
- *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ // mForceRead may be set to true by startQuickReadReturns(). In that
+ // case don't skip this frame.
+ if (mForceRead) {
+ LOGV("dataCallbackTimestamp timelapse: forced read");
+ mForceRead = false;
+ *timestampUs =
+ mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
return false;
}
}
+
+ // Workaround to bypass the first 2 input frames for skipping.
+ // The first 2 output frames from the encoder are: decoder specific info and
+ // the compressed video frame data for the first input video frame.
+ if (mNumFramesEncoded >= 1 && *timestampUs <
+ (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+ // Skip all frames from last encoded frame until
+ // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+ // Tell the camera to release its recording frame and return.
+ LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+ return true;
+ } else {
+ // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+ // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+ // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+ // of the last encoded frame's time stamp.
+ LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ return false;
+ }
return false;
}
void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
const sp<IMemory> &data) {
- if (!mUseStillCameraForTimeLapse) {
- mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
- } else {
- Mutex::Autolock autoLock(mCameraIdleLock);
- // If we are using the still camera and stop() has been called, it may
- // be waiting for the camera to get idle. In that case return
- // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
- // to a deadlock since it tries to access CameraSource::mLock which in
- // this case is held by CameraSource::stop() currently waiting for the
- // camera to get idle. And camera will not get idle until this call
- // returns.
- if (mStopWaitingForIdleCamera) {
- return;
- }
- }
+ LOGV("dataCallbackTimestamp");
+ mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
}