From 3cecf640c4daf2df616b278bd9986018c8182908 Mon Sep 17 00:00:00 2001 From: James Dong Date: Wed, 29 Jun 2011 16:56:52 -0700 Subject: Do not support still image capture mode for timelapse video recording related-to-bug: 4973779 Change-Id: Ica665217ab10247b2242acc4e93d4fe9f83e3f45 --- media/libmedia/IMediaRecorder.cpp | 17 - media/libmedia/mediarecorder.cpp | 27 -- .../libmediaplayerservice/MediaRecorderClient.cpp | 11 - media/libmediaplayerservice/MediaRecorderClient.h | 1 - .../libmediaplayerservice/StagefrightRecorder.cpp | 177 +--------- media/libmediaplayerservice/StagefrightRecorder.h | 15 +- media/libstagefright/CameraSourceTimeLapse.cpp | 389 ++++----------------- 7 files changed, 79 insertions(+), 558 deletions(-) (limited to 'media') diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index 7e44c29..38e111e 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -46,7 +46,6 @@ enum { SET_AUDIO_ENCODER, SET_OUTPUT_FILE_PATH, SET_OUTPUT_FILE_FD, - SET_OUTPUT_FILE_AUXILIARY_FD, SET_VIDEO_SIZE, SET_VIDEO_FRAMERATE, SET_PARAMETERS, @@ -177,15 +176,6 @@ public: return reply.readInt32(); } - status_t setOutputFileAuxiliary(int fd) { - LOGV("setOutputFileAuxiliary(%d)", fd); - Parcel data, reply; - data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); - data.writeFileDescriptor(fd); - remote()->transact(SET_OUTPUT_FILE_AUXILIARY_FD, data, &reply); - return reply.readInt32(); - } - status_t setVideoSize(int width, int height) { LOGV("setVideoSize(%dx%d)", width, height); @@ -404,13 +394,6 @@ status_t BnMediaRecorder::onTransact( ::close(fd); return NO_ERROR; } break; - case SET_OUTPUT_FILE_AUXILIARY_FD: { - LOGV("SET_OUTPUT_FILE_AUXILIARY_FD"); - CHECK_INTERFACE(IMediaRecorder, data, reply); - int fd = dup(data.readFileDescriptor()); - reply->writeInt32(setOutputFileAuxiliary(fd)); - return NO_ERROR; - } break; case SET_VIDEO_SIZE: { LOGV("SET_VIDEO_SIZE"); CHECK_INTERFACE(IMediaRecorder, data, reply); diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index fab674c..11d281f 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -322,32 +322,6 @@ status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length) return ret; } -status_t MediaRecorder::setOutputFileAuxiliary(int fd) -{ - LOGV("setOutputFileAuxiliary(%d)", fd); - if(mMediaRecorder == NULL) { - LOGE("media recorder is not initialized yet"); - return INVALID_OPERATION; - } - if (mIsAuxiliaryOutputFileSet) { - LOGE("output file has already been set"); - return INVALID_OPERATION; - } - if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) { - LOGE("setOutputFile called in an invalid state(%d)", mCurrentState); - return INVALID_OPERATION; - } - - status_t ret = mMediaRecorder->setOutputFileAuxiliary(fd); - if (OK != ret) { - LOGV("setOutputFileAuxiliary failed: %d", ret); - mCurrentState = MEDIA_RECORDER_ERROR; - return ret; - } - mIsAuxiliaryOutputFileSet = true; - return ret; -} - status_t MediaRecorder::setVideoSize(int width, int height) { LOGV("setVideoSize(%d, %d)", width, height); @@ -629,7 +603,6 @@ void MediaRecorder::doCleanUp() mIsAudioEncoderSet = false; mIsVideoEncoderSet = false; mIsOutputFileSet = false; - mIsAuxiliaryOutputFileSet = false; } // Release should be OK in any state diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index 905b885..6f80b35 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -178,17 +178,6 @@ status_t MediaRecorderClient::setOutputFile(int fd, int64_t offset, int64_t leng return mRecorder->setOutputFile(fd, offset, length); } -status_t MediaRecorderClient::setOutputFileAuxiliary(int fd) -{ - LOGV("setOutputFileAuxiliary(%d)", fd); - Mutex::Autolock lock(mLock); - if (mRecorder == NULL) { - LOGE("recorder is not initialized"); - return NO_INIT; - } - return mRecorder->setOutputFileAuxiliary(fd); -} - status_t MediaRecorderClient::setVideoSize(int width, int height) { LOGV("setVideoSize(%dx%d)", width, height); diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index c87a3c0..c9ccf22 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -41,7 +41,6 @@ public: virtual status_t setOutputFile(const char* path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); - virtual status_t setOutputFileAuxiliary(int fd); virtual status_t setVideoSize(int width, int height); virtual status_t setVideoFrameRate(int frames_per_second); virtual status_t setParameters(const String8& params); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index 6427bb7..6fdb726 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -28,9 +28,7 @@ #include #include #include -#include #include -#include #include #include #include @@ -67,8 +65,8 @@ static void addBatteryData(uint32_t params) { StagefrightRecorder::StagefrightRecorder() - : mWriter(NULL), mWriterAux(NULL), - mOutputFd(-1), mOutputFdAux(-1), + : mWriter(NULL), + mOutputFd(-1), mAudioSource(AUDIO_SOURCE_CNT), mVideoSource(VIDEO_SOURCE_LIST_END), mStarted(false), mSurfaceMediaSource(NULL) { @@ -259,24 +257,6 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng return OK; } -status_t StagefrightRecorder::setOutputFileAuxiliary(int fd) { - LOGV("setOutputFileAuxiliary: %d", fd); - - if (fd < 0) { - LOGE("Invalid file descriptor: %d", fd); - return -EBADF; - } - - mCaptureAuxVideo = true; - - if (mOutputFdAux >= 0) { - ::close(mOutputFdAux); - } - mOutputFdAux = dup(fd); - - return OK; -} - // Attempt to parse an int64 literal optionally surrounded by whitespace, // returns true on success, false otherwise. static bool safe_strtoi64(const char *s, int64_t *val) { @@ -573,42 +553,6 @@ status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t t return OK; } -status_t StagefrightRecorder::setParamAuxVideoWidth(int32_t width) { - LOGV("setParamAuxVideoWidth : %d", width); - - if (width <= 0) { - LOGE("Width (%d) is not positive", width); - return BAD_VALUE; - } - - mAuxVideoWidth = width; - return OK; -} - -status_t StagefrightRecorder::setParamAuxVideoHeight(int32_t height) { - LOGV("setParamAuxVideoHeight : %d", height); - - if (height <= 0) { - LOGE("Height (%d) is not positive", height); - return BAD_VALUE; - } - - mAuxVideoHeight = height; - return OK; -} - -status_t StagefrightRecorder::setParamAuxVideoEncodingBitRate(int32_t bitRate) { - LOGV("StagefrightRecorder::setParamAuxVideoEncodingBitRate: %d", bitRate); - - if (bitRate <= 0) { - LOGE("Invalid video encoding bit rate: %d", bitRate); - return BAD_VALUE; - } - - mAuxVideoBitRate = bitRate; - return OK; -} - status_t StagefrightRecorder::setParamGeoDataLongitude( int32_t longitudex10000) { @@ -738,21 +682,6 @@ status_t StagefrightRecorder::setParameter( return setParamTimeBetweenTimeLapseFrameCapture( 1000LL * timeBetweenTimeLapseFrameCaptureMs); } - } else if (key == "video-aux-param-width") { - int32_t auxWidth; - if (safe_strtoi32(value.string(), &auxWidth)) { - return setParamAuxVideoWidth(auxWidth); - } - } else if (key == "video-aux-param-height") { - int32_t auxHeight; - if (safe_strtoi32(value.string(), &auxHeight)) { - return setParamAuxVideoHeight(auxHeight); - } - } else if (key == "video-aux-param-encoding-bitrate") { - int32_t auxVideoBitRate; - if (safe_strtoi32(value.string(), &auxVideoBitRate)) { - return setParamAuxVideoEncodingBitRate(auxVideoBitRate); - } } else { LOGE("setParameter: failed to find key %s", key.string()); } @@ -1517,7 +1446,6 @@ status_t StagefrightRecorder::setupAudioEncoder(const sp& writer) { } status_t StagefrightRecorder::setupMPEG4Recording( - bool useSplitCameraSource, int outputFd, int32_t videoWidth, int32_t videoHeight, int32_t videoBitRate, @@ -1531,28 +1459,7 @@ status_t StagefrightRecorder::setupMPEG4Recording( if (mVideoSource < VIDEO_SOURCE_LIST_END) { sp mediaSource; - if (useSplitCameraSource) { - // TODO: Check if there is a better way to handle this - if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) { - LOGE("Cannot use split camera when encoding frames"); - return INVALID_OPERATION; - } - LOGV("Using Split camera source"); - mediaSource = mCameraSourceSplitter->createClient(); - } else { - err = setupMediaSource(&mediaSource); - } - - if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) { - // TODO: Might be able to handle downsampling even if using GRAlloc - if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) { - LOGE("Cannot change size or Downsample when encoding frames"); - return INVALID_OPERATION; - } - // Use downsampling from the original source. - mediaSource = - new VideoSourceDownSampler(mediaSource, videoWidth, videoHeight); - } + err = setupMediaSource(&mediaSource); if (err != OK) { return err; } @@ -1620,24 +1527,8 @@ void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalB } status_t StagefrightRecorder::startMPEG4Recording() { - if (mCaptureAuxVideo) { - if (!mCaptureTimeLapse) { - LOGE("Auxiliary video can be captured only in time lapse mode"); - return UNKNOWN_ERROR; - } - LOGV("Creating MediaSourceSplitter"); - sp cameraSource; - status_t err = setupCameraSource(&cameraSource); - if (err != OK) { - return err; - } - mCameraSourceSplitter = new MediaSourceSplitter(cameraSource); - } else { - mCameraSourceSplitter = NULL; - } - int32_t totalBitRate; - status_t err = setupMPEG4Recording(mCaptureAuxVideo, + status_t err = setupMPEG4Recording( mOutputFd, mVideoWidth, mVideoHeight, mVideoBitRate, &totalBitRate, &mWriter); if (err != OK) { @@ -1653,33 +1544,6 @@ status_t StagefrightRecorder::startMPEG4Recording() { return err; } - if (mCaptureAuxVideo) { - CHECK(mOutputFdAux >= 0); - if (mWriterAux != NULL) { - LOGE("Auxiliary File writer is not avaialble"); - return UNKNOWN_ERROR; - } - if ((mAuxVideoWidth > mVideoWidth) || (mAuxVideoHeight > mVideoHeight) || - ((mAuxVideoWidth == mVideoWidth) && mAuxVideoHeight == mVideoHeight)) { - LOGE("Auxiliary video size (%d x %d) same or larger than the main video size (%d x %d)", - mAuxVideoWidth, mAuxVideoHeight, mVideoWidth, mVideoHeight); - return UNKNOWN_ERROR; - } - - int32_t totalBitrateAux; - err = setupMPEG4Recording(mCaptureAuxVideo, - mOutputFdAux, mAuxVideoWidth, mAuxVideoHeight, - mAuxVideoBitRate, &totalBitrateAux, &mWriterAux); - if (err != OK) { - return err; - } - - sp metaAux = new MetaData; - setupMPEG4MetaData(startTimeUs, totalBitrateAux, &metaAux); - - return mWriterAux->start(metaAux.get()); - } - return OK; } @@ -1690,13 +1554,6 @@ status_t StagefrightRecorder::pause() { } mWriter->pause(); - if (mCaptureAuxVideo) { - if (mWriterAux == NULL) { - return UNKNOWN_ERROR; - } - mWriterAux->pause(); - } - if (mStarted) { mStarted = false; @@ -1724,13 +1581,6 @@ status_t StagefrightRecorder::stop() { mCameraSourceTimeLapse = NULL; } - if (mCaptureAuxVideo) { - if (mWriterAux != NULL) { - mWriterAux->stop(); - mWriterAux.clear(); - } - } - if (mWriter != NULL) { err = mWriter->stop(); mWriter.clear(); @@ -1741,13 +1591,6 @@ status_t StagefrightRecorder::stop() { mOutputFd = -1; } - if (mCaptureAuxVideo) { - if (mOutputFdAux >= 0) { - ::close(mOutputFdAux); - mOutputFdAux = -1; - } - } - if (mStarted) { mStarted = false; @@ -1787,11 +1630,8 @@ status_t StagefrightRecorder::reset() { mVideoEncoder = VIDEO_ENCODER_H263; mVideoWidth = 176; mVideoHeight = 144; - mAuxVideoWidth = 176; - mAuxVideoHeight = 144; mFrameRate = -1; mVideoBitRate = 192000; - mAuxVideoBitRate = 192000; mSampleRate = 8000; mAudioChannels = 1; mAudioBitRate = 12200; @@ -1811,8 +1651,6 @@ status_t StagefrightRecorder::reset() { mTrackEveryTimeDurationUs = 0; mCaptureTimeLapse = false; mTimeBetweenTimeLapseFrameCaptureUs = -1; - mCaptureAuxVideo = false; - mCameraSourceSplitter = NULL; mCameraSourceTimeLapse = NULL; mIsMetaDataStoredInVideoBuffers = false; mEncoderProfiles = MediaProfiles::getInstance(); @@ -1821,7 +1659,6 @@ status_t StagefrightRecorder::reset() { mLongitudex10000 = -3600000; mOutputFd = -1; - mOutputFdAux = -1; return OK; } @@ -1858,8 +1695,6 @@ status_t StagefrightRecorder::dump( snprintf(buffer, SIZE, " Recorder: %p\n", this); snprintf(buffer, SIZE, " Output file (fd %d):\n", mOutputFd); result.append(buffer); - snprintf(buffer, SIZE, " Output file Auxiliary (fd %d):\n", mOutputFdAux); - result.append(buffer); snprintf(buffer, SIZE, " File format: %d\n", mOutputFormat); result.append(buffer); snprintf(buffer, SIZE, " Max file size (bytes): %lld\n", mMaxFileSizeBytes); @@ -1904,14 +1739,10 @@ status_t StagefrightRecorder::dump( result.append(buffer); snprintf(buffer, SIZE, " Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight); result.append(buffer); - snprintf(buffer, SIZE, " Aux Frame size (pixels): %dx%d\n", mAuxVideoWidth, mAuxVideoHeight); - result.append(buffer); snprintf(buffer, SIZE, " Frame rate (fps): %d\n", mFrameRate); result.append(buffer); snprintf(buffer, SIZE, " Bit rate (bps): %d\n", mVideoBitRate); result.append(buffer); - snprintf(buffer, SIZE, " Aux Bit rate (bps): %d\n", mAuxVideoBitRate); - result.append(buffer); ::write(fd, result.string(), result.size()); return OK; } diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index 1618b92..5c5f05c 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -30,7 +30,6 @@ class Camera; class ICameraRecordingProxy; class CameraSource; class CameraSourceTimeLapse; -class MediaSourceSplitter; struct MediaSource; struct MediaWriter; class MetaData; @@ -55,7 +54,6 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t setPreviewSurface(const sp& surface); virtual status_t setOutputFile(const char *path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); - virtual status_t setOutputFileAuxiliary(int fd); virtual status_t setParameters(const String8& params); virtual status_t setListener(const sp& listener); virtual status_t prepare(); @@ -74,8 +72,8 @@ private: sp mCameraProxy; sp mPreviewSurface; sp mListener; - sp mWriter, mWriterAux; - int mOutputFd, mOutputFdAux; + sp mWriter; + int mOutputFd; sp mAudioSourceNode; audio_source_t mAudioSource; @@ -85,9 +83,8 @@ private: video_encoder mVideoEncoder; bool mUse64BitFileOffset; int32_t mVideoWidth, mVideoHeight; - int32_t mAuxVideoWidth, mAuxVideoHeight; int32_t mFrameRate; - int32_t mVideoBitRate, mAuxVideoBitRate; + int32_t mVideoBitRate; int32_t mAudioBitRate; int32_t mAudioChannels; int32_t mSampleRate; @@ -109,8 +106,6 @@ private: bool mCaptureTimeLapse; int64_t mTimeBetweenTimeLapseFrameCaptureUs; - bool mCaptureAuxVideo; - sp mCameraSourceSplitter; sp mCameraSourceTimeLapse; @@ -127,7 +122,6 @@ private: sp mSurfaceMediaSource; status_t setupMPEG4Recording( - bool useSplitCameraSource, int outputFd, int32_t videoWidth, int32_t videoHeight, int32_t videoBitRate, @@ -166,9 +160,6 @@ private: status_t setParamAudioTimeScale(int32_t timeScale); status_t setParamTimeLapseEnable(int32_t timeLapseEnable); status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs); - status_t setParamAuxVideoHeight(int32_t height); - status_t setParamAuxVideoWidth(int32_t width); - status_t setParamAuxVideoEncodingBitRate(int32_t bitRate); status_t setParamVideoEncodingBitRate(int32_t bitRate); status_t setParamVideoIFramesInterval(int32_t seconds); status_t setParamVideoEncoderProfile(int32_t profile); diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp index fe78c46..1ba79e5 100644 --- a/media/libstagefright/CameraSourceTimeLapse.cpp +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -24,15 +24,10 @@ #include #include #include -#include -#include #include #include -#include #include #include -#include "OMX_Video.h" -#include namespace android { @@ -74,20 +69,14 @@ CameraSourceTimeLapse::CameraSourceTimeLapse( mLastTimeLapseFrameRealTimestampUs(0), mSkipCurrentFrame(false) { - LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs); + LOGD("starting time lapse mode: %lld us", + mTimeBetweenTimeLapseFrameCaptureUs); + mVideoWidth = videoSize.width; mVideoHeight = videoSize.height; - if (trySettingVideoSize(videoSize.width, videoSize.height)) { - mUseStillCameraForTimeLapse = false; - } else { - // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater - // than the fastest rate at which the still camera can take pictures. - mUseStillCameraForTimeLapse = true; - CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height)); - mNeedCropping = computeCropRectangleOffset(); - mMeta->setInt32(kKeyWidth, videoSize.width); - mMeta->setInt32(kKeyHeight, videoSize.height); + if (!trySettingVideoSize(videoSize.width, videoSize.height)) { + mInitCheck = NO_INIT; } // Initialize quick stop variables. @@ -101,24 +90,22 @@ CameraSourceTimeLapse::~CameraSourceTimeLapse() { } void CameraSourceTimeLapse::startQuickReadReturns() { + LOGV("startQuickReadReturns"); Mutex::Autolock autoLock(mQuickStopLock); - LOGV("Enabling quick read returns"); // Enable quick stop mode. mQuickStop = true; - if (mUseStillCameraForTimeLapse) { - // wake up the thread right away. - mTakePictureCondition.signal(); - } else { - // Force dataCallbackTimestamp() coming from the video camera to not skip the - // next frame as we want read() to get a get a frame right away. - mForceRead = true; - } + // Force dataCallbackTimestamp() coming from the video camera to + // not skip the next frame as we want read() to get a get a frame + // right away. + mForceRead = true; } -bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) { - LOGV("trySettingVideoSize: %dx%d", width, height); +bool CameraSourceTimeLapse::trySettingVideoSize( + int32_t width, int32_t height) { + + LOGV("trySettingVideoSize"); int64_t token = IPCThreadState::self()->clearCallingIdentity(); String8 s = mCamera->getParameters(); @@ -162,53 +149,8 @@ bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) { return isSuccessful; } -bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { - LOGV("setPictureSizeToClosestSupported: %dx%d", width, height); - int64_t token = IPCThreadState::self()->clearCallingIdentity(); - String8 s = mCamera->getParameters(); - IPCThreadState::self()->restoreCallingIdentity(token); - - CameraParameters params(s); - Vector supportedSizes; - params.getSupportedPictureSizes(supportedSizes); - - int32_t minPictureSize = INT_MAX; - for (uint32_t i = 0; i < supportedSizes.size(); ++i) { - int32_t pictureWidth = supportedSizes[i].width; - int32_t pictureHeight = supportedSizes[i].height; - - if ((pictureWidth >= width) && (pictureHeight >= height)) { - int32_t pictureSize = pictureWidth*pictureHeight; - if (pictureSize < minPictureSize) { - minPictureSize = pictureSize; - mPictureWidth = pictureWidth; - mPictureHeight = pictureHeight; - } - } - } - LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight); - return (minPictureSize != INT_MAX); -} - -bool CameraSourceTimeLapse::computeCropRectangleOffset() { - if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { - return false; - } - - CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); - - int32_t widthDifference = mPictureWidth - mVideoWidth; - int32_t heightDifference = mPictureHeight - mVideoHeight; - - mCropRectStartX = widthDifference/2; - mCropRectStartY = heightDifference/2; - - LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); - - return true; -} - void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { + LOGV("signalBufferReturned"); Mutex::Autolock autoLock(mQuickStopLock); if (mQuickStop && (buffer == mLastReadBufferCopy)) { buffer->setObserver(NULL); @@ -218,7 +160,12 @@ void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { } } -void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) { +void createMediaBufferCopy( + const MediaBuffer& sourceBuffer, + int64_t frameTime, + MediaBuffer **newBuffer) { + + LOGV("createMediaBufferCopy"); size_t sourceSize = sourceBuffer.size(); void* sourcePointer = sourceBuffer.data(); @@ -229,6 +176,7 @@ void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, M } void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { + LOGV("fillLastReadBufferCopy"); int64_t frameTime; CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime)); createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy); @@ -238,11 +186,12 @@ void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { status_t CameraSourceTimeLapse::read( MediaBuffer **buffer, const ReadOptions *options) { + LOGV("read"); if (mLastReadBufferCopy == NULL) { mLastReadStatus = CameraSource::read(buffer, options); - // mQuickStop may have turned to true while read was blocked. Make a copy of - // the buffer in that case. + // mQuickStop may have turned to true while read was blocked. + // Make a copy of the buffer in that case. Mutex::Autolock autoLock(mQuickStopLock); if (mQuickStop && *buffer) { fillLastReadBufferCopy(**buffer); @@ -255,105 +204,19 @@ status_t CameraSourceTimeLapse::read( } } -// static -void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { - CameraSourceTimeLapse *source = static_cast(me); - source->threadTimeLapseEntry(); - return NULL; -} - -void CameraSourceTimeLapse::threadTimeLapseEntry() { - while (mStarted) { - { - Mutex::Autolock autoLock(mCameraIdleLock); - if (!mCameraIdle) { - mCameraIdleCondition.wait(mCameraIdleLock); - } - CHECK(mCameraIdle); - mCameraIdle = false; - } - - // Even if mQuickStop == true we need to take one more picture - // as a read() may be blocked, waiting for a frame to get available. - // After this takePicture, if mQuickStop == true, we can safely exit - // this thread as read() will make a copy of this last frame and keep - // returning it in the quick stop mode. - Mutex::Autolock autoLock(mQuickStopLock); - CHECK_EQ(OK, mCamera->takePicture(CAMERA_MSG_RAW_IMAGE)); - if (mQuickStop) { - LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true"); - return; - } - mTakePictureCondition.waitRelative(mQuickStopLock, - mTimeBetweenTimeLapseFrameCaptureUs * 1000); - } - LOGV("threadTimeLapseEntry: Exiting due to mStarted = false"); -} - -void CameraSourceTimeLapse::startCameraRecording() { - if (mUseStillCameraForTimeLapse) { - LOGV("start time lapse recording using still camera"); - - int64_t token = IPCThreadState::self()->clearCallingIdentity(); - String8 s = mCamera->getParameters(); - - CameraParameters params(s); - params.setPictureSize(mPictureWidth, mPictureHeight); - mCamera->setParameters(params.flatten()); - mCameraIdle = true; - mStopWaitingForIdleCamera = false; - - // disable shutter sound and play the recording sound. - mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0); - mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); - IPCThreadState::self()->restoreCallingIdentity(token); - - // create a thread which takes pictures in a loop - pthread_attr_t attr; - pthread_attr_init(&attr); - pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); - - pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); - pthread_attr_destroy(&attr); - } else { - LOGV("start time lapse recording using video camera"); - CameraSource::startCameraRecording(); - } -} - void CameraSourceTimeLapse::stopCameraRecording() { - if (mUseStillCameraForTimeLapse) { - void *dummy; - pthread_join(mThreadTimeLapse, &dummy); - - // Last takePicture may still be underway. Wait for the camera to get - // idle. - Mutex::Autolock autoLock(mCameraIdleLock); - mStopWaitingForIdleCamera = true; - if (!mCameraIdle) { - mCameraIdleCondition.wait(mCameraIdleLock); - } - CHECK(mCameraIdle); - mCamera->setListener(NULL); - - // play the recording sound. - mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); - } else { - CameraSource::stopCameraRecording(); - } + LOGV("stopCameraRecording"); + CameraSource::stopCameraRecording(); if (mLastReadBufferCopy) { mLastReadBufferCopy->release(); mLastReadBufferCopy = NULL; } } -void CameraSourceTimeLapse::releaseRecordingFrame(const sp& frame) { - if (!mUseStillCameraForTimeLapse) { - CameraSource::releaseRecordingFrame(frame); - } -} +sp CameraSourceTimeLapse::createIMemoryCopy( + const sp &source_data) { -sp CameraSourceTimeLapse::createIMemoryCopy(const sp &source_data) { + LOGV("createIMemoryCopy"); size_t source_size = source_data->size(); void* source_pointer = source_data->pointer(); @@ -363,102 +226,8 @@ sp CameraSourceTimeLapse::createIMemoryCopy(const sp &source_d return newMemory; } -// Allocates IMemory of final type MemoryBase with the given size. -sp allocateIMemory(size_t size) { - sp newMemoryHeap = new MemoryHeapBase(size); - sp newMemory = new MemoryBase(newMemoryHeap, 0, size); - return newMemory; -} - -// static -void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { - CameraSourceTimeLapse *source = static_cast(me); - source->threadStartPreview(); - return NULL; -} - -void CameraSourceTimeLapse::threadStartPreview() { - CHECK_EQ(OK, mCamera->startPreview()); - Mutex::Autolock autoLock(mCameraIdleLock); - mCameraIdle = true; - mCameraIdleCondition.signal(); -} - -void CameraSourceTimeLapse::restartPreview() { - // Start this in a different thread, so that the dataCallback can return - LOGV("restartPreview"); - pthread_attr_t attr; - pthread_attr_init(&attr); - pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); - - pthread_t threadPreview; - pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this); - pthread_attr_destroy(&attr); -} - -sp CameraSourceTimeLapse::cropYUVImage(const sp &source_data) { - // find the YUV format - int32_t srcFormat; - CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); - YUVImage::YUVFormat yuvFormat; - if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { - yuvFormat = YUVImage::YUV420SemiPlanar; - } else { - CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar); - yuvFormat = YUVImage::YUV420Planar; - } - - // allocate memory for cropped image and setup a canvas using it. - sp croppedImageMemory = allocateIMemory( - YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); - YUVImage yuvImageCropped(yuvFormat, - mVideoWidth, mVideoHeight, - (uint8_t *)croppedImageMemory->pointer()); - YUVCanvas yuvCanvasCrop(yuvImageCropped); - - YUVImage yuvImageSource(yuvFormat, - mPictureWidth, mPictureHeight, - (uint8_t *)source_data->pointer()); - yuvCanvasCrop.CopyImageRect( - Rect(mCropRectStartX, mCropRectStartY, - mCropRectStartX + mVideoWidth, - mCropRectStartY + mVideoHeight), - 0, 0, - yuvImageSource); - - return croppedImageMemory; -} - -void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp &data) { - if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { - // takePicture will complete after this callback, so restart preview. - restartPreview(); - return; - } - if (msgType != CAMERA_MSG_RAW_IMAGE) { - return; - } - - LOGV("dataCallback for timelapse still frame"); - CHECK_EQ(true, mUseStillCameraForTimeLapse); - - int64_t timestampUs; - if (mNumFramesReceived == 0) { - timestampUs = mStartTimeUs; - } else { - timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; - } - - if (mNeedCropping) { - sp croppedImageData = cropYUVImage(data); - dataCallbackTimestamp(timestampUs, msgType, croppedImageData); - } else { - sp dataCopy = createIMemoryCopy(data); - dataCallbackTimestamp(timestampUs, msgType, dataCopy); - } -} - bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { + LOGV("skipCurrentFrame"); if (mSkipCurrentFrame) { mSkipCurrentFrame = false; return true; @@ -468,72 +237,58 @@ bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { } bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { - if (!mUseStillCameraForTimeLapse) { - if (mLastTimeLapseFrameRealTimestampUs == 0) { - // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs - // to current time (timestampUs) and save frame data. - LOGV("dataCallbackTimestamp timelapse: initial frame"); + LOGV("skipFrameAndModifyTimeStamp"); + if (mLastTimeLapseFrameRealTimestampUs == 0) { + // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs + // to current time (timestampUs) and save frame data. + LOGV("dataCallbackTimestamp timelapse: initial frame"); - mLastTimeLapseFrameRealTimestampUs = *timestampUs; - return false; - } + mLastTimeLapseFrameRealTimestampUs = *timestampUs; + return false; + } - { - Mutex::Autolock autoLock(mQuickStopLock); - - // mForceRead may be set to true by startQuickReadReturns(). In that - // case don't skip this frame. - if (mForceRead) { - LOGV("dataCallbackTimestamp timelapse: forced read"); - mForceRead = false; - *timestampUs = - mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; - return false; - } - } + { + Mutex::Autolock autoLock(mQuickStopLock); - // Workaround to bypass the first 2 input frames for skipping. - // The first 2 output frames from the encoder are: decoder specific info and - // the compressed video frame data for the first input video frame. - if (mNumFramesEncoded >= 1 && *timestampUs < - (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { - // Skip all frames from last encoded frame until - // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. - // Tell the camera to release its recording frame and return. - LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); - return true; - } else { - // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: - // - Reset mLastTimeLapseFrameRealTimestampUs to current time. - // - Artificially modify timestampUs to be one frame time (1/framerate) ahead - // of the last encoded frame's time stamp. - LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); - - mLastTimeLapseFrameRealTimestampUs = *timestampUs; - *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; + // mForceRead may be set to true by startQuickReadReturns(). In that + // case don't skip this frame. + if (mForceRead) { + LOGV("dataCallbackTimestamp timelapse: forced read"); + mForceRead = false; + *timestampUs = + mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; return false; } } + + // Workaround to bypass the first 2 input frames for skipping. + // The first 2 output frames from the encoder are: decoder specific info and + // the compressed video frame data for the first input video frame. + if (mNumFramesEncoded >= 1 && *timestampUs < + (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { + // Skip all frames from last encoded frame until + // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. + // Tell the camera to release its recording frame and return. + LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); + return true; + } else { + // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: + // - Reset mLastTimeLapseFrameRealTimestampUs to current time. + // - Artificially modify timestampUs to be one frame time (1/framerate) ahead + // of the last encoded frame's time stamp. + LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); + + mLastTimeLapseFrameRealTimestampUs = *timestampUs; + *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; + return false; + } return false; } void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, const sp &data) { - if (!mUseStillCameraForTimeLapse) { - mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); - } else { - Mutex::Autolock autoLock(mCameraIdleLock); - // If we are using the still camera and stop() has been called, it may - // be waiting for the camera to get idle. In that case return - // immediately. Calling CameraSource::dataCallbackTimestamp() will lead - // to a deadlock since it tries to access CameraSource::mLock which in - // this case is held by CameraSource::stop() currently waiting for the - // camera to get idle. And camera will not get idle until this call - // returns. - if (mStopWaitingForIdleCamera) { - return; - } - } + LOGV("dataCallbackTimestamp"); + mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); } -- cgit v1.1