summaryrefslogtreecommitdiffstats
path: root/media/libstagefright
diff options
context:
space:
mode:
authorSteve Kondik <steve@cyngn.com>2014-01-31 22:08:54 +0800
committerSteve Kondik <steve@cyngn.com>2015-11-07 12:03:16 -0800
commite79ee6494c2a1f2d3b1f1b1393ca85beee41a29d (patch)
tree26430e8fe59a53da3d0161b6123b575bef05b49e /media/libstagefright
parent5d9ec7585ad39f73690fddbd864f3e3f5c1bde4d (diff)
downloadframeworks_av-e79ee6494c2a1f2d3b1f1b1393ca85beee41a29d.zip
frameworks_av-e79ee6494c2a1f2d3b1f1b1393ca85beee41a29d.tar.gz
frameworks_av-e79ee6494c2a1f2d3b1f1b1393ca85beee41a29d.tar.bz2
stagefright: Squashed commit of pause/resume features
Add 2 APIs (suspend/resume) in MediaPlayer - API:suspend() will just pause the player and release all the decoders to replace release() which will release the whole player - API:resume() will just init the decoders again, then start() will be called to restart streaming playback - Add a check in AwesomePlayer::onVideoEvent() to make sure the first seek operation will always seek to the next i-frame Change-Id: Ie4c82906a2a056378119921a656128ebdc1007c4 audio: Add pause support for hardware omx component - ADSP doesn't enter sleep state after wma playback is paused and power suspended. - No support for NT session pause in case of hardware component. NT session need to be paused to put ADSP into power collapse. - Add support of pause in stagefright to ensure device enters suspend mode. Also add intermediate states to avoid concurrency issues between read and pause. Change-Id: I41b946b8c8805e6ee303646b63513b5b16514ef6 libstagefright: Drain input buffer on resume - Buffers returned from codec in paused state are not drained. When codec is resumed these buffers are not drained until the next flush, and may cause timed out issue. - Added change to drain input buffers for sw decoders when resuming. Change-Id: Ida2ab1d5dc3a1910accdd6fb89548262a912d8e7 CRs-Fixed: 569585, 574967 libstagefright: camcorder pause-resume implementation - Add pause resume feature in camcorder app. So that user can pause recording and resume later which results in a single recorded clip. Change-Id: Id19c45ae5bb85265aa4d5304b160ebf119d9575a libstagefright: support pause/resume for timelapse recording Modify the timestamp calculation mechanism in CameraSourceTimeLapse in order to support pause/resume. Change-Id: Icb02ea798b0b807ffb7ada2d1ef5b2414b74edfb
Diffstat (limited to 'media/libstagefright')
-rw-r--r--media/libstagefright/AudioPlayer.cpp30
-rw-r--r--media/libstagefright/AudioSource.cpp22
-rw-r--r--media/libstagefright/AwesomePlayer.cpp98
-rw-r--r--media/libstagefright/CameraSource.cpp41
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp9
-rw-r--r--media/libstagefright/MPEG4Writer.cpp13
-rw-r--r--media/libstagefright/NuCachedSource2.cpp33
-rw-r--r--media/libstagefright/OMXCodec.cpp92
-rw-r--r--media/libstagefright/include/AwesomePlayer.h5
-rw-r--r--media/libstagefright/include/NuCachedSource2.h5
10 files changed, 315 insertions, 33 deletions
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index dd9d393..1ff5d4f 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -54,6 +54,7 @@ AudioPlayer::AudioPlayer(
mFinalStatus(OK),
mSeekTimeUs(0),
mStarted(false),
+ mSourcePaused(false),
mIsFirstBuffer(false),
mFirstBufferResult(OK),
mFirstBuffer(NULL),
@@ -62,7 +63,8 @@ AudioPlayer::AudioPlayer(
mPinnedTimeUs(-1ll),
mPlaying(false),
mStartPosUs(0),
- mCreateFlags(flags) {
+ mCreateFlags(flags),
+ mPauseRequired(false) {
}
AudioPlayer::~AudioPlayer() {
@@ -82,6 +84,7 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) {
status_t err;
if (!sourceAlreadyStarted) {
+ mSourcePaused = false;
err = mSource->start();
if (err != OK) {
@@ -257,13 +260,16 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) {
mStarted = true;
mPlaying = true;
mPinnedTimeUs = -1ll;
-
+ const char *componentName;
+ if (!(format->findCString(kKeyDecoderComponent, &componentName))) {
+ componentName = "none";
+ }
+ mPauseRequired = !strncmp(componentName, "OMX.qcom.", 9);
return OK;
}
void AudioPlayer::pause(bool playPendingSamples) {
CHECK(mStarted);
-
if (playPendingSamples) {
if (mAudioSink.get() != NULL) {
mAudioSink->stop();
@@ -284,10 +290,21 @@ void AudioPlayer::pause(bool playPendingSamples) {
}
mPlaying = false;
+ CHECK(mSource != NULL);
+ if (mPauseRequired) {
+ if (mSource->pause() == OK) {
+ mSourcePaused = true;
+ }
+ }
}
status_t AudioPlayer::resume() {
CHECK(mStarted);
+ CHECK(mSource != NULL);
+ if (mSourcePaused == true) {
+ mSourcePaused = false;
+ mSource->start();
+ }
status_t err;
if (mAudioSink.get() != NULL) {
@@ -349,7 +366,7 @@ void AudioPlayer::reset() {
mInputBuffer->release();
mInputBuffer = NULL;
}
-
+ mSourcePaused = false;
mSource->stop();
// The following hack is necessary to ensure that the OMX
@@ -379,6 +396,7 @@ void AudioPlayer::reset() {
mStarted = false;
mPlaying = false;
mStartPosUs = 0;
+ mPauseRequired = false;
}
// static
@@ -549,6 +567,10 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) {
mIsFirstBuffer = false;
} else {
err = mSource->read(&mInputBuffer, &options);
+ if (err == OK && mInputBuffer == NULL && mSourcePaused) {
+ ALOGV("mSourcePaused, return 0 from fillBuffer");
+ return 0;
+ }
}
CHECK((err == OK && mInputBuffer != NULL)
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index dc9c37b..db08476 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -58,7 +58,8 @@ AudioSource::AudioSource(
mPrevSampleTimeUs(0),
mFirstSampleTimeUs(-1ll),
mNumFramesReceived(0),
- mNumClientOwnedBuffers(0) {
+ mNumClientOwnedBuffers(0),
+ mRecPaused(false) {
ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u",
sampleRate, outSampleRate, channelCount);
CHECK(channelCount == 1 || channelCount == 2 || channelCount == 6);
@@ -109,6 +110,11 @@ status_t AudioSource::initCheck() const {
status_t AudioSource::start(MetaData *params) {
Mutex::Autolock autoLock(mLock);
+ if (mRecPaused) {
+ mRecPaused = false;
+ return OK;
+ }
+
if (mStarted) {
return UNKNOWN_ERROR;
}
@@ -138,6 +144,12 @@ status_t AudioSource::start(MetaData *params) {
return err;
}
+status_t AudioSource::pause() {
+ ALOGV("AudioSource::Pause");
+ mRecPaused = true;
+ return OK;
+}
+
void AudioSource::releaseQueuedFrames_l() {
ALOGV("releaseQueuedFrames_l");
List<MediaBuffer *>::iterator it;
@@ -368,6 +380,14 @@ status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
}
void AudioSource::queueInputBuffer_l(MediaBuffer *buffer, int64_t timeUs) {
+ if (mRecPaused) {
+ if (!mBuffersReceived.empty()) {
+ releaseQueuedFrames_l();
+ }
+ buffer->release();
+ return;
+ }
+
const size_t bufferSize = buffer->range_length();
const size_t frameSize = mRecord->frameSize();
const int64_t timestampUs =
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 3cd0b0e..778dfa5 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -216,7 +216,8 @@ AwesomePlayer::AwesomePlayer()
mLastVideoTimeUs(-1),
mTextDriver(NULL),
mOffloadAudio(false),
- mAudioTearDown(false) {
+ mAudioTearDown(false),
+ mIsFirstFrameAfterResume(false) {
CHECK_EQ(mClient.connect(), (status_t)OK);
DataSource::RegisterDefaultSniffers();
@@ -1804,11 +1805,18 @@ void AwesomePlayer::onVideoEvent() {
if (mSeeking != NO_SEEK) {
ALOGV("seeking to %" PRId64 " us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
+ MediaSource::ReadOptions::SeekMode seekmode = (mSeeking == SEEK_VIDEO_ONLY)
+ ? MediaSource::ReadOptions::SEEK_NEXT_SYNC
+ : MediaSource::ReadOptions::SEEK_CLOSEST_SYNC;
+ // Seek to the next key-frame after resume for http streaming
+ if (mCachedSource != NULL && mIsFirstFrameAfterResume) {
+ seekmode = MediaSource::ReadOptions::SEEK_NEXT_SYNC;
+ mIsFirstFrameAfterResume = false;
+ }
+
options.setSeekTo(
mSeekTimeUs,
- mSeeking == SEEK_VIDEO_ONLY
- ? MediaSource::ReadOptions::SEEK_NEXT_SYNC
- : MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
+ seekmode);
}
for (;;) {
status_t err = mVideoSource->read(&mVideoBuffer, &options);
@@ -3044,4 +3052,86 @@ void AwesomePlayer::onAudioTearDownEvent() {
beginPrepareAsync_l();
}
+// suspend() will release the decoders, the renderers and the buffers allocated for decoders
+// Releasing decoders eliminates draining power in suspended state.
+status_t AwesomePlayer::suspend() {
+ ALOGV("suspend()");
+ Mutex::Autolock autoLock(mLock);
+
+ // Set PAUSE to DrmManagerClient which will be set START in play_l()
+ if (mDecryptHandle != NULL) {
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+ Playback::PAUSE, 0);
+ }
+
+ cancelPlayerEvents();
+ if (mQueueStarted) {
+ mQueue.stop();
+ mQueueStarted = false;
+ }
+
+ // Shutdown audio decoder first
+ if ((mAudioPlayer == NULL || !(mFlags & AUDIOPLAYER_STARTED))
+ && mAudioSource != NULL) {
+ mAudioSource->stop();
+ }
+ mAudioSource.clear();
+ mOmxSource.clear();
+ delete mAudioPlayer;
+ mAudioPlayer = NULL;
+ modifyFlags(AUDIO_RUNNING | AUDIOPLAYER_STARTED, CLEAR);
+
+ // Shutdown the video decoder
+ mVideoRenderer.clear();
+ if (mVideoSource != NULL) {
+ shutdownVideoDecoder_l();
+ }
+ modifyFlags(PLAYING, CLEAR);
+ mVideoRenderingStarted = false;
+
+ // Disconnect the source
+ if (mCachedSource != NULL) {
+ status_t err = mCachedSource->disconnectWhileSuspend();
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t AwesomePlayer::resume() {
+ ALOGV("resume()");
+ Mutex::Autolock autoLock(mLock);
+
+ // Reconnect the source
+ status_t err = mCachedSource->connectWhileResume();
+ if (err != OK) {
+ return err;
+ }
+
+ if (mVideoTrack != NULL && mVideoSource == NULL) {
+ status_t err = initVideoDecoder();
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ if (mAudioTrack != NULL && mAudioSource == NULL) {
+ status_t err = initAudioDecoder();
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ mIsFirstFrameAfterResume = true;
+
+ if (!mQueueStarted) {
+ mQueue.start();
+ mQueueStarted = true;
+ }
+
+ return OK;
+}
+
} // namespace android
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index aea0f13..7652fe2 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -199,7 +199,11 @@ CameraSource::CameraSource(
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false) {
+ mCollectStats(false),
+ mPauseAdjTimeUs(0),
+ mPauseStartTimeUs(0),
+ mPauseEndTimeUs(0),
+ mRecPause(false) {
mVideoSize.width = -1;
mVideoSize.height = -1;
@@ -669,6 +673,14 @@ status_t CameraSource::startCameraRecording() {
status_t CameraSource::start(MetaData *meta) {
ALOGV("start");
+ if(mRecPause) {
+ mRecPause = false;
+ mPauseAdjTimeUs = mPauseEndTimeUs - mPauseStartTimeUs;
+ ALOGV("resume : mPause Adj / End / Start : %lld / %lld / %lld us",
+ mPauseAdjTimeUs, mPauseEndTimeUs, mPauseStartTimeUs);
+ return OK;
+ }
+
CHECK(!mStarted);
if (mInitCheck != OK) {
ALOGE("CameraSource is not initialized yet");
@@ -682,6 +694,10 @@ status_t CameraSource::start(MetaData *meta) {
}
mStartTimeUs = 0;
+ mRecPause = false;
+ mPauseAdjTimeUs = 0;
+ mPauseStartTimeUs = 0;
+ mPauseEndTimeUs = 0;
mNumInputBuffers = 0;
mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
mEncoderDataSpace = HAL_DATASPACE_BT709;
@@ -715,6 +731,16 @@ status_t CameraSource::start(MetaData *meta) {
return err;
}
+status_t CameraSource::pause() {
+ mRecPause = true;
+ mPauseStartTimeUs = mLastFrameTimestampUs;
+ //record the end time too, or there is a risk the end time is 0
+ mPauseEndTimeUs = mLastFrameTimestampUs;
+ ALOGV("pause : mPauseStart %lld us, #Queued Frames : %d",
+ mPauseStartTimeUs, mFramesReceived.size());
+ return OK;
+}
+
void CameraSource::stopCameraRecording() {
ALOGV("stopCameraRecording");
if (mCameraFlags & FLAGS_HOT_CAMERA) {
@@ -916,6 +942,19 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
return;
}
+ if (mRecPause == true) {
+ if(!mFramesReceived.empty()) {
+ ALOGV("releaseQueuedFrames - #Queued Frames : %d", mFramesReceived.size());
+ releaseQueuedFrames();
+ }
+ ALOGV("release One Video Frame for Pause : %lld us", timestampUs);
+ releaseOneRecordingFrame(data);
+ mPauseEndTimeUs = timestampUs;
+ return;
+ }
+ timestampUs -= mPauseAdjTimeUs;
+ ALOGV("dataCallbackTimestamp: AdjTimestamp %lld us", timestampUs);
+
if (mNumFramesReceived > 0) {
if (timestampUs <= mLastFrameTimestampUs) {
ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 926e95c..53815bd 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -78,6 +78,7 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(
storeMetaDataInVideoBuffers),
mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
mLastTimeLapseFrameRealTimestampUs(0),
+ mLastTimeLapseFrameTimeStampUs(0),
mSkipCurrentFrame(false) {
mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
@@ -252,6 +253,7 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
ALOGV("dataCallbackTimestamp timelapse: initial frame");
mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ mLastTimeLapseFrameTimeStampUs = *timestampUs;
return false;
}
@@ -263,8 +265,10 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
if (mForceRead) {
ALOGV("dataCallbackTimestamp timelapse: forced read");
mForceRead = false;
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
*timestampUs =
- mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ mLastTimeLapseFrameTimeStampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ mLastTimeLapseFrameTimeStampUs = *timestampUs;
// Really make sure that this video recording frame will not be dropped.
if (*timestampUs < mStartTimeUs) {
@@ -294,7 +298,8 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
ALOGV("dataCallbackTimestamp timelapse: got timelapse frame");
mLastTimeLapseFrameRealTimestampUs = *timestampUs;
- *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ *timestampUs = mLastTimeLapseFrameTimeStampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ mLastTimeLapseFrameTimeStampUs = *timestampUs;
// Update start-time once the captured-time reaches the expected start-time.
// Not doing so will result in CameraSource always dropping frames since
// updated-timestamp will never intersect start-timestamp
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index cb9df29..926833f 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -2341,19 +2341,6 @@ status_t MPEG4Writer::Track::threadEntry() {
#if 0
if (mResumed) {
- int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
- if (WARN_UNLESS(durExcludingEarlierPausesUs >= 0ll, "for %s track", trackName)) {
- copy->release();
- return ERROR_MALFORMED;
- }
-
- int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
- if (WARN_UNLESS(pausedDurationUs >= lastDurationUs, "for %s track", trackName)) {
- copy->release();
- return ERROR_MALFORMED;
- }
-
- previousPausedDurationUs += pausedDurationUs - lastDurationUs;
mResumed = false;
}
#endif
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index d6255d6..f72acf7 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -197,7 +197,8 @@ NuCachedSource2::NuCachedSource2(
mHighwaterThresholdBytes(kDefaultHighWaterThreshold),
mLowwaterThresholdBytes(kDefaultLowWaterThreshold),
mKeepAliveIntervalUs(kDefaultKeepAliveIntervalUs),
- mDisconnectAtHighwatermark(disconnectAtHighwatermark) {
+ mDisconnectAtHighwatermark(disconnectAtHighwatermark),
+ mSuspended(false) {
// We are NOT going to support disconnect-at-highwatermark indefinitely
// and we are not guaranteeing support for client-specified cache
// parameters. Both of these are temporary measures to solve a specific
@@ -332,7 +333,7 @@ void NuCachedSource2::fetchInternal() {
}
}
- if (reconnect) {
+ if (reconnect && !mSuspended) {
status_t err =
mSource->reconnectAtOffset(mCacheOffset + mCache->totalSize());
@@ -442,6 +443,13 @@ void NuCachedSource2::onFetch() {
delayUs = 100000ll;
}
+ if (mSuspended) {
+ static_cast<HTTPBase *>(mSource.get())->disconnect();
+ mFinalStatus = -EAGAIN;
+ return;
+ }
+
+
(new AMessage(kWhatFetchMore, mReflector))->post(delayUs);
}
@@ -771,4 +779,25 @@ void NuCachedSource2::RemoveCacheSpecificHeaders(
}
}
+status_t NuCachedSource2::disconnectWhileSuspend() {
+ if (mSource != NULL) {
+ static_cast<HTTPBase *>(mSource.get())->disconnect();
+ mFinalStatus = -EAGAIN;
+ mSuspended = true;
+ } else {
+ return ERROR_UNSUPPORTED;
+ }
+
+ return OK;
+}
+
+status_t NuCachedSource2::connectWhileResume() {
+ mSuspended = false;
+
+ // Begin to connect again and fetch more data
+ (new AMessage(kWhatFetchMore, mReflector))->post();
+
+ return OK;
+}
+
} // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index b2bfceb..3ec02d4 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -1614,6 +1614,8 @@ bool OMXCodec::isIntermediateState(State state) {
return state == LOADED_TO_IDLE
|| state == IDLE_TO_EXECUTING
|| state == EXECUTING_TO_IDLE
+ || state == PAUSING
+ || state == FLUSHING
|| state == IDLE_TO_LOADED
|| state == RECONFIGURING;
}
@@ -2665,6 +2667,14 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) {
break;
}
+ case OMX_StatePause:
+ {
+ CODEC_LOGV("Now paused.");
+ CHECK_EQ((int)mState, (int)PAUSING);
+ setState(PAUSED);
+ break;
+ }
+
case OMX_StateInvalid:
{
setState(ERROR);
@@ -2780,7 +2790,7 @@ void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
bool OMXCodec::flushPortAsync(OMX_U32 portIndex) {
CHECK(mState == EXECUTING || mState == RECONFIGURING
- || mState == EXECUTING_TO_IDLE);
+ || mState == EXECUTING_TO_IDLE || mState == FLUSHING);
CODEC_LOGV("flushPortAsync(%u): we own %zu out of %zu buffers already.",
portIndex, countBuffersWeOwn(mPortBuffers[portIndex]),
@@ -2830,7 +2840,7 @@ status_t OMXCodec::enablePortAsync(OMX_U32 portIndex) {
}
void OMXCodec::fillOutputBuffers() {
- CHECK_EQ((int)mState, (int)EXECUTING);
+ CHECK(mState == EXECUTING || mState == FLUSHING);
// This is a workaround for some decoders not properly reporting
// end-of-output-stream. If we own all input buffers and also own
@@ -2857,7 +2867,7 @@ void OMXCodec::fillOutputBuffers() {
}
void OMXCodec::drainInputBuffers() {
- CHECK(mState == EXECUTING || mState == RECONFIGURING);
+ CHECK(mState == EXECUTING || mState == RECONFIGURING || mState == FLUSHING);
if (mFlags & kUseSecureInputBuffers) {
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
@@ -3604,6 +3614,11 @@ void OMXCodec::clearCodecSpecificData() {
status_t OMXCodec::start(MetaData *meta) {
Mutex::Autolock autoLock(mLock);
+ if (mPaused) {
+ status_t err = resumeLocked(true);
+ return err;
+ }
+
if (mState != LOADED) {
CODEC_LOGE("called start in the unexpected state: %d", mState);
return UNKNOWN_ERROR;
@@ -3714,6 +3729,7 @@ status_t OMXCodec::stopOmxComponent_l() {
isError = true;
}
+ case PAUSED:
case EXECUTING:
{
setState(EXECUTING_TO_IDLE);
@@ -3785,6 +3801,14 @@ status_t OMXCodec::read(
Mutex::Autolock autoLock(mLock);
+ if (mPaused) {
+ err = resumeLocked(false);
+ if(err != OK) {
+ CODEC_LOGE("Failed to restart codec err= %d", err);
+ return err;
+ }
+ }
+
if (mState != EXECUTING && mState != RECONFIGURING) {
return UNKNOWN_ERROR;
}
@@ -3841,6 +3865,8 @@ status_t OMXCodec::read(
mFilledBuffers.clear();
CHECK_EQ((int)mState, (int)EXECUTING);
+ //DSP supports flushing of ports simultaneously. Flushing individual port is not supported.
+ setState(FLUSHING);
bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
@@ -3870,6 +3896,11 @@ status_t OMXCodec::read(
return UNKNOWN_ERROR;
}
+ if (seeking) {
+ CHECK_EQ((int)mState, (int)FLUSHING);
+ setState(EXECUTING);
+ }
+
if (mFilledBuffers.empty()) {
return mSignalledEOS ? mFinalStatus : ERROR_END_OF_STREAM;
}
@@ -4303,11 +4334,60 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
}
status_t OMXCodec::pause() {
- Mutex::Autolock autoLock(mLock);
+ CODEC_LOGV("pause mState=%d", mState);
+
+ Mutex::Autolock autoLock(mLock);
+
+ if (mState != EXECUTING) {
+ return UNKNOWN_ERROR;
+ }
+
+ while (isIntermediateState(mState)) {
+ mAsyncCompletion.wait(mLock);
+ }
+ if (!strncmp(mComponentName, "OMX.qcom.", 9)) {
+ status_t err = mOMX->sendCommand(mNode,
+ OMX_CommandStateSet, OMX_StatePause);
+ CHECK_EQ(err, (status_t)OK);
+ setState(PAUSING);
+
+ mPaused = true;
+ while (mState != PAUSED && mState != ERROR) {
+ mAsyncCompletion.wait(mLock);
+ }
+ return mState == ERROR ? UNKNOWN_ERROR : OK;
+ } else {
+ mPaused = true;
+ return OK;
+ }
- mPaused = true;
+}
- return OK;
+status_t OMXCodec::resumeLocked(bool drainInputBuf) {
+ CODEC_LOGV("resume mState=%d", mState);
+
+ if (!strncmp(mComponentName, "OMX.qcom.", 9)) {
+ while (isIntermediateState(mState)) {
+ mAsyncCompletion.wait(mLock);
+ }
+ CHECK_EQ(mState, (status_t)PAUSED);
+ status_t err = mOMX->sendCommand(mNode,
+ OMX_CommandStateSet, OMX_StateExecuting);
+ CHECK_EQ(err, (status_t)OK);
+ setState(IDLE_TO_EXECUTING);
+ mPaused = false;
+ while (mState != EXECUTING && mState != ERROR) {
+ mAsyncCompletion.wait(mLock);
+ }
+ if(drainInputBuf)
+ drainInputBuffers();
+ return mState == ERROR ? UNKNOWN_ERROR : OK;
+ } else { // SW Codec
+ mPaused = false;
+ if(drainInputBuf)
+ drainInputBuffers();
+ return OK;
+ }
}
////////////////////////////////////////////////////////////////////////////////
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index c72f9f6..1a8e6c8 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -109,6 +109,9 @@ struct AwesomePlayer {
void postAudioTearDown();
status_t dump(int fd, const Vector<String16> &args) const;
+ status_t suspend();
+ status_t resume();
+
private:
friend struct AwesomeEvent;
friend struct PreviewPlayer;
@@ -356,6 +359,8 @@ private:
bool mAudioTearDownWasPlaying;
int64_t mAudioTearDownPosition;
+ bool mIsFirstFrameAfterResume;
+
status_t setVideoScalingMode(int32_t mode);
status_t setVideoScalingMode_l(int32_t mode);
status_t getTrackInfo(Parcel* reply) const;
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index afa91ae..1f282ca 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -66,6 +66,9 @@ struct NuCachedSource2 : public DataSource {
String8 *cacheConfig,
bool *disconnectAtHighwatermark);
+ virtual status_t disconnectWhileSuspend();
+ virtual status_t connectWhileResume();
+
protected:
virtual ~NuCachedSource2();
@@ -123,6 +126,8 @@ protected:
bool mDisconnectAtHighwatermark;
+ bool mSuspended;
+
void onMessageReceived(const sp<AMessage> &msg);
void onFetch();
void onRead(const sp<AMessage> &msg);