summaryrefslogtreecommitdiffstats
path: root/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp')
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp554
1 files changed, 252 insertions, 302 deletions
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 25225a8..a2ec51c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -25,6 +25,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/AWakeLock.h>
+#include <media/stagefright/MediaClock.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
@@ -63,22 +64,19 @@ NuPlayer::Renderer::Renderer(
mDrainVideoQueuePending(false),
mAudioQueueGeneration(0),
mVideoQueueGeneration(0),
+ mAudioDrainGeneration(0),
+ mVideoDrainGeneration(0),
+ mPlaybackRate(1.0),
mAudioFirstAnchorTimeMediaUs(-1),
mAnchorTimeMediaUs(-1),
- mAnchorTimeRealUs(-1),
mAnchorNumFramesWritten(-1),
- mAnchorMaxMediaUs(-1),
mVideoLateByUs(0ll),
mHasAudio(false),
mHasVideo(false),
- mPauseStartedTimeRealUs(-1),
- mFlushingAudio(false),
- mFlushingVideo(false),
mNotifyCompleteAudio(false),
mNotifyCompleteVideo(false),
mSyncQueues(false),
mPaused(false),
- mPausePositionMediaTimeUs(-1),
mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
@@ -90,7 +88,7 @@ NuPlayer::Renderer::Renderer(
mTotalBuffersQueued(0),
mLastAudioBufferDrained(0),
mWakeLock(new AWakeLock()) {
-
+ mMediaClock = new MediaClock;
}
NuPlayer::Renderer::~Renderer() {
@@ -105,7 +103,8 @@ void NuPlayer::Renderer::queueBuffer(
bool audio,
const sp<ABuffer> &buffer,
const sp<AMessage> &notifyConsumed) {
- sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
+ sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
+ msg->setInt32("queueGeneration", getQueueGeneration(audio));
msg->setInt32("audio", static_cast<int32_t>(audio));
msg->setBuffer("buffer", buffer);
msg->setMessage("notifyConsumed", notifyConsumed);
@@ -115,199 +114,108 @@ void NuPlayer::Renderer::queueBuffer(
void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
CHECK_NE(finalResult, (status_t)OK);
- sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
+ sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
+ msg->setInt32("queueGeneration", getQueueGeneration(audio));
msg->setInt32("audio", static_cast<int32_t>(audio));
msg->setInt32("finalResult", finalResult);
msg->post();
}
+void NuPlayer::Renderer::setPlaybackRate(float rate) {
+ sp<AMessage> msg = new AMessage(kWhatSetRate, this);
+ msg->setFloat("rate", rate);
+ msg->post();
+}
+
void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
{
- Mutex::Autolock autoLock(mFlushLock);
+ Mutex::Autolock autoLock(mLock);
if (audio) {
mNotifyCompleteAudio |= notifyComplete;
- if (mFlushingAudio) {
- return;
- }
- mFlushingAudio = true;
+ ++mAudioQueueGeneration;
+ ++mAudioDrainGeneration;
} else {
mNotifyCompleteVideo |= notifyComplete;
- if (mFlushingVideo) {
- return;
- }
- mFlushingVideo = true;
+ ++mVideoQueueGeneration;
+ ++mVideoDrainGeneration;
}
+
+ clearAnchorTime_l();
+ clearAudioFirstAnchorTime_l();
+ mVideoLateByUs = 0;
+ mSyncQueues = false;
}
- sp<AMessage> msg = new AMessage(kWhatFlush, id());
+ sp<AMessage> msg = new AMessage(kWhatFlush, this);
msg->setInt32("audio", static_cast<int32_t>(audio));
msg->post();
}
void NuPlayer::Renderer::signalTimeDiscontinuity() {
- Mutex::Autolock autoLock(mLock);
- // CHECK(mAudioQueue.empty());
- // CHECK(mVideoQueue.empty());
- setAudioFirstAnchorTime(-1);
- setAnchorTime(-1, -1);
- setVideoLateByUs(0);
- mSyncQueues = false;
-}
-
-void NuPlayer::Renderer::signalAudioSinkChanged() {
- (new AMessage(kWhatAudioSinkChanged, id()))->post();
}
void NuPlayer::Renderer::signalDisableOffloadAudio() {
- (new AMessage(kWhatDisableOffloadAudio, id()))->post();
+ (new AMessage(kWhatDisableOffloadAudio, this))->post();
}
void NuPlayer::Renderer::signalEnableOffloadAudio() {
- (new AMessage(kWhatEnableOffloadAudio, id()))->post();
+ (new AMessage(kWhatEnableOffloadAudio, this))->post();
}
void NuPlayer::Renderer::pause() {
- (new AMessage(kWhatPause, id()))->post();
+ (new AMessage(kWhatPause, this))->post();
}
void NuPlayer::Renderer::resume() {
- (new AMessage(kWhatResume, id()))->post();
+ (new AMessage(kWhatResume, this))->post();
}
void NuPlayer::Renderer::setVideoFrameRate(float fps) {
- sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
+ sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
msg->setFloat("frame-rate", fps);
msg->post();
}
-// Called on any threads, except renderer's thread.
-status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
- {
- Mutex::Autolock autoLock(mLock);
- int64_t currentPositionUs;
- if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
- *mediaUs = currentPositionUs;
- return OK;
- }
- }
- return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs());
-}
-
-// Called on only renderer's thread.
-status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) {
- return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs());
-}
-
-// Called on only renderer's thread.
-// Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's
-// thread, no need to acquire mLock.
-status_t NuPlayer::Renderer::getCurrentPositionOnLooper(
- int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
- int64_t currentPositionUs;
- if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
- *mediaUs = currentPositionUs;
- return OK;
- }
- return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo);
-}
-
-// Called either with mLock acquired or on renderer's thread.
-bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) {
- if (!mPaused || mPausePositionMediaTimeUs < 0ll) {
- return false;
- }
- *mediaUs = mPausePositionMediaTimeUs;
- return true;
-}
-
// Called on any threads.
-status_t NuPlayer::Renderer::getCurrentPositionFromAnchor(
- int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
- Mutex::Autolock autoLock(mTimeLock);
- if (!mHasAudio && !mHasVideo) {
- return NO_INIT;
- }
-
- if (mAnchorTimeMediaUs < 0) {
- return NO_INIT;
- }
-
- int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
-
- if (mPauseStartedTimeRealUs != -1) {
- positionUs -= (nowUs - mPauseStartedTimeRealUs);
- }
-
- // limit position to the last queued media time (for video only stream
- // position will be discrete as we don't know how long each frame lasts)
- if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
- if (positionUs > mAnchorMaxMediaUs) {
- positionUs = mAnchorMaxMediaUs;
- }
- }
-
- if (positionUs < mAudioFirstAnchorTimeMediaUs) {
- positionUs = mAudioFirstAnchorTimeMediaUs;
- }
-
- *mediaUs = (positionUs <= 0) ? 0 : positionUs;
- return OK;
-}
-
-void NuPlayer::Renderer::setHasMedia(bool audio) {
- Mutex::Autolock autoLock(mTimeLock);
- if (audio) {
- mHasAudio = true;
- } else {
- mHasVideo = true;
- }
+status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
+ return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
}
-void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
- Mutex::Autolock autoLock(mTimeLock);
- mAudioFirstAnchorTimeMediaUs = mediaUs;
+void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
+ mAudioFirstAnchorTimeMediaUs = -1;
+ mMediaClock->setStartingTimeMedia(-1);
}
-void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
- Mutex::Autolock autoLock(mTimeLock);
+void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
if (mAudioFirstAnchorTimeMediaUs == -1) {
mAudioFirstAnchorTimeMediaUs = mediaUs;
+ mMediaClock->setStartingTimeMedia(mediaUs);
}
}
-void NuPlayer::Renderer::setAnchorTime(
- int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
- Mutex::Autolock autoLock(mTimeLock);
- mAnchorTimeMediaUs = mediaUs;
- mAnchorTimeRealUs = realUs;
- mAnchorNumFramesWritten = numFramesWritten;
- if (resume) {
- mPauseStartedTimeRealUs = -1;
- }
+void NuPlayer::Renderer::clearAnchorTime_l() {
+ mMediaClock->clearAnchor();
+ mAnchorTimeMediaUs = -1;
+ mAnchorNumFramesWritten = -1;
}
void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
- Mutex::Autolock autoLock(mTimeLock);
+ Mutex::Autolock autoLock(mLock);
mVideoLateByUs = lateUs;
}
int64_t NuPlayer::Renderer::getVideoLateByUs() {
- Mutex::Autolock autoLock(mTimeLock);
+ Mutex::Autolock autoLock(mLock);
return mVideoLateByUs;
}
-void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
- Mutex::Autolock autoLock(mTimeLock);
- mPauseStartedTimeRealUs = realUs;
-}
-
status_t NuPlayer::Renderer::openAudioSink(
const sp<AMessage> &format,
bool offloadOnly,
bool hasVideo,
uint32_t flags,
bool *isOffloaded) {
- sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
+ sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
msg->setMessage("format", format);
msg->setInt32("offload-only", offloadOnly);
msg->setInt32("has-video", hasVideo);
@@ -328,7 +236,7 @@ status_t NuPlayer::Renderer::openAudioSink(
}
void NuPlayer::Renderer::closeAudioSink() {
- sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id());
+ sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
sp<AMessage> response;
msg->postAndAwaitResponse(&response);
@@ -356,7 +264,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
response->setInt32("err", err);
response->setInt32("offload", offloadingAudio());
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
response->postReply(replyID);
@@ -365,7 +273,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatCloseAudioSink:
{
- uint32_t replyID;
+ sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
onCloseAudioSink();
@@ -384,8 +292,8 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatDrainAudioQueue:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
- if (generation != mAudioQueueGeneration) {
+ CHECK(msg->findInt32("drainGeneration", &generation));
+ if (generation != getDrainGeneration(true /* audio */)) {
break;
}
@@ -407,9 +315,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
// Let's give it more data after about half that time
// has elapsed.
- // kWhatDrainAudioQueue is used for non-offloading mode,
- // and mLock is used only for offloading mode. Therefore,
- // no need to acquire mLock here.
+ Mutex::Autolock autoLock(mLock);
postDrainAudioQueue_l(delayUs / 2);
}
break;
@@ -418,8 +324,8 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatDrainVideoQueue:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
- if (generation != mVideoQueueGeneration) {
+ CHECK(msg->findInt32("drainGeneration", &generation));
+ if (generation != getDrainGeneration(false /* audio */)) {
break;
}
@@ -427,22 +333,20 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
onDrainVideoQueue();
- Mutex::Autolock autoLock(mLock);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
break;
}
case kWhatPostDrainVideoQueue:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
- if (generation != mVideoQueueGeneration) {
+ CHECK(msg->findInt32("drainGeneration", &generation));
+ if (generation != getDrainGeneration(false /* audio */)) {
break;
}
mDrainVideoQueuePending = false;
- Mutex::Autolock autoLock(mLock);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
break;
}
@@ -458,15 +362,19 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatFlush:
+ case kWhatSetRate:
{
- onFlush(msg);
+ CHECK(msg->findFloat("rate", &mPlaybackRate));
+ int32_t ratePermille = (int32_t)(0.5f + 1000 * mPlaybackRate);
+ mPlaybackRate = ratePermille / 1000.0f;
+ mMediaClock->setPlaybackRate(mPlaybackRate);
+ mAudioSink->setPlaybackRatePermille(ratePermille);
break;
}
- case kWhatAudioSinkChanged:
+ case kWhatFlush:
{
- onAudioSinkChanged();
+ onFlush(msg);
break;
}
@@ -511,7 +419,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatAudioOffloadPauseTimeout:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
+ CHECK(msg->findInt32("drainGeneration", &generation));
if (generation != mAudioOffloadPauseTimeoutGeneration) {
break;
}
@@ -538,19 +446,19 @@ void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
}
mDrainAudioQueuePending = true;
- sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
- msg->setInt32("generation", mAudioQueueGeneration);
+ sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
+ msg->setInt32("drainGeneration", mAudioDrainGeneration);
msg->post(delayUs);
}
-void NuPlayer::Renderer::prepareForMediaRenderingStart() {
- mAudioRenderingStartGeneration = mAudioQueueGeneration;
- mVideoRenderingStartGeneration = mVideoQueueGeneration;
+void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
+ mAudioRenderingStartGeneration = mAudioDrainGeneration;
+ mVideoRenderingStartGeneration = mVideoDrainGeneration;
}
-void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
- if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
- mAudioRenderingStartGeneration == mAudioQueueGeneration) {
+void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
+ if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
+ mAudioRenderingStartGeneration == mAudioDrainGeneration) {
mVideoRenderingStartGeneration = -1;
mAudioRenderingStartGeneration = -1;
@@ -618,7 +526,7 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
- setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+ setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -638,34 +546,45 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
entry = NULL;
}
sizeCopied += copy;
- notifyIfMediaRenderingStarted();
+
+ notifyIfMediaRenderingStarted_l();
}
if (mAudioFirstAnchorTimeMediaUs >= 0) {
int64_t nowUs = ALooper::GetNowUs();
- setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
+ int64_t nowMediaUs =
+ mAudioFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
+ // we don't know how much data we are queueing for offloaded tracks.
+ mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
}
- // we don't know how much data we are queueing for offloaded tracks
- mAnchorMaxMediaUs = -1;
-
if (hasEOS) {
- (new AMessage(kWhatStopAudioSink, id()))->post();
+ (new AMessage(kWhatStopAudioSink, this))->post();
}
return sizeCopied;
}
bool NuPlayer::Renderer::onDrainAudioQueue() {
+ // TODO: This call to getPosition checks if AudioTrack has been created
+ // in AudioSink before draining audio. If AudioTrack doesn't exist, then
+ // CHECKs on getPosition will fail.
+ // We still need to figure out why AudioTrack is not created when
+ // this function is called. One possible reason could be leftover
+ // audio. Another possible place is to check whether decoder
+ // has received INFO_FORMAT_CHANGED as the first buffer since
+ // AudioSink is opened there, and possible interactions with flush
+ // immediately after start. Investigate error message
+ // "vorbis_dsp_synthesis returned -135", along with RTSP.
uint32_t numFramesPlayed;
if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
return false;
}
+#if 0
ssize_t numFramesAvailableToWrite =
mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
-#if 0
if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
ALOGI("audio sink underrun");
} else {
@@ -674,10 +593,7 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
}
#endif
- size_t numBytesAvailableToWrite =
- numFramesAvailableToWrite * mAudioSink->frameSize();
-
- while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
+ while (!mAudioQueue.empty()) {
QueueEntry *entry = &*mAudioQueue.begin();
mLastAudioBufferDrained = entry->mBufferOrdinal;
@@ -710,14 +626,16 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
- if (copy > numBytesAvailableToWrite) {
- copy = numBytesAvailableToWrite;
- }
- ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
+ ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
+ copy, false /* blocking */);
if (written < 0) {
// An error in AudioSink write. Perhaps the AudioSink was not properly opened.
- ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ if (written == WOULD_BLOCK) {
+ ALOGW("AudioSink write would block when writing %zu bytes", copy);
+ } else {
+ ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ }
break;
}
@@ -729,73 +647,92 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
entry = NULL;
}
- numBytesAvailableToWrite -= written;
size_t copiedFrames = written / mAudioSink->frameSize();
mNumFramesWritten += copiedFrames;
- notifyIfMediaRenderingStarted();
+ {
+ Mutex::Autolock autoLock(mLock);
+ notifyIfMediaRenderingStarted_l();
+ }
if (written != (ssize_t)copy) {
// A short count was received from AudioSink::write()
//
- // AudioSink write should block until exactly the number of bytes are delivered.
- // But it may return with a short count (without an error) when:
+ // AudioSink write is called in non-blocking mode.
+ // It may return with a short count when:
//
// 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
- // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
+ // 2) The data to be copied exceeds the available buffer in AudioSink.
+ // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
+ // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
// (Case 1)
// Must be a multiple of the frame size. If it is not a multiple of a frame size, it
// needs to fail, as we should not carry over fractional frames between calls.
CHECK_EQ(copy % mAudioSink->frameSize(), 0);
- // (Case 2)
+ // (Case 2, 3, 4)
// Return early to the caller.
// Beware of calling immediately again as this may busy-loop if you are not careful.
- ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
+ ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
break;
}
}
- mAnchorMaxMediaUs =
- mAnchorTimeMediaUs +
- (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
- * 1000LL * mAudioSink->msecsPerFrame());
+ int64_t maxTimeMedia;
+ {
+ Mutex::Autolock autoLock(mLock);
+ maxTimeMedia =
+ mAnchorTimeMediaUs +
+ (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+ * 1000LL * mAudioSink->msecsPerFrame());
+ }
+ mMediaClock->updateMaxTimeMedia(maxTimeMedia);
return !mAudioQueue.empty();
}
+int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
+ int32_t sampleRate = offloadingAudio() ?
+ mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
+ // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+ return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
+}
+
+// Calculate duration of pending samples if played at normal rate (i.e., 1.0).
int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
- int64_t writtenAudioDurationUs =
- mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
+ int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
}
int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
- int64_t currentPositionUs;
- if (mPaused || getCurrentPositionOnLooper(
- &currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
- // If failed to get current position, e.g. due to audio clock is not ready, then just
- // play out video immediately without delay.
+ int64_t realUs;
+ if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
+ // If failed to get current position, e.g. due to audio clock is
+ // not ready, then just play out video immediately without delay.
return nowUs;
}
- return (mediaTimeUs - currentPositionUs) + nowUs;
+ return realUs;
}
void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
+ Mutex::Autolock autoLock(mLock);
// TRICKY: vorbis decoder generates multiple frames with the same
// timestamp, so only update on the first frame with a given timestamp
if (mediaTimeUs == mAnchorTimeMediaUs) {
return;
}
- setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+ setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
int64_t nowUs = ALooper::GetNowUs();
- setAnchorTime(
- mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
+ int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
+ mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
+ mAnchorNumFramesWritten = mNumFramesWritten;
+ mAnchorTimeMediaUs = mediaTimeUs;
}
-void NuPlayer::Renderer::postDrainVideoQueue_l() {
+// Called without mLock acquired.
+void NuPlayer::Renderer::postDrainVideoQueue() {
if (mDrainVideoQueuePending
- || mSyncQueues
+ || getSyncQueues()
|| (mPaused && mVideoSampleReceived)) {
return;
}
@@ -806,8 +743,8 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() {
QueueEntry &entry = *mVideoQueue.begin();
- sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
- msg->setInt32("generation", mVideoQueueGeneration);
+ sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
+ msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
if (entry.mBuffer == NULL) {
// EOS doesn't carry a timestamp.
@@ -827,16 +764,19 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
- if (mAnchorTimeMediaUs < 0) {
- setAnchorTime(mediaTimeUs, nowUs);
- mPausePositionMediaTimeUs = mediaTimeUs;
- mAnchorMaxMediaUs = mediaTimeUs;
- realTimeUs = nowUs;
- } else {
- realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mAnchorTimeMediaUs < 0) {
+ mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
+ mAnchorTimeMediaUs = mediaTimeUs;
+ realTimeUs = nowUs;
+ } else {
+ realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+ }
}
if (!mHasAudio) {
- mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
+ // smooth out videos >= 10fps
+ mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
}
// Heuristics to handle situation when media time changed without a
@@ -915,16 +855,19 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
ALOGV("video late by %lld us (%.2f secs)",
mVideoLateByUs, mVideoLateByUs / 1E6);
} else {
+ int64_t mediaUs = 0;
+ mMediaClock->getMediaTime(realTimeUs, &mediaUs);
ALOGV("rendering video at media time %.2f secs",
(mFlags & FLAG_REAL_TIME ? realTimeUs :
- (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
+ mediaUs) / 1E6);
}
} else {
setVideoLateByUs(0);
if (!mVideoSampleReceived && !mHasAudio) {
// This will ensure that the first frame after a flush won't be used as anchor
// when renderer is in paused state, because resume can happen any time after seek.
- setAnchorTime(-1, -1);
+ Mutex::Autolock autoLock(mLock);
+ clearAnchorTime_l();
}
}
@@ -941,7 +884,8 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
mVideoRenderingStarted = true;
notifyVideoRenderingStart();
}
- notifyIfMediaRenderingStarted();
+ Mutex::Autolock autoLock(mLock);
+ notifyIfMediaRenderingStarted_l();
}
}
@@ -960,14 +904,22 @@ void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t del
}
void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
- (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
+ (new AMessage(kWhatAudioOffloadTearDown, this))->post();
}
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
- setHasMedia(audio);
+ if (dropBufferIfStale(audio, msg)) {
+ return;
+ }
+
+ if (audio) {
+ mHasAudio = true;
+ } else {
+ mHasVideo = true;
+ }
if (mHasVideo) {
if (mVideoScheduler == NULL) {
@@ -976,10 +928,6 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
}
}
- if (dropBufferWhileFlushing(audio, msg)) {
- return;
- }
-
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
@@ -993,15 +941,16 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
entry.mFinalResult = OK;
entry.mBufferOrdinal = ++mTotalBuffersQueued;
- Mutex::Autolock autoLock(mLock);
if (audio) {
+ Mutex::Autolock autoLock(mLock);
mAudioQueue.push_back(entry);
postDrainAudioQueue_l();
} else {
mVideoQueue.push_back(entry);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
}
+ Mutex::Autolock autoLock(mLock);
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
return;
}
@@ -1050,7 +999,9 @@ void NuPlayer::Renderer::syncQueuesDone_l() {
}
if (!mVideoQueue.empty()) {
- postDrainVideoQueue_l();
+ mLock.unlock();
+ postDrainVideoQueue();
+ mLock.lock();
}
}
@@ -1058,7 +1009,7 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
- if (dropBufferWhileFlushing(audio, msg)) {
+ if (dropBufferIfStale(audio, msg)) {
return;
}
@@ -1069,19 +1020,20 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
entry.mOffset = 0;
entry.mFinalResult = finalResult;
- Mutex::Autolock autoLock(mLock);
if (audio) {
+ Mutex::Autolock autoLock(mLock);
if (mAudioQueue.empty() && mSyncQueues) {
syncQueuesDone_l();
}
mAudioQueue.push_back(entry);
postDrainAudioQueue_l();
} else {
- if (mVideoQueue.empty() && mSyncQueues) {
+ if (mVideoQueue.empty() && getSyncQueues()) {
+ Mutex::Autolock autoLock(mLock);
syncQueuesDone_l();
}
mVideoQueue.push_back(entry);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
}
}
@@ -1090,31 +1042,25 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
CHECK(msg->findInt32("audio", &audio));
{
- Mutex::Autolock autoLock(mFlushLock);
+ Mutex::Autolock autoLock(mLock);
if (audio) {
- mFlushingAudio = false;
notifyComplete = mNotifyCompleteAudio;
mNotifyCompleteAudio = false;
} else {
- mFlushingVideo = false;
notifyComplete = mNotifyCompleteVideo;
mNotifyCompleteVideo = false;
}
- }
- // If we're currently syncing the queues, i.e. dropping audio while
- // aligning the first audio/video buffer times and only one of the
- // two queues has data, we may starve that queue by not requesting
- // more buffers from the decoder. If the other source then encounters
- // a discontinuity that leads to flushing, we'll never find the
- // corresponding discontinuity on the other queue.
- // Therefore we'll stop syncing the queues if at least one of them
- // is flushed.
- {
- Mutex::Autolock autoLock(mLock);
- syncQueuesDone_l();
- setPauseStartedTimeRealUs(-1);
- setAnchorTime(-1, -1);
+ // If we're currently syncing the queues, i.e. dropping audio while
+ // aligning the first audio/video buffer times and only one of the
+ // two queues has data, we may starve that queue by not requesting
+ // more buffers from the decoder. If the other source then encounters
+ // a discontinuity that leads to flushing, we'll never find the
+ // corresponding discontinuity on the other queue.
+ // Therefore we'll stop syncing the queues if at least one of them
+ // is flushed.
+ syncQueuesDone_l();
+ clearAnchorTime_l();
}
ALOGV("flushing %s", audio ? "audio" : "video");
@@ -1123,11 +1069,11 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
Mutex::Autolock autoLock(mLock);
flushQueue(&mAudioQueue);
- ++mAudioQueueGeneration;
- prepareForMediaRenderingStart();
+ ++mAudioDrainGeneration;
+ prepareForMediaRenderingStart_l();
if (offloadingAudio()) {
- setAudioFirstAnchorTime(-1);
+ clearAudioFirstAnchorTime_l();
}
}
@@ -1142,13 +1088,14 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
flushQueue(&mVideoQueue);
mDrainVideoQueuePending = false;
- ++mVideoQueueGeneration;
if (mVideoScheduler != NULL) {
mVideoScheduler->restart();
}
- prepareForMediaRenderingStart();
+ Mutex::Autolock autoLock(mLock);
+ ++mVideoDrainGeneration;
+ prepareForMediaRenderingStart_l();
}
mVideoSampleReceived = false;
@@ -1178,20 +1125,12 @@ void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
notify->post();
}
-bool NuPlayer::Renderer::dropBufferWhileFlushing(
+bool NuPlayer::Renderer::dropBufferIfStale(
bool audio, const sp<AMessage> &msg) {
- bool flushing = false;
-
- {
- Mutex::Autolock autoLock(mFlushLock);
- if (audio) {
- flushing = mFlushingAudio;
- } else {
- flushing = mFlushingVideo;
- }
- }
+ int32_t queueGeneration;
+ CHECK(msg->findInt32("queueGeneration", &queueGeneration));
- if (!flushing) {
+ if (queueGeneration == getQueueGeneration(audio)) {
return false;
}
@@ -1209,7 +1148,10 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
}
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
- mAnchorNumFramesWritten = -1;
+ {
+ Mutex::Autolock autoLock(mLock);
+ mAnchorNumFramesWritten = -1;
+ }
uint32_t written;
if (mAudioSink->getFramesWritten(&written) == OK) {
mNumFramesWritten = written;
@@ -1219,13 +1161,13 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
void NuPlayer::Renderer::onDisableOffloadAudio() {
Mutex::Autolock autoLock(mLock);
mFlags &= ~FLAG_OFFLOAD_AUDIO;
- ++mAudioQueueGeneration;
+ ++mAudioDrainGeneration;
}
void NuPlayer::Renderer::onEnableOffloadAudio() {
Mutex::Autolock autoLock(mLock);
mFlags |= FLAG_OFFLOAD_AUDIO;
- ++mAudioQueueGeneration;
+ ++mAudioDrainGeneration;
}
void NuPlayer::Renderer::onPause() {
@@ -1234,25 +1176,13 @@ void NuPlayer::Renderer::onPause() {
return;
}
int64_t currentPositionUs;
- int64_t pausePositionMediaTimeUs;
- if (getCurrentPositionFromAnchor(
- &currentPositionUs, ALooper::GetNowUs()) == OK) {
- pausePositionMediaTimeUs = currentPositionUs;
- } else {
- // Set paused position to -1 (unavailabe) if we don't have anchor time
- // This could happen if client does a seekTo() immediately followed by
- // pause(). Renderer will be flushed with anchor time cleared. We don't
- // want to leave stale value in mPausePositionMediaTimeUs.
- pausePositionMediaTimeUs = -1;
- }
{
Mutex::Autolock autoLock(mLock);
- mPausePositionMediaTimeUs = pausePositionMediaTimeUs;
- ++mAudioQueueGeneration;
- ++mVideoQueueGeneration;
- prepareForMediaRenderingStart();
+ ++mAudioDrainGeneration;
+ ++mVideoDrainGeneration;
+ prepareForMediaRenderingStart_l();
mPaused = true;
- setPauseStartedTimeRealUs(ALooper::GetNowUs());
+ mMediaClock->setPlaybackRate(0.0);
}
mDrainAudioQueuePending = false;
@@ -1277,21 +1207,18 @@ void NuPlayer::Renderer::onResume() {
mAudioSink->start();
}
- Mutex::Autolock autoLock(mLock);
- mPaused = false;
- if (mPauseStartedTimeRealUs != -1) {
- int64_t newAnchorRealUs =
- mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
- setAnchorTime(
- mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
- }
+ {
+ Mutex::Autolock autoLock(mLock);
+ mPaused = false;
+ mMediaClock->setPlaybackRate(mPlaybackRate);
- if (!mAudioQueue.empty()) {
- postDrainAudioQueue_l();
+ if (!mAudioQueue.empty()) {
+ postDrainAudioQueue_l();
+ }
}
if (!mVideoQueue.empty()) {
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
}
}
@@ -1302,6 +1229,21 @@ void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
mVideoScheduler->init(fps);
}
+int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) {
+ Mutex::Autolock autoLock(mLock);
+ return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
+}
+
+int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) {
+ Mutex::Autolock autoLock(mLock);
+ return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
+}
+
+bool NuPlayer::Renderer::getSyncQueues() {
+ Mutex::Autolock autoLock(mLock);
+ return mSyncQueues;
+}
+
// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
// as it acquires locks and may query the audio driver.
//
@@ -1309,6 +1251,7 @@ void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
// accessing getTimestamp() or getPosition() every time a data buffer with
// a media time is received.
//
+// Calculate duration of played samples if played at normal rate (i.e., 1.0).
int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
uint32_t numFramesPlayed;
int64_t numFramesPlayedAt;
@@ -1346,9 +1289,8 @@ int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
//ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
}
- // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
//CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test
- int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
+ int64_t durationUs = getDurationUsIfPlayedAtSampleRate(numFramesPlayed)
+ nowUs - numFramesPlayedAt;
if (durationUs < 0) {
// Occurs when numFramesPlayed position is very small and the following:
@@ -1373,7 +1315,7 @@ void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reaso
mAudioOffloadTornDown = true;
int64_t currentPositionUs;
- if (getCurrentPositionOnLooper(&currentPositionUs) != OK) {
+ if (getCurrentPosition(&currentPositionUs) != OK) {
currentPositionUs = 0;
}
@@ -1390,8 +1332,8 @@ void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reaso
void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
if (offloadingAudio()) {
mWakeLock->acquire();
- sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
- msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
+ sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
+ msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
msg->post(kOffloadPauseMaxUs);
}
}
@@ -1487,6 +1429,10 @@ status_t NuPlayer::Renderer::onOpenAudioSink(
&offloadInfo);
if (err == OK) {
+ if (mPlaybackRate != 1.0) {
+ mAudioSink->setPlaybackRatePermille(
+ (int32_t)(mPlaybackRate * 1000 + 0.5f));
+ }
// If the playback is offloaded to h/w, we pass
// the HAL some metadata information.
// We don't want to do this for PCM because it
@@ -1542,6 +1488,10 @@ status_t NuPlayer::Renderer::onOpenAudioSink(
return err;
}
mCurrentPcmInfo = info;
+ if (mPlaybackRate != 1.0) {
+ mAudioSink->setPlaybackRatePermille(
+ (int32_t)(mPlaybackRate * 1000 + 0.5f));
+ }
mAudioSink->start();
}
if (audioSinkChanged) {