summaryrefslogtreecommitdiffstats
path: root/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp')
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp617
1 files changed, 507 insertions, 110 deletions
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index bf5271e..d6bf1de 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,23 +20,42 @@
#include "NuPlayerRenderer.h"
-#include "SoftwareRenderer.h"
+#include <cutils/properties.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+
+#include <VideoFrameScheduler.h>
+
+#include <inttypes.h>
namespace android {
+// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
+// is closed to allow the audio DSP to power down.
+static const int64_t kOffloadPauseMaxUs = 60000000ll;
+
// static
const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
+static bool sFrameAccurateAVsync = false;
+
+static void readProperties() {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.sys.media.avsync", value, NULL)) {
+ sFrameAccurateAVsync =
+ !strcmp("1", value) || !strcasecmp("true", value);
+ }
+}
+
NuPlayer::Renderer::Renderer(
const sp<MediaPlayerBase::AudioSink> &sink,
const sp<AMessage> &notify,
uint32_t flags)
: mAudioSink(sink),
- mSoftRenderer(NULL),
mNotify(notify),
mFlags(flags),
mNumFramesWritten(0),
@@ -44,6 +63,7 @@ NuPlayer::Renderer::Renderer(
mDrainVideoQueuePending(false),
mAudioQueueGeneration(0),
mVideoQueueGeneration(0),
+ mFirstAnchorTimeMediaUs(-1),
mAnchorTimeMediaUs(-1),
mAnchorTimeRealUs(-1),
mFlushingAudio(false),
@@ -52,20 +72,23 @@ NuPlayer::Renderer::Renderer(
mHasVideo(false),
mSyncQueues(false),
mPaused(false),
+ mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
mAudioRenderingStartGeneration(0),
mLastPositionUpdateUs(-1ll),
- mVideoLateByUs(0ll) {
+ mVideoLateByUs(0ll),
+ mAudioOffloadPauseTimeoutGeneration(0),
+ mAudioOffloadTornDown(false) {
+ readProperties();
}
NuPlayer::Renderer::~Renderer() {
- delete mSoftRenderer;
-}
-
-void NuPlayer::Renderer::setSoftRenderer(SoftwareRenderer *softRenderer) {
- delete mSoftRenderer;
- mSoftRenderer = softRenderer;
+ if (offloadingAudio()) {
+ mAudioSink->stop();
+ mAudioSink->flush();
+ mAudioSink->close();
+ }
}
void NuPlayer::Renderer::queueBuffer(
@@ -92,10 +115,14 @@ void NuPlayer::Renderer::flush(bool audio) {
{
Mutex::Autolock autoLock(mFlushLock);
if (audio) {
- CHECK(!mFlushingAudio);
+ if (mFlushingAudio) {
+ return;
+ }
mFlushingAudio = true;
} else {
- CHECK(!mFlushingVideo);
+ if (mFlushingVideo) {
+ return;
+ }
mFlushingVideo = true;
}
}
@@ -106,13 +133,23 @@ void NuPlayer::Renderer::flush(bool audio) {
}
void NuPlayer::Renderer::signalTimeDiscontinuity() {
+ Mutex::Autolock autoLock(mLock);
// CHECK(mAudioQueue.empty());
// CHECK(mVideoQueue.empty());
+ mFirstAnchorTimeMediaUs = -1;
mAnchorTimeMediaUs = -1;
mAnchorTimeRealUs = -1;
mSyncQueues = false;
}
+void NuPlayer::Renderer::signalAudioSinkChanged() {
+ (new AMessage(kWhatAudioSinkChanged, id()))->post();
+}
+
+void NuPlayer::Renderer::signalDisableOffloadAudio() {
+ (new AMessage(kWhatDisableOffloadAudio, id()))->post();
+}
+
void NuPlayer::Renderer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
@@ -121,8 +158,20 @@ void NuPlayer::Renderer::resume() {
(new AMessage(kWhatResume, id()))->post();
}
+void NuPlayer::Renderer::setVideoFrameRate(float fps) {
+ sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
+ msg->setFloat("frame-rate", fps);
+ msg->post();
+}
+
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatStopAudioSink:
+ {
+ mAudioSink->stop();
+ break;
+ }
+
case kWhatDrainAudioQueue:
{
int32_t generation;
@@ -149,7 +198,10 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
// Let's give it more data after about half that time
// has elapsed.
- postDrainAudioQueue(delayUs / 2);
+ // kWhatDrainAudioQueue is used for non-offloading mode,
+ // and mLock is used only for offloading mode. Therefore,
+ // no need to acquire mLock here.
+ postDrainAudioQueue_l(delayUs / 2);
}
break;
}
@@ -194,6 +246,12 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatDisableOffloadAudio:
+ {
+ onDisableOffloadAudio();
+ break;
+ }
+
case kWhatPause:
{
onPause();
@@ -206,14 +264,41 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSetVideoFrameRate:
+ {
+ float fps;
+ CHECK(msg->findFloat("frame-rate", &fps));
+ onSetVideoFrameRate(fps);
+ break;
+ }
+
+ case kWhatAudioOffloadTearDown:
+ {
+ onAudioOffloadTearDown(kDueToError);
+ break;
+ }
+
+ case kWhatAudioOffloadPauseTimeout:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation != mAudioOffloadPauseTimeoutGeneration) {
+ break;
+ }
+ ALOGV("Audio Offload tear down due to pause timeout.");
+ onAudioOffloadTearDown(kDueToTimeout);
+ break;
+ }
+
default:
TRESPASS();
break;
}
}
-void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
- if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
+void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
+ if (mDrainAudioQueuePending || mSyncQueues || mPaused
+ || offloadingAudio()) {
return;
}
@@ -227,10 +312,6 @@ void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
msg->post(delayUs);
}
-void NuPlayer::Renderer::signalAudioSinkChanged() {
- (new AMessage(kWhatAudioSinkChanged, id()))->post();
-}
-
void NuPlayer::Renderer::prepareForMediaRenderingStart() {
mAudioRenderingStartGeneration = mAudioQueueGeneration;
mVideoRenderingStartGeneration = mVideoQueueGeneration;
@@ -248,6 +329,103 @@ void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
}
}
+// static
+size_t NuPlayer::Renderer::AudioSinkCallback(
+ MediaPlayerBase::AudioSink * /* audioSink */,
+ void *buffer,
+ size_t size,
+ void *cookie,
+ MediaPlayerBase::AudioSink::cb_event_t event) {
+ NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
+
+ switch (event) {
+ case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
+ {
+ return me->fillAudioBuffer(buffer, size);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
+ {
+ me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
+ {
+ me->notifyAudioOffloadTearDown();
+ break;
+ }
+ }
+
+ return 0;
+}
+
+size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (!offloadingAudio() || mPaused) {
+ return 0;
+ }
+
+ bool hasEOS = false;
+
+ size_t sizeCopied = 0;
+ bool firstEntry = true;
+ while (sizeCopied < size && !mAudioQueue.empty()) {
+ QueueEntry *entry = &*mAudioQueue.begin();
+
+ if (entry->mBuffer == NULL) { // EOS
+ hasEOS = true;
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ break;
+ }
+
+ if (firstEntry && entry->mOffset == 0) {
+ firstEntry = false;
+ int64_t mediaTimeUs;
+ CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+ if (mFirstAnchorTimeMediaUs == -1) {
+ mFirstAnchorTimeMediaUs = mediaTimeUs;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ mAnchorTimeMediaUs =
+ mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
+ mAnchorTimeRealUs = nowUs;
+
+ notifyPosition();
+ }
+
+ size_t copy = entry->mBuffer->size() - entry->mOffset;
+ size_t sizeRemaining = size - sizeCopied;
+ if (copy > sizeRemaining) {
+ copy = sizeRemaining;
+ }
+
+ memcpy((char *)buffer + sizeCopied,
+ entry->mBuffer->data() + entry->mOffset,
+ copy);
+
+ entry->mOffset += copy;
+ if (entry->mOffset == entry->mBuffer->size()) {
+ entry->mNotifyConsumed->post();
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ }
+ sizeCopied += copy;
+ notifyIfMediaRenderingStarted();
+ }
+
+ if (hasEOS) {
+ (new AMessage(kWhatStopAudioSink, id()))->post();
+ }
+
+ return sizeCopied;
+}
+
bool NuPlayer::Renderer::onDrainAudioQueue() {
uint32_t numFramesPlayed;
if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
@@ -274,8 +452,11 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
if (entry->mBuffer == NULL) {
// EOS
-
- notifyEOS(true /* audio */, entry->mFinalResult);
+ int64_t postEOSDelayUs = 0;
+ if (mAudioSink->needsTrailingPadding()) {
+ postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+ }
+ notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
@@ -285,26 +466,16 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
if (entry->mOffset == 0) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-
+ if (mFirstAnchorTimeMediaUs == -1) {
+ mFirstAnchorTimeMediaUs = mediaTimeUs;
+ }
mAnchorTimeMediaUs = mediaTimeUs;
- uint32_t numFramesPlayed;
- CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
-
- uint32_t numFramesPendingPlayout =
- mNumFramesWritten - numFramesPlayed;
-
- int64_t realTimeOffsetUs =
- (mAudioSink->latency() / 2 /* XXX */
- + numFramesPendingPlayout
- * mAudioSink->msecsPerFrame()) * 1000ll;
+ int64_t nowUs = ALooper::GetNowUs();
+ mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs);
- // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
-
- mAnchorTimeRealUs =
- ALooper::GetNowUs() + realTimeOffsetUs;
+ notifyPosition();
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -312,11 +483,13 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
copy = numBytesAvailableToWrite;
}
- CHECK_EQ(mAudioSink->write(
- entry->mBuffer->data() + entry->mOffset, copy),
- (ssize_t)copy);
+ ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
+ if (written < 0) {
+ // An error in AudioSink write is fatal here.
+ LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ }
- entry->mOffset += copy;
+ entry->mOffset += written;
if (entry->mOffset == entry->mBuffer->size()) {
entry->mNotifyConsumed->post();
mAudioQueue.erase(mAudioQueue.begin());
@@ -324,20 +497,46 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
entry = NULL;
}
- numBytesAvailableToWrite -= copy;
- size_t copiedFrames = copy / mAudioSink->frameSize();
+ numBytesAvailableToWrite -= written;
+ size_t copiedFrames = written / mAudioSink->frameSize();
mNumFramesWritten += copiedFrames;
notifyIfMediaRenderingStarted();
- }
-
- notifyPosition();
+ if (written != (ssize_t)copy) {
+ // A short count was received from AudioSink::write()
+ //
+ // AudioSink write should block until exactly the number of bytes are delivered.
+ // But it may return with a short count (without an error) when:
+ //
+ // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
+ // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
+
+ // (Case 1)
+ // Must be a multiple of the frame size. If it is not a multiple of a frame size, it
+ // needs to fail, as we should not carry over fractional frames between calls.
+ CHECK_EQ(copy % mAudioSink->frameSize(), 0);
+
+ // (Case 2)
+ // Return early to the caller.
+ // Beware of calling immediately again as this may busy-loop if you are not careful.
+ ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
+ break;
+ }
+ }
return !mAudioQueue.empty();
}
+int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
+ int64_t writtenAudioDurationUs =
+ mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
+ return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
+}
+
void NuPlayer::Renderer::postDrainVideoQueue() {
- if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
+ if (mDrainVideoQueuePending
+ || mSyncQueues
+ || (mPaused && mVideoSampleReceived)) {
return;
}
@@ -350,36 +549,53 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
msg->setInt32("generation", mVideoQueueGeneration);
- int64_t delayUs;
-
if (entry.mBuffer == NULL) {
// EOS doesn't carry a timestamp.
- delayUs = 0;
- } else if (mFlags & FLAG_REAL_TIME) {
+ msg->post();
+ mDrainVideoQueuePending = true;
+ return;
+ }
+
+ int64_t delayUs;
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t realTimeUs;
+ if (mFlags & FLAG_REAL_TIME) {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
- delayUs = mediaTimeUs - ALooper::GetNowUs();
+ realTimeUs = mediaTimeUs;
} else {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+ if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) {
+ mFirstAnchorTimeMediaUs = mediaTimeUs;
+ }
if (mAnchorTimeMediaUs < 0) {
- delayUs = 0;
-
if (!mHasAudio) {
mAnchorTimeMediaUs = mediaTimeUs;
- mAnchorTimeRealUs = ALooper::GetNowUs();
+ mAnchorTimeRealUs = nowUs;
+ notifyPosition();
}
+ realTimeUs = nowUs;
} else {
- int64_t realTimeUs =
+ realTimeUs =
(mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
-
- delayUs = realTimeUs - ALooper::GetNowUs();
}
}
- msg->post(delayUs);
+ realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
+ int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+ delayUs = realTimeUs - nowUs;
+
+ ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
+ // post 2 display refreshes before rendering is due
+ // FIXME currently this increases power consumption, so unless frame-accurate
+ // AV sync is requested, post closer to required render time (at 0.63 vsyncs)
+ if (!sFrameAccurateAVsync) {
+ twoVsyncsUs >>= 4;
+ }
+ msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
mDrainVideoQueuePending = true;
}
@@ -400,8 +616,6 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
entry = NULL;
mVideoLateByUs = 0ll;
-
- notifyPosition();
return;
}
@@ -415,34 +629,39 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
}
- mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
- bool tooLate = (mVideoLateByUs > 40000);
+ bool tooLate = false;
- if (tooLate) {
- ALOGV("video late by %lld us (%.2f secs)",
- mVideoLateByUs, mVideoLateByUs / 1E6);
- } else {
- ALOGV("rendering video at media time %.2f secs",
- (mFlags & FLAG_REAL_TIME ? realTimeUs :
- (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
- if (mSoftRenderer != NULL) {
- mSoftRenderer->render(entry->mBuffer->data(), entry->mBuffer->size(), NULL);
+ if (!mPaused) {
+ mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
+ tooLate = (mVideoLateByUs > 40000);
+
+ if (tooLate) {
+ ALOGV("video late by %lld us (%.2f secs)",
+ mVideoLateByUs, mVideoLateByUs / 1E6);
+ } else {
+ ALOGV("rendering video at media time %.2f secs",
+ (mFlags & FLAG_REAL_TIME ? realTimeUs :
+ (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
}
+ } else {
+ mVideoLateByUs = 0ll;
}
+ entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
entry->mNotifyConsumed->setInt32("render", !tooLate);
entry->mNotifyConsumed->post();
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
- if (!mVideoRenderingStarted) {
- mVideoRenderingStarted = true;
- notifyVideoRenderingStart();
- }
+ mVideoSampleReceived = true;
- notifyIfMediaRenderingStarted();
-
- notifyPosition();
+ if (!mPaused) {
+ if (!mVideoRenderingStarted) {
+ mVideoRenderingStarted = true;
+ notifyVideoRenderingStart();
+ }
+ notifyIfMediaRenderingStarted();
+ }
}
void NuPlayer::Renderer::notifyVideoRenderingStart() {
@@ -451,12 +670,16 @@ void NuPlayer::Renderer::notifyVideoRenderingStart() {
notify->post();
}
-void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
+void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatEOS);
notify->setInt32("audio", static_cast<int32_t>(audio));
notify->setInt32("finalResult", finalResult);
- notify->post();
+ notify->post(delayUs);
+}
+
+void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
+ (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
}
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
@@ -467,6 +690,10 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
mHasAudio = true;
} else {
mHasVideo = true;
+ if (mVideoScheduler == NULL) {
+ mVideoScheduler = new VideoFrameScheduler();
+ mVideoScheduler->init();
+ }
}
if (dropBufferWhileFlushing(audio, msg)) {
@@ -486,13 +713,15 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
entry.mFinalResult = OK;
if (audio) {
+ Mutex::Autolock autoLock(mLock);
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
mVideoQueue.push_back(entry);
postDrainVideoQueue();
}
+ Mutex::Autolock autoLock(mLock);
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
return;
}
@@ -502,7 +731,7 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
// EOS signalled on either queue.
- syncQueuesDone();
+ syncQueuesDone_l();
return;
}
@@ -526,10 +755,10 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
return;
}
- syncQueuesDone();
+ syncQueuesDone_l();
}
-void NuPlayer::Renderer::syncQueuesDone() {
+void NuPlayer::Renderer::syncQueuesDone_l() {
if (!mSyncQueues) {
return;
}
@@ -537,7 +766,7 @@ void NuPlayer::Renderer::syncQueuesDone() {
mSyncQueues = false;
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
@@ -561,14 +790,16 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
entry.mFinalResult = finalResult;
if (audio) {
+ Mutex::Autolock autoLock(mLock);
if (mAudioQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ syncQueuesDone_l();
}
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
if (mVideoQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ Mutex::Autolock autoLock(mLock);
+ syncQueuesDone_l();
}
mVideoQueue.push_back(entry);
postDrainVideoQueue();
@@ -579,6 +810,15 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
+ {
+ Mutex::Autolock autoLock(mFlushLock);
+ if (audio) {
+ mFlushingAudio = false;
+ } else {
+ mFlushingVideo = false;
+ }
+ }
+
// If we're currently syncing the queues, i.e. dropping audio while
// aligning the first audio/video buffer times and only one of the
// two queues has data, we may starve that queue by not requesting
@@ -587,31 +827,46 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
// corresponding discontinuity on the other queue.
// Therefore we'll stop syncing the queues if at least one of them
// is flushed.
- syncQueuesDone();
+ {
+ Mutex::Autolock autoLock(mLock);
+ syncQueuesDone_l();
+ }
ALOGV("flushing %s", audio ? "audio" : "video");
if (audio) {
- flushQueue(&mAudioQueue);
+ {
+ Mutex::Autolock autoLock(mLock);
+ flushQueue(&mAudioQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingAudio = false;
+ ++mAudioQueueGeneration;
+ prepareForMediaRenderingStart();
+
+ if (offloadingAudio()) {
+ mFirstAnchorTimeMediaUs = -1;
+ }
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
- prepareForMediaRenderingStart();
+ if (offloadingAudio()) {
+ mAudioSink->pause();
+ mAudioSink->flush();
+ mAudioSink->start();
+ }
} else {
flushQueue(&mVideoQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingVideo = false;
-
mDrainVideoQueuePending = false;
++mVideoQueueGeneration;
+ if (mVideoScheduler != NULL) {
+ mVideoScheduler->restart();
+ }
+
prepareForMediaRenderingStart();
}
+ mVideoSampleReceived = false;
notifyFlushComplete(audio);
}
@@ -661,6 +916,9 @@ bool NuPlayer::Renderer::dropBufferWhileFlushing(
}
void NuPlayer::Renderer::onAudioSinkChanged() {
+ if (offloadingAudio()) {
+ return;
+ }
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
uint32_t written;
@@ -669,10 +927,18 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
}
}
+void NuPlayer::Renderer::onDisableOffloadAudio() {
+ Mutex::Autolock autoLock(mLock);
+ mFlags &= ~FLAG_OFFLOAD_AUDIO;
+ ++mAudioQueueGeneration;
+}
+
void NuPlayer::Renderer::notifyPosition() {
- if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
- return;
- }
+ // notifyPosition() must be called only after setting mAnchorTimeRealUs
+ // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position.
+ //CHECK_GE(mAnchorTimeRealUs, 0);
+ //CHECK_GE(mAnchorTimeMediaUs, 0);
+ //CHECK(!mPaused || !mHasAudio); // video-only does display in paused mode.
int64_t nowUs = ALooper::GetNowUs();
@@ -684,6 +950,18 @@ void NuPlayer::Renderer::notifyPosition() {
int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
+ //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)"
+ // " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)",
+ // (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs,
+ // (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs);
+
+ // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(),
+ // positionUs may be less than the first media time. This is avoided
+ // here to prevent potential retrograde motion of the position bar
+ // when starting up after a seek.
+ if (positionUs < mFirstAnchorTimeMediaUs) {
+ positionUs = mFirstAnchorTimeMediaUs;
+ }
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatPosition);
notify->setInt64("positionUs", positionUs);
@@ -692,39 +970,47 @@ void NuPlayer::Renderer::notifyPosition() {
}
void NuPlayer::Renderer::onPause() {
- CHECK(!mPaused);
+ if (mPaused) {
+ ALOGW("Renderer::onPause() called while already paused!");
+ return;
+ }
+ {
+ Mutex::Autolock autoLock(mLock);
+ ++mAudioQueueGeneration;
+ ++mVideoQueueGeneration;
+ prepareForMediaRenderingStart();
+ mPaused = true;
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
-
mDrainVideoQueuePending = false;
- ++mVideoQueueGeneration;
-
- prepareForMediaRenderingStart();
if (mHasAudio) {
mAudioSink->pause();
+ startAudioOffloadPauseTimeout();
}
ALOGV("now paused audio queue has %d entries, video has %d entries",
mAudioQueue.size(), mVideoQueue.size());
-
- mPaused = true;
}
void NuPlayer::Renderer::onResume() {
+ readProperties();
+
if (!mPaused) {
return;
}
if (mHasAudio) {
+ cancelAudioOffloadPauseTimeout();
mAudioSink->start();
}
+ Mutex::Autolock autoLock(mLock);
mPaused = false;
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
@@ -732,5 +1018,116 @@ void NuPlayer::Renderer::onResume() {
}
}
+void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
+ if (mVideoScheduler == NULL) {
+ mVideoScheduler = new VideoFrameScheduler();
+ }
+ mVideoScheduler->init(fps);
+}
+
+// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
+// as it acquires locks and may query the audio driver.
+//
+// Some calls are not needed since notifyPosition() doesn't always deliver a message.
+// Some calls could conceivably retrieve extrapolated data instead of
+// accessing getTimestamp() or getPosition() every time a data buffer with
+// a media time is received.
+//
+int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
+ uint32_t numFramesPlayed;
+ int64_t numFramesPlayedAt;
+ AudioTimestamp ts;
+ static const int64_t kStaleTimestamp100ms = 100000;
+
+ status_t res = mAudioSink->getTimestamp(ts);
+ if (res == OK) { // case 1: mixing audio tracks and offloaded tracks.
+ numFramesPlayed = ts.mPosition;
+ numFramesPlayedAt =
+ ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+ const int64_t timestampAge = nowUs - numFramesPlayedAt;
+ if (timestampAge > kStaleTimestamp100ms) {
+ // This is an audio FIXME.
+ // getTimestamp returns a timestamp which may come from audio mixing threads.
+ // After pausing, the MixerThread may go idle, thus the mTime estimate may
+ // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
+ // the max latency should be about 25ms with an average around 12ms (to be verified).
+ // For safety we use 100ms.
+ ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
+ (long long)nowUs, (long long)numFramesPlayedAt);
+ numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
+ }
+ //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
+ } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
+ numFramesPlayed = 0;
+ numFramesPlayedAt = nowUs;
+ //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
+ // numFramesPlayed, (long long)numFramesPlayedAt);
+ } else { // case 3: transitory at new track or audio fast tracks.
+ res = mAudioSink->getPosition(&numFramesPlayed);
+ CHECK_EQ(res, (status_t)OK);
+ numFramesPlayedAt = nowUs;
+ numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
+ }
+
+ // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+ //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test
+ int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()
+ + nowUs - numFramesPlayedAt;
+ if (durationUs < 0) {
+ // Occurs when numFramesPlayed position is very small and the following:
+ // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
+ // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
+ // (2) In case 3, using getPosition and adding mAudioSink->latency() to
+ // numFramesPlayedAt, by a time amount greater than numFramesPlayed.
+ //
+ // Both of these are transitory conditions.
+ ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
+ durationUs = 0;
+ }
+ ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
+ (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
+ return durationUs;
+}
+
+void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
+ if (mAudioOffloadTornDown) {
+ return;
+ }
+ mAudioOffloadTornDown = true;
+
+ int64_t firstAudioTimeUs;
+ {
+ Mutex::Autolock autoLock(mLock);
+ firstAudioTimeUs = mFirstAnchorTimeMediaUs;
+ }
+
+ int64_t currentPositionUs =
+ firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs());
+
+ mAudioSink->stop();
+ mAudioSink->flush();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatAudioOffloadTearDown);
+ notify->setInt64("positionUs", currentPositionUs);
+ notify->setInt32("reason", reason);
+ notify->post();
+}
+
+void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
+ if (offloadingAudio()) {
+ sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
+ msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
+ msg->post(kOffloadPauseMaxUs);
+ }
+}
+
+void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
+ if (offloadingAudio()) {
+ ++mAudioOffloadPauseTimeoutGeneration;
+ }
+}
+
} // namespace android