summaryrefslogtreecommitdiffstats
path: root/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp')
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp406
1 files changed, 316 insertions, 90 deletions
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index bf5271e..067784b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,11 +20,13 @@
#include "NuPlayerRenderer.h"
-#include "SoftwareRenderer.h"
-
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+
+#include <inttypes.h>
namespace android {
@@ -36,7 +38,6 @@ NuPlayer::Renderer::Renderer(
const sp<AMessage> &notify,
uint32_t flags)
: mAudioSink(sink),
- mSoftRenderer(NULL),
mNotify(notify),
mFlags(flags),
mNumFramesWritten(0),
@@ -44,6 +45,7 @@ NuPlayer::Renderer::Renderer(
mDrainVideoQueuePending(false),
mAudioQueueGeneration(0),
mVideoQueueGeneration(0),
+ mFirstAudioTimeUs(-1),
mAnchorTimeMediaUs(-1),
mAnchorTimeRealUs(-1),
mFlushingAudio(false),
@@ -56,16 +58,16 @@ NuPlayer::Renderer::Renderer(
mVideoRenderingStartGeneration(0),
mAudioRenderingStartGeneration(0),
mLastPositionUpdateUs(-1ll),
- mVideoLateByUs(0ll) {
+ mVideoLateByUs(0ll),
+ mVideoSampleReceived(false) {
}
NuPlayer::Renderer::~Renderer() {
- delete mSoftRenderer;
-}
-
-void NuPlayer::Renderer::setSoftRenderer(SoftwareRenderer *softRenderer) {
- delete mSoftRenderer;
- mSoftRenderer = softRenderer;
+ if (offloadingAudio()) {
+ mAudioSink->stop();
+ mAudioSink->flush();
+ mAudioSink->close();
+ }
}
void NuPlayer::Renderer::queueBuffer(
@@ -92,10 +94,14 @@ void NuPlayer::Renderer::flush(bool audio) {
{
Mutex::Autolock autoLock(mFlushLock);
if (audio) {
- CHECK(!mFlushingAudio);
+ if (mFlushingAudio) {
+ return;
+ }
mFlushingAudio = true;
} else {
- CHECK(!mFlushingVideo);
+ if (mFlushingVideo) {
+ return;
+ }
mFlushingVideo = true;
}
}
@@ -106,6 +112,7 @@ void NuPlayer::Renderer::flush(bool audio) {
}
void NuPlayer::Renderer::signalTimeDiscontinuity() {
+ Mutex::Autolock autoLock(mLock);
// CHECK(mAudioQueue.empty());
// CHECK(mVideoQueue.empty());
mAnchorTimeMediaUs = -1;
@@ -113,6 +120,14 @@ void NuPlayer::Renderer::signalTimeDiscontinuity() {
mSyncQueues = false;
}
+void NuPlayer::Renderer::signalAudioSinkChanged() {
+ (new AMessage(kWhatAudioSinkChanged, id()))->post();
+}
+
+void NuPlayer::Renderer::signalDisableOffloadAudio() {
+ (new AMessage(kWhatDisableOffloadAudio, id()))->post();
+}
+
void NuPlayer::Renderer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
@@ -123,6 +138,12 @@ void NuPlayer::Renderer::resume() {
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatStopAudioSink:
+ {
+ mAudioSink->stop();
+ break;
+ }
+
case kWhatDrainAudioQueue:
{
int32_t generation;
@@ -149,7 +170,10 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
// Let's give it more data after about half that time
// has elapsed.
- postDrainAudioQueue(delayUs / 2);
+ // kWhatDrainAudioQueue is used for non-offloading mode,
+ // and mLock is used only for offloading mode. Therefore,
+ // no need to acquire mLock here.
+ postDrainAudioQueue_l(delayUs / 2);
}
break;
}
@@ -194,6 +218,12 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatDisableOffloadAudio:
+ {
+ onDisableOffloadAudio();
+ break;
+ }
+
case kWhatPause:
{
onPause();
@@ -206,14 +236,21 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatAudioOffloadTearDown:
+ {
+ onAudioOffloadTearDown();
+ break;
+ }
+
default:
TRESPASS();
break;
}
}
-void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
- if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
+void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
+ if (mDrainAudioQueuePending || mSyncQueues || mPaused
+ || offloadingAudio()) {
return;
}
@@ -227,10 +264,6 @@ void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
msg->post(delayUs);
}
-void NuPlayer::Renderer::signalAudioSinkChanged() {
- (new AMessage(kWhatAudioSinkChanged, id()))->post();
-}
-
void NuPlayer::Renderer::prepareForMediaRenderingStart() {
mAudioRenderingStartGeneration = mAudioQueueGeneration;
mVideoRenderingStartGeneration = mVideoQueueGeneration;
@@ -248,6 +281,110 @@ void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
}
}
+// static
+size_t NuPlayer::Renderer::AudioSinkCallback(
+ MediaPlayerBase::AudioSink * /* audioSink */,
+ void *buffer,
+ size_t size,
+ void *cookie,
+ MediaPlayerBase::AudioSink::cb_event_t event) {
+ NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
+
+ switch (event) {
+ case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
+ {
+ return me->fillAudioBuffer(buffer, size);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
+ {
+ me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
+ {
+ me->notifyAudioOffloadTearDown();
+ break;
+ }
+ }
+
+ return 0;
+}
+
+size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (!offloadingAudio() || mPaused) {
+ return 0;
+ }
+
+ bool hasEOS = false;
+
+ size_t sizeCopied = 0;
+ bool firstEntry = true;
+ while (sizeCopied < size && !mAudioQueue.empty()) {
+ QueueEntry *entry = &*mAudioQueue.begin();
+
+ if (entry->mBuffer == NULL) { // EOS
+ hasEOS = true;
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ break;
+ }
+
+ if (firstEntry && entry->mOffset == 0) {
+ firstEntry = false;
+ int64_t mediaTimeUs;
+ CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+ if (mFirstAudioTimeUs == -1) {
+ mFirstAudioTimeUs = mediaTimeUs;
+ }
+
+ uint32_t numFramesPlayed;
+ CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+ // TODO: figure out how to calculate initial latency.
+ // Otherwise, the initial time is not correct till the first sample
+ // is played.
+ mAnchorTimeMediaUs = mFirstAudioTimeUs
+ + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
+ mAnchorTimeRealUs = ALooper::GetNowUs();
+ }
+
+ size_t copy = entry->mBuffer->size() - entry->mOffset;
+ size_t sizeRemaining = size - sizeCopied;
+ if (copy > sizeRemaining) {
+ copy = sizeRemaining;
+ }
+
+ memcpy((char *)buffer + sizeCopied,
+ entry->mBuffer->data() + entry->mOffset,
+ copy);
+
+ entry->mOffset += copy;
+ if (entry->mOffset == entry->mBuffer->size()) {
+ entry->mNotifyConsumed->post();
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ }
+ sizeCopied += copy;
+ notifyIfMediaRenderingStarted();
+ }
+
+ if (sizeCopied != 0) {
+ notifyPosition();
+ }
+
+ if (hasEOS) {
+ (new AMessage(kWhatStopAudioSink, id()))->post();
+ }
+
+ return sizeCopied;
+}
+
bool NuPlayer::Renderer::onDrainAudioQueue() {
uint32_t numFramesPlayed;
if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
@@ -274,8 +411,11 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
if (entry->mBuffer == NULL) {
// EOS
-
- notifyEOS(true /* audio */, entry->mFinalResult);
+ int64_t postEOSDelayUs = 0;
+ if (mAudioSink->needsTrailingPadding()) {
+ postEOSDelayUs = getAudioPendingPlayoutUs() + 1000 * mAudioSink->latency();
+ }
+ notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
@@ -285,26 +425,11 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
if (entry->mOffset == 0) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-
mAnchorTimeMediaUs = mediaTimeUs;
- uint32_t numFramesPlayed;
- CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
-
- uint32_t numFramesPendingPlayout =
- mNumFramesWritten - numFramesPlayed;
-
- int64_t realTimeOffsetUs =
- (mAudioSink->latency() / 2 /* XXX */
- + numFramesPendingPlayout
- * mAudioSink->msecsPerFrame()) * 1000ll;
-
- // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
-
- mAnchorTimeRealUs =
- ALooper::GetNowUs() + realTimeOffsetUs;
+ mAnchorTimeRealUs = ALooper::GetNowUs()
+ + getAudioPendingPlayoutUs() + 1000 * mAudioSink->latency() / 2;
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -312,11 +437,13 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
copy = numBytesAvailableToWrite;
}
- CHECK_EQ(mAudioSink->write(
- entry->mBuffer->data() + entry->mOffset, copy),
- (ssize_t)copy);
+ ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
+ if (written < 0) {
+ // An error in AudioSink write is fatal here.
+ LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ }
- entry->mOffset += copy;
+ entry->mOffset += written;
if (entry->mOffset == entry->mBuffer->size()) {
entry->mNotifyConsumed->post();
mAudioQueue.erase(mAudioQueue.begin());
@@ -324,20 +451,50 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
entry = NULL;
}
- numBytesAvailableToWrite -= copy;
- size_t copiedFrames = copy / mAudioSink->frameSize();
+ numBytesAvailableToWrite -= written;
+ size_t copiedFrames = written / mAudioSink->frameSize();
mNumFramesWritten += copiedFrames;
notifyIfMediaRenderingStarted();
- }
+ if (written != (ssize_t)copy) {
+ // A short count was received from AudioSink::write()
+ //
+ // AudioSink write should block until exactly the number of bytes are delivered.
+ // But it may return with a short count (without an error) when:
+ //
+ // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
+ // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
+
+ // (Case 1)
+ // Must be a multiple of the frame size. If it is not a multiple of a frame size, it
+ // needs to fail, as we should not carry over fractional frames between calls.
+ CHECK_EQ(copy % mAudioSink->frameSize(), 0);
+
+ // (Case 2)
+ // Return early to the caller.
+ // Beware of calling immediately again as this may busy-loop if you are not careful.
+ ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
+ break;
+ }
+ }
notifyPosition();
return !mAudioQueue.empty();
}
+int64_t NuPlayer::Renderer::getAudioPendingPlayoutUs() {
+ uint32_t numFramesPlayed;
+ CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+ uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed;
+ return numFramesPendingPlayout * mAudioSink->msecsPerFrame() * 1000;
+}
+
void NuPlayer::Renderer::postDrainVideoQueue() {
- if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
+ if (mDrainVideoQueuePending
+ || mSyncQueues
+ || (mPaused && mVideoSampleReceived)) {
return;
}
@@ -379,6 +536,7 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
}
}
+ ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
msg->post(delayUs);
mDrainVideoQueuePending = true;
@@ -415,19 +573,22 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
}
- mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
- bool tooLate = (mVideoLateByUs > 40000);
+ bool tooLate = false;
- if (tooLate) {
- ALOGV("video late by %lld us (%.2f secs)",
- mVideoLateByUs, mVideoLateByUs / 1E6);
- } else {
- ALOGV("rendering video at media time %.2f secs",
- (mFlags & FLAG_REAL_TIME ? realTimeUs :
- (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
- if (mSoftRenderer != NULL) {
- mSoftRenderer->render(entry->mBuffer->data(), entry->mBuffer->size(), NULL);
+ if (!mPaused) {
+ mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
+ tooLate = (mVideoLateByUs > 40000);
+
+ if (tooLate) {
+ ALOGV("video late by %lld us (%.2f secs)",
+ mVideoLateByUs, mVideoLateByUs / 1E6);
+ } else {
+ ALOGV("rendering video at media time %.2f secs",
+ (mFlags & FLAG_REAL_TIME ? realTimeUs :
+ (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
}
+ } else {
+ mVideoLateByUs = 0ll;
}
entry->mNotifyConsumed->setInt32("render", !tooLate);
@@ -435,12 +596,15 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
- if (!mVideoRenderingStarted) {
- mVideoRenderingStarted = true;
- notifyVideoRenderingStart();
- }
+ mVideoSampleReceived = true;
- notifyIfMediaRenderingStarted();
+ if (!mPaused) {
+ if (!mVideoRenderingStarted) {
+ mVideoRenderingStarted = true;
+ notifyVideoRenderingStart();
+ }
+ notifyIfMediaRenderingStarted();
+ }
notifyPosition();
}
@@ -451,12 +615,16 @@ void NuPlayer::Renderer::notifyVideoRenderingStart() {
notify->post();
}
-void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
+void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatEOS);
notify->setInt32("audio", static_cast<int32_t>(audio));
notify->setInt32("finalResult", finalResult);
- notify->post();
+ notify->post(delayUs);
+}
+
+void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
+ (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
}
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
@@ -486,13 +654,15 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
entry.mFinalResult = OK;
if (audio) {
+ Mutex::Autolock autoLock(mLock);
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
mVideoQueue.push_back(entry);
postDrainVideoQueue();
}
+ Mutex::Autolock autoLock(mLock);
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
return;
}
@@ -502,7 +672,7 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
// EOS signalled on either queue.
- syncQueuesDone();
+ syncQueuesDone_l();
return;
}
@@ -526,10 +696,10 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
return;
}
- syncQueuesDone();
+ syncQueuesDone_l();
}
-void NuPlayer::Renderer::syncQueuesDone() {
+void NuPlayer::Renderer::syncQueuesDone_l() {
if (!mSyncQueues) {
return;
}
@@ -537,7 +707,7 @@ void NuPlayer::Renderer::syncQueuesDone() {
mSyncQueues = false;
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
@@ -561,14 +731,16 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
entry.mFinalResult = finalResult;
if (audio) {
+ Mutex::Autolock autoLock(mLock);
if (mAudioQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ syncQueuesDone_l();
}
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
if (mVideoQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ Mutex::Autolock autoLock(mLock);
+ syncQueuesDone_l();
}
mVideoQueue.push_back(entry);
postDrainVideoQueue();
@@ -579,6 +751,15 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
+ {
+ Mutex::Autolock autoLock(mFlushLock);
+ if (audio) {
+ mFlushingAudio = false;
+ } else {
+ mFlushingVideo = false;
+ }
+ }
+
// If we're currently syncing the queues, i.e. dropping audio while
// aligning the first audio/video buffer times and only one of the
// two queues has data, we may starve that queue by not requesting
@@ -587,31 +768,42 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
// corresponding discontinuity on the other queue.
// Therefore we'll stop syncing the queues if at least one of them
// is flushed.
- syncQueuesDone();
+ {
+ Mutex::Autolock autoLock(mLock);
+ syncQueuesDone_l();
+ }
ALOGV("flushing %s", audio ? "audio" : "video");
if (audio) {
- flushQueue(&mAudioQueue);
+ {
+ Mutex::Autolock autoLock(mLock);
+ flushQueue(&mAudioQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingAudio = false;
+ ++mAudioQueueGeneration;
+ prepareForMediaRenderingStart();
+
+ if (offloadingAudio()) {
+ mFirstAudioTimeUs = -1;
+ }
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
- prepareForMediaRenderingStart();
+ if (offloadingAudio()) {
+ mAudioSink->pause();
+ mAudioSink->flush();
+ mAudioSink->start();
+ }
} else {
flushQueue(&mVideoQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingVideo = false;
-
mDrainVideoQueuePending = false;
++mVideoQueueGeneration;
prepareForMediaRenderingStart();
}
+ mVideoSampleReceived = false;
notifyFlushComplete(audio);
}
@@ -661,6 +853,9 @@ bool NuPlayer::Renderer::dropBufferWhileFlushing(
}
void NuPlayer::Renderer::onAudioSinkChanged() {
+ if (offloadingAudio()) {
+ return;
+ }
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
uint32_t written;
@@ -669,6 +864,12 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
}
}
+void NuPlayer::Renderer::onDisableOffloadAudio() {
+ Mutex::Autolock autoLock(mLock);
+ mFlags &= ~FLAG_OFFLOAD_AUDIO;
+ ++mAudioQueueGeneration;
+}
+
void NuPlayer::Renderer::notifyPosition() {
if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
return;
@@ -692,15 +893,20 @@ void NuPlayer::Renderer::notifyPosition() {
}
void NuPlayer::Renderer::onPause() {
- CHECK(!mPaused);
+ if (mPaused) {
+ ALOGW("Renderer::onPause() called while already paused!");
+ return;
+ }
+ {
+ Mutex::Autolock autoLock(mLock);
+ ++mAudioQueueGeneration;
+ ++mVideoQueueGeneration;
+ prepareForMediaRenderingStart();
+ mPaused = true;
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
-
mDrainVideoQueuePending = false;
- ++mVideoQueueGeneration;
-
- prepareForMediaRenderingStart();
if (mHasAudio) {
mAudioSink->pause();
@@ -708,8 +914,6 @@ void NuPlayer::Renderer::onPause() {
ALOGV("now paused audio queue has %d entries, video has %d entries",
mAudioQueue.size(), mVideoQueue.size());
-
- mPaused = true;
}
void NuPlayer::Renderer::onResume() {
@@ -721,10 +925,11 @@ void NuPlayer::Renderer::onResume() {
mAudioSink->start();
}
+ Mutex::Autolock autoLock(mLock);
mPaused = false;
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
@@ -732,5 +937,26 @@ void NuPlayer::Renderer::onResume() {
}
}
+void NuPlayer::Renderer::onAudioOffloadTearDown() {
+ uint32_t numFramesPlayed;
+ CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+ int64_t firstAudioTimeUs;
+ {
+ Mutex::Autolock autoLock(mLock);
+ firstAudioTimeUs = mFirstAudioTimeUs;
+ }
+ int64_t currentPositionUs = firstAudioTimeUs
+ + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
+
+ mAudioSink->stop();
+ mAudioSink->flush();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatAudioOffloadTearDown);
+ notify->setInt64("positionUs", currentPositionUs);
+ notify->post();
+}
+
} // namespace android