summaryrefslogtreecommitdiffstats
path: root/media/libmediaplayerservice/nuplayer
diff options
context:
space:
mode:
Diffstat (limited to 'media/libmediaplayerservice/nuplayer')
-rw-r--r--media/libmediaplayerservice/nuplayer/Android.mk4
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.cpp1226
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.h143
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp38
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.h11
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp1341
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.h75
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp1036
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h101
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp266
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h85
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp234
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.h17
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp959
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h115
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerSource.h31
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp24
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.h5
-rw-r--r--media/libmediaplayerservice/nuplayer/StreamingSource.cpp2
-rw-r--r--media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp144
-rw-r--r--media/libmediaplayerservice/nuplayer/mp4/MP4Source.h53
21 files changed, 4899 insertions, 1011 deletions
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index f946c1c..676c0a6 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -6,18 +6,20 @@ LOCAL_SRC_FILES:= \
HTTPLiveSource.cpp \
NuPlayer.cpp \
NuPlayerDecoder.cpp \
+ NuPlayerDecoderPassThrough.cpp \
NuPlayerDriver.cpp \
NuPlayerRenderer.cpp \
NuPlayerStreamListener.cpp \
RTSPSource.cpp \
StreamingSource.cpp \
- mp4/MP4Source.cpp \
LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright/httplive \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
$(TOP)/frameworks/av/media/libstagefright/rtsp \
+ $(TOP)/frameworks/av/media/libstagefright/timedtext \
+ $(TOP)/frameworks/av/media/libmediaplayerservice \
$(TOP)/frameworks/native/include/media/openmax
LOCAL_MODULE:= libstagefright_nuplayer
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index b04e7a6..6859a1a 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -14,10 +14,14 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GenericSource"
+
#include "GenericSource.h"
#include "AnotherPacketSource.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -28,57 +32,158 @@
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include "../../libstagefright/include/DRMExtractor.h"
+#include "../../libstagefright/include/NuCachedSource2.h"
+#include "../../libstagefright/include/WVMExtractor.h"
+#include "../../libstagefright/include/HTTPBase.h"
namespace android {
NuPlayer::GenericSource::GenericSource(
const sp<AMessage> &notify,
- const char *url,
- const KeyedVector<String8, String8> *headers,
bool uidValid,
uid_t uid)
: Source(notify),
+ mFetchSubtitleDataGeneration(0),
+ mFetchTimedTextDataGeneration(0),
mDurationUs(0ll),
- mAudioIsVorbis(false) {
+ mAudioIsVorbis(false),
+ mIsWidevine(false),
+ mUIDValid(uidValid),
+ mUID(uid),
+ mDrmManagerClient(NULL),
+ mMetaDataSize(-1ll),
+ mBitrate(-1ll),
+ mPollBufferingGeneration(0),
+ mPendingReadBufferTypes(0) {
+ resetDataSource();
DataSource::RegisterDefaultSniffers();
+}
+
+void NuPlayer::GenericSource::resetDataSource() {
+ mAudioTimeUs = 0;
+ mVideoTimeUs = 0;
+ mHTTPService.clear();
+ mHttpSource.clear();
+ mUri.clear();
+ mUriHeaders.clear();
+ mFd = -1;
+ mOffset = 0;
+ mLength = 0;
+ setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
+ mDecryptHandle = NULL;
+ mDrmManagerClient = NULL;
+ mStarted = false;
+ mStopRead = true;
+}
- sp<DataSource> dataSource =
- DataSource::CreateFromURI(url, headers);
- CHECK(dataSource != NULL);
+status_t NuPlayer::GenericSource::setDataSource(
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+ resetDataSource();
+
+ mHTTPService = httpService;
+ mUri = url;
- initFromDataSource(dataSource);
+ if (headers) {
+ mUriHeaders = *headers;
+ }
+
+ // delay data source creation to prepareAsync() to avoid blocking
+ // the calling thread in setDataSource for any significant time.
+ return OK;
}
-NuPlayer::GenericSource::GenericSource(
- const sp<AMessage> &notify,
- int fd, int64_t offset, int64_t length)
- : Source(notify),
- mDurationUs(0ll),
- mAudioIsVorbis(false) {
- DataSource::RegisterDefaultSniffers();
+status_t NuPlayer::GenericSource::setDataSource(
+ int fd, int64_t offset, int64_t length) {
+ resetDataSource();
- sp<DataSource> dataSource = new FileSource(dup(fd), offset, length);
+ mFd = dup(fd);
+ mOffset = offset;
+ mLength = length;
- initFromDataSource(dataSource);
+ // delay data source creation to prepareAsync() to avoid blocking
+ // the calling thread in setDataSource for any significant time.
+ return OK;
+}
+
+sp<MetaData> NuPlayer::GenericSource::getFileFormatMeta() const {
+ return mFileMeta;
}
-void NuPlayer::GenericSource::initFromDataSource(
- const sp<DataSource> &dataSource) {
- sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+status_t NuPlayer::GenericSource::initFromDataSource() {
+ sp<MediaExtractor> extractor;
- CHECK(extractor != NULL);
+ CHECK(mDataSource != NULL);
+
+ if (mIsWidevine) {
+ String8 mimeType;
+ float confidence;
+ sp<AMessage> dummy;
+ bool success;
+
+ success = SniffWVM(mDataSource, &mimeType, &confidence, &dummy);
+ if (!success
+ || strcasecmp(
+ mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM)) {
+ ALOGE("unsupported widevine mime: %s", mimeType.string());
+ return UNKNOWN_ERROR;
+ }
+
+ mWVMExtractor = new WVMExtractor(mDataSource);
+ mWVMExtractor->setAdaptiveStreamingMode(true);
+ if (mUIDValid) {
+ mWVMExtractor->setUID(mUID);
+ }
+ extractor = mWVMExtractor;
+ } else {
+ extractor = MediaExtractor::Create(mDataSource,
+ mSniffedMIME.empty() ? NULL: mSniffedMIME.c_str());
+ }
+
+ if (extractor == NULL) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (extractor->getDrmFlag()) {
+ checkDrmStatus(mDataSource);
+ }
+
+ mFileMeta = extractor->getMetaData();
+ if (mFileMeta != NULL) {
+ int64_t duration;
+ if (mFileMeta->findInt64(kKeyDuration, &duration)) {
+ mDurationUs = duration;
+ }
+ }
+
+ int32_t totalBitrate = 0;
+
+ size_t numtracks = extractor->countTracks();
+ if (numtracks == 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ for (size_t i = 0; i < numtracks; ++i) {
+ sp<MediaSource> track = extractor->getTrack(i);
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<MetaData> meta = extractor->getTrackMetaData(i);
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
- sp<MediaSource> track;
-
+ // Do the string compare immediately with "mime",
+ // we can't assume "mime" would stay valid after another
+ // extractor operation, some extractors might modify meta
+ // during getTrack() and make it invalid.
if (!strncasecmp(mime, "audio/", 6)) {
if (mAudioTrack.mSource == NULL) {
- mAudioTrack.mSource = track = extractor->getTrack(i);
+ mAudioTrack.mIndex = i;
+ mAudioTrack.mSource = track;
+ mAudioTrack.mPackets =
+ new AnotherPacketSource(mAudioTrack.mSource->getFormat());
if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
mAudioIsVorbis = true;
@@ -88,37 +193,175 @@ void NuPlayer::GenericSource::initFromDataSource(
}
} else if (!strncasecmp(mime, "video/", 6)) {
if (mVideoTrack.mSource == NULL) {
- mVideoTrack.mSource = track = extractor->getTrack(i);
+ mVideoTrack.mIndex = i;
+ mVideoTrack.mSource = track;
+ mVideoTrack.mPackets =
+ new AnotherPacketSource(mVideoTrack.mSource->getFormat());
+
+ // check if the source requires secure buffers
+ int32_t secure;
+ if (meta->findInt32(kKeyRequiresSecureBuffers, &secure)
+ && secure) {
+ mIsWidevine = true;
+ if (mUIDValid) {
+ extractor->setUID(mUID);
+ }
+ }
}
}
if (track != NULL) {
+ mSources.push(track);
int64_t durationUs;
if (meta->findInt64(kKeyDuration, &durationUs)) {
if (durationUs > mDurationUs) {
mDurationUs = durationUs;
}
}
+
+ int32_t bitrate;
+ if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
+ totalBitrate += bitrate;
+ } else {
+ totalBitrate = -1;
+ }
+ }
+ }
+
+ mBitrate = totalBitrate;
+
+ return OK;
+}
+
+void NuPlayer::GenericSource::checkDrmStatus(const sp<DataSource>& dataSource) {
+ dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
+ if (mDecryptHandle != NULL) {
+ CHECK(mDrmManagerClient);
+ if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
+ sp<AMessage> msg = dupNotify();
+ msg->setInt32("what", kWhatDrmNoLicense);
+ msg->post();
}
}
}
+int64_t NuPlayer::GenericSource::getLastReadPosition() {
+ if (mAudioTrack.mSource != NULL) {
+ return mAudioTimeUs;
+ } else if (mVideoTrack.mSource != NULL) {
+ return mVideoTimeUs;
+ } else {
+ return 0;
+ }
+}
+
+status_t NuPlayer::GenericSource::setBuffers(
+ bool audio, Vector<MediaBuffer *> &buffers) {
+ if (mIsWidevine && !audio) {
+ return mVideoTrack.mSource->setBuffers(buffers);
+ }
+ return INVALID_OPERATION;
+}
+
NuPlayer::GenericSource::~GenericSource() {
+ if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+ }
}
void NuPlayer::GenericSource::prepareAsync() {
- if (mVideoTrack.mSource != NULL) {
- sp<MetaData> meta = mVideoTrack.mSource->getFormat();
+ if (mLooper == NULL) {
+ mLooper = new ALooper;
+ mLooper->setName("generic");
+ mLooper->start();
+
+ mLooper->registerHandler(this);
+ }
+
+ sp<AMessage> msg = new AMessage(kWhatPrepareAsync, id());
+ msg->post();
+}
+
+void NuPlayer::GenericSource::onPrepareAsync() {
+ // delayed data source creation
+ if (mDataSource == NULL) {
+ if (!mUri.empty()) {
+ const char* uri = mUri.c_str();
+ mIsWidevine = !strncasecmp(uri, "widevine://", 11);
+
+ if (!strncasecmp("http://", uri, 7)
+ || !strncasecmp("https://", uri, 8)
+ || mIsWidevine) {
+ mHttpSource = DataSource::CreateMediaHTTP(mHTTPService);
+ if (mHttpSource == NULL) {
+ ALOGE("Failed to create http source!");
+ notifyPreparedAndCleanup(UNKNOWN_ERROR);
+ return;
+ }
+ }
+
+ mDataSource = DataSource::CreateFromURI(
+ mHTTPService, uri, &mUriHeaders, &mContentType,
+ static_cast<HTTPBase *>(mHttpSource.get()));
+ } else {
+ // set to false first, if the extractor
+ // comes back as secure, set it to true then.
+ mIsWidevine = false;
+
+ mDataSource = new FileSource(mFd, mOffset, mLength);
+ }
+
+ if (mDataSource == NULL) {
+ ALOGE("Failed to create data source!");
+ notifyPreparedAndCleanup(UNKNOWN_ERROR);
+ return;
+ }
+
+ if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
+ mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());
+ }
+
+ if (mIsWidevine || mCachedSource != NULL) {
+ schedulePollBuffering();
+ }
+ }
+
+ // check initial caching status
+ status_t err = prefillCacheIfNecessary();
+ if (err != OK) {
+ if (err == -EAGAIN) {
+ (new AMessage(kWhatPrepareAsync, id()))->post(200000);
+ } else {
+ ALOGE("Failed to prefill data cache!");
+ notifyPreparedAndCleanup(UNKNOWN_ERROR);
+ }
+ return;
+ }
- int32_t width, height;
- CHECK(meta->findInt32(kKeyWidth, &width));
- CHECK(meta->findInt32(kKeyHeight, &height));
+ // init extrator from data source
+ err = initFromDataSource();
- notifyVideoSizeChanged(width, height);
+ if (err != OK) {
+ ALOGE("Failed to init from data source!");
+ notifyPreparedAndCleanup(err);
+ return;
+ }
+
+ if (mVideoTrack.mSource != NULL) {
+ sp<MetaData> meta = doGetFormatMeta(false /* audio */);
+ sp<AMessage> msg = new AMessage;
+ err = convertMetaDataToMessage(meta, &msg);
+ if(err != OK) {
+ notifyPreparedAndCleanup(err);
+ return;
+ }
+ notifyVideoSizeChanged(msg);
}
notifyFlagsChanged(
- FLAG_CAN_PAUSE
+ (mIsWidevine ? FLAG_SECURE : 0)
+ | FLAG_CAN_PAUSE
| FLAG_CAN_SEEK_BACKWARD
| FLAG_CAN_SEEK_FORWARD
| FLAG_CAN_SEEK);
@@ -126,33 +369,455 @@ void NuPlayer::GenericSource::prepareAsync() {
notifyPrepared();
}
+void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {
+ if (err != OK) {
+ mMetaDataSize = -1ll;
+ mContentType = "";
+ mSniffedMIME = "";
+ mDataSource.clear();
+ mCachedSource.clear();
+ mHttpSource.clear();
+
+ cancelPollBuffering();
+ }
+ notifyPrepared(err);
+}
+
+status_t NuPlayer::GenericSource::prefillCacheIfNecessary() {
+ CHECK(mDataSource != NULL);
+
+ if (mCachedSource == NULL) {
+ // no prefill if the data source is not cached
+ return OK;
+ }
+
+ // We're not doing this for streams that appear to be audio-only
+ // streams to ensure that even low bandwidth streams start
+ // playing back fairly instantly.
+ if (!strncasecmp(mContentType.string(), "audio/", 6)) {
+ return OK;
+ }
+
+ // We're going to prefill the cache before trying to instantiate
+ // the extractor below, as the latter is an operation that otherwise
+ // could block on the datasource for a significant amount of time.
+ // During that time we'd be unable to abort the preparation phase
+ // without this prefill.
+
+ // Initially make sure we have at least 192 KB for the sniff
+ // to complete without blocking.
+ static const size_t kMinBytesForSniffing = 192 * 1024;
+ static const size_t kDefaultMetaSize = 200000;
+
+ status_t finalStatus;
+
+ size_t cachedDataRemaining =
+ mCachedSource->approxDataRemaining(&finalStatus);
+
+ if (finalStatus != OK || (mMetaDataSize >= 0
+ && (off64_t)cachedDataRemaining >= mMetaDataSize)) {
+ ALOGV("stop caching, status %d, "
+ "metaDataSize %lld, cachedDataRemaining %zu",
+ finalStatus, mMetaDataSize, cachedDataRemaining);
+ return OK;
+ }
+
+ ALOGV("now cached %zu bytes of data", cachedDataRemaining);
+
+ if (mMetaDataSize < 0
+ && cachedDataRemaining >= kMinBytesForSniffing) {
+ String8 tmp;
+ float confidence;
+ sp<AMessage> meta;
+ if (!mCachedSource->sniff(&tmp, &confidence, &meta)) {
+ return UNKNOWN_ERROR;
+ }
+
+ // We successfully identified the file's extractor to
+ // be, remember this mime type so we don't have to
+ // sniff it again when we call MediaExtractor::Create()
+ mSniffedMIME = tmp.string();
+
+ if (meta == NULL
+ || !meta->findInt64("meta-data-size",
+ reinterpret_cast<int64_t*>(&mMetaDataSize))) {
+ mMetaDataSize = kDefaultMetaSize;
+ }
+
+ if (mMetaDataSize < 0ll) {
+ ALOGE("invalid metaDataSize = %lld bytes", mMetaDataSize);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ return -EAGAIN;
+}
+
void NuPlayer::GenericSource::start() {
ALOGI("start");
+ mStopRead = false;
if (mAudioTrack.mSource != NULL) {
CHECK_EQ(mAudioTrack.mSource->start(), (status_t)OK);
- mAudioTrack.mPackets =
- new AnotherPacketSource(mAudioTrack.mSource->getFormat());
-
- readBuffer(true /* audio */);
+ postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
}
if (mVideoTrack.mSource != NULL) {
CHECK_EQ(mVideoTrack.mSource->start(), (status_t)OK);
- mVideoTrack.mPackets =
- new AnotherPacketSource(mVideoTrack.mSource->getFormat());
+ postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+ }
- readBuffer(false /* audio */);
+ setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);
+ mStarted = true;
+}
+
+void NuPlayer::GenericSource::stop() {
+ // nothing to do, just account for DRM playback status
+ setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
+ mStarted = false;
+ if (mIsWidevine) {
+ // For a widevine source we need to prevent any further reads.
+ sp<AMessage> msg = new AMessage(kWhatStopWidevine, id());
+ sp<AMessage> response;
+ (void) msg->postAndAwaitResponse(&response);
}
}
+void NuPlayer::GenericSource::pause() {
+ // nothing to do, just account for DRM playback status
+ setDrmPlaybackStatusIfNeeded(Playback::PAUSE, 0);
+ mStarted = false;
+}
+
+void NuPlayer::GenericSource::resume() {
+ // nothing to do, just account for DRM playback status
+ setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);
+ mStarted = true;
+}
+
+void NuPlayer::GenericSource::disconnect() {
+ if (mDataSource != NULL) {
+ // disconnect data source
+ if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
+ static_cast<NuCachedSource2 *>(mDataSource.get())->disconnect();
+ }
+ } else if (mHttpSource != NULL) {
+ static_cast<HTTPBase *>(mHttpSource.get())->disconnect();
+ }
+}
+
+void NuPlayer::GenericSource::setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position) {
+ if (mDecryptHandle != NULL) {
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle, playbackStatus, position);
+ }
+ mSubtitleTrack.mPackets = new AnotherPacketSource(NULL);
+ mTimedTextTrack.mPackets = new AnotherPacketSource(NULL);
+}
+
status_t NuPlayer::GenericSource::feedMoreTSData() {
return OK;
}
+void NuPlayer::GenericSource::schedulePollBuffering() {
+ sp<AMessage> msg = new AMessage(kWhatPollBuffering, id());
+ msg->setInt32("generation", mPollBufferingGeneration);
+ msg->post(1000000ll);
+}
+
+void NuPlayer::GenericSource::cancelPollBuffering() {
+ ++mPollBufferingGeneration;
+}
+
+void NuPlayer::GenericSource::notifyBufferingUpdate(int percentage) {
+ sp<AMessage> msg = dupNotify();
+ msg->setInt32("what", kWhatBufferingUpdate);
+ msg->setInt32("percentage", percentage);
+ msg->post();
+}
+
+void NuPlayer::GenericSource::onPollBuffering() {
+ status_t finalStatus = UNKNOWN_ERROR;
+ int64_t cachedDurationUs = 0ll;
+
+ if (mCachedSource != NULL) {
+ size_t cachedDataRemaining =
+ mCachedSource->approxDataRemaining(&finalStatus);
+
+ if (finalStatus == OK) {
+ off64_t size;
+ int64_t bitrate = 0ll;
+ if (mDurationUs > 0 && mCachedSource->getSize(&size) == OK) {
+ bitrate = size * 8000000ll / mDurationUs;
+ } else if (mBitrate > 0) {
+ bitrate = mBitrate;
+ }
+ if (bitrate > 0) {
+ cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate;
+ }
+ }
+ } else if (mWVMExtractor != NULL) {
+ cachedDurationUs
+ = mWVMExtractor->getCachedDurationUs(&finalStatus);
+ }
+
+ if (finalStatus == ERROR_END_OF_STREAM) {
+ notifyBufferingUpdate(100);
+ cancelPollBuffering();
+ return;
+ } else if (cachedDurationUs > 0ll && mDurationUs > 0ll) {
+ int percentage = 100.0 * cachedDurationUs / mDurationUs;
+ if (percentage > 100) {
+ percentage = 100;
+ }
+
+ notifyBufferingUpdate(percentage);
+ }
+
+ schedulePollBuffering();
+}
+
+
+void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatPrepareAsync:
+ {
+ onPrepareAsync();
+ break;
+ }
+ case kWhatFetchSubtitleData:
+ {
+ fetchTextData(kWhatSendSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+ mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatFetchTimedTextData:
+ {
+ fetchTextData(kWhatSendTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+ mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatSendSubtitleData:
+ {
+ sendTextData(kWhatSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+ mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatSendTimedTextData:
+ {
+ sendTextData(kWhatTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+ mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatChangeAVSource:
+ {
+ int32_t trackIndex;
+ CHECK(msg->findInt32("trackIndex", &trackIndex));
+ const sp<MediaSource> source = mSources.itemAt(trackIndex);
+
+ Track* track;
+ const char *mime;
+ media_track_type trackType, counterpartType;
+ sp<MetaData> meta = source->getFormat();
+ meta->findCString(kKeyMIMEType, &mime);
+ if (!strncasecmp(mime, "audio/", 6)) {
+ track = &mAudioTrack;
+ trackType = MEDIA_TRACK_TYPE_AUDIO;
+ counterpartType = MEDIA_TRACK_TYPE_VIDEO;;
+ } else {
+ CHECK(!strncasecmp(mime, "video/", 6));
+ track = &mVideoTrack;
+ trackType = MEDIA_TRACK_TYPE_VIDEO;
+ counterpartType = MEDIA_TRACK_TYPE_AUDIO;;
+ }
+
+
+ if (track->mSource != NULL) {
+ track->mSource->stop();
+ }
+ track->mSource = source;
+ track->mSource->start();
+ track->mIndex = trackIndex;
+
+ status_t avail;
+ if (!track->mPackets->hasBufferAvailable(&avail)) {
+ // sync from other source
+ TRESPASS();
+ break;
+ }
+
+ int64_t timeUs, actualTimeUs;
+ const bool formatChange = true;
+ sp<AMessage> latestMeta = track->mPackets->getLatestEnqueuedMeta();
+ CHECK(latestMeta != NULL && latestMeta->findInt64("timeUs", &timeUs));
+ readBuffer(trackType, timeUs, &actualTimeUs, formatChange);
+ readBuffer(counterpartType, -1, NULL, formatChange);
+ ALOGV("timeUs %lld actualTimeUs %lld", timeUs, actualTimeUs);
+
+ break;
+ }
+ case kWhatPollBuffering:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation == mPollBufferingGeneration) {
+ onPollBuffering();
+ }
+ break;
+ }
+
+ case kWhatGetFormat:
+ {
+ onGetFormatMeta(msg);
+ break;
+ }
+
+ case kWhatGetSelectedTrack:
+ {
+ onGetSelectedTrack(msg);
+ break;
+ }
+
+ case kWhatSelectTrack:
+ {
+ onSelectTrack(msg);
+ break;
+ }
+
+ case kWhatSeek:
+ {
+ onSeek(msg);
+ break;
+ }
+
+ case kWhatReadBuffer:
+ {
+ onReadBuffer(msg);
+ break;
+ }
+
+ case kWhatStopWidevine:
+ {
+ // mStopRead is only used for Widevine to prevent the video source
+ // from being read while the associated video decoder is shutting down.
+ mStopRead = true;
+ if (mVideoTrack.mSource != NULL) {
+ mVideoTrack.mPackets->clear();
+ }
+ sp<AMessage> response = new AMessage;
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+ break;
+ }
+ default:
+ Source::onMessageReceived(msg);
+ break;
+ }
+}
+
+void NuPlayer::GenericSource::fetchTextData(
+ uint32_t sendWhat,
+ media_track_type type,
+ int32_t curGen,
+ sp<AnotherPacketSource> packets,
+ sp<AMessage> msg) {
+ int32_t msgGeneration;
+ CHECK(msg->findInt32("generation", &msgGeneration));
+ if (msgGeneration != curGen) {
+ // stale
+ return;
+ }
+
+ int32_t avail;
+ if (packets->hasBufferAvailable(&avail)) {
+ return;
+ }
+
+ int64_t timeUs;
+ CHECK(msg->findInt64("timeUs", &timeUs));
+
+ int64_t subTimeUs;
+ readBuffer(type, timeUs, &subTimeUs);
+
+ int64_t delayUs = subTimeUs - timeUs;
+ if (msg->what() == kWhatFetchSubtitleData) {
+ const int64_t oneSecUs = 1000000ll;
+ delayUs -= oneSecUs;
+ }
+ sp<AMessage> msg2 = new AMessage(sendWhat, id());
+ msg2->setInt32("generation", msgGeneration);
+ msg2->post(delayUs < 0 ? 0 : delayUs);
+}
+
+void NuPlayer::GenericSource::sendTextData(
+ uint32_t what,
+ media_track_type type,
+ int32_t curGen,
+ sp<AnotherPacketSource> packets,
+ sp<AMessage> msg) {
+ int32_t msgGeneration;
+ CHECK(msg->findInt32("generation", &msgGeneration));
+ if (msgGeneration != curGen) {
+ // stale
+ return;
+ }
+
+ int64_t subTimeUs;
+ if (packets->nextBufferTime(&subTimeUs) != OK) {
+ return;
+ }
+
+ int64_t nextSubTimeUs;
+ readBuffer(type, -1, &nextSubTimeUs);
+
+ sp<ABuffer> buffer;
+ status_t dequeueStatus = packets->dequeueAccessUnit(&buffer);
+ if (dequeueStatus == OK) {
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", what);
+ notify->setBuffer("buffer", buffer);
+ notify->post();
+
+ const int64_t delayUs = nextSubTimeUs - subTimeUs;
+ msg->post(delayUs < 0 ? 0 : delayUs);
+ }
+}
+
sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) {
+ sp<AMessage> msg = new AMessage(kWhatGetFormat, id());
+ msg->setInt32("audio", audio);
+
+ sp<AMessage> response;
+ void *format;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findPointer("format", &format));
+ return (MetaData *)format;
+ } else {
+ return NULL;
+ }
+}
+
+void NuPlayer::GenericSource::onGetFormatMeta(sp<AMessage> msg) const {
+ int32_t audio;
+ CHECK(msg->findInt32("audio", &audio));
+
+ sp<AMessage> response = new AMessage;
+ sp<MetaData> format = doGetFormatMeta(audio);
+ response->setPointer("format", format.get());
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+sp<MetaData> NuPlayer::GenericSource::doGetFormatMeta(bool audio) const {
sp<MediaSource> source = audio ? mAudioTrack.mSource : mVideoTrack.mSource;
if (source == NULL) {
@@ -170,14 +835,53 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(
return -EWOULDBLOCK;
}
+ if (mIsWidevine && !audio) {
+ // try to read a buffer as we may not have been able to the last time
+ postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+ }
+
status_t finalResult;
if (!track->mPackets->hasBufferAvailable(&finalResult)) {
- return finalResult == OK ? -EWOULDBLOCK : finalResult;
+ return (finalResult == OK ? -EWOULDBLOCK : finalResult);
}
status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
- readBuffer(audio, -1ll);
+ if (!track->mPackets->hasBufferAvailable(&finalResult)) {
+ postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+ }
+
+ if (result != OK) {
+ if (mSubtitleTrack.mSource != NULL) {
+ mSubtitleTrack.mPackets->clear();
+ mFetchSubtitleDataGeneration++;
+ }
+ if (mTimedTextTrack.mSource != NULL) {
+ mTimedTextTrack.mPackets->clear();
+ mFetchTimedTextDataGeneration++;
+ }
+ return result;
+ }
+
+ int64_t timeUs;
+ status_t eosResult; // ignored
+ CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+
+ if (mSubtitleTrack.mSource != NULL
+ && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchSubtitleDataGeneration);
+ msg->post();
+ }
+
+ if (mTimedTextTrack.mSource != NULL
+ && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchTimedTextDataGeneration);
+ msg->post();
+ }
return result;
}
@@ -187,25 +891,387 @@ status_t NuPlayer::GenericSource::getDuration(int64_t *durationUs) {
return OK;
}
+size_t NuPlayer::GenericSource::getTrackCount() const {
+ return mSources.size();
+}
+
+sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {
+ size_t trackCount = mSources.size();
+ if (trackIndex >= trackCount) {
+ return NULL;
+ }
+
+ sp<AMessage> format = new AMessage();
+ sp<MetaData> meta = mSources.itemAt(trackIndex)->getFormat();
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ int32_t trackType;
+ if (!strncasecmp(mime, "video/", 6)) {
+ trackType = MEDIA_TRACK_TYPE_VIDEO;
+ } else if (!strncasecmp(mime, "audio/", 6)) {
+ trackType = MEDIA_TRACK_TYPE_AUDIO;
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+ trackType = MEDIA_TRACK_TYPE_TIMEDTEXT;
+ } else {
+ trackType = MEDIA_TRACK_TYPE_UNKNOWN;
+ }
+ format->setInt32("type", trackType);
+
+ const char *lang;
+ if (!meta->findCString(kKeyMediaLanguage, &lang)) {
+ lang = "und";
+ }
+ format->setString("language", lang);
+
+ if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ format->setString("mime", mime);
+
+ int32_t isAutoselect = 1, isDefault = 0, isForced = 0;
+ meta->findInt32(kKeyTrackIsAutoselect, &isAutoselect);
+ meta->findInt32(kKeyTrackIsDefault, &isDefault);
+ meta->findInt32(kKeyTrackIsForced, &isForced);
+
+ format->setInt32("auto", !!isAutoselect);
+ format->setInt32("default", !!isDefault);
+ format->setInt32("forced", !!isForced);
+ }
+
+ return format;
+}
+
+ssize_t NuPlayer::GenericSource::getSelectedTrack(media_track_type type) const {
+ sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id());
+ msg->setInt32("type", type);
+
+ sp<AMessage> response;
+ int32_t index;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("index", &index));
+ return index;
+ } else {
+ return -1;
+ }
+}
+
+void NuPlayer::GenericSource::onGetSelectedTrack(sp<AMessage> msg) const {
+ int32_t tmpType;
+ CHECK(msg->findInt32("type", &tmpType));
+ media_track_type type = (media_track_type)tmpType;
+
+ sp<AMessage> response = new AMessage;
+ ssize_t index = doGetSelectedTrack(type);
+ response->setInt32("index", index);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+ssize_t NuPlayer::GenericSource::doGetSelectedTrack(media_track_type type) const {
+ const Track *track = NULL;
+ switch (type) {
+ case MEDIA_TRACK_TYPE_VIDEO:
+ track = &mVideoTrack;
+ break;
+ case MEDIA_TRACK_TYPE_AUDIO:
+ track = &mAudioTrack;
+ break;
+ case MEDIA_TRACK_TYPE_TIMEDTEXT:
+ track = &mTimedTextTrack;
+ break;
+ case MEDIA_TRACK_TYPE_SUBTITLE:
+ track = &mSubtitleTrack;
+ break;
+ default:
+ break;
+ }
+
+ if (track != NULL && track->mSource != NULL) {
+ return track->mIndex;
+ }
+
+ return -1;
+}
+
+status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select) {
+ ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex);
+ sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
+ msg->setInt32("trackIndex", trackIndex);
+ msg->setInt32("select", select);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+
+ return err;
+}
+
+void NuPlayer::GenericSource::onSelectTrack(sp<AMessage> msg) {
+ int32_t trackIndex, select;
+ CHECK(msg->findInt32("trackIndex", &trackIndex));
+ CHECK(msg->findInt32("select", &select));
+
+ sp<AMessage> response = new AMessage;
+ status_t err = doSelectTrack(trackIndex, select);
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select) {
+ if (trackIndex >= mSources.size()) {
+ return BAD_INDEX;
+ }
+
+ if (!select) {
+ Track* track = NULL;
+ if (mSubtitleTrack.mSource != NULL && trackIndex == mSubtitleTrack.mIndex) {
+ track = &mSubtitleTrack;
+ mFetchSubtitleDataGeneration++;
+ } else if (mTimedTextTrack.mSource != NULL && trackIndex == mTimedTextTrack.mIndex) {
+ track = &mTimedTextTrack;
+ mFetchTimedTextDataGeneration++;
+ }
+ if (track == NULL) {
+ return INVALID_OPERATION;
+ }
+ track->mSource->stop();
+ track->mSource = NULL;
+ track->mPackets->clear();
+ return OK;
+ }
+
+ const sp<MediaSource> source = mSources.itemAt(trackIndex);
+ sp<MetaData> meta = source->getFormat();
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+ if (!strncasecmp(mime, "text/", 5)) {
+ bool isSubtitle = strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP);
+ Track *track = isSubtitle ? &mSubtitleTrack : &mTimedTextTrack;
+ if (track->mSource != NULL && track->mIndex == trackIndex) {
+ return OK;
+ }
+ track->mIndex = trackIndex;
+ if (track->mSource != NULL) {
+ track->mSource->stop();
+ }
+ track->mSource = mSources.itemAt(trackIndex);
+ track->mSource->start();
+ if (track->mPackets == NULL) {
+ track->mPackets = new AnotherPacketSource(track->mSource->getFormat());
+ } else {
+ track->mPackets->clear();
+ track->mPackets->setFormat(track->mSource->getFormat());
+
+ }
+
+ if (isSubtitle) {
+ mFetchSubtitleDataGeneration++;
+ } else {
+ mFetchTimedTextDataGeneration++;
+ }
+
+ return OK;
+ } else if (!strncasecmp(mime, "audio/", 6) || !strncasecmp(mime, "video/", 6)) {
+ bool audio = !strncasecmp(mime, "audio/", 6);
+ Track *track = audio ? &mAudioTrack : &mVideoTrack;
+ if (track->mSource != NULL && track->mIndex == trackIndex) {
+ return OK;
+ }
+
+ sp<AMessage> msg = new AMessage(kWhatChangeAVSource, id());
+ msg->setInt32("trackIndex", trackIndex);
+ msg->post();
+ return OK;
+ }
+
+ return INVALID_OPERATION;
+}
+
status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs) {
+ sp<AMessage> msg = new AMessage(kWhatSeek, id());
+ msg->setInt64("seekTimeUs", seekTimeUs);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+
+ return err;
+}
+
+void NuPlayer::GenericSource::onSeek(sp<AMessage> msg) {
+ int64_t seekTimeUs;
+ CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+
+ sp<AMessage> response = new AMessage;
+ status_t err = doSeek(seekTimeUs);
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs) {
+ // If the Widevine source is stopped, do not attempt to read any
+ // more buffers.
+ if (mStopRead) {
+ return INVALID_OPERATION;
+ }
if (mVideoTrack.mSource != NULL) {
int64_t actualTimeUs;
- readBuffer(false /* audio */, seekTimeUs, &actualTimeUs);
+ readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, &actualTimeUs);
seekTimeUs = actualTimeUs;
}
if (mAudioTrack.mSource != NULL) {
- readBuffer(true /* audio */, seekTimeUs);
+ readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs);
}
+ setDrmPlaybackStatusIfNeeded(Playback::START, seekTimeUs / 1000);
+ if (!mStarted) {
+ setDrmPlaybackStatusIfNeeded(Playback::PAUSE, 0);
+ }
return OK;
}
+sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
+ MediaBuffer* mb,
+ media_track_type trackType,
+ int64_t *actualTimeUs) {
+ bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
+ size_t outLength = mb->range_length();
+
+ if (audio && mAudioIsVorbis) {
+ outLength += sizeof(int32_t);
+ }
+
+ sp<ABuffer> ab;
+ if (mIsWidevine && !audio) {
+ // data is already provided in the buffer
+ ab = new ABuffer(NULL, mb->range_length());
+ mb->add_ref();
+ ab->setMediaBufferBase(mb);
+ } else {
+ ab = new ABuffer(outLength);
+ memcpy(ab->data(),
+ (const uint8_t *)mb->data() + mb->range_offset(),
+ mb->range_length());
+ }
+
+ if (audio && mAudioIsVorbis) {
+ int32_t numPageSamples;
+ if (!mb->meta_data()->findInt32(kKeyValidSamples, &numPageSamples)) {
+ numPageSamples = -1;
+ }
+
+ uint8_t* abEnd = ab->data() + mb->range_length();
+ memcpy(abEnd, &numPageSamples, sizeof(numPageSamples));
+ }
+
+ sp<AMessage> meta = ab->meta();
+
+ int64_t timeUs;
+ CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
+ meta->setInt64("timeUs", timeUs);
+
+ if (trackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ const char *mime;
+ CHECK(mTimedTextTrack.mSource != NULL
+ && mTimedTextTrack.mSource->getFormat()->findCString(kKeyMIMEType, &mime));
+ meta->setString("mime", mime);
+ }
+
+ int64_t durationUs;
+ if (mb->meta_data()->findInt64(kKeyDuration, &durationUs)) {
+ meta->setInt64("durationUs", durationUs);
+ }
+
+ if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ meta->setInt32("trackIndex", mSubtitleTrack.mIndex);
+ }
+
+ if (actualTimeUs) {
+ *actualTimeUs = timeUs;
+ }
+
+ mb->release();
+ mb = NULL;
+
+ return ab;
+}
+
+void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) {
+ Mutex::Autolock _l(mReadBufferLock);
+
+ if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {
+ mPendingReadBufferTypes |= (1 << trackType);
+ sp<AMessage> msg = new AMessage(kWhatReadBuffer, id());
+ msg->setInt32("trackType", trackType);
+ msg->post();
+ }
+}
+
+void NuPlayer::GenericSource::onReadBuffer(sp<AMessage> msg) {
+ int32_t tmpType;
+ CHECK(msg->findInt32("trackType", &tmpType));
+ media_track_type trackType = (media_track_type)tmpType;
+ {
+ // only protect the variable change, as readBuffer may
+ // take considerable time. This may result in one extra
+ // read being processed, but that is benign.
+ Mutex::Autolock _l(mReadBufferLock);
+ mPendingReadBufferTypes &= ~(1 << trackType);
+ }
+ readBuffer(trackType);
+}
+
void NuPlayer::GenericSource::readBuffer(
- bool audio, int64_t seekTimeUs, int64_t *actualTimeUs) {
- Track *track = audio ? &mAudioTrack : &mVideoTrack;
- CHECK(track->mSource != NULL);
+ media_track_type trackType, int64_t seekTimeUs, int64_t *actualTimeUs, bool formatChange) {
+ // Do not read data if Widevine source is stopped
+ if (mStopRead) {
+ return;
+ }
+ Track *track;
+ size_t maxBuffers = 1;
+ switch (trackType) {
+ case MEDIA_TRACK_TYPE_VIDEO:
+ track = &mVideoTrack;
+ if (mIsWidevine) {
+ maxBuffers = 2;
+ }
+ break;
+ case MEDIA_TRACK_TYPE_AUDIO:
+ track = &mAudioTrack;
+ if (mIsWidevine) {
+ maxBuffers = 8;
+ } else {
+ maxBuffers = 64;
+ }
+ break;
+ case MEDIA_TRACK_TYPE_SUBTITLE:
+ track = &mSubtitleTrack;
+ break;
+ case MEDIA_TRACK_TYPE_TIMEDTEXT:
+ track = &mTimedTextTrack;
+ break;
+ default:
+ TRESPASS();
+ }
+
+ if (track->mSource == NULL) {
+ return;
+ }
if (actualTimeUs) {
*actualTimeUs = seekTimeUs;
@@ -216,64 +1282,56 @@ void NuPlayer::GenericSource::readBuffer(
bool seeking = false;
if (seekTimeUs >= 0) {
- options.setSeekTo(seekTimeUs);
+ options.setSeekTo(seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
seeking = true;
}
- for (;;) {
+ if (mIsWidevine && trackType != MEDIA_TRACK_TYPE_AUDIO) {
+ options.setNonBlocking();
+ }
+
+ for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
MediaBuffer *mbuf;
status_t err = track->mSource->read(&mbuf, &options);
options.clearSeekTo();
if (err == OK) {
- size_t outLength = mbuf->range_length();
-
- if (audio && mAudioIsVorbis) {
- outLength += sizeof(int32_t);
- }
-
- sp<ABuffer> buffer = new ABuffer(outLength);
-
- memcpy(buffer->data(),
- (const uint8_t *)mbuf->data() + mbuf->range_offset(),
- mbuf->range_length());
-
- if (audio && mAudioIsVorbis) {
- int32_t numPageSamples;
- if (!mbuf->meta_data()->findInt32(
- kKeyValidSamples, &numPageSamples)) {
- numPageSamples = -1;
- }
-
- memcpy(buffer->data() + mbuf->range_length(),
- &numPageSamples,
- sizeof(numPageSamples));
- }
-
int64_t timeUs;
CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
-
- buffer->meta()->setInt64("timeUs", timeUs);
-
- if (actualTimeUs) {
- *actualTimeUs = timeUs;
+ if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ mAudioTimeUs = timeUs;
+ } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+ mVideoTimeUs = timeUs;
}
- mbuf->release();
- mbuf = NULL;
-
- if (seeking) {
- track->mPackets->queueDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK, NULL);
+ // formatChange && seeking: track whose source is changed during selection
+ // formatChange && !seeking: track whose source is not changed during selection
+ // !formatChange: normal seek
+ if ((seeking || formatChange)
+ && (trackType == MEDIA_TRACK_TYPE_AUDIO
+ || trackType == MEDIA_TRACK_TYPE_VIDEO)) {
+ ATSParser::DiscontinuityType type = formatChange
+ ? (seeking
+ ? ATSParser::DISCONTINUITY_FORMATCHANGE
+ : ATSParser::DISCONTINUITY_NONE)
+ : ATSParser::DISCONTINUITY_SEEK;
+ track->mPackets->queueDiscontinuity( type, NULL, true /* discard */);
}
+ sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType, actualTimeUs);
track->mPackets->queueAccessUnit(buffer);
+ formatChange = false;
+ seeking = false;
+ ++numBuffers;
+ } else if (err == WOULD_BLOCK) {
break;
} else if (err == INFO_FORMAT_CHANGED) {
#if 0
track->mPackets->queueDiscontinuity(
- ATSParser::DISCONTINUITY_FORMATCHANGE, NULL);
+ ATSParser::DISCONTINUITY_FORMATCHANGE,
+ NULL,
+ false /* discard */);
#endif
} else {
track->mPackets->signalEOS(err);
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 2da680c..f8601ea 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -23,58 +23,175 @@
#include "ATSParser.h"
+#include <media/mediaplayer.h>
+
namespace android {
+class DecryptHandle;
+class DrmManagerClient;
struct AnotherPacketSource;
struct ARTSPController;
struct DataSource;
+struct IMediaHTTPService;
struct MediaSource;
+class MediaBuffer;
+struct NuCachedSource2;
+struct WVMExtractor;
struct NuPlayer::GenericSource : public NuPlayer::Source {
- GenericSource(
- const sp<AMessage> &notify,
+ GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid);
+
+ status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid = false,
- uid_t uid = 0);
+ const KeyedVector<String8, String8> *headers);
- GenericSource(
- const sp<AMessage> &notify,
- int fd, int64_t offset, int64_t length);
+ status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual void prepareAsync();
virtual void start();
+ virtual void stop();
+ virtual void pause();
+ virtual void resume();
+
+ virtual void disconnect();
virtual status_t feedMoreTSData();
+ virtual sp<MetaData> getFileFormatMeta() const;
+
virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
virtual status_t getDuration(int64_t *durationUs);
+ virtual size_t getTrackCount() const;
+ virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
+ virtual ssize_t getSelectedTrack(media_track_type type) const;
+ virtual status_t selectTrack(size_t trackIndex, bool select);
virtual status_t seekTo(int64_t seekTimeUs);
+ virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers);
+
protected:
virtual ~GenericSource();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
virtual sp<MetaData> getFormatMeta(bool audio);
private:
+ enum {
+ kWhatPrepareAsync,
+ kWhatFetchSubtitleData,
+ kWhatFetchTimedTextData,
+ kWhatSendSubtitleData,
+ kWhatSendTimedTextData,
+ kWhatChangeAVSource,
+ kWhatPollBuffering,
+ kWhatGetFormat,
+ kWhatGetSelectedTrack,
+ kWhatSelectTrack,
+ kWhatSeek,
+ kWhatReadBuffer,
+ kWhatStopWidevine,
+ };
+
+ Vector<sp<MediaSource> > mSources;
+
struct Track {
+ size_t mIndex;
sp<MediaSource> mSource;
sp<AnotherPacketSource> mPackets;
};
Track mAudioTrack;
+ int64_t mAudioTimeUs;
Track mVideoTrack;
+ int64_t mVideoTimeUs;
+ Track mSubtitleTrack;
+ Track mTimedTextTrack;
+ int32_t mFetchSubtitleDataGeneration;
+ int32_t mFetchTimedTextDataGeneration;
int64_t mDurationUs;
bool mAudioIsVorbis;
-
- void initFromDataSource(const sp<DataSource> &dataSource);
-
+ bool mIsWidevine;
+ bool mUIDValid;
+ uid_t mUID;
+ sp<IMediaHTTPService> mHTTPService;
+ AString mUri;
+ KeyedVector<String8, String8> mUriHeaders;
+ int mFd;
+ int64_t mOffset;
+ int64_t mLength;
+
+ sp<DataSource> mDataSource;
+ sp<NuCachedSource2> mCachedSource;
+ sp<DataSource> mHttpSource;
+ sp<WVMExtractor> mWVMExtractor;
+ sp<MetaData> mFileMeta;
+ DrmManagerClient *mDrmManagerClient;
+ sp<DecryptHandle> mDecryptHandle;
+ bool mStarted;
+ bool mStopRead;
+ String8 mContentType;
+ AString mSniffedMIME;
+ off64_t mMetaDataSize;
+ int64_t mBitrate;
+ int32_t mPollBufferingGeneration;
+ uint32_t mPendingReadBufferTypes;
+ mutable Mutex mReadBufferLock;
+
+ sp<ALooper> mLooper;
+
+ void resetDataSource();
+
+ status_t initFromDataSource();
+ void checkDrmStatus(const sp<DataSource>& dataSource);
+ int64_t getLastReadPosition();
+ void setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position);
+
+ status_t prefillCacheIfNecessary();
+
+ void notifyPreparedAndCleanup(status_t err);
+
+ void onGetFormatMeta(sp<AMessage> msg) const;
+ sp<MetaData> doGetFormatMeta(bool audio) const;
+
+ void onGetSelectedTrack(sp<AMessage> msg) const;
+ ssize_t doGetSelectedTrack(media_track_type type) const;
+
+ void onSelectTrack(sp<AMessage> msg);
+ status_t doSelectTrack(size_t trackIndex, bool select);
+
+ void onSeek(sp<AMessage> msg);
+ status_t doSeek(int64_t seekTimeUs);
+
+ void onPrepareAsync();
+
+ void fetchTextData(
+ uint32_t what, media_track_type type,
+ int32_t curGen, sp<AnotherPacketSource> packets, sp<AMessage> msg);
+
+ void sendTextData(
+ uint32_t what, media_track_type type,
+ int32_t curGen, sp<AnotherPacketSource> packets, sp<AMessage> msg);
+
+ sp<ABuffer> mediaBufferToABuffer(
+ MediaBuffer *mbuf,
+ media_track_type trackType,
+ int64_t *actualTimeUs = NULL);
+
+ void postReadBuffer(media_track_type trackType);
+ void onReadBuffer(sp<AMessage> msg);
void readBuffer(
- bool audio,
- int64_t seekTimeUs = -1ll, int64_t *actualTimeUs = NULL);
+ media_track_type trackType,
+ int64_t seekTimeUs = -1ll, int64_t *actualTimeUs = NULL, bool formatChange = false);
+
+ void schedulePollBuffering();
+ void cancelPollBuffering();
+ void onPollBuffering();
+ void notifyBufferingUpdate(int percentage);
DISALLOW_EVIL_CONSTRUCTORS(GenericSource);
};
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index 510dcc9..a003c81 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -24,6 +24,7 @@
#include "LiveDataSource.h"
#include "LiveSession.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -34,13 +35,12 @@ namespace android {
NuPlayer::HTTPLiveSource::HTTPLiveSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid, uid_t uid)
+ const KeyedVector<String8, String8> *headers)
: Source(notify),
+ mHTTPService(httpService),
mURL(url),
- mUIDValid(uidValid),
- mUID(uid),
mFlags(0),
mFinalResult(OK),
mOffset(0),
@@ -62,25 +62,31 @@ NuPlayer::HTTPLiveSource::HTTPLiveSource(
NuPlayer::HTTPLiveSource::~HTTPLiveSource() {
if (mLiveSession != NULL) {
mLiveSession->disconnect();
- mLiveSession.clear();
+ mLiveLooper->unregisterHandler(mLiveSession->id());
+ mLiveLooper->unregisterHandler(id());
mLiveLooper->stop();
+
+ mLiveSession.clear();
mLiveLooper.clear();
}
}
void NuPlayer::HTTPLiveSource::prepareAsync() {
- mLiveLooper = new ALooper;
- mLiveLooper->setName("http live");
- mLiveLooper->start();
+ if (mLiveLooper == NULL) {
+ mLiveLooper = new ALooper;
+ mLiveLooper->setName("http live");
+ mLiveLooper->start();
+
+ mLiveLooper->registerHandler(this);
+ }
sp<AMessage> notify = new AMessage(kWhatSessionNotify, id());
mLiveSession = new LiveSession(
notify,
(mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
- mUIDValid,
- mUID);
+ mHTTPService);
mLiveLooper->registerHandler(mLiveSession);
@@ -121,8 +127,12 @@ status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) {
return mLiveSession->getDuration(durationUs);
}
-status_t NuPlayer::HTTPLiveSource::getTrackInfo(Parcel *reply) const {
- return mLiveSession->getTrackInfo(reply);
+size_t NuPlayer::HTTPLiveSource::getTrackCount() const {
+ return mLiveSession->getTrackCount();
+}
+
+sp<AMessage> NuPlayer::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {
+ return mLiveSession->getTrackInfo(trackIndex);
}
status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select) {
@@ -207,9 +217,9 @@ void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
int32_t height;
if (format != NULL &&
format->findInt32("width", &width) && format->findInt32("height", &height)) {
- notifyVideoSizeChanged(width, height);
+ notifyVideoSizeChanged(format);
} else {
- notifyVideoSizeChanged(0, 0);
+ notifyVideoSizeChanged();
}
uint32_t flags = FLAG_CAN_PAUSE;
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index bcc3f8b..6b5f6af 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -28,10 +28,9 @@ struct LiveSession;
struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
HTTPLiveSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid = false,
- uid_t uid = 0);
+ const KeyedVector<String8, String8> *headers);
virtual void prepareAsync();
virtual void start();
@@ -41,7 +40,8 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
virtual status_t feedMoreTSData();
virtual status_t getDuration(int64_t *durationUs);
- virtual status_t getTrackInfo(Parcel *reply) const;
+ virtual size_t getTrackCount() const;
+ virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
virtual status_t selectTrack(size_t trackIndex, bool select);
virtual status_t seekTo(int64_t seekTimeUs);
@@ -61,10 +61,9 @@ private:
kWhatFetchSubtitleData,
};
+ sp<IMediaHTTPService> mHTTPService;
AString mURL;
KeyedVector<String8, String8> mExtraHeaders;
- bool mUIDValid;
- uid_t mUID;
uint32_t mFlags;
status_t mFinalResult;
off64_t mOffset;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 25d55a3..a63a940 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -22,24 +22,22 @@
#include "HTTPLiveSource.h"
#include "NuPlayerDecoder.h"
+#include "NuPlayerDecoderPassThrough.h"
#include "NuPlayerDriver.h"
#include "NuPlayerRenderer.h"
#include "NuPlayerSource.h"
#include "RTSPSource.h"
#include "StreamingSource.h"
#include "GenericSource.h"
-#include "mp4/MP4Source.h"
+#include "TextDescriptions.h"
#include "ATSParser.h"
-#include "SoftwareRenderer.h"
-
-#include <cutils/properties.h> // for property_get
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -52,6 +50,10 @@
namespace android {
+// TODO optimize buffer size for power consumption
+// The offload read buffer size is 32 KB but 24 KB uses less power.
+const size_t NuPlayer::kAggregateBufferSizeBytes = 24 * 1024;
+
struct NuPlayer::Action : public RefBase {
Action() {}
@@ -62,16 +64,18 @@ private:
};
struct NuPlayer::SeekAction : public Action {
- SeekAction(int64_t seekTimeUs)
- : mSeekTimeUs(seekTimeUs) {
+ SeekAction(int64_t seekTimeUs, bool needNotify)
+ : mSeekTimeUs(seekTimeUs),
+ mNeedNotify(needNotify) {
}
virtual void execute(NuPlayer *player) {
- player->performSeek(mSeekTimeUs);
+ player->performSeek(mSeekTimeUs, mNeedNotify);
}
private:
int64_t mSeekTimeUs;
+ bool mNeedNotify;
DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
};
@@ -148,22 +152,26 @@ NuPlayer::NuPlayer()
: mUIDValid(false),
mSourceFlags(0),
mVideoIsAVC(false),
- mNeedsSwRenderer(false),
+ mOffloadAudio(false),
+ mAudioDecoderGeneration(0),
+ mVideoDecoderGeneration(0),
+ mRendererGeneration(0),
mAudioEOS(false),
mVideoEOS(false),
mScanSourcesPending(false),
mScanSourcesGeneration(0),
mPollDurationGeneration(0),
+ mTimedTextGeneration(0),
mTimeDiscontinuityPending(false),
mFlushingAudio(NONE),
mFlushingVideo(NONE),
mSkipRenderingAudioUntilMediaTimeUs(-1ll),
mSkipRenderingVideoUntilMediaTimeUs(-1ll),
- mVideoLateByUs(0ll),
mNumFramesTotal(0ll),
mNumFramesDropped(0ll),
mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
mStarted(false) {
+ clearFlushComplete();
}
NuPlayer::~NuPlayer() {
@@ -183,14 +191,7 @@ void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) {
sp<AMessage> notify = new AMessage(kWhatSourceNotify, id());
- char prop[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.use-mp4source", prop, NULL)
- && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) {
- msg->setObject("source", new MP4Source(notify, source));
- } else {
- msg->setObject("source", new StreamingSource(notify, source));
- }
-
+ msg->setObject("source", new StreamingSource(notify, source));
msg->post();
}
@@ -212,7 +213,10 @@ static bool IsHTTPLiveURL(const char *url) {
}
void NuPlayer::setDataSourceAsync(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+
sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
size_t len = strlen(url);
@@ -220,18 +224,31 @@ void NuPlayer::setDataSourceAsync(
sp<Source> source;
if (IsHTTPLiveURL(url)) {
- source = new HTTPLiveSource(notify, url, headers, mUIDValid, mUID);
+ source = new HTTPLiveSource(notify, httpService, url, headers);
} else if (!strncasecmp(url, "rtsp://", 7)) {
- source = new RTSPSource(notify, url, headers, mUIDValid, mUID);
+ source = new RTSPSource(
+ notify, httpService, url, headers, mUIDValid, mUID);
} else if ((!strncasecmp(url, "http://", 7)
|| !strncasecmp(url, "https://", 8))
&& ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
|| strstr(url, ".sdp?"))) {
- source = new RTSPSource(notify, url, headers, mUIDValid, mUID, true);
+ source = new RTSPSource(
+ notify, httpService, url, headers, mUIDValid, mUID, true);
} else {
- source = new GenericSource(notify, url, headers, mUIDValid, mUID);
+ sp<GenericSource> genericSource =
+ new GenericSource(notify, mUIDValid, mUID);
+ // Don't set FLAG_SECURE on mSourceFlags here for widevine.
+ // The correct flags will be updated in Source::kWhatFlagsChanged
+ // handler when GenericSource is prepared.
+
+ status_t err = genericSource->setDataSource(httpService, url, headers);
+
+ if (err == OK) {
+ source = genericSource;
+ } else {
+ ALOGE("Failed to set data source!");
+ }
}
-
msg->setObject("source", source);
msg->post();
}
@@ -241,7 +258,16 @@ void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
sp<AMessage> notify = new AMessage(kWhatSourceNotify, id());
- sp<Source> source = new GenericSource(notify, fd, offset, length);
+ sp<GenericSource> source =
+ new GenericSource(notify, mUIDValid, mUID);
+
+ status_t err = source->setDataSource(fd, offset, length);
+
+ if (err != OK) {
+ ALOGE("Failed to set data source!");
+ source = NULL;
+ }
+
msg->setObject("source", source);
msg->post();
}
@@ -260,7 +286,7 @@ void NuPlayer::setVideoSurfaceTextureAsync(
msg->setObject(
"native-window",
new NativeWindowWrapper(
- new Surface(bufferProducer)));
+ new Surface(bufferProducer, true /* controlledByApp */)));
}
msg->post();
@@ -285,32 +311,52 @@ void NuPlayer::resume() {
}
void NuPlayer::resetAsync() {
+ if (mSource != NULL) {
+ // During a reset, the data source might be unresponsive already, we need to
+ // disconnect explicitly so that reads exit promptly.
+ // We can't queue the disconnect request to the looper, as it might be
+ // queued behind a stuck read and never gets processed.
+ // Doing a disconnect outside the looper to allows the pending reads to exit
+ // (either successfully or with error).
+ mSource->disconnect();
+ }
+
(new AMessage(kWhatReset, id()))->post();
}
-void NuPlayer::seekToAsync(int64_t seekTimeUs) {
+void NuPlayer::seekToAsync(int64_t seekTimeUs, bool needNotify) {
sp<AMessage> msg = new AMessage(kWhatSeek, id());
msg->setInt64("seekTimeUs", seekTimeUs);
+ msg->setInt32("needNotify", needNotify);
msg->post();
}
-// static
-bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) {
- switch (state) {
- case FLUSHING_DECODER:
- if (needShutdown != NULL) {
- *needShutdown = false;
- }
- return true;
- case FLUSHING_DECODER_SHUTDOWN:
- if (needShutdown != NULL) {
- *needShutdown = true;
- }
- return true;
+void NuPlayer::writeTrackInfo(
+ Parcel* reply, const sp<AMessage> format) const {
+ int32_t trackType;
+ CHECK(format->findInt32("type", &trackType));
- default:
- return false;
+ AString lang;
+ CHECK(format->findString("language", &lang));
+
+ reply->writeInt32(2); // write something non-zero
+ reply->writeInt32(trackType);
+ reply->writeString16(String16(lang.c_str()));
+
+ if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ int32_t isAuto, isDefault, isForced;
+ CHECK(format->findInt32("auto", &isAuto));
+ CHECK(format->findInt32("default", &isDefault));
+ CHECK(format->findInt32("forced", &isForced));
+
+ reply->writeString16(String16(mime.c_str()));
+ reply->writeInt32(isAuto);
+ reply->writeInt32(isDefault);
+ reply->writeInt32(isForced);
}
}
@@ -322,17 +368,19 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
CHECK(mSource == NULL);
+ status_t err = OK;
sp<RefBase> obj;
CHECK(msg->findObject("source", &obj));
-
- mSource = static_cast<Source *>(obj.get());
-
- looper()->registerHandler(mSource);
+ if (obj != NULL) {
+ mSource = static_cast<Source *>(obj.get());
+ } else {
+ err = UNKNOWN_ERROR;
+ }
CHECK(mDriver != NULL);
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
- driver->notifySetDataSourceCompleted(OK);
+ driver->notifySetDataSourceCompleted(err);
}
break;
}
@@ -348,16 +396,58 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ Parcel* reply;
+ CHECK(msg->findPointer("reply", (void**)&reply));
+
+ size_t inbandTracks = 0;
+ if (mSource != NULL) {
+ inbandTracks = mSource->getTrackCount();
+ }
+
+ size_t ccTracks = 0;
+ if (mCCDecoder != NULL) {
+ ccTracks = mCCDecoder->getTrackCount();
+ }
+
+ // total track count
+ reply->writeInt32(inbandTracks + ccTracks);
+
+ // write inband tracks
+ for (size_t i = 0; i < inbandTracks; ++i) {
+ writeTrackInfo(reply, mSource->getTrackInfo(i));
+ }
+
+ // write CC track
+ for (size_t i = 0; i < ccTracks; ++i) {
+ writeTrackInfo(reply, mCCDecoder->getTrackInfo(i));
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatGetSelectedTrack:
+ {
status_t err = INVALID_OPERATION;
if (mSource != NULL) {
+ err = OK;
+
+ int32_t type32;
+ CHECK(msg->findInt32("type", (int32_t*)&type32));
+ media_track_type type = (media_track_type)type32;
+ ssize_t selectedTrack = mSource->getSelectedTrack(type);
+
Parcel* reply;
CHECK(msg->findPointer("reply", (void**)&reply));
- err = mSource->getTrackInfo(reply);
+ reply->writeInt32(selectedTrack);
}
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
response->postReply(replyID);
break;
}
@@ -367,13 +457,40 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ size_t trackIndex;
+ int32_t select;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+ CHECK(msg->findInt32("select", &select));
+
status_t err = INVALID_OPERATION;
+
+ size_t inbandTracks = 0;
if (mSource != NULL) {
- size_t trackIndex;
- int32_t select;
- CHECK(msg->findSize("trackIndex", &trackIndex));
- CHECK(msg->findInt32("select", &select));
+ inbandTracks = mSource->getTrackCount();
+ }
+ size_t ccTracks = 0;
+ if (mCCDecoder != NULL) {
+ ccTracks = mCCDecoder->getTrackCount();
+ }
+
+ if (trackIndex < inbandTracks) {
err = mSource->selectTrack(trackIndex, select);
+
+ if (!select && err == OK) {
+ int32_t type;
+ sp<AMessage> info = mSource->getTrackInfo(trackIndex);
+ if (info != NULL
+ && info->findInt32("type", &type)
+ && type == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ ++mTimedTextGeneration;
+ }
+ }
+ } else {
+ trackIndex -= inbandTracks;
+
+ if (trackIndex < ccTracks) {
+ err = mCCDecoder->selectTrack(trackIndex, select);
+ }
}
sp<AMessage> response = new AMessage;
@@ -421,6 +538,18 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
static_cast<NativeWindowWrapper *>(obj.get())));
if (obj != NULL) {
+ if (mStarted && mSource->getFormat(false /* audio */) != NULL) {
+ // Issue a seek to refresh the video screen only if started otherwise
+ // the extractor may not yet be started and will assert.
+ // If the video decoder is not set (perhaps audio only in this case)
+ // do not perform a seek as it is not needed.
+ int64_t currentPositionUs = 0;
+ if (getCurrentPosition(&currentPositionUs) == OK) {
+ mDeferredActions.push_back(
+ new SeekAction(currentPositionUs, false /* needNotify */));
+ }
+ }
+
// If there is a new surface texture, instantiate decoders
// again if possible.
mDeferredActions.push_back(
@@ -447,16 +576,26 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("kWhatStart");
mVideoIsAVC = false;
- mNeedsSwRenderer = false;
+ mOffloadAudio = false;
mAudioEOS = false;
mVideoEOS = false;
mSkipRenderingAudioUntilMediaTimeUs = -1;
mSkipRenderingVideoUntilMediaTimeUs = -1;
- mVideoLateByUs = 0;
mNumFramesTotal = 0;
mNumFramesDropped = 0;
mStarted = true;
+ /* instantiate decoders now for secure playback */
+ if (mSourceFlags & Source::FLAG_SECURE) {
+ if (mNativeWindow != NULL) {
+ instantiateDecoder(false, &mVideoDecoder);
+ }
+
+ if (mAudioSink != NULL) {
+ instantiateDecoder(true, &mAudioDecoder);
+ }
+ }
+
mSource->start();
uint32_t flags = 0;
@@ -465,12 +604,37 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
flags |= Renderer::FLAG_REAL_TIME;
}
- mRenderer = new Renderer(
- mAudioSink,
- new AMessage(kWhatRendererNotify, id()),
- flags);
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+
+ mOffloadAudio =
+ canOffloadStream(audioMeta, (videoFormat != NULL),
+ true /* is_streaming */, streamType);
+ if (mOffloadAudio) {
+ flags |= Renderer::FLAG_OFFLOAD_AUDIO;
+ }
- looper()->registerHandler(mRenderer);
+ sp<AMessage> notify = new AMessage(kWhatRendererNotify, id());
+ ++mRendererGeneration;
+ notify->setInt32("generation", mRendererGeneration);
+ mRenderer = new Renderer(mAudioSink, notify, flags);
+
+ mRendererLooper = new ALooper;
+ mRendererLooper->setName("NuPlayerRenderer");
+ mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ mRendererLooper->registerHandler(mRenderer);
+
+ sp<MetaData> meta = getFileMeta();
+ int32_t rate;
+ if (meta != NULL
+ && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) {
+ mRenderer->setVideoFrameRate(rate);
+ }
postScanSources();
break;
@@ -493,11 +657,18 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
bool mHadAnySourcesBefore =
(mAudioDecoder != NULL) || (mVideoDecoder != NULL);
+ // initialize video before audio because successful initialization of
+ // video may change deep buffer mode of audio.
if (mNativeWindow != NULL) {
instantiateDecoder(false, &mVideoDecoder);
}
if (mAudioSink != NULL) {
+ if (mOffloadAudio) {
+ // open audio sink early under offload mode.
+ sp<AMessage> format = mSource->getFormat(true /*audio*/);
+ openAudioSink(format, true /*offloadOnly*/);
+ }
instantiateDecoder(true, &mAudioDecoder);
}
@@ -538,24 +709,40 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
{
bool audio = msg->what() == kWhatAudioNotify;
- sp<AMessage> codecRequest;
- CHECK(msg->findMessage("codec-request", &codecRequest));
+ int32_t currentDecoderGeneration =
+ (audio? mAudioDecoderGeneration : mVideoDecoderGeneration);
+ int32_t requesterGeneration = currentDecoderGeneration - 1;
+ CHECK(msg->findInt32("generation", &requesterGeneration));
+
+ if (requesterGeneration != currentDecoderGeneration) {
+ ALOGV("got message from old %s decoder, generation(%d:%d)",
+ audio ? "audio" : "video", requesterGeneration,
+ currentDecoderGeneration);
+ sp<AMessage> reply;
+ if (!(msg->findMessage("reply", &reply))) {
+ return;
+ }
+
+ reply->setInt32("err", INFO_DISCONTINUITY);
+ reply->post();
+ return;
+ }
int32_t what;
- CHECK(codecRequest->findInt32("what", &what));
+ CHECK(msg->findInt32("what", &what));
- if (what == ACodec::kWhatFillThisBuffer) {
+ if (what == Decoder::kWhatFillThisBuffer) {
status_t err = feedDecoderInputData(
- audio, codecRequest);
+ audio, msg);
if (err == -EWOULDBLOCK) {
if (mSource->feedMoreTSData() == OK) {
- msg->post(10000ll);
+ msg->post(10 * 1000ll);
}
}
- } else if (what == ACodec::kWhatEOS) {
+ } else if (what == Decoder::kWhatEOS) {
int32_t err;
- CHECK(codecRequest->findInt32("err", &err));
+ CHECK(msg->findInt32("err", &err));
if (err == ERROR_END_OF_STREAM) {
ALOGV("got %s decoder EOS", audio ? "audio" : "video");
@@ -566,170 +753,91 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
mRenderer->queueEOS(audio, err);
- } else if (what == ACodec::kWhatFlushCompleted) {
- bool needShutdown;
-
- if (audio) {
- CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
- mFlushingAudio = FLUSHED;
- } else {
- CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
- mFlushingVideo = FLUSHED;
-
- mVideoLateByUs = 0;
- }
-
+ } else if (what == Decoder::kWhatFlushCompleted) {
ALOGV("decoder %s flush completed", audio ? "audio" : "video");
- if (needShutdown) {
- ALOGV("initiating %s decoder shutdown",
- audio ? "audio" : "video");
-
- (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();
-
- if (audio) {
- mFlushingAudio = SHUTTING_DOWN_DECODER;
- } else {
- mFlushingVideo = SHUTTING_DOWN_DECODER;
- }
- }
-
+ handleFlushComplete(audio, true /* isDecoder */);
finishFlushIfPossible();
- } else if (what == ACodec::kWhatOutputFormatChanged) {
- if (audio) {
- int32_t numChannels;
- CHECK(codecRequest->findInt32(
- "channel-count", &numChannels));
-
- int32_t sampleRate;
- CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
-
- ALOGV("Audio output format changed to %d Hz, %d channels",
- sampleRate, numChannels);
-
- mAudioSink->close();
-
- audio_output_flags_t flags;
- int64_t durationUs;
- // FIXME: we should handle the case where the video decoder
- // is created after we receive the format change indication.
- // Current code will just make that we select deep buffer
- // with video which should not be a problem as it should
- // not prevent from keeping A/V sync.
- if (mVideoDecoder == NULL &&
- mSource->getDuration(&durationUs) == OK &&
- durationUs
- > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
- flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
- } else {
- flags = AUDIO_OUTPUT_FLAG_NONE;
- }
+ } else if (what == Decoder::kWhatOutputFormatChanged) {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
- int32_t channelMask;
- if (!codecRequest->findInt32("channel-mask", &channelMask)) {
- channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
- }
-
- CHECK_EQ(mAudioSink->open(
- sampleRate,
- numChannels,
- (audio_channel_mask_t)channelMask,
- AUDIO_FORMAT_PCM_16_BIT,
- 8 /* bufferCount */,
- NULL,
- NULL,
- flags),
- (status_t)OK);
- mAudioSink->start();
-
- mRenderer->signalAudioSinkChanged();
+ if (audio) {
+ openAudioSink(format, false /*offloadOnly*/);
} else {
// video
+ sp<AMessage> inputFormat =
+ mSource->getFormat(false /* audio */);
- int32_t width, height;
- CHECK(codecRequest->findInt32("width", &width));
- CHECK(codecRequest->findInt32("height", &height));
-
- int32_t cropLeft, cropTop, cropRight, cropBottom;
- CHECK(codecRequest->findRect(
- "crop",
- &cropLeft, &cropTop, &cropRight, &cropBottom));
-
- int32_t displayWidth = cropRight - cropLeft + 1;
- int32_t displayHeight = cropBottom - cropTop + 1;
-
- ALOGV("Video output format changed to %d x %d "
- "(crop: %d x %d @ (%d, %d))",
- width, height,
- displayWidth,
- displayHeight,
- cropLeft, cropTop);
-
- sp<AMessage> videoInputFormat =
- mSource->getFormat(false /* audio */);
-
- // Take into account sample aspect ratio if necessary:
- int32_t sarWidth, sarHeight;
- if (videoInputFormat->findInt32("sar-width", &sarWidth)
- && videoInputFormat->findInt32(
- "sar-height", &sarHeight)) {
- ALOGV("Sample aspect ratio %d : %d",
- sarWidth, sarHeight);
-
- displayWidth = (displayWidth * sarWidth) / sarHeight;
-
- ALOGV("display dimensions %d x %d",
- displayWidth, displayHeight);
- }
-
- notifyListener(
- MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight);
-
- if (mNeedsSwRenderer && mNativeWindow != NULL) {
- int32_t colorFormat;
- CHECK(codecRequest->findInt32("color-format", &colorFormat));
-
- sp<MetaData> meta = new MetaData;
- meta->setInt32(kKeyWidth, width);
- meta->setInt32(kKeyHeight, height);
- meta->setRect(kKeyCropRect, cropLeft, cropTop, cropRight, cropBottom);
- meta->setInt32(kKeyColorFormat, colorFormat);
-
- mRenderer->setSoftRenderer(
- new SoftwareRenderer(mNativeWindow->getNativeWindow(), meta));
- }
+ updateVideoSize(inputFormat, format);
}
- } else if (what == ACodec::kWhatShutdownCompleted) {
+ } else if (what == Decoder::kWhatShutdownCompleted) {
ALOGV("%s shutdown completed", audio ? "audio" : "video");
if (audio) {
mAudioDecoder.clear();
+ ++mAudioDecoderGeneration;
CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
mFlushingAudio = SHUT_DOWN;
} else {
mVideoDecoder.clear();
+ ++mVideoDecoderGeneration;
CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
mFlushingVideo = SHUT_DOWN;
}
finishFlushIfPossible();
- } else if (what == ACodec::kWhatError) {
- ALOGE("Received error from %s decoder, aborting playback.",
- audio ? "audio" : "video");
-
- mRenderer->queueEOS(audio, UNKNOWN_ERROR);
- } else if (what == ACodec::kWhatDrainThisBuffer) {
- renderBuffer(audio, codecRequest);
- } else if (what == ACodec::kWhatComponentAllocated) {
- if (!audio) {
- AString name;
- CHECK(codecRequest->findString("componentName", &name));
- mNeedsSwRenderer = name.startsWith("OMX.google.");
+ } else if (what == Decoder::kWhatError) {
+ status_t err;
+ if (!msg->findInt32("err", &err) || err == OK) {
+ err = UNKNOWN_ERROR;
+ }
+
+ // Decoder errors can be due to Source (e.g. from streaming),
+ // or from decoding corrupted bitstreams, or from other decoder
+ // MediaCodec operations (e.g. from an ongoing reset or seek).
+ //
+ // We try to gracefully shut down the affected decoder if possible,
+ // rather than trying to force the shutdown with something
+ // similar to performReset(). This method can lead to a hang
+ // if MediaCodec functions block after an error, but they should
+ // typically return INVALID_OPERATION instead of blocking.
+
+ FlushStatus *flushing = audio ? &mFlushingAudio : &mFlushingVideo;
+ ALOGE("received error(%#x) from %s decoder, flushing(%d), now shutting down",
+ err, audio ? "audio" : "video", *flushing);
+
+ switch (*flushing) {
+ case NONE:
+ mDeferredActions.push_back(
+ new ShutdownDecoderAction(audio, !audio /* video */));
+ processDeferredActions();
+ break;
+ case FLUSHING_DECODER:
+ *flushing = FLUSHING_DECODER_SHUTDOWN; // initiate shutdown after flush.
+ break; // Wait for flush to complete.
+ case FLUSHING_DECODER_SHUTDOWN:
+ break; // Wait for flush to complete.
+ case SHUTTING_DOWN_DECODER:
+ break; // Wait for shutdown to complete.
+ case FLUSHED:
+ // Widevine source reads must stop before releasing the video decoder.
+ if (!audio && mSource != NULL && mSourceFlags & Source::FLAG_SECURE) {
+ mSource->stop();
+ }
+ getDecoder(audio)->initiateShutdown(); // In the middle of a seek.
+ *flushing = SHUTTING_DOWN_DECODER; // Shut down.
+ break;
+ case SHUT_DOWN:
+ finishFlushIfPossible(); // Should not occur.
+ break; // Finish anyways.
}
- } else if (what != ACodec::kWhatComponentConfigured
- && what != ACodec::kWhatBuffersAllocated) {
- ALOGV("Unhandled codec notification %d '%c%c%c%c'.",
+ notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+ } else if (what == Decoder::kWhatDrainThisBuffer) {
+ renderBuffer(audio, msg);
+ } else {
+ ALOGV("Unhandled decoder notification %d '%c%c%c%c'.",
what,
what >> 24,
(what >> 16) & 0xff,
@@ -742,6 +850,14 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatRendererNotify:
{
+ int32_t requesterGeneration = mRendererGeneration - 1;
+ CHECK(msg->findInt32("generation", &requesterGeneration));
+ if (requesterGeneration != mRendererGeneration) {
+ ALOGV("got message from old renderer, generation(%d:%d)",
+ requesterGeneration, mRendererGeneration);
+ return;
+ }
+
int32_t what;
CHECK(msg->findInt32("what", &what));
@@ -772,31 +888,38 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
&& (mVideoEOS || mVideoDecoder == NULL)) {
notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
}
- } else if (what == Renderer::kWhatPosition) {
- int64_t positionUs;
- CHECK(msg->findInt64("positionUs", &positionUs));
-
- CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs));
-
- if (mDriver != NULL) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
- driver->notifyPosition(positionUs);
-
- driver->notifyFrameStats(
- mNumFramesTotal, mNumFramesDropped);
- }
- }
} else if (what == Renderer::kWhatFlushComplete) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
+ handleFlushComplete(audio, false /* isDecoder */);
+ finishFlushIfPossible();
} else if (what == Renderer::kWhatVideoRenderingStart) {
notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0);
} else if (what == Renderer::kWhatMediaRenderingStart) {
ALOGV("media rendering started");
notifyListener(MEDIA_STARTED, 0, 0);
+ } else if (what == Renderer::kWhatAudioOffloadTearDown) {
+ ALOGV("Tear down audio offload, fall back to s/w path");
+ int64_t positionUs;
+ CHECK(msg->findInt64("positionUs", &positionUs));
+ int32_t reason;
+ CHECK(msg->findInt32("reason", &reason));
+ closeAudioSink();
+ mAudioDecoder.clear();
+ ++mAudioDecoderGeneration;
+ mRenderer->flush(true /* audio */);
+ if (mVideoDecoder != NULL) {
+ mRenderer->flush(false /* audio */);
+ }
+ mRenderer->signalDisableOffloadAudio();
+ mOffloadAudio = false;
+
+ performSeek(positionUs, false /* needNotify */);
+ if (reason == Renderer::kDueToError) {
+ instantiateDecoder(true /* audio */, &mAudioDecoder);
+ }
}
break;
}
@@ -824,14 +947,18 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatSeek:
{
int64_t seekTimeUs;
+ int32_t needNotify;
CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+ CHECK(msg->findInt32("needNotify", &needNotify));
- ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs);
+ ALOGV("kWhatSeek seekTimeUs=%lld us, needNotify=%d",
+ seekTimeUs, needNotify);
mDeferredActions.push_back(
new SimpleAction(&NuPlayer::performDecoderFlush));
- mDeferredActions.push_back(new SeekAction(seekTimeUs));
+ mDeferredActions.push_back(
+ new SeekAction(seekTimeUs, needNotify));
processDeferredActions();
break;
@@ -839,17 +966,36 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatPause:
{
- CHECK(mRenderer != NULL);
- mSource->pause();
- mRenderer->pause();
+ if (mSource != NULL) {
+ mSource->pause();
+ } else {
+ ALOGW("pause called when source is gone or not set");
+ }
+ if (mRenderer != NULL) {
+ mRenderer->pause();
+ } else {
+ ALOGW("pause called when renderer is gone or not set");
+ }
break;
}
case kWhatResume:
{
- CHECK(mRenderer != NULL);
- mSource->resume();
- mRenderer->resume();
+ if (mSource != NULL) {
+ mSource->resume();
+ } else {
+ ALOGW("resume called when source is gone or not set");
+ }
+ // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
+ // needed.
+ if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
+ instantiateDecoder(true /* audio */, &mAudioDecoder);
+ }
+ if (mRenderer != NULL) {
+ mRenderer->resume();
+ } else {
+ ALOGW("resume called when renderer is gone or not set");
+ }
break;
}
@@ -859,39 +1005,96 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatClosedCaptionNotify:
+ {
+ onClosedCaptionNotify(msg);
+ break;
+ }
+
default:
TRESPASS();
break;
}
}
+bool NuPlayer::audioDecoderStillNeeded() {
+ // Audio decoder is no longer needed if it's in shut/shutting down status.
+ return ((mFlushingAudio != SHUT_DOWN) && (mFlushingAudio != SHUTTING_DOWN_DECODER));
+}
+
+void NuPlayer::handleFlushComplete(bool audio, bool isDecoder) {
+ // We wait for both the decoder flush and the renderer flush to complete
+ // before entering either the FLUSHED or the SHUTTING_DOWN_DECODER state.
+
+ mFlushComplete[audio][isDecoder] = true;
+ if (!mFlushComplete[audio][!isDecoder]) {
+ return;
+ }
+
+ FlushStatus *state = audio ? &mFlushingAudio : &mFlushingVideo;
+ switch (*state) {
+ case FLUSHING_DECODER:
+ {
+ *state = FLUSHED;
+ break;
+ }
+
+ case FLUSHING_DECODER_SHUTDOWN:
+ {
+ *state = SHUTTING_DOWN_DECODER;
+
+ ALOGV("initiating %s decoder shutdown", audio ? "audio" : "video");
+ if (!audio) {
+ // Widevine source reads must stop before releasing the video decoder.
+ if (mSource != NULL && mSourceFlags & Source::FLAG_SECURE) {
+ mSource->stop();
+ }
+ }
+ getDecoder(audio)->initiateShutdown();
+ break;
+ }
+
+ default:
+ // decoder flush completes only occur in a flushing state.
+ LOG_ALWAYS_FATAL_IF(isDecoder, "decoder flush in invalid state %d", *state);
+ break;
+ }
+}
+
void NuPlayer::finishFlushIfPossible() {
- if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) {
+ if (mFlushingAudio != NONE && mFlushingAudio != FLUSHED
+ && mFlushingAudio != SHUT_DOWN) {
return;
}
- if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) {
+ if (mFlushingVideo != NONE && mFlushingVideo != FLUSHED
+ && mFlushingVideo != SHUT_DOWN) {
return;
}
ALOGV("both audio and video are flushed now.");
+ mPendingAudioAccessUnit.clear();
+ mAggregateBuffer.clear();
+
if (mTimeDiscontinuityPending) {
mRenderer->signalTimeDiscontinuity();
mTimeDiscontinuityPending = false;
}
- if (mAudioDecoder != NULL) {
+ if (mAudioDecoder != NULL && mFlushingAudio == FLUSHED) {
mAudioDecoder->signalResume();
}
- if (mVideoDecoder != NULL) {
+ if (mVideoDecoder != NULL && mFlushingVideo == FLUSHED) {
mVideoDecoder->signalResume();
}
mFlushingAudio = NONE;
mFlushingVideo = NONE;
+ clearFlushComplete();
+
processDeferredActions();
}
@@ -907,6 +1110,38 @@ void NuPlayer::postScanSources() {
mScanSourcesPending = true;
}
+void NuPlayer::openAudioSink(const sp<AMessage> &format, bool offloadOnly) {
+ uint32_t flags;
+ int64_t durationUs;
+ bool hasVideo = (mVideoDecoder != NULL);
+ // FIXME: we should handle the case where the video decoder
+ // is created after we receive the format change indication.
+ // Current code will just make that we select deep buffer
+ // with video which should not be a problem as it should
+ // not prevent from keeping A/V sync.
+ if (hasVideo &&
+ mSource->getDuration(&durationUs) == OK &&
+ durationUs
+ > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
+ flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ } else {
+ flags = AUDIO_OUTPUT_FLAG_NONE;
+ }
+
+ mOffloadAudio = mRenderer->openAudioSink(
+ format, offloadOnly, hasVideo, flags);
+
+ if (mOffloadAudio) {
+ sp<MetaData> audioMeta =
+ mSource->getFormatMeta(true /* audio */);
+ sendMetaDataToHal(mAudioSink, audioMeta);
+ }
+}
+
+void NuPlayer::closeAudioSink() {
+ mRenderer->closeAudioSink();
+}
+
status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
if (*decoder != NULL) {
return OK;
@@ -922,18 +1157,57 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
AString mime;
CHECK(format->findString("mime", &mime));
mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
- }
- sp<AMessage> notify =
- new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
- id());
+ sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id());
+ mCCDecoder = new CCDecoder(ccNotify);
+
+ if (mSourceFlags & Source::FLAG_SECURE) {
+ format->setInt32("secure", true);
+ }
+ }
- *decoder = audio ? new Decoder(notify) :
- new Decoder(notify, mNativeWindow);
- looper()->registerHandler(*decoder);
+ if (audio) {
+ sp<AMessage> notify = new AMessage(kWhatAudioNotify, id());
+ ++mAudioDecoderGeneration;
+ notify->setInt32("generation", mAudioDecoderGeneration);
+
+ if (mOffloadAudio) {
+ *decoder = new DecoderPassThrough(notify);
+ } else {
+ *decoder = new Decoder(notify);
+ }
+ } else {
+ sp<AMessage> notify = new AMessage(kWhatVideoNotify, id());
+ ++mVideoDecoderGeneration;
+ notify->setInt32("generation", mVideoDecoderGeneration);
+ *decoder = new Decoder(notify, mNativeWindow);
+ }
+ (*decoder)->init();
(*decoder)->configure(format);
+ // allocate buffers to decrypt widevine source buffers
+ if (!audio && (mSourceFlags & Source::FLAG_SECURE)) {
+ Vector<sp<ABuffer> > inputBufs;
+ CHECK_EQ((*decoder)->getInputBuffers(&inputBufs), (status_t)OK);
+
+ Vector<MediaBuffer *> mediaBufs;
+ for (size_t i = 0; i < inputBufs.size(); i++) {
+ const sp<ABuffer> &buffer = inputBufs[i];
+ MediaBuffer *mbuf = new MediaBuffer(buffer->data(), buffer->size());
+ mediaBufs.push(mbuf);
+ }
+
+ status_t err = mSource->setBuffers(audio, mediaBufs);
+ if (err != OK) {
+ for (size_t i = 0; i < mediaBufs.size(); ++i) {
+ mediaBufs[i]->release();
+ }
+ mediaBufs.clear();
+ ALOGE("Secure source didn't support secure mediaBufs.");
+ return err;
+ }
+ }
return OK;
}
@@ -941,8 +1215,9 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
sp<AMessage> reply;
CHECK(msg->findMessage("reply", &reply));
- if ((audio && IsFlushingState(mFlushingAudio))
- || (!audio && IsFlushingState(mFlushingVideo))) {
+ if ((audio && mFlushingAudio != NONE)
+ || (!audio && mFlushingVideo != NONE)
+ || mSource == NULL) {
reply->setInt32("err", INFO_DISCONTINUITY);
reply->post();
return OK;
@@ -950,14 +1225,37 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
sp<ABuffer> accessUnit;
+ // Aggregate smaller buffers into a larger buffer.
+ // The goal is to reduce power consumption.
+ // Note this will not work if the decoder requires one frame per buffer.
+ bool doBufferAggregation = (audio && mOffloadAudio);
+ bool needMoreData = false;
+
bool dropAccessUnit;
do {
- status_t err = mSource->dequeueAccessUnit(audio, &accessUnit);
+ status_t err;
+ // Did we save an accessUnit earlier because of a discontinuity?
+ if (audio && (mPendingAudioAccessUnit != NULL)) {
+ accessUnit = mPendingAudioAccessUnit;
+ mPendingAudioAccessUnit.clear();
+ err = mPendingAudioErr;
+ ALOGV("feedDecoderInputData() use mPendingAudioAccessUnit");
+ } else {
+ err = mSource->dequeueAccessUnit(audio, &accessUnit);
+ }
if (err == -EWOULDBLOCK) {
return err;
} else if (err != OK) {
if (err == INFO_DISCONTINUITY) {
+ if (doBufferAggregation && (mAggregateBuffer != NULL)) {
+ // We already have some data so save this for later.
+ mPendingAudioErr = err;
+ mPendingAudioAccessUnit = accessUnit;
+ accessUnit.clear();
+ ALOGD("feedDecoderInputData() save discontinuity for later");
+ break;
+ }
int32_t type;
CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
@@ -1002,34 +1300,44 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
mTimeDiscontinuityPending =
mTimeDiscontinuityPending || timeChange;
- if (formatChange || timeChange) {
- if (mFlushingAudio == NONE && mFlushingVideo == NONE) {
- // And we'll resume scanning sources once we're done
- // flushing.
- mDeferredActions.push_front(
- new SimpleAction(
- &NuPlayer::performScanSources));
- }
+ bool seamlessFormatChange = false;
+ sp<AMessage> newFormat = mSource->getFormat(audio);
+ if (formatChange) {
+ seamlessFormatChange =
+ getDecoder(audio)->supportsSeamlessFormatChange(newFormat);
+ // treat seamless format change separately
+ formatChange = !seamlessFormatChange;
+ }
+ bool shutdownOrFlush = formatChange || timeChange;
+
+ // We want to queue up scan-sources only once per discontinuity.
+ // We control this by doing it only if neither audio nor video are
+ // flushing or shutting down. (After handling 1st discontinuity, one
+ // of the flushing states will not be NONE.)
+ // No need to scan sources if this discontinuity does not result
+ // in a flush or shutdown, as the flushing state will stay NONE.
+ if (mFlushingAudio == NONE && mFlushingVideo == NONE &&
+ shutdownOrFlush) {
+ // And we'll resume scanning sources once we're done
+ // flushing.
+ mDeferredActions.push_front(
+ new SimpleAction(
+ &NuPlayer::performScanSources));
+ }
- sp<AMessage> newFormat = mSource->getFormat(audio);
- sp<Decoder> &decoder = audio ? mAudioDecoder : mVideoDecoder;
- if (formatChange && !decoder->supportsSeamlessFormatChange(newFormat)) {
- flushDecoder(audio, /* needShutdown = */ true);
- } else {
- flushDecoder(audio, /* needShutdown = */ false);
- err = OK;
- }
+ if (formatChange /* not seamless */) {
+ // must change decoder
+ flushDecoder(audio, /* needShutdown = */ true);
+ } else if (timeChange) {
+ // need to flush
+ flushDecoder(audio, /* needShutdown = */ false, newFormat);
+ err = OK;
+ } else if (seamlessFormatChange) {
+ // reuse existing decoder and don't flush
+ updateDecoderFormatWithoutFlush(audio, newFormat);
+ err = OK;
} else {
// This stream is unaffected by the discontinuity
-
- if (audio) {
- mFlushingAudio = FLUSHED;
- } else {
- mFlushingVideo = FLUSHED;
- }
-
- finishFlushIfPossible();
-
return -EWOULDBLOCK;
}
}
@@ -1045,13 +1353,58 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
dropAccessUnit = false;
if (!audio
- && mVideoLateByUs > 100000ll
+ && !(mSourceFlags & Source::FLAG_SECURE)
+ && mRenderer->getVideoLateByUs() > 100000ll
&& mVideoIsAVC
&& !IsAVCReferenceFrame(accessUnit)) {
dropAccessUnit = true;
++mNumFramesDropped;
}
- } while (dropAccessUnit);
+
+ size_t smallSize = accessUnit->size();
+ needMoreData = false;
+ if (doBufferAggregation && (mAggregateBuffer == NULL)
+ // Don't bother if only room for a few small buffers.
+ && (smallSize < (kAggregateBufferSizeBytes / 3))) {
+ // Create a larger buffer for combining smaller buffers from the extractor.
+ mAggregateBuffer = new ABuffer(kAggregateBufferSizeBytes);
+ mAggregateBuffer->setRange(0, 0); // start empty
+ }
+
+ if (doBufferAggregation && (mAggregateBuffer != NULL)) {
+ int64_t timeUs;
+ int64_t dummy;
+ bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs);
+ bool bigTimestampValid = mAggregateBuffer->meta()->findInt64("timeUs", &dummy);
+ // Will the smaller buffer fit?
+ size_t bigSize = mAggregateBuffer->size();
+ size_t roomLeft = mAggregateBuffer->capacity() - bigSize;
+ // Should we save this small buffer for the next big buffer?
+ // If the first small buffer did not have a timestamp then save
+ // any buffer that does have a timestamp until the next big buffer.
+ if ((smallSize > roomLeft)
+ || (!bigTimestampValid && (bigSize > 0) && smallTimestampValid)) {
+ mPendingAudioErr = err;
+ mPendingAudioAccessUnit = accessUnit;
+ accessUnit.clear();
+ } else {
+ // Grab time from first small buffer if available.
+ if ((bigSize == 0) && smallTimestampValid) {
+ mAggregateBuffer->meta()->setInt64("timeUs", timeUs);
+ }
+ // Append small buffer to the bigger buffer.
+ memcpy(mAggregateBuffer->base() + bigSize, accessUnit->data(), smallSize);
+ bigSize += smallSize;
+ mAggregateBuffer->setRange(0, bigSize);
+
+ // Keep looping until we run out of room in the mAggregateBuffer.
+ needMoreData = true;
+
+ ALOGV("feedDecoderInputData() smallSize = %zu, bigSize = %zu, capacity = %zu",
+ smallSize, bigSize, mAggregateBuffer->capacity());
+ }
+ }
+ } while (dropAccessUnit || needMoreData);
// ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
@@ -1063,7 +1416,19 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
mediaTimeUs / 1E6);
#endif
- reply->setBuffer("buffer", accessUnit);
+ if (!audio) {
+ mCCDecoder->decode(accessUnit);
+ }
+
+ if (doBufferAggregation && (mAggregateBuffer != NULL)) {
+ ALOGV("feedDecoderInputData() reply with aggregated buffer, %zu",
+ mAggregateBuffer->size());
+ reply->setBuffer("buffer", mAggregateBuffer);
+ mAggregateBuffer.clear();
+ } else {
+ reply->setBuffer("buffer", accessUnit);
+ }
+
reply->post();
return OK;
@@ -1075,7 +1440,8 @@ void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
sp<AMessage> reply;
CHECK(msg->findMessage("reply", &reply));
- if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) {
+ if ((audio && mFlushingAudio != NONE)
+ || (!audio && mFlushingVideo != NONE)) {
// We're currently attempting to flush the decoder, in order
// to complete this, the decoder wants all its buffers back,
// so we don't want any output buffers it sent us (from before
@@ -1091,14 +1457,15 @@ void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
+ int64_t mediaTimeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
int64_t &skipUntilMediaTimeUs =
audio
? mSkipRenderingAudioUntilMediaTimeUs
: mSkipRenderingVideoUntilMediaTimeUs;
if (skipUntilMediaTimeUs >= 0) {
- int64_t mediaTimeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
if (mediaTimeUs < skipUntilMediaTimeUs) {
ALOGV("dropping %s buffer at time %lld as requested.",
@@ -1112,9 +1479,79 @@ void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
skipUntilMediaTimeUs = -1;
}
+ if (!audio && mCCDecoder->isSelected()) {
+ mCCDecoder->display(mediaTimeUs);
+ }
+
mRenderer->queueBuffer(audio, buffer, reply);
}
+void NuPlayer::updateVideoSize(
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat) {
+ if (inputFormat == NULL) {
+ ALOGW("Unknown video size, reporting 0x0!");
+ notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
+ return;
+ }
+
+ int32_t displayWidth, displayHeight;
+ int32_t cropLeft, cropTop, cropRight, cropBottom;
+
+ if (outputFormat != NULL) {
+ int32_t width, height;
+ CHECK(outputFormat->findInt32("width", &width));
+ CHECK(outputFormat->findInt32("height", &height));
+
+ int32_t cropLeft, cropTop, cropRight, cropBottom;
+ CHECK(outputFormat->findRect(
+ "crop",
+ &cropLeft, &cropTop, &cropRight, &cropBottom));
+
+ displayWidth = cropRight - cropLeft + 1;
+ displayHeight = cropBottom - cropTop + 1;
+
+ ALOGV("Video output format changed to %d x %d "
+ "(crop: %d x %d @ (%d, %d))",
+ width, height,
+ displayWidth,
+ displayHeight,
+ cropLeft, cropTop);
+ } else {
+ CHECK(inputFormat->findInt32("width", &displayWidth));
+ CHECK(inputFormat->findInt32("height", &displayHeight));
+
+ ALOGV("Video input format %d x %d", displayWidth, displayHeight);
+ }
+
+ // Take into account sample aspect ratio if necessary:
+ int32_t sarWidth, sarHeight;
+ if (inputFormat->findInt32("sar-width", &sarWidth)
+ && inputFormat->findInt32("sar-height", &sarHeight)) {
+ ALOGV("Sample aspect ratio %d : %d", sarWidth, sarHeight);
+
+ displayWidth = (displayWidth * sarWidth) / sarHeight;
+
+ ALOGV("display dimensions %d x %d", displayWidth, displayHeight);
+ }
+
+ int32_t rotationDegrees;
+ if (!inputFormat->findInt32("rotation-degrees", &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
+
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ int32_t tmp = displayWidth;
+ displayWidth = displayHeight;
+ displayHeight = tmp;
+ }
+
+ notifyListener(
+ MEDIA_SET_VIDEO_SIZE,
+ displayWidth,
+ displayHeight);
+}
+
void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) {
if (mDriver == NULL) {
return;
@@ -1129,63 +1566,72 @@ void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) {
driver->notifyListener(msg, ext1, ext2, in);
}
-void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
+void NuPlayer::flushDecoder(
+ bool audio, bool needShutdown, const sp<AMessage> &newFormat) {
ALOGV("[%s] flushDecoder needShutdown=%d",
audio ? "audio" : "video", needShutdown);
- if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) {
+ const sp<Decoder> &decoder = getDecoder(audio);
+ if (decoder == NULL) {
ALOGI("flushDecoder %s without decoder present",
audio ? "audio" : "video");
+ return;
}
// Make sure we don't continue to scan sources until we finish flushing.
++mScanSourcesGeneration;
mScanSourcesPending = false;
- (audio ? mAudioDecoder : mVideoDecoder)->signalFlush();
+ decoder->signalFlush(newFormat);
mRenderer->flush(audio);
FlushStatus newStatus =
needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
+ mFlushComplete[audio][false /* isDecoder */] = false;
+ mFlushComplete[audio][true /* isDecoder */] = false;
if (audio) {
- CHECK(mFlushingAudio == NONE
- || mFlushingAudio == AWAITING_DISCONTINUITY);
-
+ ALOGE_IF(mFlushingAudio != NONE,
+ "audio flushDecoder() is called in state %d", mFlushingAudio);
mFlushingAudio = newStatus;
-
- if (mFlushingVideo == NONE) {
- mFlushingVideo = (mVideoDecoder != NULL)
- ? AWAITING_DISCONTINUITY
- : FLUSHED;
- }
} else {
- CHECK(mFlushingVideo == NONE
- || mFlushingVideo == AWAITING_DISCONTINUITY);
-
+ ALOGE_IF(mFlushingVideo != NONE,
+ "video flushDecoder() is called in state %d", mFlushingVideo);
mFlushingVideo = newStatus;
- if (mFlushingAudio == NONE) {
- mFlushingAudio = (mAudioDecoder != NULL)
- ? AWAITING_DISCONTINUITY
- : FLUSHED;
+ if (mCCDecoder != NULL) {
+ mCCDecoder->flush();
}
}
}
-sp<AMessage> NuPlayer::Source::getFormat(bool audio) {
- sp<MetaData> meta = getFormatMeta(audio);
+void NuPlayer::updateDecoderFormatWithoutFlush(
+ bool audio, const sp<AMessage> &format) {
+ ALOGV("[%s] updateDecoderFormatWithoutFlush", audio ? "audio" : "video");
- if (meta == NULL) {
- return NULL;
+ const sp<Decoder> &decoder = getDecoder(audio);
+ if (decoder == NULL) {
+ ALOGI("updateDecoderFormatWithoutFlush %s without decoder present",
+ audio ? "audio" : "video");
+ return;
}
- sp<AMessage> msg = new AMessage;
+ decoder->signalUpdateFormat(format);
+}
- if(convertMetaDataToMessage(meta, &msg) == OK) {
- return msg;
- }
- return NULL;
+void NuPlayer::queueDecoderShutdown(
+ bool audio, bool video, const sp<AMessage> &reply) {
+ ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
+
+ mDeferredActions.push_back(
+ new ShutdownDecoderAction(audio, video));
+
+ mDeferredActions.push_back(
+ new SimpleAction(&NuPlayer::performScanSources));
+
+ mDeferredActions.push_back(new PostMessageAction(reply));
+
+ processDeferredActions();
}
status_t NuPlayer::setVideoScalingMode(int32_t mode) {
@@ -1211,6 +1657,19 @@ status_t NuPlayer::getTrackInfo(Parcel* reply) const {
return err;
}
+status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const {
+ sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id());
+ msg->setPointer("reply", reply);
+ msg->setInt32("type", type);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+ return err;
+}
+
status_t NuPlayer::selectTrack(size_t trackIndex, bool select) {
sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
msg->setSize("trackIndex", trackIndex);
@@ -1219,9 +1678,35 @@ status_t NuPlayer::selectTrack(size_t trackIndex, bool select) {
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
return err;
}
+status_t NuPlayer::getCurrentPosition(int64_t *mediaUs) {
+ sp<Renderer> renderer = mRenderer;
+ if (renderer == NULL) {
+ return NO_INIT;
+ }
+
+ return renderer->getCurrentPosition(mediaUs);
+}
+
+void NuPlayer::getStats(int64_t *numFramesTotal, int64_t *numFramesDropped) {
+ *numFramesTotal = mNumFramesTotal;
+ *numFramesDropped = mNumFramesDropped;
+}
+
+sp<MetaData> NuPlayer::getFileMeta() {
+ return mSource->getFileFormatMeta();
+}
+
void NuPlayer::schedulePollDuration() {
sp<AMessage> msg = new AMessage(kWhatPollDuration, id());
msg->setInt32("generation", mPollDurationGeneration);
@@ -1238,18 +1723,6 @@ void NuPlayer::processDeferredActions() {
// an intermediate state, i.e. one more more decoders are currently
// flushing or shutting down.
- if (mRenderer != NULL) {
- // There's an edge case where the renderer owns all output
- // buffers and is paused, therefore the decoder will not read
- // more input data and will never encounter the matching
- // discontinuity. To avoid this, we resume the renderer.
-
- if (mFlushingAudio == AWAITING_DISCONTINUITY
- || mFlushingVideo == AWAITING_DISCONTINUITY) {
- mRenderer->resume();
- }
- }
-
if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
// We're currently flushing, postpone the reset until that's
// completed.
@@ -1267,18 +1740,29 @@ void NuPlayer::processDeferredActions() {
}
}
-void NuPlayer::performSeek(int64_t seekTimeUs) {
- ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)",
+void NuPlayer::performSeek(int64_t seekTimeUs, bool needNotify) {
+ ALOGV("performSeek seekTimeUs=%lld us (%.2f secs), needNotify(%d)",
seekTimeUs,
- seekTimeUs / 1E6);
-
+ seekTimeUs / 1E6,
+ needNotify);
+
+ if (mSource == NULL) {
+ // This happens when reset occurs right before the loop mode
+ // asynchronously seeks to the start of the stream.
+ LOG_ALWAYS_FATAL_IF(mAudioDecoder != NULL || mVideoDecoder != NULL,
+ "mSource is NULL and decoders not NULL audio(%p) video(%p)",
+ mAudioDecoder.get(), mVideoDecoder.get());
+ return;
+ }
mSource->seekTo(seekTimeUs);
+ ++mTimedTextGeneration;
if (mDriver != NULL) {
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
- driver->notifyPosition(seekTimeUs);
- driver->notifySeekComplete();
+ if (needNotify) {
+ driver->notifySeekComplete();
+ }
}
}
@@ -1313,14 +1797,6 @@ void NuPlayer::performDecoderShutdown(bool audio, bool video) {
mTimeDiscontinuityPending = true;
- if (mFlushingAudio == NONE && (!audio || mAudioDecoder == NULL)) {
- mFlushingAudio = FLUSHED;
- }
-
- if (mFlushingVideo == NONE && (!video || mVideoDecoder == NULL)) {
- mFlushingVideo = FLUSHED;
- }
-
if (audio && mAudioDecoder != NULL) {
flushDecoder(true /* audio */, true /* needShutdown */);
}
@@ -1341,13 +1817,19 @@ void NuPlayer::performReset() {
++mScanSourcesGeneration;
mScanSourcesPending = false;
+ if (mRendererLooper != NULL) {
+ if (mRenderer != NULL) {
+ mRendererLooper->unregisterHandler(mRenderer->id());
+ }
+ mRendererLooper->stop();
+ mRendererLooper.clear();
+ }
mRenderer.clear();
+ ++mRendererGeneration;
if (mSource != NULL) {
mSource->stop();
- looper()->unregisterHandler(mSource->id());
-
mSource.clear();
}
@@ -1408,16 +1890,15 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
- driver->notifyPrepareCompleted(err);
- }
-
- int64_t durationUs;
- if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
+ // notify duration first, so that it's definitely set when
+ // the app received the "prepare complete" callback.
+ int64_t durationUs;
+ if (mSource->getDuration(&durationUs) == OK) {
driver->notifyDuration(durationUs);
}
+ driver->notifyPrepareCompleted(err);
}
+
break;
}
@@ -1446,11 +1927,19 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
case Source::kWhatVideoSizeChanged:
{
- int32_t width, height;
- CHECK(msg->findInt32("width", &width));
- CHECK(msg->findInt32("height", &height));
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
- notifyListener(MEDIA_SET_VIDEO_SIZE, width, height);
+ updateVideoSize(format);
+ break;
+ }
+
+ case Source::kWhatBufferingUpdate:
+ {
+ int32_t percentage;
+ CHECK(msg->findInt32("percentage", &percentage));
+
+ notifyListener(MEDIA_BUFFERING_UPDATE, percentage, 0);
break;
}
@@ -1471,21 +1960,40 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
- int32_t trackIndex;
- int64_t timeUs, durationUs;
- CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+ sendSubtitleData(buffer, 0 /* baseIndex */);
+ break;
+ }
+
+ case Source::kWhatTimedTextData:
+ {
+ int32_t generation;
+ if (msg->findInt32("generation", &generation)
+ && generation != mTimedTextGeneration) {
+ break;
+ }
+
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
- Parcel in;
- in.writeInt32(trackIndex);
- in.writeInt64(timeUs);
- in.writeInt64(durationUs);
- in.writeInt32(buffer->size());
- in.writeInt32(buffer->size());
- in.write(buffer->data(), buffer->size());
+ sp<NuPlayerDriver> driver = mDriver.promote();
+ if (driver == NULL) {
+ break;
+ }
- notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
+ int posMs;
+ int64_t timeUs, posUs;
+ driver->getCurrentPosition(&posMs);
+ posUs = posMs * 1000;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ if (posUs < timeUs) {
+ if (!msg->findInt32("generation", &generation)) {
+ msg->setInt32("generation", mTimedTextGeneration);
+ }
+ msg->post(timeUs - posUs);
+ } else {
+ sendTimedTextData(buffer);
+ }
break;
}
@@ -1502,13 +2010,112 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
break;
}
+ case Source::kWhatDrmNoLicense:
+ {
+ notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
+ break;
+ }
+
default:
TRESPASS();
}
}
+void NuPlayer::onClosedCaptionNotify(const sp<AMessage> &msg) {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ switch (what) {
+ case NuPlayer::CCDecoder::kWhatClosedCaptionData:
+ {
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
+
+ size_t inbandTracks = 0;
+ if (mSource != NULL) {
+ inbandTracks = mSource->getTrackCount();
+ }
+
+ sendSubtitleData(buffer, inbandTracks);
+ break;
+ }
+
+ case NuPlayer::CCDecoder::kWhatTrackAdded:
+ {
+ notifyListener(MEDIA_INFO, MEDIA_INFO_METADATA_UPDATE, 0);
+
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+
+
+}
+
+void NuPlayer::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) {
+ int32_t trackIndex;
+ int64_t timeUs, durationUs;
+ CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+
+ Parcel in;
+ in.writeInt32(trackIndex + baseIndex);
+ in.writeInt64(timeUs);
+ in.writeInt64(durationUs);
+ in.writeInt32(buffer->size());
+ in.writeInt32(buffer->size());
+ in.write(buffer->data(), buffer->size());
+
+ notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
+}
+
+void NuPlayer::sendTimedTextData(const sp<ABuffer> &buffer) {
+ const void *data;
+ size_t size = 0;
+ int64_t timeUs;
+ int32_t flag = TextDescriptions::LOCAL_DESCRIPTIONS;
+
+ AString mime;
+ CHECK(buffer->meta()->findString("mime", &mime));
+ CHECK(strcasecmp(mime.c_str(), MEDIA_MIMETYPE_TEXT_3GPP) == 0);
+
+ data = buffer->data();
+ size = buffer->size();
+
+ Parcel parcel;
+ if (size > 0) {
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ flag |= TextDescriptions::IN_BAND_TEXT_3GPP;
+ TextDescriptions::getParcelOfDescriptions(
+ (const uint8_t *)data, size, flag, timeUs / 1000, &parcel);
+ }
+
+ if ((parcel.dataSize() > 0)) {
+ notifyListener(MEDIA_TIMED_TEXT, 0, 0, &parcel);
+ } else { // send an empty timed text
+ notifyListener(MEDIA_TIMED_TEXT, 0, 0);
+ }
+}
////////////////////////////////////////////////////////////////////////////////
+sp<AMessage> NuPlayer::Source::getFormat(bool audio) {
+ sp<MetaData> meta = getFormatMeta(audio);
+
+ if (meta == NULL) {
+ return NULL;
+ }
+
+ sp<AMessage> msg = new AMessage;
+
+ if(convertMetaDataToMessage(meta, &msg) == OK) {
+ return msg;
+ }
+ return NULL;
+}
+
void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) {
sp<AMessage> notify = dupNotify();
notify->setInt32("what", kWhatFlagsChanged);
@@ -1516,11 +2123,10 @@ void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) {
notify->post();
}
-void NuPlayer::Source::notifyVideoSizeChanged(int32_t width, int32_t height) {
+void NuPlayer::Source::notifyVideoSizeChanged(const sp<AMessage> &format) {
sp<AMessage> notify = dupNotify();
notify->setInt32("what", kWhatVideoSizeChanged);
- notify->setInt32("width", width);
- notify->setInt32("height", height);
+ notify->setMessage("format", format);
notify->post();
}
@@ -1531,23 +2137,8 @@ void NuPlayer::Source::notifyPrepared(status_t err) {
notify->post();
}
-void NuPlayer::Source::onMessageReceived(const sp<AMessage> &msg) {
+void NuPlayer::Source::onMessageReceived(const sp<AMessage> & /* msg */) {
TRESPASS();
}
-void NuPlayer::queueDecoderShutdown(
- bool audio, bool video, const sp<AMessage> &reply) {
- ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
-
- mDeferredActions.push_back(
- new ShutdownDecoderAction(audio, video));
-
- mDeferredActions.push_back(
- new SimpleAction(&NuPlayer::performScanSources));
-
- mDeferredActions.push_back(new PostMessageAction(reply));
-
- processDeferredActions();
-}
-
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 590e1f2..d6120d2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -24,7 +24,8 @@
namespace android {
-struct ACodec;
+struct ABuffer;
+struct AMessage;
struct MetaData;
struct NuPlayerDriver;
@@ -38,7 +39,9 @@ struct NuPlayer : public AHandler {
void setDataSourceAsync(const sp<IStreamSource> &source);
void setDataSourceAsync(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
void setDataSourceAsync(int fd, int64_t offset, int64_t length);
@@ -56,12 +59,20 @@ struct NuPlayer : public AHandler {
// Will notify the driver through "notifyResetComplete" once finished.
void resetAsync();
- // Will notify the driver through "notifySeekComplete" once finished.
- void seekToAsync(int64_t seekTimeUs);
+ // Will notify the driver through "notifySeekComplete" once finished
+ // and needNotify is true.
+ void seekToAsync(int64_t seekTimeUs, bool needNotify = false);
status_t setVideoScalingMode(int32_t mode);
status_t getTrackInfo(Parcel* reply) const;
+ status_t getSelectedTrack(int32_t type, Parcel* reply) const;
status_t selectTrack(size_t trackIndex, bool select);
+ status_t getCurrentPosition(int64_t *mediaUs);
+ void getStats(int64_t *mNumFramesTotal, int64_t *mNumFramesDropped);
+
+ sp<MetaData> getFileMeta();
+
+ static const size_t kAggregateBufferSizeBytes;
protected:
virtual ~NuPlayer();
@@ -74,6 +85,8 @@ public:
private:
struct Decoder;
+ struct DecoderPassThrough;
+ struct CCDecoder;
struct GenericSource;
struct HTTPLiveSource;
struct Renderer;
@@ -96,6 +109,7 @@ private:
kWhatScanSources = 'scan',
kWhatVideoNotify = 'vidN',
kWhatAudioNotify = 'audN',
+ kWhatClosedCaptionNotify = 'capN',
kWhatRendererNotify = 'renN',
kWhatReset = 'rset',
kWhatSeek = 'seek',
@@ -104,6 +118,7 @@ private:
kWhatPollDuration = 'polD',
kWhatSourceNotify = 'srcN',
kWhatGetTrackInfo = 'gTrI',
+ kWhatGetSelectedTrack = 'gSel',
kWhatSelectTrack = 'selT',
};
@@ -116,9 +131,14 @@ private:
sp<MediaPlayerBase::AudioSink> mAudioSink;
sp<Decoder> mVideoDecoder;
bool mVideoIsAVC;
- bool mNeedsSwRenderer;
+ bool mOffloadAudio;
sp<Decoder> mAudioDecoder;
+ sp<CCDecoder> mCCDecoder;
sp<Renderer> mRenderer;
+ sp<ALooper> mRendererLooper;
+ int32_t mAudioDecoderGeneration;
+ int32_t mVideoDecoderGeneration;
+ int32_t mRendererGeneration;
List<sp<Action> > mDeferredActions;
@@ -129,10 +149,10 @@ private:
int32_t mScanSourcesGeneration;
int32_t mPollDurationGeneration;
+ int32_t mTimedTextGeneration;
enum FlushStatus {
NONE,
- AWAITING_DISCONTINUITY,
FLUSHING_DECODER,
FLUSHING_DECODER_SHUTDOWN,
SHUTTING_DOWN_DECODER,
@@ -144,31 +164,60 @@ private:
// notion of time has changed.
bool mTimeDiscontinuityPending;
+ // Status of flush responses from the decoder and renderer.
+ bool mFlushComplete[2][2];
+
+ // Used by feedDecoderInputData to aggregate small buffers into
+ // one large buffer.
+ sp<ABuffer> mPendingAudioAccessUnit;
+ status_t mPendingAudioErr;
+ sp<ABuffer> mAggregateBuffer;
+
FlushStatus mFlushingAudio;
FlushStatus mFlushingVideo;
int64_t mSkipRenderingAudioUntilMediaTimeUs;
int64_t mSkipRenderingVideoUntilMediaTimeUs;
- int64_t mVideoLateByUs;
int64_t mNumFramesTotal, mNumFramesDropped;
int32_t mVideoScalingMode;
bool mStarted;
+ inline const sp<Decoder> &getDecoder(bool audio) {
+ return audio ? mAudioDecoder : mVideoDecoder;
+ }
+
+ inline void clearFlushComplete() {
+ mFlushComplete[0][0] = false;
+ mFlushComplete[0][1] = false;
+ mFlushComplete[1][0] = false;
+ mFlushComplete[1][1] = false;
+ }
+
+ void openAudioSink(const sp<AMessage> &format, bool offloadOnly);
+ void closeAudioSink();
+
status_t instantiateDecoder(bool audio, sp<Decoder> *decoder);
+ void updateVideoSize(
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat = NULL);
+
status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg);
void renderBuffer(bool audio, const sp<AMessage> &msg);
void notifyListener(int msg, int ext1, int ext2, const Parcel *in = NULL);
+ void handleFlushComplete(bool audio, bool isDecoder);
void finishFlushIfPossible();
- void flushDecoder(bool audio, bool needShutdown);
+ bool audioDecoderStillNeeded();
- static bool IsFlushingState(FlushStatus state, bool *needShutdown = NULL);
+ void flushDecoder(
+ bool audio, bool needShutdown, const sp<AMessage> &newFormat = NULL);
+ void updateDecoderFormatWithoutFlush(bool audio, const sp<AMessage> &format);
void postScanSources();
@@ -177,7 +226,7 @@ private:
void processDeferredActions();
- void performSeek(int64_t seekTimeUs);
+ void performSeek(int64_t seekTimeUs, bool needNotify);
void performDecoderFlush();
void performDecoderShutdown(bool audio, bool video);
void performReset();
@@ -185,10 +234,16 @@ private:
void performSetSurface(const sp<NativeWindowWrapper> &wrapper);
void onSourceNotify(const sp<AMessage> &msg);
+ void onClosedCaptionNotify(const sp<AMessage> &msg);
void queueDecoderShutdown(
bool audio, bool video, const sp<AMessage> &reply);
+ void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
+ void sendTimedTextData(const sp<ABuffer> &buffer);
+
+ void writeTrackInfo(Parcel* reply, const sp<AMessage> format) const;
+
DISALLOW_EVIL_CONSTRUCTORS(NuPlayer);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 2423fd5..27f6131 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -17,14 +17,19 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "NuPlayerDecoder"
#include <utils/Log.h>
+#include <inttypes.h>
#include "NuPlayerDecoder.h"
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
namespace android {
@@ -32,120 +37,674 @@ NuPlayer::Decoder::Decoder(
const sp<AMessage> &notify,
const sp<NativeWindowWrapper> &nativeWindow)
: mNotify(notify),
- mNativeWindow(nativeWindow) {
+ mNativeWindow(nativeWindow),
+ mBufferGeneration(0),
+ mPaused(true),
+ mComponentName("decoder") {
+ // Every decoder has its own looper because MediaCodec operations
+ // are blocking, but NuPlayer needs asynchronous operations.
+ mDecoderLooper = new ALooper;
+ mDecoderLooper->setName("NPDecoder");
+ mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("NPDecoder-CL");
+ mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}
NuPlayer::Decoder::~Decoder() {
+ mDecoderLooper->unregisterHandler(id());
+ mDecoderLooper->stop();
+
+ releaseAndResetMediaBuffers();
}
-void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
+static
+status_t PostAndAwaitResponse(
+ const sp<AMessage> &msg, sp<AMessage> *response) {
+ status_t err = msg->postAndAwaitResponse(response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!(*response)->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+void NuPlayer::Decoder::rememberCodecSpecificData(const sp<AMessage> &format) {
+ mCSDsForCurrentFormat.clear();
+ for (int32_t i = 0; ; ++i) {
+ AString tag = "csd-";
+ tag.append(i);
+ sp<ABuffer> buffer;
+ if (!format->findBuffer(tag.c_str(), &buffer)) {
+ break;
+ }
+ mCSDsForCurrentFormat.push(buffer);
+ }
+}
+
+void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
CHECK(mCodec == NULL);
+ ++mBufferGeneration;
+
AString mime;
CHECK(format->findString("mime", &mime));
- sp<AMessage> notifyMsg =
- new AMessage(kWhatCodecNotify, id());
+ sp<Surface> surface = NULL;
+ if (mNativeWindow != NULL) {
+ surface = mNativeWindow->getSurfaceTextureClient();
+ }
- mCSDIndex = 0;
- for (size_t i = 0;; ++i) {
- sp<ABuffer> csd;
- if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
- break;
- }
+ mComponentName = mime;
+ mComponentName.append(" decoder");
+ ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), surface.get());
- mCSD.push(csd);
+ mCodec = MediaCodec::CreateByType(mCodecLooper, mime.c_str(), false /* encoder */);
+ int32_t secure = 0;
+ if (format->findInt32("secure", &secure) && secure != 0) {
+ if (mCodec != NULL) {
+ mCodec->getName(&mComponentName);
+ mComponentName.append(".secure");
+ mCodec->release();
+ ALOGI("[%s] creating", mComponentName.c_str());
+ mCodec = MediaCodec::CreateByComponentName(
+ mCodecLooper, mComponentName.c_str());
+ }
+ }
+ if (mCodec == NULL) {
+ ALOGE("Failed to create %s%s decoder",
+ (secure ? "secure " : ""), mime.c_str());
+ handleError(UNKNOWN_ERROR);
+ return;
}
+ mCodec->getName(&mComponentName);
+
+ status_t err;
if (mNativeWindow != NULL) {
- format->setObject("native-window", mNativeWindow);
+ // disconnect from surface as MediaCodec will reconnect
+ err = native_window_api_disconnect(
+ surface.get(), NATIVE_WINDOW_API_MEDIA);
+ // We treat this as a warning, as this is a preparatory step.
+ // Codec will try to connect to the surface, which is where
+ // any error signaling will occur.
+ ALOGW_IF(err != OK, "failed to disconnect from surface: %d", err);
+ }
+ err = mCodec->configure(
+ format, surface, NULL /* crypto */, 0 /* flags */);
+ if (err != OK) {
+ ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);
+ mCodec->release();
+ mCodec.clear();
+ handleError(err);
+ return;
+ }
+ rememberCodecSpecificData(format);
+
+ // the following should work in configured state
+ CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));
+ CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat));
+
+ err = mCodec->start();
+ if (err != OK) {
+ ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err);
+ mCodec->release();
+ mCodec.clear();
+ handleError(err);
+ return;
}
- // Current video decoders do not return from OMX_FillThisBuffer
- // quickly, violating the OpenMAX specs, until that is remedied
- // we need to invest in an extra looper to free the main event
- // queue.
- bool needDedicatedLooper = !strncasecmp(mime.c_str(), "video/", 6);
+ // the following should work after start
+ CHECK_EQ((status_t)OK, mCodec->getInputBuffers(&mInputBuffers));
+ releaseAndResetMediaBuffers();
+ CHECK_EQ((status_t)OK, mCodec->getOutputBuffers(&mOutputBuffers));
+ ALOGV("[%s] got %zu input and %zu output buffers",
+ mComponentName.c_str(),
+ mInputBuffers.size(),
+ mOutputBuffers.size());
- mFormat = format;
- mCodec = new ACodec;
+ requestCodecNotification();
+ mPaused = false;
+}
- if (needDedicatedLooper && mCodecLooper == NULL) {
- mCodecLooper = new ALooper;
- mCodecLooper->setName("NuPlayerDecoder");
- mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+void NuPlayer::Decoder::releaseAndResetMediaBuffers() {
+ for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+ if (mMediaBuffers[i] != NULL) {
+ mMediaBuffers[i]->release();
+ mMediaBuffers.editItemAt(i) = NULL;
+ }
+ }
+ mMediaBuffers.resize(mInputBuffers.size());
+ for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+ mMediaBuffers.editItemAt(i) = NULL;
+ }
+ mInputBufferIsDequeued.clear();
+ mInputBufferIsDequeued.resize(mInputBuffers.size());
+ for (size_t i = 0; i < mInputBufferIsDequeued.size(); i++) {
+ mInputBufferIsDequeued.editItemAt(i) = false;
}
- (needDedicatedLooper ? mCodecLooper : looper())->registerHandler(mCodec);
+ mPendingInputMessages.clear();
+}
- mCodec->setNotificationMessage(notifyMsg);
- mCodec->initiateSetup(format);
+void NuPlayer::Decoder::requestCodecNotification() {
+ if (mCodec != NULL) {
+ sp<AMessage> reply = new AMessage(kWhatCodecNotify, id());
+ reply->setInt32("generation", mBufferGeneration);
+ mCodec->requestActivityNotification(reply);
+ }
}
-void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatCodecNotify:
- {
- int32_t what;
- CHECK(msg->findInt32("what", &what));
+bool NuPlayer::Decoder::isStaleReply(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ return generation != mBufferGeneration;
+}
- if (what == ACodec::kWhatFillThisBuffer) {
- onFillThisBuffer(msg);
- } else {
- sp<AMessage> notify = mNotify->dup();
- notify->setMessage("codec-request", msg);
- notify->post();
- }
- break;
+void NuPlayer::Decoder::init() {
+ mDecoderLooper->registerHandler(this);
+}
+
+void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
+ sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+ msg->setMessage("format", format);
+ msg->post();
+}
+
+void NuPlayer::Decoder::signalUpdateFormat(const sp<AMessage> &format) {
+ sp<AMessage> msg = new AMessage(kWhatUpdateFormat, id());
+ msg->setMessage("format", format);
+ msg->post();
+}
+
+status_t NuPlayer::Decoder::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
+ sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, id());
+ msg->setPointer("buffers", buffers);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+void NuPlayer::Decoder::handleError(int32_t err)
+{
+ // We cannot immediately release the codec due to buffers still outstanding
+ // in the renderer. We signal to the player the error so it can shutdown/release the
+ // decoder after flushing and increment the generation to discard unnecessary messages.
+
+ ++mBufferGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+bool NuPlayer::Decoder::handleAnInputBuffer() {
+ size_t bufferIx = -1;
+ status_t res = mCodec->dequeueInputBuffer(&bufferIx);
+ ALOGV("[%s] dequeued input: %d",
+ mComponentName.c_str(), res == OK ? (int)bufferIx : res);
+ if (res != OK) {
+ if (res != -EAGAIN) {
+ ALOGE("Failed to dequeue input buffer for %s (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
}
+ return false;
+ }
- default:
- TRESPASS();
+ CHECK_LT(bufferIx, mInputBuffers.size());
+
+ if (mMediaBuffers[bufferIx] != NULL) {
+ mMediaBuffers[bufferIx]->release();
+ mMediaBuffers.editItemAt(bufferIx) = NULL;
+ }
+ mInputBufferIsDequeued.editItemAt(bufferIx) = true;
+
+ sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
+ reply->setSize("buffer-ix", bufferIx);
+ reply->setInt32("generation", mBufferGeneration);
+
+ if (!mCSDsToSubmit.isEmpty()) {
+ sp<ABuffer> buffer = mCSDsToSubmit.itemAt(0);
+ ALOGI("[%s] resubmitting CSD", mComponentName.c_str());
+ reply->setBuffer("buffer", buffer);
+ mCSDsToSubmit.removeAt(0);
+ CHECK(onInputBufferFilled(reply));
+ return true;
+ }
+
+ while (!mPendingInputMessages.empty()) {
+ sp<AMessage> msg = *mPendingInputMessages.begin();
+ if (!onInputBufferFilled(msg)) {
break;
+ }
+ mPendingInputMessages.erase(mPendingInputMessages.begin());
}
+
+ if (!mInputBufferIsDequeued.editItemAt(bufferIx)) {
+ return true;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFillThisBuffer);
+ notify->setBuffer("buffer", mInputBuffers[bufferIx]);
+ notify->setMessage("reply", reply);
+ notify->post();
+ return true;
}
-void NuPlayer::Decoder::onFillThisBuffer(const sp<AMessage> &msg) {
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
+bool android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
+ size_t bufferIx;
+ CHECK(msg->findSize("buffer-ix", &bufferIx));
+ CHECK_LT(bufferIx, mInputBuffers.size());
+ sp<ABuffer> codecBuffer = mInputBuffers[bufferIx];
-#if 0
- sp<ABuffer> outBuffer;
- CHECK(msg->findBuffer("buffer", &outBuffer));
-#else
- sp<ABuffer> outBuffer;
-#endif
+ sp<ABuffer> buffer;
+ bool hasBuffer = msg->findBuffer("buffer", &buffer);
- if (mCSDIndex < mCSD.size()) {
- outBuffer = mCSD.editItemAt(mCSDIndex++);
- outBuffer->meta()->setInt64("timeUs", 0);
+ // handle widevine classic source - that fills an arbitrary input buffer
+ MediaBuffer *mediaBuffer = NULL;
+ if (hasBuffer) {
+ mediaBuffer = (MediaBuffer *)(buffer->getMediaBufferBase());
+ if (mediaBuffer != NULL) {
+ // likely filled another buffer than we requested: adjust buffer index
+ size_t ix;
+ for (ix = 0; ix < mInputBuffers.size(); ix++) {
+ const sp<ABuffer> &buf = mInputBuffers[ix];
+ if (buf->data() == mediaBuffer->data()) {
+ // all input buffers are dequeued on start, hence the check
+ if (!mInputBufferIsDequeued[ix]) {
+ ALOGV("[%s] received MediaBuffer for #%zu instead of #%zu",
+ mComponentName.c_str(), ix, bufferIx);
+ mediaBuffer->release();
+ return false;
+ }
- reply->setBuffer("buffer", outBuffer);
- reply->post();
- return;
+ // TRICKY: need buffer for the metadata, so instead, set
+ // codecBuffer to the same (though incorrect) buffer to
+ // avoid a memcpy into the codecBuffer
+ codecBuffer = buffer;
+ codecBuffer->setRange(
+ mediaBuffer->range_offset(),
+ mediaBuffer->range_length());
+ bufferIx = ix;
+ break;
+ }
+ }
+ CHECK(ix < mInputBuffers.size());
+ }
+ }
+
+
+
+ if (buffer == NULL /* includes !hasBuffer */) {
+ int32_t streamErr = ERROR_END_OF_STREAM;
+ CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+
+ if (streamErr == OK) {
+ /* buffers are returned to hold on to */
+ return true;
+ }
+
+ // attempt to queue EOS
+ status_t err = mCodec->queueInputBuffer(
+ bufferIx,
+ 0,
+ 0,
+ 0,
+ MediaCodec::BUFFER_FLAG_EOS);
+ if (err == OK) {
+ mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+ } else if (streamErr == ERROR_END_OF_STREAM) {
+ streamErr = err;
+ // err will not be ERROR_END_OF_STREAM
+ }
+
+ if (streamErr != ERROR_END_OF_STREAM) {
+ ALOGE("Stream error for %s (err=%d), EOS %s queued",
+ mComponentName.c_str(),
+ streamErr,
+ err == OK ? "successfully" : "unsuccessfully");
+ handleError(streamErr);
+ }
+ } else {
+ int64_t timeUs = 0;
+ uint32_t flags = 0;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ int32_t eos, csd;
+ // we do not expect SYNCFRAME for decoder
+ if (buffer->meta()->findInt32("eos", &eos) && eos) {
+ flags |= MediaCodec::BUFFER_FLAG_EOS;
+ } else if (buffer->meta()->findInt32("csd", &csd) && csd) {
+ flags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
+ }
+
+ // copy into codec buffer
+ if (buffer != codecBuffer) {
+ CHECK_LE(buffer->size(), codecBuffer->capacity());
+ codecBuffer->setRange(0, buffer->size());
+ memcpy(codecBuffer->data(), buffer->data(), buffer->size());
+ }
+
+ status_t err = mCodec->queueInputBuffer(
+ bufferIx,
+ codecBuffer->offset(),
+ codecBuffer->size(),
+ timeUs,
+ flags);
+ if (err != OK) {
+ if (mediaBuffer != NULL) {
+ mediaBuffer->release();
+ }
+ ALOGE("Failed to queue input buffer for %s (err=%d)",
+ mComponentName.c_str(), err);
+ handleError(err);
+ } else {
+ mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+ if (mediaBuffer != NULL) {
+ CHECK(mMediaBuffers[bufferIx] == NULL);
+ mMediaBuffers.editItemAt(bufferIx) = mediaBuffer;
+ }
+ }
+ }
+ return true;
+}
+
+bool NuPlayer::Decoder::handleAnOutputBuffer() {
+ size_t bufferIx = -1;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ status_t res = mCodec->dequeueOutputBuffer(
+ &bufferIx, &offset, &size, &timeUs, &flags);
+
+ if (res != OK) {
+ ALOGV("[%s] dequeued output: %d", mComponentName.c_str(), res);
+ } else {
+ ALOGV("[%s] dequeued output: %d (time=%lld flags=%" PRIu32 ")",
+ mComponentName.c_str(), (int)bufferIx, timeUs, flags);
+ }
+
+ if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
+ res = mCodec->getOutputBuffers(&mOutputBuffers);
+ if (res != OK) {
+ ALOGE("Failed to get output buffers for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
+ // NuPlayer ignores this
+ return true;
+ } else if (res == INFO_FORMAT_CHANGED) {
+ sp<AMessage> format = new AMessage();
+ res = mCodec->getOutputFormat(&format);
+ if (res != OK) {
+ ALOGE("Failed to get output format for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatOutputFormatChanged);
+ notify->setMessage("format", format);
+ notify->post();
+ return true;
+ } else if (res == INFO_DISCONTINUITY) {
+ // nothing to do
+ return true;
+ } else if (res != OK) {
+ if (res != -EAGAIN) {
+ ALOGE("Failed to dequeue output buffer for %s (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ }
+ return false;
}
+ CHECK_LT(bufferIx, mOutputBuffers.size());
+ sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ buffer->setRange(offset, size);
+ buffer->meta()->clear();
+ buffer->meta()->setInt64("timeUs", timeUs);
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ buffer->meta()->setInt32("eos", true);
+ }
+ // we do not expect CODECCONFIG or SYNCFRAME for decoder
+
+ sp<AMessage> reply = new AMessage(kWhatRenderBuffer, id());
+ reply->setSize("buffer-ix", bufferIx);
+ reply->setInt32("generation", mBufferGeneration);
+
sp<AMessage> notify = mNotify->dup();
- notify->setMessage("codec-request", msg);
+ notify->setInt32("what", kWhatDrainThisBuffer);
+ notify->setBuffer("buffer", buffer);
+ notify->setMessage("reply", reply);
notify->post();
+
+ // FIXME: This should be handled after rendering is complete,
+ // but Renderer needs it now
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ ALOGV("queueing eos [%s]", mComponentName.c_str());
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatEOS);
+ notify->setInt32("err", ERROR_END_OF_STREAM);
+ notify->post();
+ }
+ return true;
}
-void NuPlayer::Decoder::signalFlush() {
- if (mCodec != NULL) {
- mCodec->signalFlush();
+void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
+ status_t err;
+ int32_t render;
+ size_t bufferIx;
+ CHECK(msg->findSize("buffer-ix", &bufferIx));
+ if (msg->findInt32("render", &render) && render) {
+ int64_t timestampNs;
+ CHECK(msg->findInt64("timestampNs", &timestampNs));
+ err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);
+ } else {
+ err = mCodec->releaseOutputBuffer(bufferIx);
+ }
+ if (err != OK) {
+ ALOGE("failed to release output buffer for %s (err=%d)",
+ mComponentName.c_str(), err);
+ handleError(err);
}
}
-void NuPlayer::Decoder::signalResume() {
+void NuPlayer::Decoder::onFlush() {
+ status_t err = OK;
if (mCodec != NULL) {
- mCodec->signalResume();
+ err = mCodec->flush();
+ mCSDsToSubmit = mCSDsForCurrentFormat; // copy operator
+ ++mBufferGeneration;
+ }
+
+ if (err != OK) {
+ ALOGE("failed to flush %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ // finish with posting kWhatFlushCompleted.
+ // we attempt to release the buffers even if flush fails.
}
+ releaseAndResetMediaBuffers();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFlushCompleted);
+ notify->post();
+ mPaused = true;
}
-void NuPlayer::Decoder::initiateShutdown() {
+void NuPlayer::Decoder::onResume() {
+ mPaused = false;
+}
+
+void NuPlayer::Decoder::onShutdown() {
+ status_t err = OK;
if (mCodec != NULL) {
- mCodec->initiateShutdown();
+ err = mCodec->release();
+ mCodec = NULL;
+ ++mBufferGeneration;
+
+ if (mNativeWindow != NULL) {
+ // reconnect to surface as MediaCodec disconnected from it
+ status_t error =
+ native_window_api_connect(
+ mNativeWindow->getNativeWindow().get(),
+ NATIVE_WINDOW_API_MEDIA);
+ ALOGW_IF(error != NO_ERROR,
+ "[%s] failed to connect to native window, error=%d",
+ mComponentName.c_str(), error);
+ }
+ mComponentName = "decoder";
}
+
+ releaseAndResetMediaBuffers();
+
+ if (err != OK) {
+ ALOGE("failed to release %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ // finish with posting kWhatShutdownCompleted.
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatShutdownCompleted);
+ notify->post();
+ mPaused = true;
+}
+
+void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+ ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
+
+ switch (msg->what()) {
+ case kWhatConfigure:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+ onConfigure(format);
+ break;
+ }
+
+ case kWhatUpdateFormat:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+ rememberCodecSpecificData(format);
+ break;
+ }
+
+ case kWhatGetInputBuffers:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ Vector<sp<ABuffer> > *dstBuffers;
+ CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
+
+ dstBuffers->clear();
+ for (size_t i = 0; i < mInputBuffers.size(); i++) {
+ dstBuffers->push(mInputBuffers[i]);
+ }
+
+ (new AMessage)->postReply(replyID);
+ break;
+ }
+
+ case kWhatCodecNotify:
+ {
+ if (!isStaleReply(msg)) {
+ int32_t numInput, numOutput;
+
+ if (!msg->findInt32("input-buffers", &numInput)) {
+ numInput = INT32_MAX;
+ }
+
+ if (!msg->findInt32("output-buffers", &numOutput)) {
+ numOutput = INT32_MAX;
+ }
+
+ if (!mPaused) {
+ while (numInput-- > 0 && handleAnInputBuffer()) {}
+ }
+
+ while (numOutput-- > 0 && handleAnOutputBuffer()) {}
+ }
+
+ requestCodecNotification();
+ break;
+ }
+
+ case kWhatInputBufferFilled:
+ {
+ if (!isStaleReply(msg)) {
+ if (!mPendingInputMessages.empty()
+ || !onInputBufferFilled(msg)) {
+ mPendingInputMessages.push_back(msg);
+ }
+ }
+
+ break;
+ }
+
+ case kWhatRenderBuffer:
+ {
+ if (!isStaleReply(msg)) {
+ onRenderBuffer(msg);
+ }
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ sp<AMessage> format;
+ if (msg->findMessage("new-format", &format)) {
+ rememberCodecSpecificData(format);
+ }
+ onFlush();
+ break;
+ }
+
+ case kWhatResume:
+ {
+ onResume();
+ break;
+ }
+
+ case kWhatShutdown:
+ {
+ onShutdown();
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+void NuPlayer::Decoder::signalFlush(const sp<AMessage> &format) {
+ sp<AMessage> msg = new AMessage(kWhatFlush, id());
+ if (format != NULL) {
+ msg->setMessage("new-format", format);
+ }
+ msg->post();
+}
+
+void NuPlayer::Decoder::signalResume() {
+ (new AMessage(kWhatResume, id()))->post();
+}
+
+void NuPlayer::Decoder::initiateShutdown() {
+ (new AMessage(kWhatShutdown, id()))->post();
}
bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const {
@@ -163,14 +722,16 @@ bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &ta
const char * keys[] = { "channel-count", "sample-rate", "is-adts" };
for (unsigned int i = 0; i < sizeof(keys) / sizeof(keys[0]); i++) {
int32_t oldVal, newVal;
- if (!mFormat->findInt32(keys[i], &oldVal) || !targetFormat->findInt32(keys[i], &newVal)
- || oldVal != newVal) {
+ if (!mOutputFormat->findInt32(keys[i], &oldVal) ||
+ !targetFormat->findInt32(keys[i], &newVal) ||
+ oldVal != newVal) {
return false;
}
}
sp<ABuffer> oldBuf, newBuf;
- if (mFormat->findBuffer("csd-0", &oldBuf) && targetFormat->findBuffer("csd-0", &newBuf)) {
+ if (mOutputFormat->findBuffer("csd-0", &oldBuf) &&
+ targetFormat->findBuffer("csd-0", &newBuf)) {
if (oldBuf->size() != newBuf->size()) {
return false;
}
@@ -181,7 +742,7 @@ bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &ta
}
bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetFormat) const {
- if (mFormat == NULL) {
+ if (mOutputFormat == NULL) {
return false;
}
@@ -190,7 +751,7 @@ bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetF
}
AString oldMime, newMime;
- if (!mFormat->findString("mime", &oldMime)
+ if (!mOutputFormat->findString("mime", &oldMime)
|| !targetFormat->findString("mime", &newMime)
|| !(oldMime == newMime)) {
return false;
@@ -201,12 +762,343 @@ bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetF
if (audio) {
seamless = supportsSeamlessAudioFormatChange(targetFormat);
} else {
- seamless = mCodec != NULL && mCodec->isConfiguredForAdaptivePlayback();
+ int32_t isAdaptive;
+ seamless = (mCodec != NULL &&
+ mInputFormat->findInt32("adaptive-playback", &isAdaptive) &&
+ isAdaptive);
}
ALOGV("%s seamless support for %s", seamless ? "yes" : "no", oldMime.c_str());
return seamless;
}
+struct CCData {
+ CCData(uint8_t type, uint8_t data1, uint8_t data2)
+ : mType(type), mData1(data1), mData2(data2) {
+ }
+ bool getChannel(size_t *channel) const {
+ if (mData1 >= 0x10 && mData1 <= 0x1f) {
+ *channel = (mData1 >= 0x18 ? 1 : 0) + (mType ? 2 : 0);
+ return true;
+ }
+ return false;
+ }
+
+ uint8_t mType;
+ uint8_t mData1;
+ uint8_t mData2;
+};
+
+static bool isNullPad(CCData *cc) {
+ return cc->mData1 < 0x10 && cc->mData2 < 0x10;
+}
+
+static void dumpBytePair(const sp<ABuffer> &ccBuf) {
+ size_t offset = 0;
+ AString out;
+
+ while (offset < ccBuf->size()) {
+ char tmp[128];
+
+ CCData *cc = (CCData *) (ccBuf->data() + offset);
+
+ if (isNullPad(cc)) {
+ // 1 null pad or XDS metadata, ignore
+ offset += sizeof(CCData);
+ continue;
+ }
+
+ if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
+ // 2 basic chars
+ sprintf(tmp, "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
+ // 1 special char
+ sprintf(tmp, "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Spanish/French char
+ sprintf(tmp, "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Portuguese/German/Danish char
+ sprintf(tmp, "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
+ // Mid-Row Codes (Table 69)
+ sprintf(tmp, "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
+ ||
+ ((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
+ && cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
+ // Misc Control Codes (Table 70)
+ sprintf(tmp, "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 & 0x70) == 0x10
+ && (cc->mData2 & 0x40) == 0x40
+ && ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
+ // Preamble Address Codes (Table 71)
+ sprintf(tmp, "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else {
+ sprintf(tmp, "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ }
+
+ if (out.size() > 0) {
+ out.append(", ");
+ }
+
+ out.append(tmp);
+
+ offset += sizeof(CCData);
+ }
+
+ ALOGI("%s", out.c_str());
+}
+
+NuPlayer::CCDecoder::CCDecoder(const sp<AMessage> &notify)
+ : mNotify(notify),
+ mCurrentChannel(0),
+ mSelectedTrack(-1) {
+ for (size_t i = 0; i < sizeof(mTrackIndices)/sizeof(mTrackIndices[0]); ++i) {
+ mTrackIndices[i] = -1;
+ }
+}
+
+size_t NuPlayer::CCDecoder::getTrackCount() const {
+ return mFoundChannels.size();
+}
+
+sp<AMessage> NuPlayer::CCDecoder::getTrackInfo(size_t index) const {
+ if (!isTrackValid(index)) {
+ return NULL;
+ }
+
+ sp<AMessage> format = new AMessage();
+
+ format->setInt32("type", MEDIA_TRACK_TYPE_SUBTITLE);
+ format->setString("language", "und");
+ format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_608);
+ //CC1, field 0 channel 0
+ bool isDefaultAuto = (mFoundChannels[index] == 0);
+ format->setInt32("auto", isDefaultAuto);
+ format->setInt32("default", isDefaultAuto);
+ format->setInt32("forced", 0);
+
+ return format;
+}
+
+status_t NuPlayer::CCDecoder::selectTrack(size_t index, bool select) {
+ if (!isTrackValid(index)) {
+ return BAD_VALUE;
+ }
+
+ if (select) {
+ if (mSelectedTrack == (ssize_t)index) {
+ ALOGE("track %zu already selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("selected track %zu", index);
+ mSelectedTrack = index;
+ } else {
+ if (mSelectedTrack != (ssize_t)index) {
+ ALOGE("track %zu is not selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("unselected track %zu", index);
+ mSelectedTrack = -1;
+ }
+
+ return OK;
+}
+
+bool NuPlayer::CCDecoder::isSelected() const {
+ return mSelectedTrack >= 0 && mSelectedTrack < (int32_t) getTrackCount();
+}
+
+bool NuPlayer::CCDecoder::isTrackValid(size_t index) const {
+ return index < getTrackCount();
+}
+
+int32_t NuPlayer::CCDecoder::getTrackIndex(size_t channel) const {
+ if (channel < sizeof(mTrackIndices)/sizeof(mTrackIndices[0])) {
+ return mTrackIndices[channel];
+ }
+ return -1;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ sp<ABuffer> sei;
+ if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {
+ return false;
+ }
+
+ bool trackAdded = false;
+
+ NALBitReader br(sei->data() + 1, sei->size() - 1);
+ // sei_message()
+ while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message()
+ uint32_t payload_type = 0;
+ size_t payload_size = 0;
+ uint8_t last_byte;
+
+ do {
+ last_byte = br.getBits(8);
+ payload_type += last_byte;
+ } while (last_byte == 0xFF);
+
+ do {
+ last_byte = br.getBits(8);
+ payload_size += last_byte;
+ } while (last_byte == 0xFF);
+
+ // sei_payload()
+ if (payload_type == 4) {
+ // user_data_registered_itu_t_t35()
+
+ // ATSC A/72: 6.4.2
+ uint8_t itu_t_t35_country_code = br.getBits(8);
+ uint16_t itu_t_t35_provider_code = br.getBits(16);
+ uint32_t user_identifier = br.getBits(32);
+ uint8_t user_data_type_code = br.getBits(8);
+
+ payload_size -= 1 + 2 + 4 + 1;
+
+ if (itu_t_t35_country_code == 0xB5
+ && itu_t_t35_provider_code == 0x0031
+ && user_identifier == 'GA94'
+ && user_data_type_code == 0x3) {
+ // MPEG_cc_data()
+ // ATSC A/53 Part 4: 6.2.3.1
+ br.skipBits(1); //process_em_data_flag
+ bool process_cc_data_flag = br.getBits(1);
+ br.skipBits(1); //additional_data_flag
+ size_t cc_count = br.getBits(5);
+ br.skipBits(8); // em_data;
+ payload_size -= 2;
+
+ if (process_cc_data_flag) {
+ AString out;
+
+ sp<ABuffer> ccBuf = new ABuffer(cc_count * sizeof(CCData));
+ ccBuf->setRange(0, 0);
+
+ for (size_t i = 0; i < cc_count; i++) {
+ uint8_t marker = br.getBits(5);
+ CHECK_EQ(marker, 0x1f);
+
+ bool cc_valid = br.getBits(1);
+ uint8_t cc_type = br.getBits(2);
+ // remove odd parity bit
+ uint8_t cc_data_1 = br.getBits(8) & 0x7f;
+ uint8_t cc_data_2 = br.getBits(8) & 0x7f;
+
+ if (cc_valid
+ && (cc_type == 0 || cc_type == 1)) {
+ CCData cc(cc_type, cc_data_1, cc_data_2);
+ if (!isNullPad(&cc)) {
+ size_t channel;
+ if (cc.getChannel(&channel) && getTrackIndex(channel) < 0) {
+ mTrackIndices[channel] = mFoundChannels.size();
+ mFoundChannels.push_back(channel);
+ trackAdded = true;
+ }
+ memcpy(ccBuf->data() + ccBuf->size(),
+ (void *)&cc, sizeof(cc));
+ ccBuf->setRange(0, ccBuf->size() + sizeof(CCData));
+ }
+ }
+ }
+ payload_size -= cc_count * 3;
+
+ mCCMap.add(timeUs, ccBuf);
+ break;
+ }
+ } else {
+ ALOGV("Malformed SEI payload type 4");
+ }
+ } else {
+ ALOGV("Unsupported SEI payload type %d", payload_type);
+ }
+
+ // skipping remaining bits of this payload
+ br.skipBits(payload_size * 8);
+ }
+
+ return trackAdded;
+}
+
+sp<ABuffer> NuPlayer::CCDecoder::filterCCBuf(
+ const sp<ABuffer> &ccBuf, size_t index) {
+ sp<ABuffer> filteredCCBuf = new ABuffer(ccBuf->size());
+ filteredCCBuf->setRange(0, 0);
+
+ size_t cc_count = ccBuf->size() / sizeof(CCData);
+ const CCData* cc_data = (const CCData*)ccBuf->data();
+ for (size_t i = 0; i < cc_count; ++i) {
+ size_t channel;
+ if (cc_data[i].getChannel(&channel)) {
+ mCurrentChannel = channel;
+ }
+ if (mCurrentChannel == mFoundChannels[index]) {
+ memcpy(filteredCCBuf->data() + filteredCCBuf->size(),
+ (void *)&cc_data[i], sizeof(CCData));
+ filteredCCBuf->setRange(0, filteredCCBuf->size() + sizeof(CCData));
+ }
+ }
+
+ return filteredCCBuf;
+}
+
+void NuPlayer::CCDecoder::decode(const sp<ABuffer> &accessUnit) {
+ if (extractFromSEI(accessUnit)) {
+ ALOGI("Found CEA-608 track");
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatTrackAdded);
+ msg->post();
+ }
+ // TODO: extract CC from other sources
+}
+
+void NuPlayer::CCDecoder::display(int64_t timeUs) {
+ if (!isTrackValid(mSelectedTrack)) {
+ ALOGE("Could not find current track(index=%d)", mSelectedTrack);
+ return;
+ }
+
+ ssize_t index = mCCMap.indexOfKey(timeUs);
+ if (index < 0) {
+ ALOGV("cc for timestamp %" PRId64 " not found", timeUs);
+ return;
+ }
+
+ sp<ABuffer> ccBuf = filterCCBuf(mCCMap.valueAt(index), mSelectedTrack);
+
+ if (ccBuf->size() > 0) {
+#if 0
+ dumpBytePair(ccBuf);
+#endif
+
+ ccBuf->meta()->setInt32("trackIndex", mSelectedTrack);
+ ccBuf->meta()->setInt64("timeUs", timeUs);
+ ccBuf->meta()->setInt64("durationUs", 0ll);
+
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatClosedCaptionData);
+ msg->setBuffer("buffer", ccBuf);
+ msg->post();
+ }
+
+ // remove all entries before timeUs
+ mCCMap.removeItemsAt(0, index + 1);
+}
+
+void NuPlayer::CCDecoder::flush() {
+ mCCMap.clear();
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index 78ea74a..dba3eee 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -25,20 +25,36 @@
namespace android {
struct ABuffer;
+struct MediaCodec;
+struct MediaBuffer;
struct NuPlayer::Decoder : public AHandler {
Decoder(const sp<AMessage> &notify,
const sp<NativeWindowWrapper> &nativeWindow = NULL);
- void configure(const sp<AMessage> &format);
+ virtual void configure(const sp<AMessage> &format);
+ virtual void init();
- void signalFlush();
- void signalResume();
- void initiateShutdown();
+ status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;
+ virtual void signalFlush(const sp<AMessage> &format = NULL);
+ virtual void signalUpdateFormat(const sp<AMessage> &format);
+ virtual void signalResume();
+ virtual void initiateShutdown();
- bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+ virtual bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+
+ enum {
+ kWhatFillThisBuffer = 'flTB',
+ kWhatDrainThisBuffer = 'drTB',
+ kWhatOutputFormatChanged = 'fmtC',
+ kWhatFlushCompleted = 'flsC',
+ kWhatShutdownCompleted = 'shDC',
+ kWhatEOS = 'eos ',
+ kWhatError = 'err ',
+ };
protected:
+
virtual ~Decoder();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -46,27 +62,90 @@ protected:
private:
enum {
kWhatCodecNotify = 'cdcN',
+ kWhatConfigure = 'conf',
+ kWhatGetInputBuffers = 'gInB',
+ kWhatInputBufferFilled = 'inpF',
+ kWhatRenderBuffer = 'rndr',
+ kWhatFlush = 'flus',
+ kWhatShutdown = 'shuD',
+ kWhatUpdateFormat = 'uFmt',
};
sp<AMessage> mNotify;
sp<NativeWindowWrapper> mNativeWindow;
- sp<AMessage> mFormat;
- sp<ACodec> mCodec;
+ sp<AMessage> mInputFormat;
+ sp<AMessage> mOutputFormat;
+ sp<MediaCodec> mCodec;
sp<ALooper> mCodecLooper;
+ sp<ALooper> mDecoderLooper;
- Vector<sp<ABuffer> > mCSD;
- size_t mCSDIndex;
+ List<sp<AMessage> > mPendingInputMessages;
- sp<AMessage> makeFormat(const sp<MetaData> &meta);
+ Vector<sp<ABuffer> > mInputBuffers;
+ Vector<sp<ABuffer> > mOutputBuffers;
+ Vector<sp<ABuffer> > mCSDsForCurrentFormat;
+ Vector<sp<ABuffer> > mCSDsToSubmit;
+ Vector<bool> mInputBufferIsDequeued;
+ Vector<MediaBuffer *> mMediaBuffers;
- void onFillThisBuffer(const sp<AMessage> &msg);
+ void handleError(int32_t err);
+ bool handleAnInputBuffer();
+ bool handleAnOutputBuffer();
+
+ void releaseAndResetMediaBuffers();
+ void requestCodecNotification();
+ bool isStaleReply(const sp<AMessage> &msg);
+
+ void onConfigure(const sp<AMessage> &format);
+ void onFlush();
+ void onResume();
+ bool onInputBufferFilled(const sp<AMessage> &msg);
+ void onRenderBuffer(const sp<AMessage> &msg);
+ void onShutdown();
+
+ int32_t mBufferGeneration;
+ bool mPaused;
+ AString mComponentName;
bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;
+ void rememberCodecSpecificData(const sp<AMessage> &format);
DISALLOW_EVIL_CONSTRUCTORS(Decoder);
};
+struct NuPlayer::CCDecoder : public RefBase {
+ enum {
+ kWhatClosedCaptionData,
+ kWhatTrackAdded,
+ };
+
+ CCDecoder(const sp<AMessage> &notify);
+
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
+ status_t selectTrack(size_t index, bool select);
+ bool isSelected() const;
+ void decode(const sp<ABuffer> &accessUnit);
+ void display(int64_t timeUs);
+ void flush();
+
+private:
+ sp<AMessage> mNotify;
+ KeyedVector<int64_t, sp<ABuffer> > mCCMap;
+ size_t mCurrentChannel;
+ int32_t mSelectedTrack;
+ int32_t mTrackIndices[4];
+ Vector<size_t> mFoundChannels;
+
+ bool isTrackValid(size_t index) const;
+ int32_t getTrackIndex(size_t channel) const;
+ bool extractFromSEI(const sp<ABuffer> &accessUnit);
+ sp<ABuffer> filterCCBuf(const sp<ABuffer> &ccBuf, size_t index);
+
+ DISALLOW_EVIL_CONSTRUCTORS(CCDecoder);
+};
+
} // namespace android
#endif // NUPLAYER_DECODER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
new file mode 100644
index 0000000..f7aacdd
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -0,0 +1,266 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDecoderPassThrough"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayerDecoderPassThrough.h"
+
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+static const size_t kMaxCachedBytes = 200000;
+// The buffers will contain a bit less than kAggregateBufferSizeBytes.
+// So we can start off with just enough buffers to keep the cache full.
+static const size_t kMaxPendingBuffers = 1 + (kMaxCachedBytes / NuPlayer::kAggregateBufferSizeBytes);
+
+NuPlayer::DecoderPassThrough::DecoderPassThrough(
+ const sp<AMessage> &notify)
+ : Decoder(notify),
+ mNotify(notify),
+ mBufferGeneration(0),
+ mReachedEOS(true),
+ mPendingBuffersToFill(0),
+ mPendingBuffersToDrain(0),
+ mCachedBytes(0),
+ mComponentName("pass through decoder") {
+ mDecoderLooper = new ALooper;
+ mDecoderLooper->setName("NuPlayerDecoderPassThrough");
+ mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+NuPlayer::DecoderPassThrough::~DecoderPassThrough() {
+}
+
+void NuPlayer::DecoderPassThrough::configure(const sp<AMessage> &format) {
+ sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+ msg->setMessage("format", format);
+ msg->post();
+}
+
+void NuPlayer::DecoderPassThrough::init() {
+ mDecoderLooper->registerHandler(this);
+}
+
+void NuPlayer::DecoderPassThrough::signalFlush() {
+ (new AMessage(kWhatFlush, id()))->post();
+}
+
+void NuPlayer::DecoderPassThrough::signalResume() {
+ (new AMessage(kWhatResume, id()))->post();
+}
+
+void NuPlayer::DecoderPassThrough::initiateShutdown() {
+ (new AMessage(kWhatShutdown, id()))->post();
+}
+
+bool NuPlayer::DecoderPassThrough::supportsSeamlessFormatChange(
+ const sp<AMessage> & /* targetFormat */) const {
+ return true;
+}
+
+void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
+ ALOGV("[%s] onConfigure", mComponentName.c_str());
+ mCachedBytes = 0;
+ mPendingBuffersToFill = 0;
+ mPendingBuffersToDrain = 0;
+ mReachedEOS = false;
+ ++mBufferGeneration;
+
+ requestMaxBuffers();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatOutputFormatChanged);
+ notify->setMessage("format", format);
+ notify->post();
+}
+
+bool NuPlayer::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ return generation != mBufferGeneration;
+}
+
+bool NuPlayer::DecoderPassThrough::requestABuffer() {
+ if (mCachedBytes >= kMaxCachedBytes) {
+ ALOGV("[%s] mCachedBytes = %zu",
+ mComponentName.c_str(), mCachedBytes);
+ return false;
+ }
+ if (mReachedEOS) {
+ ALOGV("[%s] reached EOS", mComponentName.c_str());
+ return false;
+ }
+
+ sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
+ reply->setInt32("generation", mBufferGeneration);
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFillThisBuffer);
+ notify->setMessage("reply", reply);
+ notify->post();
+ mPendingBuffersToFill++;
+ ALOGV("requestABuffer: #ToFill = %zu, #ToDrain = %zu", mPendingBuffersToFill,
+ mPendingBuffersToDrain);
+
+ return true;
+}
+
+void android::NuPlayer::DecoderPassThrough::onInputBufferFilled(
+ const sp<AMessage> &msg) {
+ --mPendingBuffersToFill;
+ if (mReachedEOS) {
+ return;
+ }
+
+ sp<ABuffer> buffer;
+ msg->findBuffer("buffer", &buffer);
+ if (buffer == NULL) {
+ mReachedEOS = true;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatEOS);
+ notify->setInt32("err", ERROR_END_OF_STREAM);
+ notify->post();
+ return;
+ }
+
+ mCachedBytes += buffer->size();
+
+ sp<AMessage> reply = new AMessage(kWhatBufferConsumed, id());
+ reply->setInt32("generation", mBufferGeneration);
+ reply->setInt32("size", buffer->size());
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatDrainThisBuffer);
+ notify->setBuffer("buffer", buffer);
+ notify->setMessage("reply", reply);
+ notify->post();
+ ++mPendingBuffersToDrain;
+ ALOGV("onInputBufferFilled: #ToFill = %zu, #ToDrain = %zu, cachedBytes = %zu",
+ mPendingBuffersToFill, mPendingBuffersToDrain, mCachedBytes);
+}
+
+void NuPlayer::DecoderPassThrough::onBufferConsumed(int32_t size) {
+ --mPendingBuffersToDrain;
+ mCachedBytes -= size;
+ ALOGV("onBufferConsumed: #ToFill = %zu, #ToDrain = %zu, cachedBytes = %zu",
+ mPendingBuffersToFill, mPendingBuffersToDrain, mCachedBytes);
+ requestABuffer();
+}
+
+void NuPlayer::DecoderPassThrough::onFlush() {
+ ++mBufferGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFlushCompleted);
+ notify->post();
+ mPendingBuffersToFill = 0;
+ mPendingBuffersToDrain = 0;
+ mCachedBytes = 0;
+ mReachedEOS = false;
+}
+
+void NuPlayer::DecoderPassThrough::requestMaxBuffers() {
+ for (size_t i = 0; i < kMaxPendingBuffers; i++) {
+ if (!requestABuffer()) {
+ break;
+ }
+ }
+}
+
+void NuPlayer::DecoderPassThrough::onShutdown() {
+ ++mBufferGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatShutdownCompleted);
+ notify->post();
+ mReachedEOS = true;
+}
+
+void NuPlayer::DecoderPassThrough::onMessageReceived(const sp<AMessage> &msg) {
+ ALOGV("[%s] onMessage: %s", mComponentName.c_str(),
+ msg->debugString().c_str());
+
+ switch (msg->what()) {
+ case kWhatConfigure:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+ onConfigure(format);
+ break;
+ }
+
+ case kWhatRequestABuffer:
+ {
+ if (!isStaleReply(msg)) {
+ requestABuffer();
+ }
+
+ break;
+ }
+
+ case kWhatInputBufferFilled:
+ {
+ if (!isStaleReply(msg)) {
+ onInputBufferFilled(msg);
+ }
+ break;
+ }
+
+ case kWhatBufferConsumed:
+ {
+ if (!isStaleReply(msg)) {
+ int32_t size;
+ CHECK(msg->findInt32("size", &size));
+ onBufferConsumed(size);
+ }
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ onFlush();
+ break;
+ }
+
+ case kWhatResume:
+ {
+ requestMaxBuffers();
+ break;
+ }
+
+ case kWhatShutdown:
+ {
+ onShutdown();
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
new file mode 100644
index 0000000..fb20257
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_DECODER_PASS_THROUGH_H_
+
+#define NUPLAYER_DECODER_PASS_THROUGH_H_
+
+#include "NuPlayer.h"
+
+#include "NuPlayerDecoder.h"
+
+namespace android {
+
+struct NuPlayer::DecoderPassThrough : public Decoder {
+ DecoderPassThrough(const sp<AMessage> &notify);
+
+ virtual void configure(const sp<AMessage> &format);
+ virtual void init();
+
+ virtual void signalFlush();
+ virtual void signalResume();
+ virtual void initiateShutdown();
+
+ bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+
+protected:
+
+ virtual ~DecoderPassThrough();
+
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum {
+ kWhatRequestABuffer = 'reqB',
+ kWhatConfigure = 'conf',
+ kWhatInputBufferFilled = 'inpF',
+ kWhatBufferConsumed = 'bufC',
+ kWhatFlush = 'flus',
+ kWhatShutdown = 'shuD',
+ };
+
+ sp<AMessage> mNotify;
+ sp<ALooper> mDecoderLooper;
+
+ /** Returns true if a buffer was requested.
+ * Returns false if at EOS or cache already full.
+ */
+ bool requestABuffer();
+ bool isStaleReply(const sp<AMessage> &msg);
+
+ void onConfigure(const sp<AMessage> &format);
+ void onFlush();
+ void onInputBufferFilled(const sp<AMessage> &msg);
+ void onBufferConsumed(int32_t size);
+ void requestMaxBuffers();
+ void onShutdown();
+
+ int32_t mBufferGeneration;
+ bool mReachedEOS;
+ // TODO mPendingBuffersToFill and mPendingBuffersToDrain are only for
+ // debugging. They can be removed when the power investigation is done.
+ size_t mPendingBuffersToFill;
+ size_t mPendingBuffersToDrain;
+ size_t mCachedBytes;
+ AString mComponentName;
+
+ DISALLOW_EVIL_CONSTRUCTORS(DecoderPassThrough);
+};
+
+} // namespace android
+
+#endif // NUPLAYER_DECODER_PASS_THROUGH_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index b9651a1..ab46074 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -26,7 +26,9 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
namespace android {
@@ -37,12 +39,14 @@ NuPlayerDriver::NuPlayerDriver()
mSetSurfaceInProgress(false),
mDurationUs(-1),
mPositionUs(-1),
- mNumFramesTotal(0),
- mNumFramesDropped(0),
+ mSeekInProgress(false),
mLooper(new ALooper),
mPlayerFlags(0),
mAtEOS(false),
+ mLooping(false),
+ mAutoLoop(false),
mStartupSeekTimeUs(-1) {
+ ALOGV("NuPlayerDriver(%p)", this);
mLooper->setName("NuPlayerDriver Looper");
mLooper->start(
@@ -57,6 +61,7 @@ NuPlayerDriver::NuPlayerDriver()
}
NuPlayerDriver::~NuPlayerDriver() {
+ ALOGV("~NuPlayerDriver(%p)", this);
mLooper->stop();
}
@@ -71,7 +76,10 @@ status_t NuPlayerDriver::setUID(uid_t uid) {
}
status_t NuPlayerDriver::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+ ALOGV("setDataSource(%p) url(%s)", this, uriDebugString(url, false).c_str());
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
@@ -80,7 +88,7 @@ status_t NuPlayerDriver::setDataSource(
mState = STATE_SET_DATASOURCE_PENDING;
- mPlayer->setDataSourceAsync(url, headers);
+ mPlayer->setDataSourceAsync(httpService, url, headers);
while (mState == STATE_SET_DATASOURCE_PENDING) {
mCondition.wait(mLock);
@@ -90,6 +98,7 @@ status_t NuPlayerDriver::setDataSource(
}
status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
+ ALOGV("setDataSource(%p) file(%d)", this, fd);
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
@@ -108,6 +117,7 @@ status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
}
status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
+ ALOGV("setDataSource(%p) stream source", this);
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
@@ -127,6 +137,7 @@ status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
status_t NuPlayerDriver::setVideoSurfaceTexture(
const sp<IGraphicBufferProducer> &bufferProducer) {
+ ALOGV("setVideoSurfaceTexture(%p)", this);
Mutex::Autolock autoLock(mLock);
if (mSetSurfaceInProgress) {
@@ -154,6 +165,7 @@ status_t NuPlayerDriver::setVideoSurfaceTexture(
}
status_t NuPlayerDriver::prepare() {
+ ALOGV("prepare(%p)", this);
Mutex::Autolock autoLock(mLock);
return prepare_l();
}
@@ -172,12 +184,23 @@ status_t NuPlayerDriver::prepare_l() {
mCondition.wait(mLock);
}
return (mState == STATE_PREPARED) ? OK : UNKNOWN_ERROR;
+ case STATE_STOPPED:
+ // this is really just paused. handle as seek to start
+ mAtEOS = false;
+ mState = STATE_STOPPED_AND_PREPARING;
+ mIsAsyncPrepare = false;
+ mPlayer->seekToAsync(0, true /* needNotify */);
+ while (mState == STATE_STOPPED_AND_PREPARING) {
+ mCondition.wait(mLock);
+ }
+ return (mState == STATE_STOPPED_AND_PREPARED) ? OK : UNKNOWN_ERROR;
default:
return INVALID_OPERATION;
};
}
status_t NuPlayerDriver::prepareAsync() {
+ ALOGV("prepareAsync(%p)", this);
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -186,12 +209,20 @@ status_t NuPlayerDriver::prepareAsync() {
mIsAsyncPrepare = true;
mPlayer->prepareAsync();
return OK;
+ case STATE_STOPPED:
+ // this is really just paused. handle as seek to start
+ mAtEOS = false;
+ mState = STATE_STOPPED_AND_PREPARING;
+ mIsAsyncPrepare = true;
+ mPlayer->seekToAsync(0, true /* needNotify */);
+ return OK;
default:
return INVALID_OPERATION;
};
}
status_t NuPlayerDriver::start() {
+ ALOGD("start(%p)", this);
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -214,9 +245,7 @@ status_t NuPlayerDriver::start() {
mPlayer->start();
if (mStartupSeekTimeUs >= 0) {
- if (mStartupSeekTimeUs == 0) {
- notifySeekComplete();
- } else {
+ if (mStartupSeekTimeUs > 0) {
mPlayer->seekToAsync(mStartupSeekTimeUs);
}
@@ -226,11 +255,26 @@ status_t NuPlayerDriver::start() {
}
case STATE_RUNNING:
+ {
+ if (mAtEOS) {
+ mPlayer->seekToAsync(0);
+ mAtEOS = false;
+ mPositionUs = -1;
+ }
break;
+ }
case STATE_PAUSED:
+ case STATE_STOPPED_AND_PREPARED:
{
- mPlayer->resume();
+ if (mAtEOS) {
+ mPlayer->seekToAsync(0);
+ mAtEOS = false;
+ mPlayer->resume();
+ mPositionUs = -1;
+ } else {
+ mPlayer->resume();
+ }
break;
}
@@ -244,7 +288,31 @@ status_t NuPlayerDriver::start() {
}
status_t NuPlayerDriver::stop() {
- return pause();
+ ALOGD("stop(%p)", this);
+ Mutex::Autolock autoLock(mLock);
+
+ switch (mState) {
+ case STATE_RUNNING:
+ mPlayer->pause();
+ // fall through
+
+ case STATE_PAUSED:
+ mState = STATE_STOPPED;
+ notifyListener_l(MEDIA_STOPPED);
+ break;
+
+ case STATE_PREPARED:
+ case STATE_STOPPED:
+ case STATE_STOPPED_AND_PREPARING:
+ case STATE_STOPPED_AND_PREPARED:
+ mState = STATE_STOPPED;
+ break;
+
+ default:
+ return INVALID_OPERATION;
+ }
+
+ return OK;
}
status_t NuPlayerDriver::pause() {
@@ -256,7 +324,8 @@ status_t NuPlayerDriver::pause() {
return OK;
case STATE_RUNNING:
- notifyListener(MEDIA_PAUSED);
+ mState = STATE_PAUSED;
+ notifyListener_l(MEDIA_PAUSED);
mPlayer->pause();
break;
@@ -264,8 +333,6 @@ status_t NuPlayerDriver::pause() {
return INVALID_OPERATION;
}
- mState = STATE_PAUSED;
-
return OK;
}
@@ -274,6 +341,7 @@ bool NuPlayerDriver::isPlaying() {
}
status_t NuPlayerDriver::seekTo(int msec) {
+ ALOGD("seekTo(%p) %d ms", this, msec);
Mutex::Autolock autoLock(mLock);
int64_t seekTimeUs = msec * 1000ll;
@@ -282,6 +350,10 @@ status_t NuPlayerDriver::seekTo(int msec) {
case STATE_PREPARED:
{
mStartupSeekTimeUs = seekTimeUs;
+ // pretend that the seek completed. It will actually happen when starting playback.
+ // TODO: actually perform the seek here, so the player is ready to go at the new
+ // location
+ notifySeekComplete_l();
break;
}
@@ -289,9 +361,10 @@ status_t NuPlayerDriver::seekTo(int msec) {
case STATE_PAUSED:
{
mAtEOS = false;
+ mSeekInProgress = true;
// seeks can take a while, so we essentially paused
- notifyListener(MEDIA_PAUSED);
- mPlayer->seekToAsync(seekTimeUs);
+ notifyListener_l(MEDIA_PAUSED);
+ mPlayer->seekToAsync(seekTimeUs, true /* needNotify */);
break;
}
@@ -299,18 +372,24 @@ status_t NuPlayerDriver::seekTo(int msec) {
return INVALID_OPERATION;
}
+ mPositionUs = seekTimeUs;
return OK;
}
status_t NuPlayerDriver::getCurrentPosition(int *msec) {
- Mutex::Autolock autoLock(mLock);
+ int64_t tempUs = 0;
+ status_t ret = mPlayer->getCurrentPosition(&tempUs);
- if (mPositionUs < 0) {
- *msec = 0;
+ Mutex::Autolock autoLock(mLock);
+ // We need to check mSeekInProgress here because mPlayer->seekToAsync is an async call, which
+ // means getCurrentPosition can be called before seek is completed. Iow, renderer may return a
+ // position value that's different the seek to position.
+ if (ret != OK || mSeekInProgress) {
+ tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
} else {
- *msec = (mPositionUs + 500ll) / 1000;
+ mPositionUs = tempUs;
}
-
+ *msec = (int)divRound(tempUs, (int64_t)(1000));
return OK;
}
@@ -327,6 +406,7 @@ status_t NuPlayerDriver::getDuration(int *msec) {
}
status_t NuPlayerDriver::reset() {
+ ALOGD("reset(%p)", this);
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -341,7 +421,7 @@ status_t NuPlayerDriver::reset() {
{
CHECK(mIsAsyncPrepare);
- notifyListener(MEDIA_PREPARED);
+ notifyListener_l(MEDIA_PREPARED);
break;
}
@@ -349,7 +429,9 @@ status_t NuPlayerDriver::reset() {
break;
}
- notifyListener(MEDIA_STOPPED);
+ if (mState != STATE_STOPPED) {
+ notifyListener_l(MEDIA_STOPPED);
+ }
mState = STATE_RESET_IN_PROGRESS;
mPlayer->resetAsync();
@@ -361,12 +443,14 @@ status_t NuPlayerDriver::reset() {
mDurationUs = -1;
mPositionUs = -1;
mStartupSeekTimeUs = -1;
+ mLooping = false;
return OK;
}
-status_t NuPlayerDriver::setLooping(int /* loop */) {
- return INVALID_OPERATION;
+status_t NuPlayerDriver::setLooping(int loop) {
+ mLooping = loop != 0;
+ return OK;
}
player_type NuPlayerDriver::playerType() {
@@ -410,6 +494,12 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) {
return mPlayer->selectTrack(trackIndex, false /* select */);
}
+ case INVOKE_ID_GET_SELECTED_TRACK:
+ {
+ int32_t type = request.readInt32();
+ return mPlayer->getSelectedTrack(type, reply);
+ }
+
default:
{
return INVALID_OPERATION;
@@ -419,6 +509,7 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) {
void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) {
mPlayer->setAudioSink(audioSink);
+ mAudioSink = audioSink;
}
status_t NuPlayerDriver::setParameter(
@@ -458,6 +549,7 @@ status_t NuPlayerDriver::getMetadata(
}
void NuPlayerDriver::notifyResetComplete() {
+ ALOGD("notifyResetComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
CHECK_EQ(mState, STATE_RESET_IN_PROGRESS);
@@ -466,6 +558,7 @@ void NuPlayerDriver::notifyResetComplete() {
}
void NuPlayerDriver::notifySetSurfaceComplete() {
+ ALOGV("notifySetSurfaceComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
CHECK(mSetSurfaceInProgress);
@@ -479,35 +572,45 @@ void NuPlayerDriver::notifyDuration(int64_t durationUs) {
mDurationUs = durationUs;
}
-void NuPlayerDriver::notifyPosition(int64_t positionUs) {
- Mutex::Autolock autoLock(mLock);
- mPositionUs = positionUs;
-}
-
void NuPlayerDriver::notifySeekComplete() {
- notifyListener(MEDIA_SEEK_COMPLETE);
-}
-
-void NuPlayerDriver::notifyFrameStats(
- int64_t numFramesTotal, int64_t numFramesDropped) {
+ ALOGV("notifySeekComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
- mNumFramesTotal = numFramesTotal;
- mNumFramesDropped = numFramesDropped;
+ mSeekInProgress = false;
+ notifySeekComplete_l();
+}
+
+void NuPlayerDriver::notifySeekComplete_l() {
+ bool wasSeeking = true;
+ if (mState == STATE_STOPPED_AND_PREPARING) {
+ wasSeeking = false;
+ mState = STATE_STOPPED_AND_PREPARED;
+ mCondition.broadcast();
+ if (!mIsAsyncPrepare) {
+ // if we are preparing synchronously, no need to notify listener
+ return;
+ }
+ } else if (mState == STATE_STOPPED) {
+ // no need to notify listener
+ return;
+ }
+ notifyListener_l(wasSeeking ? MEDIA_SEEK_COMPLETE : MEDIA_PREPARED);
}
status_t NuPlayerDriver::dump(
int fd, const Vector<String16> & /* args */) const {
- Mutex::Autolock autoLock(mLock);
+ int64_t numFramesTotal;
+ int64_t numFramesDropped;
+ mPlayer->getStats(&numFramesTotal, &numFramesDropped);
FILE *out = fdopen(dup(fd), "w");
fprintf(out, " NuPlayer\n");
fprintf(out, " numFramesTotal(%" PRId64 "), numFramesDropped(%" PRId64 "), "
"percentageDropped(%.2f)\n",
- mNumFramesTotal,
- mNumFramesDropped,
- mNumFramesTotal == 0
- ? 0.0 : (double)mNumFramesDropped / mNumFramesTotal);
+ numFramesTotal,
+ numFramesDropped,
+ numFramesTotal == 0
+ ? 0.0 : (double)numFramesDropped / numFramesTotal);
fclose(out);
out = NULL;
@@ -517,11 +620,41 @@ status_t NuPlayerDriver::dump(
void NuPlayerDriver::notifyListener(
int msg, int ext1, int ext2, const Parcel *in) {
- if (msg == MEDIA_PLAYBACK_COMPLETE || msg == MEDIA_ERROR) {
- mAtEOS = true;
+ Mutex::Autolock autoLock(mLock);
+ notifyListener_l(msg, ext1, ext2, in);
+}
+
+void NuPlayerDriver::notifyListener_l(
+ int msg, int ext1, int ext2, const Parcel *in) {
+ switch (msg) {
+ case MEDIA_PLAYBACK_COMPLETE:
+ {
+ if (mState != STATE_RESET_IN_PROGRESS) {
+ if (mLooping || (mAutoLoop
+ && (mAudioSink == NULL || mAudioSink->realtime()))) {
+ mPlayer->seekToAsync(0);
+ break;
+ }
+
+ mPlayer->pause();
+ mState = STATE_PAUSED;
+ }
+ // fall through
+ }
+
+ case MEDIA_ERROR:
+ {
+ mAtEOS = true;
+ break;
+ }
+
+ default:
+ break;
}
+ mLock.unlock();
sendEvent(msg, ext1, ext2, in);
+ mLock.lock();
}
void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) {
@@ -550,15 +683,24 @@ void NuPlayerDriver::notifyPrepareCompleted(status_t err) {
mAsyncResult = err;
if (err == OK) {
+ // update state before notifying client, so that if client calls back into NuPlayerDriver
+ // in response, NuPlayerDriver has the right state
+ mState = STATE_PREPARED;
if (mIsAsyncPrepare) {
- notifyListener(MEDIA_PREPARED);
+ notifyListener_l(MEDIA_PREPARED);
}
- mState = STATE_PREPARED;
} else {
+ mState = STATE_UNPREPARED;
if (mIsAsyncPrepare) {
- notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+ notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
}
- mState = STATE_UNPREPARED;
+ }
+
+ sp<MetaData> meta = mPlayer->getFileMeta();
+ int32_t loop;
+ if (meta != NULL
+ && meta->findInt32(kKeyAutoLoop, &loop) && loop != 0) {
+ mAutoLoop = true;
}
mCondition.broadcast();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 99f72a6..5cba7d9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -31,7 +31,9 @@ struct NuPlayerDriver : public MediaPlayerInterface {
virtual status_t setUID(uid_t uid);
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
@@ -66,9 +68,8 @@ struct NuPlayerDriver : public MediaPlayerInterface {
void notifyResetComplete();
void notifySetSurfaceComplete();
void notifyDuration(int64_t durationUs);
- void notifyPosition(int64_t positionUs);
void notifySeekComplete();
- void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped);
+ void notifySeekComplete_l();
void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
void notifyFlagsChanged(uint32_t flags);
@@ -85,6 +86,9 @@ private:
STATE_RUNNING,
STATE_PAUSED,
STATE_RESET_IN_PROGRESS,
+ STATE_STOPPED, // equivalent to PAUSED
+ STATE_STOPPED_AND_PREPARING, // equivalent to PAUSED, but seeking
+ STATE_STOPPED_AND_PREPARED, // equivalent to PAUSED, but seek complete
};
mutable Mutex mLock;
@@ -100,19 +104,22 @@ private:
bool mSetSurfaceInProgress;
int64_t mDurationUs;
int64_t mPositionUs;
- int64_t mNumFramesTotal;
- int64_t mNumFramesDropped;
+ bool mSeekInProgress;
// <<<
sp<ALooper> mLooper;
sp<NuPlayer> mPlayer;
+ sp<AudioSink> mAudioSink;
uint32_t mPlayerFlags;
bool mAtEOS;
+ bool mLooping;
+ bool mAutoLoop;
int64_t mStartupSeekTimeUs;
status_t prepare_l();
+ void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index bf5271e..638d9bc 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,23 +20,44 @@
#include "NuPlayerRenderer.h"
-#include "SoftwareRenderer.h"
+#include <cutils/properties.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <VideoFrameScheduler.h>
+
+#include <inttypes.h>
namespace android {
+// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
+// is closed to allow the audio DSP to power down.
+static const int64_t kOffloadPauseMaxUs = 60000000ll;
+
// static
const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
+static bool sFrameAccurateAVsync = false;
+
+static void readProperties() {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.sys.media.avsync", value, NULL)) {
+ sFrameAccurateAVsync =
+ !strcmp("1", value) || !strcasecmp("true", value);
+ }
+}
+
NuPlayer::Renderer::Renderer(
const sp<MediaPlayerBase::AudioSink> &sink,
const sp<AMessage> &notify,
uint32_t flags)
: mAudioSink(sink),
- mSoftRenderer(NULL),
mNotify(notify),
mFlags(flags),
mNumFramesWritten(0),
@@ -44,28 +65,35 @@ NuPlayer::Renderer::Renderer(
mDrainVideoQueuePending(false),
mAudioQueueGeneration(0),
mVideoQueueGeneration(0),
+ mAudioFirstAnchorTimeMediaUs(-1),
mAnchorTimeMediaUs(-1),
mAnchorTimeRealUs(-1),
- mFlushingAudio(false),
- mFlushingVideo(false),
+ mVideoLateByUs(0ll),
mHasAudio(false),
mHasVideo(false),
+ mPauseStartedTimeRealUs(-1),
+ mFlushingAudio(false),
+ mFlushingVideo(false),
mSyncQueues(false),
mPaused(false),
+ mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
mAudioRenderingStartGeneration(0),
- mLastPositionUpdateUs(-1ll),
- mVideoLateByUs(0ll) {
+ mAudioOffloadPauseTimeoutGeneration(0),
+ mAudioOffloadTornDown(false),
+ mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
+ mTotalBuffersQueued(0),
+ mLastAudioBufferDrained(0) {
+ readProperties();
}
NuPlayer::Renderer::~Renderer() {
- delete mSoftRenderer;
-}
-
-void NuPlayer::Renderer::setSoftRenderer(SoftwareRenderer *softRenderer) {
- delete mSoftRenderer;
- mSoftRenderer = softRenderer;
+ if (offloadingAudio()) {
+ mAudioSink->stop();
+ mAudioSink->flush();
+ mAudioSink->close();
+ }
}
void NuPlayer::Renderer::queueBuffer(
@@ -92,10 +120,14 @@ void NuPlayer::Renderer::flush(bool audio) {
{
Mutex::Autolock autoLock(mFlushLock);
if (audio) {
- CHECK(!mFlushingAudio);
+ if (mFlushingAudio) {
+ return;
+ }
mFlushingAudio = true;
} else {
- CHECK(!mFlushingVideo);
+ if (mFlushingVideo) {
+ return;
+ }
mFlushingVideo = true;
}
}
@@ -106,13 +138,23 @@ void NuPlayer::Renderer::flush(bool audio) {
}
void NuPlayer::Renderer::signalTimeDiscontinuity() {
+ Mutex::Autolock autoLock(mLock);
// CHECK(mAudioQueue.empty());
// CHECK(mVideoQueue.empty());
- mAnchorTimeMediaUs = -1;
- mAnchorTimeRealUs = -1;
+ setAudioFirstAnchorTime(-1);
+ setAnchorTime(-1, -1);
+ setVideoLateByUs(0);
mSyncQueues = false;
}
+void NuPlayer::Renderer::signalAudioSinkChanged() {
+ (new AMessage(kWhatAudioSinkChanged, id()))->post();
+}
+
+void NuPlayer::Renderer::signalDisableOffloadAudio() {
+ (new AMessage(kWhatDisableOffloadAudio, id()))->post();
+}
+
void NuPlayer::Renderer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
@@ -121,8 +163,156 @@ void NuPlayer::Renderer::resume() {
(new AMessage(kWhatResume, id()))->post();
}
+void NuPlayer::Renderer::setVideoFrameRate(float fps) {
+ sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
+ msg->setFloat("frame-rate", fps);
+ msg->post();
+}
+
+status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
+ return getCurrentPosition(mediaUs, ALooper::GetNowUs());
+}
+
+status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ if (!mHasAudio && !mHasVideo) {
+ return NO_INIT;
+ }
+
+ if (mAnchorTimeMediaUs < 0) {
+ return NO_INIT;
+ }
+ int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
+
+ if (mPauseStartedTimeRealUs != -1) {
+ positionUs -= (nowUs - mPauseStartedTimeRealUs);
+ }
+
+ if (positionUs < mAudioFirstAnchorTimeMediaUs) {
+ positionUs = mAudioFirstAnchorTimeMediaUs;
+ }
+
+ *mediaUs = (positionUs <= 0) ? 0 : positionUs;
+ return OK;
+}
+
+void NuPlayer::Renderer::setHasMedia(bool audio) {
+ Mutex::Autolock autoLock(mTimeLock);
+ if (audio) {
+ mHasAudio = true;
+ } else {
+ mHasVideo = true;
+ }
+}
+
+void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mAudioFirstAnchorTimeMediaUs = mediaUs;
+}
+
+void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ if (mAudioFirstAnchorTimeMediaUs == -1) {
+ mAudioFirstAnchorTimeMediaUs = mediaUs;
+ }
+}
+
+void NuPlayer::Renderer::setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mAnchorTimeMediaUs = mediaUs;
+ mAnchorTimeRealUs = realUs;
+ if (resume) {
+ mPauseStartedTimeRealUs = -1;
+ }
+}
+
+void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mVideoLateByUs = lateUs;
+}
+
+int64_t NuPlayer::Renderer::getVideoLateByUs() {
+ Mutex::Autolock autoLock(mTimeLock);
+ return mVideoLateByUs;
+}
+
+void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mPauseStartedTimeRealUs = realUs;
+}
+
+bool NuPlayer::Renderer::openAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags) {
+ sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
+ msg->setMessage("format", format);
+ msg->setInt32("offload-only", offloadOnly);
+ msg->setInt32("has-video", hasVideo);
+ msg->setInt32("flags", flags);
+
+ sp<AMessage> response;
+ msg->postAndAwaitResponse(&response);
+
+ int32_t offload;
+ CHECK(response->findInt32("offload", &offload));
+ return (offload != 0);
+}
+
+void NuPlayer::Renderer::closeAudioSink() {
+ sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id());
+
+ sp<AMessage> response;
+ msg->postAndAwaitResponse(&response);
+}
+
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatOpenAudioSink:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+
+ int32_t offloadOnly;
+ CHECK(msg->findInt32("offload-only", &offloadOnly));
+
+ int32_t hasVideo;
+ CHECK(msg->findInt32("has-video", &hasVideo));
+
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ bool offload = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("offload", offload);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+
+ break;
+ }
+
+ case kWhatCloseAudioSink:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ onCloseAudioSink();
+
+ sp<AMessage> response = new AMessage;
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStopAudioSink:
+ {
+ mAudioSink->stop();
+ break;
+ }
+
case kWhatDrainAudioQueue:
{
int32_t generation;
@@ -149,7 +339,10 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
// Let's give it more data after about half that time
// has elapsed.
- postDrainAudioQueue(delayUs / 2);
+ // kWhatDrainAudioQueue is used for non-offloading mode,
+ // and mLock is used only for offloading mode. Therefore,
+ // no need to acquire mLock here.
+ postDrainAudioQueue_l(delayUs / 2);
}
break;
}
@@ -170,6 +363,19 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatPostDrainVideoQueue:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation != mVideoQueueGeneration) {
+ break;
+ }
+
+ mDrainVideoQueuePending = false;
+ postDrainVideoQueue();
+ break;
+ }
+
case kWhatQueueBuffer:
{
onQueueBuffer(msg);
@@ -194,6 +400,12 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatDisableOffloadAudio:
+ {
+ onDisableOffloadAudio();
+ break;
+ }
+
case kWhatPause:
{
onPause();
@@ -206,14 +418,41 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSetVideoFrameRate:
+ {
+ float fps;
+ CHECK(msg->findFloat("frame-rate", &fps));
+ onSetVideoFrameRate(fps);
+ break;
+ }
+
+ case kWhatAudioOffloadTearDown:
+ {
+ onAudioOffloadTearDown(kDueToError);
+ break;
+ }
+
+ case kWhatAudioOffloadPauseTimeout:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation != mAudioOffloadPauseTimeoutGeneration) {
+ break;
+ }
+ ALOGV("Audio Offload tear down due to pause timeout.");
+ onAudioOffloadTearDown(kDueToTimeout);
+ break;
+ }
+
default:
TRESPASS();
break;
}
}
-void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
- if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
+void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
+ if (mDrainAudioQueuePending || mSyncQueues || mPaused
+ || offloadingAudio()) {
return;
}
@@ -227,10 +466,6 @@ void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
msg->post(delayUs);
}
-void NuPlayer::Renderer::signalAudioSinkChanged() {
- (new AMessage(kWhatAudioSinkChanged, id()))->post();
-}
-
void NuPlayer::Renderer::prepareForMediaRenderingStart() {
mAudioRenderingStartGeneration = mAudioQueueGeneration;
mVideoRenderingStartGeneration = mVideoQueueGeneration;
@@ -248,6 +483,94 @@ void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
}
}
+// static
+size_t NuPlayer::Renderer::AudioSinkCallback(
+ MediaPlayerBase::AudioSink * /* audioSink */,
+ void *buffer,
+ size_t size,
+ void *cookie,
+ MediaPlayerBase::AudioSink::cb_event_t event) {
+ NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
+
+ switch (event) {
+ case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
+ {
+ return me->fillAudioBuffer(buffer, size);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
+ {
+ me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
+ {
+ me->notifyAudioOffloadTearDown();
+ break;
+ }
+ }
+
+ return 0;
+}
+
+size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (!offloadingAudio() || mPaused) {
+ return 0;
+ }
+
+ bool hasEOS = false;
+
+ size_t sizeCopied = 0;
+ bool firstEntry = true;
+ while (sizeCopied < size && !mAudioQueue.empty()) {
+ QueueEntry *entry = &*mAudioQueue.begin();
+
+ if (entry->mBuffer == NULL) { // EOS
+ hasEOS = true;
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ break;
+ }
+
+ if (firstEntry && entry->mOffset == 0) {
+ firstEntry = false;
+ int64_t mediaTimeUs;
+ CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+ onNewAudioMediaTime(mediaTimeUs);
+ }
+
+ size_t copy = entry->mBuffer->size() - entry->mOffset;
+ size_t sizeRemaining = size - sizeCopied;
+ if (copy > sizeRemaining) {
+ copy = sizeRemaining;
+ }
+
+ memcpy((char *)buffer + sizeCopied,
+ entry->mBuffer->data() + entry->mOffset,
+ copy);
+
+ entry->mOffset += copy;
+ if (entry->mOffset == entry->mBuffer->size()) {
+ entry->mNotifyConsumed->post();
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ }
+ sizeCopied += copy;
+ notifyIfMediaRenderingStarted();
+ }
+
+ if (hasEOS) {
+ (new AMessage(kWhatStopAudioSink, id()))->post();
+ }
+
+ return sizeCopied;
+}
+
bool NuPlayer::Renderer::onDrainAudioQueue() {
uint32_t numFramesPlayed;
if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
@@ -272,10 +595,15 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
QueueEntry *entry = &*mAudioQueue.begin();
+ mLastAudioBufferDrained = entry->mBufferOrdinal;
+
if (entry->mBuffer == NULL) {
// EOS
-
- notifyEOS(true /* audio */, entry->mFinalResult);
+ int64_t postEOSDelayUs = 0;
+ if (mAudioSink->needsTrailingPadding()) {
+ postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+ }
+ notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
@@ -285,26 +613,8 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
if (entry->mOffset == 0) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-
- mAnchorTimeMediaUs = mediaTimeUs;
-
- uint32_t numFramesPlayed;
- CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
-
- uint32_t numFramesPendingPlayout =
- mNumFramesWritten - numFramesPlayed;
-
- int64_t realTimeOffsetUs =
- (mAudioSink->latency() / 2 /* XXX */
- + numFramesPendingPlayout
- * mAudioSink->msecsPerFrame()) * 1000ll;
-
- // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
-
- mAnchorTimeRealUs =
- ALooper::GetNowUs() + realTimeOffsetUs;
+ onNewAudioMediaTime(mediaTimeUs);
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -312,11 +622,13 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
copy = numBytesAvailableToWrite;
}
- CHECK_EQ(mAudioSink->write(
- entry->mBuffer->data() + entry->mOffset, copy),
- (ssize_t)copy);
+ ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
+ if (written < 0) {
+ // An error in AudioSink write is fatal here.
+ LOG_ALWAYS_FATAL("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ }
- entry->mOffset += copy;
+ entry->mOffset += written;
if (entry->mOffset == entry->mBuffer->size()) {
entry->mNotifyConsumed->post();
mAudioQueue.erase(mAudioQueue.begin());
@@ -324,20 +636,67 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
entry = NULL;
}
- numBytesAvailableToWrite -= copy;
- size_t copiedFrames = copy / mAudioSink->frameSize();
+ numBytesAvailableToWrite -= written;
+ size_t copiedFrames = written / mAudioSink->frameSize();
mNumFramesWritten += copiedFrames;
notifyIfMediaRenderingStarted();
+
+ if (written != (ssize_t)copy) {
+ // A short count was received from AudioSink::write()
+ //
+ // AudioSink write should block until exactly the number of bytes are delivered.
+ // But it may return with a short count (without an error) when:
+ //
+ // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
+ // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
+
+ // (Case 1)
+ // Must be a multiple of the frame size. If it is not a multiple of a frame size, it
+ // needs to fail, as we should not carry over fractional frames between calls.
+ CHECK_EQ(copy % mAudioSink->frameSize(), 0);
+
+ // (Case 2)
+ // Return early to the caller.
+ // Beware of calling immediately again as this may busy-loop if you are not careful.
+ ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
+ break;
+ }
}
+ return !mAudioQueue.empty();
+}
- notifyPosition();
+int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
+ int64_t writtenAudioDurationUs =
+ mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
+ return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
+}
- return !mAudioQueue.empty();
+int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
+ int64_t currentPositionUs;
+ if (getCurrentPosition(&currentPositionUs, nowUs) != OK) {
+ // If failed to get current position, e.g. due to audio clock is not ready, then just
+ // play out video immediately without delay.
+ return nowUs;
+ }
+ return (mediaTimeUs - currentPositionUs) + nowUs;
+}
+
+void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
+ // TRICKY: vorbis decoder generates multiple frames with the same
+ // timestamp, so only update on the first frame with a given timestamp
+ if (mediaTimeUs == mAnchorTimeMediaUs) {
+ return;
+ }
+ setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+ int64_t nowUs = ALooper::GetNowUs();
+ setAnchorTime(mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs));
}
void NuPlayer::Renderer::postDrainVideoQueue() {
- if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
+ if (mDrainVideoQueuePending
+ || mSyncQueues
+ || (mPaused && mVideoSampleReceived)) {
return;
}
@@ -350,36 +709,64 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
msg->setInt32("generation", mVideoQueueGeneration);
- int64_t delayUs;
-
if (entry.mBuffer == NULL) {
// EOS doesn't carry a timestamp.
- delayUs = 0;
- } else if (mFlags & FLAG_REAL_TIME) {
+ msg->post();
+ mDrainVideoQueuePending = true;
+ return;
+ }
+
+ int64_t delayUs;
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t realTimeUs;
+ if (mFlags & FLAG_REAL_TIME) {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
- delayUs = mediaTimeUs - ALooper::GetNowUs();
+ realTimeUs = mediaTimeUs;
} else {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
if (mAnchorTimeMediaUs < 0) {
- delayUs = 0;
-
- if (!mHasAudio) {
- mAnchorTimeMediaUs = mediaTimeUs;
- mAnchorTimeRealUs = ALooper::GetNowUs();
- }
+ setAnchorTime(mediaTimeUs, nowUs);
+ realTimeUs = nowUs;
} else {
- int64_t realTimeUs =
- (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
+ realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+ }
- delayUs = realTimeUs - ALooper::GetNowUs();
+ // Heuristics to handle situation when media time changed without a
+ // discontinuity. If we have not drained an audio buffer that was
+ // received after this buffer, repost in 10 msec. Otherwise repost
+ // in 500 msec.
+ delayUs = realTimeUs - nowUs;
+ if (delayUs > 500000) {
+ int64_t postDelayUs = 500000;
+ if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
+ postDelayUs = 10000;
+ }
+ msg->setWhat(kWhatPostDrainVideoQueue);
+ msg->post(postDelayUs);
+ mVideoScheduler->restart();
+ ALOGI("possible video time jump of %dms, retrying in %dms",
+ (int)(delayUs / 1000), (int)(postDelayUs / 1000));
+ mDrainVideoQueuePending = true;
+ return;
}
}
- msg->post(delayUs);
+ realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
+ int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+ delayUs = realTimeUs - nowUs;
+
+ ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
+ // post 2 display refreshes before rendering is due
+ // FIXME currently this increases power consumption, so unless frame-accurate
+ // AV sync is requested, post closer to required render time (at 0.63 vsyncs)
+ if (!sFrameAccurateAVsync) {
+ twoVsyncsUs >>= 4;
+ }
+ msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
mDrainVideoQueuePending = true;
}
@@ -399,12 +786,11 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
- mVideoLateByUs = 0ll;
-
- notifyPosition();
+ setVideoLateByUs(0);
return;
}
+ int64_t nowUs = -1;
int64_t realTimeUs;
if (mFlags & FLAG_REAL_TIME) {
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
@@ -412,37 +798,51 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
- realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
+ nowUs = ALooper::GetNowUs();
+ realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
}
- mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
- bool tooLate = (mVideoLateByUs > 40000);
+ bool tooLate = false;
+
+ if (!mPaused) {
+ if (nowUs == -1) {
+ nowUs = ALooper::GetNowUs();
+ }
+ setVideoLateByUs(nowUs - realTimeUs);
+ tooLate = (mVideoLateByUs > 40000);
- if (tooLate) {
- ALOGV("video late by %lld us (%.2f secs)",
- mVideoLateByUs, mVideoLateByUs / 1E6);
+ if (tooLate) {
+ ALOGV("video late by %lld us (%.2f secs)",
+ mVideoLateByUs, mVideoLateByUs / 1E6);
+ } else {
+ ALOGV("rendering video at media time %.2f secs",
+ (mFlags & FLAG_REAL_TIME ? realTimeUs :
+ (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
+ }
} else {
- ALOGV("rendering video at media time %.2f secs",
- (mFlags & FLAG_REAL_TIME ? realTimeUs :
- (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
- if (mSoftRenderer != NULL) {
- mSoftRenderer->render(entry->mBuffer->data(), entry->mBuffer->size(), NULL);
+ setVideoLateByUs(0);
+ if (!mVideoSampleReceived && !mHasAudio) {
+ // This will ensure that the first frame after a flush won't be used as anchor
+ // when renderer is in paused state, because resume can happen any time after seek.
+ setAnchorTime(-1, -1);
}
}
+ entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
entry->mNotifyConsumed->setInt32("render", !tooLate);
entry->mNotifyConsumed->post();
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
- if (!mVideoRenderingStarted) {
- mVideoRenderingStarted = true;
- notifyVideoRenderingStart();
- }
+ mVideoSampleReceived = true;
- notifyIfMediaRenderingStarted();
-
- notifyPosition();
+ if (!mPaused) {
+ if (!mVideoRenderingStarted) {
+ mVideoRenderingStarted = true;
+ notifyVideoRenderingStart();
+ }
+ notifyIfMediaRenderingStarted();
+ }
}
void NuPlayer::Renderer::notifyVideoRenderingStart() {
@@ -451,22 +851,29 @@ void NuPlayer::Renderer::notifyVideoRenderingStart() {
notify->post();
}
-void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
+void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatEOS);
notify->setInt32("audio", static_cast<int32_t>(audio));
notify->setInt32("finalResult", finalResult);
- notify->post();
+ notify->post(delayUs);
+}
+
+void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
+ (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
}
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
- if (audio) {
- mHasAudio = true;
- } else {
- mHasVideo = true;
+ setHasMedia(audio);
+
+ if (mHasVideo) {
+ if (mVideoScheduler == NULL) {
+ mVideoScheduler = new VideoFrameScheduler();
+ mVideoScheduler->init();
+ }
}
if (dropBufferWhileFlushing(audio, msg)) {
@@ -484,15 +891,18 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
entry.mNotifyConsumed = notifyConsumed;
entry.mOffset = 0;
entry.mFinalResult = OK;
+ entry.mBufferOrdinal = ++mTotalBuffersQueued;
if (audio) {
+ Mutex::Autolock autoLock(mLock);
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
mVideoQueue.push_back(entry);
postDrainVideoQueue();
}
+ Mutex::Autolock autoLock(mLock);
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
return;
}
@@ -502,7 +912,7 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
// EOS signalled on either queue.
- syncQueuesDone();
+ syncQueuesDone_l();
return;
}
@@ -526,10 +936,10 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
return;
}
- syncQueuesDone();
+ syncQueuesDone_l();
}
-void NuPlayer::Renderer::syncQueuesDone() {
+void NuPlayer::Renderer::syncQueuesDone_l() {
if (!mSyncQueues) {
return;
}
@@ -537,7 +947,7 @@ void NuPlayer::Renderer::syncQueuesDone() {
mSyncQueues = false;
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
@@ -561,14 +971,16 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
entry.mFinalResult = finalResult;
if (audio) {
+ Mutex::Autolock autoLock(mLock);
if (mAudioQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ syncQueuesDone_l();
}
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
if (mVideoQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ Mutex::Autolock autoLock(mLock);
+ syncQueuesDone_l();
}
mVideoQueue.push_back(entry);
postDrainVideoQueue();
@@ -579,6 +991,15 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
+ {
+ Mutex::Autolock autoLock(mFlushLock);
+ if (audio) {
+ mFlushingAudio = false;
+ } else {
+ mFlushingVideo = false;
+ }
+ }
+
// If we're currently syncing the queues, i.e. dropping audio while
// aligning the first audio/video buffer times and only one of the
// two queues has data, we may starve that queue by not requesting
@@ -587,31 +1008,47 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
// corresponding discontinuity on the other queue.
// Therefore we'll stop syncing the queues if at least one of them
// is flushed.
- syncQueuesDone();
+ {
+ Mutex::Autolock autoLock(mLock);
+ syncQueuesDone_l();
+ setPauseStartedTimeRealUs(-1);
+ }
ALOGV("flushing %s", audio ? "audio" : "video");
if (audio) {
- flushQueue(&mAudioQueue);
+ {
+ Mutex::Autolock autoLock(mLock);
+ flushQueue(&mAudioQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingAudio = false;
+ ++mAudioQueueGeneration;
+ prepareForMediaRenderingStart();
+
+ if (offloadingAudio()) {
+ setAudioFirstAnchorTime(-1);
+ }
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
- prepareForMediaRenderingStart();
+ if (offloadingAudio()) {
+ mAudioSink->pause();
+ mAudioSink->flush();
+ mAudioSink->start();
+ }
} else {
flushQueue(&mVideoQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingVideo = false;
-
mDrainVideoQueuePending = false;
++mVideoQueueGeneration;
+ if (mVideoScheduler != NULL) {
+ mVideoScheduler->restart();
+ }
+
prepareForMediaRenderingStart();
}
+ mVideoSampleReceived = false;
notifyFlushComplete(audio);
}
@@ -661,6 +1098,9 @@ bool NuPlayer::Renderer::dropBufferWhileFlushing(
}
void NuPlayer::Renderer::onAudioSinkChanged() {
+ if (offloadingAudio()) {
+ return;
+ }
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
uint32_t written;
@@ -669,62 +1109,60 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
}
}
-void NuPlayer::Renderer::notifyPosition() {
- if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
- return;
- }
-
- int64_t nowUs = ALooper::GetNowUs();
-
- if (mLastPositionUpdateUs >= 0
- && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
- return;
- }
- mLastPositionUpdateUs = nowUs;
-
- int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
-
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatPosition);
- notify->setInt64("positionUs", positionUs);
- notify->setInt64("videoLateByUs", mVideoLateByUs);
- notify->post();
+void NuPlayer::Renderer::onDisableOffloadAudio() {
+ Mutex::Autolock autoLock(mLock);
+ mFlags &= ~FLAG_OFFLOAD_AUDIO;
+ ++mAudioQueueGeneration;
}
void NuPlayer::Renderer::onPause() {
- CHECK(!mPaused);
+ if (mPaused) {
+ ALOGW("Renderer::onPause() called while already paused!");
+ return;
+ }
+ {
+ Mutex::Autolock autoLock(mLock);
+ ++mAudioQueueGeneration;
+ ++mVideoQueueGeneration;
+ prepareForMediaRenderingStart();
+ mPaused = true;
+ setPauseStartedTimeRealUs(ALooper::GetNowUs());
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
-
mDrainVideoQueuePending = false;
- ++mVideoQueueGeneration;
-
- prepareForMediaRenderingStart();
if (mHasAudio) {
mAudioSink->pause();
+ startAudioOffloadPauseTimeout();
}
ALOGV("now paused audio queue has %d entries, video has %d entries",
mAudioQueue.size(), mVideoQueue.size());
-
- mPaused = true;
}
void NuPlayer::Renderer::onResume() {
+ readProperties();
+
if (!mPaused) {
return;
}
if (mHasAudio) {
+ cancelAudioOffloadPauseTimeout();
mAudioSink->start();
}
+ Mutex::Autolock autoLock(mLock);
mPaused = false;
+ if (mPauseStartedTimeRealUs != -1) {
+ int64_t newAnchorRealUs =
+ mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
+ setAnchorTime(mAnchorTimeMediaUs, newAnchorRealUs, true /* resume */);
+ }
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
@@ -732,5 +1170,242 @@ void NuPlayer::Renderer::onResume() {
}
}
+void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
+ if (mVideoScheduler == NULL) {
+ mVideoScheduler = new VideoFrameScheduler();
+ }
+ mVideoScheduler->init(fps);
+}
+
+// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
+// as it acquires locks and may query the audio driver.
+//
+// Some calls could conceivably retrieve extrapolated data instead of
+// accessing getTimestamp() or getPosition() every time a data buffer with
+// a media time is received.
+//
+int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
+ uint32_t numFramesPlayed;
+ int64_t numFramesPlayedAt;
+ AudioTimestamp ts;
+ static const int64_t kStaleTimestamp100ms = 100000;
+
+ status_t res = mAudioSink->getTimestamp(ts);
+ if (res == OK) { // case 1: mixing audio tracks and offloaded tracks.
+ numFramesPlayed = ts.mPosition;
+ numFramesPlayedAt =
+ ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+ const int64_t timestampAge = nowUs - numFramesPlayedAt;
+ if (timestampAge > kStaleTimestamp100ms) {
+ // This is an audio FIXME.
+ // getTimestamp returns a timestamp which may come from audio mixing threads.
+ // After pausing, the MixerThread may go idle, thus the mTime estimate may
+ // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
+ // the max latency should be about 25ms with an average around 12ms (to be verified).
+ // For safety we use 100ms.
+ ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
+ (long long)nowUs, (long long)numFramesPlayedAt);
+ numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
+ }
+ //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
+ } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
+ numFramesPlayed = 0;
+ numFramesPlayedAt = nowUs;
+ //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
+ // numFramesPlayed, (long long)numFramesPlayedAt);
+ } else { // case 3: transitory at new track or audio fast tracks.
+ res = mAudioSink->getPosition(&numFramesPlayed);
+ CHECK_EQ(res, (status_t)OK);
+ numFramesPlayedAt = nowUs;
+ numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
+ }
+
+ // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+ //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test
+ int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
+ + nowUs - numFramesPlayedAt;
+ if (durationUs < 0) {
+ // Occurs when numFramesPlayed position is very small and the following:
+ // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
+ // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
+ // (2) In case 3, using getPosition and adding mAudioSink->latency() to
+ // numFramesPlayedAt, by a time amount greater than numFramesPlayed.
+ //
+ // Both of these are transitory conditions.
+ ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
+ durationUs = 0;
+ }
+ ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
+ (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
+ return durationUs;
+}
+
+void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
+ if (mAudioOffloadTornDown) {
+ return;
+ }
+ mAudioOffloadTornDown = true;
+
+ int64_t currentPositionUs;
+ if (getCurrentPosition(&currentPositionUs) != OK) {
+ currentPositionUs = 0;
+ }
+
+ mAudioSink->stop();
+ mAudioSink->flush();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatAudioOffloadTearDown);
+ notify->setInt64("positionUs", currentPositionUs);
+ notify->setInt32("reason", reason);
+ notify->post();
+}
+
+void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
+ if (offloadingAudio()) {
+ sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
+ msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
+ msg->post(kOffloadPauseMaxUs);
+ }
+}
+
+void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
+ if (offloadingAudio()) {
+ ++mAudioOffloadPauseTimeoutGeneration;
+ }
+}
+
+bool NuPlayer::Renderer::onOpenAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags) {
+ ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
+ offloadOnly, offloadingAudio());
+ bool audioSinkChanged = false;
+
+ int32_t numChannels;
+ CHECK(format->findInt32("channel-count", &numChannels));
+
+ int32_t channelMask;
+ if (!format->findInt32("channel-mask", &channelMask)) {
+ // signal to the AudioSink to derive the mask from count.
+ channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+ }
+
+ int32_t sampleRate;
+ CHECK(format->findInt32("sample-rate", &sampleRate));
+
+ if (offloadingAudio()) {
+ audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+ status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+
+ if (err != OK) {
+ ALOGE("Couldn't map mime \"%s\" to a valid "
+ "audio_format", mime.c_str());
+ onDisableOffloadAudio();
+ } else {
+ ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
+ mime.c_str(), audioFormat);
+
+ int avgBitRate = -1;
+ format->findInt32("bit-rate", &avgBitRate);
+
+ int32_t aacProfile = -1;
+ if (audioFormat == AUDIO_FORMAT_AAC
+ && format->findInt32("aac-profile", &aacProfile)) {
+ // Redefine AAC format as per aac profile
+ mapAACProfileToAudioFormat(
+ audioFormat,
+ aacProfile);
+ }
+
+ audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+ offloadInfo.duration_us = -1;
+ format->findInt64(
+ "durationUs", &offloadInfo.duration_us);
+ offloadInfo.sample_rate = sampleRate;
+ offloadInfo.channel_mask = channelMask;
+ offloadInfo.format = audioFormat;
+ offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
+ offloadInfo.bit_rate = avgBitRate;
+ offloadInfo.has_video = hasVideo;
+ offloadInfo.is_streaming = true;
+
+ if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
+ ALOGV("openAudioSink: no change in offload mode");
+ // no change from previous configuration, everything ok.
+ return offloadingAudio();
+ }
+ ALOGV("openAudioSink: try to open AudioSink in offload mode");
+ flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ flags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ audioSinkChanged = true;
+ mAudioSink->close();
+ err = mAudioSink->open(
+ sampleRate,
+ numChannels,
+ (audio_channel_mask_t)channelMask,
+ audioFormat,
+ 8 /* bufferCount */,
+ &NuPlayer::Renderer::AudioSinkCallback,
+ this,
+ (audio_output_flags_t)flags,
+ &offloadInfo);
+
+ if (err == OK) {
+ // If the playback is offloaded to h/w, we pass
+ // the HAL some metadata information.
+ // We don't want to do this for PCM because it
+ // will be going through the AudioFlinger mixer
+ // before reaching the hardware.
+ // TODO
+ mCurrentOffloadInfo = offloadInfo;
+ err = mAudioSink->start();
+ ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
+ }
+ if (err != OK) {
+ // Clean up, fall back to non offload mode.
+ mAudioSink->close();
+ onDisableOffloadAudio();
+ mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+ ALOGV("openAudioSink: offload failed");
+ }
+ }
+ }
+ if (!offloadOnly && !offloadingAudio()) {
+ flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ ALOGV("openAudioSink: open AudioSink in NON-offload mode");
+
+ audioSinkChanged = true;
+ mAudioSink->close();
+ mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+ CHECK_EQ(mAudioSink->open(
+ sampleRate,
+ numChannels,
+ (audio_channel_mask_t)channelMask,
+ AUDIO_FORMAT_PCM_16_BIT,
+ 8 /* bufferCount */,
+ NULL,
+ NULL,
+ (audio_output_flags_t)flags),
+ (status_t)OK);
+ mAudioSink->start();
+ }
+ if (audioSinkChanged) {
+ onAudioSinkChanged();
+ }
+
+ return offloadingAudio();
+}
+
+void NuPlayer::Renderer::onCloseAudioSink() {
+ mAudioSink->close();
+ mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 9124e03..b15a266 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -23,16 +23,22 @@
namespace android {
struct ABuffer;
-class SoftwareRenderer;
+struct VideoFrameScheduler;
struct NuPlayer::Renderer : public AHandler {
enum Flags {
FLAG_REAL_TIME = 1,
+ FLAG_OFFLOAD_AUDIO = 2,
};
Renderer(const sp<MediaPlayerBase::AudioSink> &sink,
const sp<AMessage> &notify,
uint32_t flags = 0);
+ static size_t AudioSinkCallback(
+ MediaPlayerBase::AudioSink *audioSink,
+ void *data, size_t size, void *me,
+ MediaPlayerBase::AudioSink::cb_event_t event);
+
void queueBuffer(
bool audio,
const sp<ABuffer> &buffer,
@@ -46,18 +52,45 @@ struct NuPlayer::Renderer : public AHandler {
void signalAudioSinkChanged();
+ void signalDisableOffloadAudio();
+
void pause();
void resume();
+ void setVideoFrameRate(float fps);
+
+ // Following setters and getters are protected by mTimeLock.
+ status_t getCurrentPosition(int64_t *mediaUs);
+ status_t getCurrentPosition(int64_t *mediaUs, int64_t nowUs);
+ void setHasMedia(bool audio);
+ void setAudioFirstAnchorTime(int64_t mediaUs);
+ void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs);
+ void setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume = false);
+ void setVideoLateByUs(int64_t lateUs);
+ int64_t getVideoLateByUs();
+ void setPauseStartedTimeRealUs(int64_t realUs);
+
+ bool openAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags);
+ void closeAudioSink();
+
enum {
kWhatEOS = 'eos ',
kWhatFlushComplete = 'fluC',
kWhatPosition = 'posi',
kWhatVideoRenderingStart = 'vdrd',
kWhatMediaRenderingStart = 'mdrd',
+ kWhatAudioOffloadTearDown = 'aOTD',
+ kWhatAudioOffloadPauseTimeout = 'aOPT',
};
- void setSoftRenderer(SoftwareRenderer *softRenderer);
+ enum AudioOffloadTearDownReason {
+ kDueToError = 0,
+ kDueToTimeout,
+ };
protected:
virtual ~Renderer();
@@ -66,14 +99,20 @@ protected:
private:
enum {
- kWhatDrainAudioQueue = 'draA',
- kWhatDrainVideoQueue = 'draV',
- kWhatQueueBuffer = 'queB',
- kWhatQueueEOS = 'qEOS',
- kWhatFlush = 'flus',
- kWhatAudioSinkChanged = 'auSC',
- kWhatPause = 'paus',
- kWhatResume = 'resm',
+ kWhatDrainAudioQueue = 'draA',
+ kWhatDrainVideoQueue = 'draV',
+ kWhatPostDrainVideoQueue = 'pDVQ',
+ kWhatQueueBuffer = 'queB',
+ kWhatQueueEOS = 'qEOS',
+ kWhatFlush = 'flus',
+ kWhatAudioSinkChanged = 'auSC',
+ kWhatPause = 'paus',
+ kWhatResume = 'resm',
+ kWhatOpenAudioSink = 'opnA',
+ kWhatCloseAudioSink = 'clsA',
+ kWhatStopAudioSink = 'stpA',
+ kWhatDisableOffloadAudio = 'noOA',
+ kWhatSetVideoFrameRate = 'sVFR',
};
struct QueueEntry {
@@ -81,44 +120,69 @@ private:
sp<AMessage> mNotifyConsumed;
size_t mOffset;
status_t mFinalResult;
+ int32_t mBufferOrdinal;
};
static const int64_t kMinPositionUpdateDelayUs;
sp<MediaPlayerBase::AudioSink> mAudioSink;
- SoftwareRenderer *mSoftRenderer;
sp<AMessage> mNotify;
+ Mutex mLock;
uint32_t mFlags;
List<QueueEntry> mAudioQueue;
List<QueueEntry> mVideoQueue;
uint32_t mNumFramesWritten;
+ sp<VideoFrameScheduler> mVideoScheduler;
bool mDrainAudioQueuePending;
bool mDrainVideoQueuePending;
int32_t mAudioQueueGeneration;
int32_t mVideoQueueGeneration;
+ Mutex mTimeLock;
+ // |mTimeLock| protects the following 7 member vars that are related to time.
+ // Note: those members are only written on Renderer thread, so reading on Renderer thread
+ // doesn't need to be protected. Otherwise accessing those members must be protected by
+ // |mTimeLock|.
+ // TODO: move those members to a seperated media clock class.
+ int64_t mAudioFirstAnchorTimeMediaUs;
int64_t mAnchorTimeMediaUs;
int64_t mAnchorTimeRealUs;
+ int64_t mVideoLateByUs;
+ bool mHasAudio;
+ bool mHasVideo;
+ int64_t mPauseStartedTimeRealUs;
Mutex mFlushLock; // protects the following 2 member vars.
bool mFlushingAudio;
bool mFlushingVideo;
- bool mHasAudio;
- bool mHasVideo;
bool mSyncQueues;
bool mPaused;
+ bool mVideoSampleReceived;
bool mVideoRenderingStarted;
int32_t mVideoRenderingStartGeneration;
int32_t mAudioRenderingStartGeneration;
int64_t mLastPositionUpdateUs;
- int64_t mVideoLateByUs;
+
+ int32_t mAudioOffloadPauseTimeoutGeneration;
+ bool mAudioOffloadTornDown;
+ audio_offload_info_t mCurrentOffloadInfo;
+
+ int32_t mTotalBuffersQueued;
+ int32_t mLastAudioBufferDrained;
+
+ size_t fillAudioBuffer(void *buffer, size_t size);
bool onDrainAudioQueue();
- void postDrainAudioQueue(int64_t delayUs = 0);
+ int64_t getPendingAudioPlayoutDurationUs(int64_t nowUs);
+ int64_t getPlayedOutAudioDurationUs(int64_t nowUs);
+ void postDrainAudioQueue_l(int64_t delayUs = 0);
+
+ void onNewAudioMediaTime(int64_t mediaTimeUs);
+ int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs);
void onDrainVideoQueue();
void postDrainVideoQueue();
@@ -130,18 +194,33 @@ private:
void onQueueEOS(const sp<AMessage> &msg);
void onFlush(const sp<AMessage> &msg);
void onAudioSinkChanged();
+ void onDisableOffloadAudio();
void onPause();
void onResume();
-
- void notifyEOS(bool audio, status_t finalResult);
+ void onSetVideoFrameRate(float fps);
+ void onAudioOffloadTearDown(AudioOffloadTearDownReason reason);
+ bool onOpenAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags);
+ void onCloseAudioSink();
+
+ void notifyEOS(bool audio, status_t finalResult, int64_t delayUs = 0);
void notifyFlushComplete(bool audio);
void notifyPosition();
void notifyVideoLateBy(int64_t lateByUs);
void notifyVideoRenderingStart();
+ void notifyAudioOffloadTearDown();
void flushQueue(List<QueueEntry> *queue);
bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg);
- void syncQueuesDone();
+ void syncQueuesDone_l();
+
+ bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; }
+
+ void startAudioOffloadPauseTimeout();
+ void cancelAudioOffloadPauseTimeout();
DISALLOW_EVIL_CONSTRUCTORS(Renderer);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 11279fc..2f06c31 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -21,11 +21,15 @@
#include "NuPlayer.h"
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <media/mediaplayer.h>
+#include <utils/Vector.h>
namespace android {
struct ABuffer;
struct MetaData;
+struct MediaBuffer;
struct NuPlayer::Source : public AHandler {
enum Flags {
@@ -34,16 +38,20 @@ struct NuPlayer::Source : public AHandler {
FLAG_CAN_SEEK_FORWARD = 4, // the "10 sec forward button"
FLAG_CAN_SEEK = 8, // the "seek bar"
FLAG_DYNAMIC_DURATION = 16,
+ FLAG_SECURE = 32,
};
enum {
kWhatPrepared,
kWhatFlagsChanged,
kWhatVideoSizeChanged,
+ kWhatBufferingUpdate,
kWhatBufferingStart,
kWhatBufferingEnd,
kWhatSubtitleData,
+ kWhatTimedTextData,
kWhatQueueDecoderShutdown,
+ kWhatDrmNoLicense,
};
// The provides message is used to notify the player about various
@@ -59,11 +67,16 @@ struct NuPlayer::Source : public AHandler {
virtual void pause() {}
virtual void resume() {}
+ // Explicitly disconnect the underling data source
+ virtual void disconnect() {}
+
// Returns OK iff more data was available,
// an error or ERROR_END_OF_STREAM if not.
virtual status_t feedMoreTSData() = 0;
virtual sp<AMessage> getFormat(bool audio);
+ virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
+ virtual sp<MetaData> getFileFormatMeta() const { return NULL; }
virtual status_t dequeueAccessUnit(
bool audio, sp<ABuffer> *accessUnit) = 0;
@@ -72,7 +85,15 @@ struct NuPlayer::Source : public AHandler {
return INVALID_OPERATION;
}
- virtual status_t getTrackInfo(Parcel* /* reply */) const {
+ virtual size_t getTrackCount() const {
+ return 0;
+ }
+
+ virtual sp<AMessage> getTrackInfo(size_t /* trackIndex */) const {
+ return NULL;
+ }
+
+ virtual ssize_t getSelectedTrack(media_track_type /* type */) const {
return INVALID_OPERATION;
}
@@ -84,6 +105,10 @@ struct NuPlayer::Source : public AHandler {
return INVALID_OPERATION;
}
+ virtual status_t setBuffers(bool /* audio */, Vector<MediaBuffer *> &/* buffers */) {
+ return INVALID_OPERATION;
+ }
+
virtual bool isRealTime() const {
return false;
}
@@ -93,12 +118,10 @@ protected:
virtual void onMessageReceived(const sp<AMessage> &msg);
- virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
-
sp<AMessage> dupNotify() const { return mNotify->dup(); }
void notifyFlagsChanged(uint32_t flags);
- void notifyVideoSizeChanged(int32_t width, int32_t height);
+ void notifyVideoSizeChanged(const sp<AMessage> &format = NULL);
void notifyPrepared(status_t err = OK);
private:
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 18cf6d1..ffacb8f 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -24,6 +24,7 @@
#include "MyHandler.h"
#include "SDPLoader.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
@@ -33,12 +34,14 @@ const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
NuPlayer::RTSPSource::RTSPSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid,
uid_t uid,
bool isSDP)
: Source(notify),
+ mHTTPService(httpService),
mURL(url),
mUIDValid(uidValid),
mUID(uid),
@@ -67,6 +70,7 @@ NuPlayer::RTSPSource::RTSPSource(
NuPlayer::RTSPSource::~RTSPSource() {
if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
mLooper->stop();
}
}
@@ -77,14 +81,13 @@ void NuPlayer::RTSPSource::prepareAsync() {
mLooper->setName("rtsp");
mLooper->start();
- mReflector = new AHandlerReflector<RTSPSource>(this);
- mLooper->registerHandler(mReflector);
+ mLooper->registerHandler(this);
}
CHECK(mHandler == NULL);
CHECK(mSDPLoader == NULL);
- sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id());
+ sp<AMessage> notify = new AMessage(kWhatNotify, id());
CHECK_EQ(mState, (int)DISCONNECTED);
mState = CONNECTING;
@@ -92,7 +95,7 @@ void NuPlayer::RTSPSource::prepareAsync() {
if (mIsSDP) {
mSDPLoader = new SDPLoader(notify,
(mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0,
- mUIDValid, mUID);
+ mHTTPService);
mSDPLoader->load(
mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
@@ -115,7 +118,7 @@ void NuPlayer::RTSPSource::stop() {
if (mLooper == NULL) {
return;
}
- sp<AMessage> msg = new AMessage(kWhatDisconnect, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatDisconnect, id());
sp<AMessage> dummy;
msg->postAndAwaitResponse(&dummy);
@@ -302,7 +305,7 @@ status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) {
}
status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) {
- sp<AMessage> msg = new AMessage(kWhatPerformSeek, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatPerformSeek, id());
msg->setInt32("generation", ++mSeekGeneration);
msg->setInt64("timeUs", seekTimeUs);
msg->post(200000ll);
@@ -353,7 +356,7 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
{
onConnected();
- notifyVideoSizeChanged(0, 0);
+ notifyVideoSizeChanged();
uint32_t flags = 0;
@@ -502,7 +505,10 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
TrackInfo *info = &mTracks.editItemAt(trackIndex);
sp<AnotherPacketSource> source = info->mSource;
if (source != NULL) {
- source->queueDiscontinuity(ATSParser::DISCONTINUITY_SEEK, NULL);
+ source->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_SEEK,
+ NULL,
+ true /* discard */);
}
break;
@@ -607,7 +613,7 @@ void NuPlayer::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) {
ALOGE("Unable to find url in SDP");
err = UNKNOWN_ERROR;
} else {
- sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id());
+ sp<AMessage> notify = new AMessage(kWhatNotify, id());
mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID);
mLooper->registerHandler(mHandler);
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 8cf34a0..f1cae53 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -22,8 +22,6 @@
#include "ATSParser.h"
-#include <media/stagefright/foundation/AHandlerReflector.h>
-
namespace android {
struct ALooper;
@@ -34,6 +32,7 @@ struct SDPLoader;
struct NuPlayer::RTSPSource : public NuPlayer::Source {
RTSPSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid = false,
@@ -88,6 +87,7 @@ private:
bool mNPTMappingValid;
};
+ sp<IMediaHTTPService> mHTTPService;
AString mURL;
KeyedVector<String8, String8> mExtraHeaders;
bool mUIDValid;
@@ -100,7 +100,6 @@ private:
bool mBuffering;
sp<ALooper> mLooper;
- sp<AHandlerReflector<RTSPSource> > mReflector;
sp<MyHandler> mHandler;
sp<SDPLoader> mSDPLoader;
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index 28f0d50..2e9a29a 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -44,7 +44,7 @@ NuPlayer::StreamingSource::~StreamingSource() {
}
void NuPlayer::StreamingSource::prepareAsync() {
- notifyVideoSizeChanged(0, 0);
+ notifyVideoSizeChanged();
notifyFlagsChanged(0);
notifyPrepared();
}
diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp
deleted file mode 100644
index 2aae4dd..0000000
--- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "MP4Source.h"
-
-#include "FragmentedMP4Parser.h"
-#include "../NuPlayerStreamListener.h"
-
-#include <media/IStreamSource.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-struct StreamSource : public FragmentedMP4Parser::Source {
- StreamSource(const sp<IStreamSource> &source)
- : mListener(new NuPlayer::NuPlayerStreamListener(source, 0)),
- mPosition(0) {
- mListener->start();
- }
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- if (offset < mPosition) {
- return -EPIPE;
- }
-
- while (offset > mPosition) {
- char buffer[1024];
- off64_t skipBytes = offset - mPosition;
- if (skipBytes > sizeof(buffer)) {
- skipBytes = sizeof(buffer);
- }
-
- sp<AMessage> extra;
- ssize_t n;
- for (;;) {
- n = mListener->read(buffer, skipBytes, &extra);
-
- if (n == -EWOULDBLOCK) {
- usleep(10000);
- continue;
- }
-
- break;
- }
-
- ALOGV("skipped %ld bytes at offset %lld", n, mPosition);
-
- if (n < 0) {
- return n;
- }
-
- mPosition += n;
- }
-
- sp<AMessage> extra;
- size_t total = 0;
- while (total < size) {
- ssize_t n = mListener->read(
- (uint8_t *)data + total, size - total, &extra);
-
- if (n == -EWOULDBLOCK) {
- usleep(10000);
- continue;
- } else if (n == 0) {
- break;
- } else if (n < 0) {
- mPosition += total;
- return n;
- }
-
- total += n;
- }
-
- ALOGV("read %ld bytes at offset %lld", total, mPosition);
-
- mPosition += total;
-
- return total;
- }
-
- bool isSeekable() {
- return false;
- }
-
-private:
- sp<NuPlayer::NuPlayerStreamListener> mListener;
- off64_t mPosition;
-
- DISALLOW_EVIL_CONSTRUCTORS(StreamSource);
-};
-
-MP4Source::MP4Source(
- const sp<AMessage> &notify, const sp<IStreamSource> &source)
- : Source(notify),
- mSource(source),
- mLooper(new ALooper),
- mParser(new FragmentedMP4Parser),
- mEOS(false) {
- mLooper->registerHandler(mParser);
-}
-
-MP4Source::~MP4Source() {
-}
-
-void MP4Source::prepareAsync() {
- notifyVideoSizeChanged(0, 0);
- notifyFlagsChanged(0);
- notifyPrepared();
-}
-
-void MP4Source::start() {
- mLooper->start(false /* runOnCallingThread */);
- mParser->start(new StreamSource(mSource));
-}
-
-status_t MP4Source::feedMoreTSData() {
- return mEOS ? ERROR_END_OF_STREAM : (status_t)OK;
-}
-
-sp<AMessage> MP4Source::getFormat(bool audio) {
- return mParser->getFormat(audio);
-}
-
-status_t MP4Source::dequeueAccessUnit(
- bool audio, sp<ABuffer> *accessUnit) {
- return mParser->dequeueAccessUnit(audio, accessUnit);
-}
-
-} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h
deleted file mode 100644
index a6ef622..0000000
--- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MP4_SOURCE_H
-#define MP4_SOURCE_H
-
-#include "NuPlayerSource.h"
-
-namespace android {
-
-struct FragmentedMP4Parser;
-
-struct MP4Source : public NuPlayer::Source {
- MP4Source(const sp<AMessage> &notify, const sp<IStreamSource> &source);
-
- virtual void prepareAsync();
- virtual void start();
-
- virtual status_t feedMoreTSData();
-
- virtual sp<AMessage> getFormat(bool audio);
-
- virtual status_t dequeueAccessUnit(
- bool audio, sp<ABuffer> *accessUnit);
-
-protected:
- virtual ~MP4Source();
-
-private:
- sp<IStreamSource> mSource;
- sp<ALooper> mLooper;
- sp<FragmentedMP4Parser> mParser;
- bool mEOS;
-
- DISALLOW_EVIL_CONSTRUCTORS(MP4Source);
-};
-
-} // namespace android
-
-#endif // MP4_SOURCE_H