summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorRobert Shih <robertshih@google.com>2015-04-17 16:42:24 +0000
committerAndroid (Google) Code Review <android-gerrit@google.com>2015-04-17 16:42:25 +0000
commitbfb03d2486e3c5dd21120bf403d68fa36323b102 (patch)
treee881a136c41925b672eee4a5000919bba74fb64e /media
parent9767a47dcf9bbe090045ab99e0290d73289c704f (diff)
parent0852843d304006e3ab333081fddda13b07193de8 (diff)
downloadframeworks_av-bfb03d2486e3c5dd21120bf403d68fa36323b102.zip
frameworks_av-bfb03d2486e3c5dd21120bf403d68fa36323b102.tar.gz
frameworks_av-bfb03d2486e3c5dd21120bf403d68fa36323b102.tar.bz2
Merge "stagefright: initial timed id3 support in hls"
Diffstat (limited to 'media')
-rw-r--r--media/libmedia/mediaplayer.cpp3
-rw-r--r--media/libmediaplayerservice/nuplayer/Android.mk1
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp140
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.h9
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp24
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.h1
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerSource.h1
-rw-r--r--media/libstagefright/MediaDefs.cpp1
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp147
-rw-r--r--media/libstagefright/httplive/LiveSession.h15
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.cpp77
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.h5
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp21
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h4
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp8
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.h1
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp23
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.h2
18 files changed, 399 insertions, 84 deletions
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 6eddcb6..9a276ae 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -857,6 +857,9 @@ void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj)
case MEDIA_SUBTITLE_DATA:
ALOGV("Received subtitle data message");
break;
+ case MEDIA_META_DATA:
+ ALOGV("Received timed metadata message");
+ break;
default:
ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
break;
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index 6609874..fca08e2 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -16,6 +16,7 @@ LOCAL_SRC_FILES:= \
StreamingSource.cpp \
LOCAL_C_INCLUDES := \
+ $(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/av/media/libstagefright/httplive \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index 0476c9b..39b8d09 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -22,7 +22,6 @@
#include "AnotherPacketSource.h"
#include "LiveDataSource.h"
-#include "LiveSession.h"
#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -30,6 +29,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaDefs.h>
namespace android {
@@ -44,7 +44,10 @@ NuPlayer::HTTPLiveSource::HTTPLiveSource(
mFlags(0),
mFinalResult(OK),
mOffset(0),
- mFetchSubtitleDataGeneration(0) {
+ mFetchSubtitleDataGeneration(0),
+ mFetchMetaDataGeneration(0),
+ mHasMetadata(false),
+ mMetadataSelected(false) {
if (headers) {
mExtraHeaders = *headers;
@@ -142,19 +145,49 @@ sp<AMessage> NuPlayer::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {
ssize_t NuPlayer::HTTPLiveSource::getSelectedTrack(media_track_type type) const {
if (mLiveSession == NULL) {
return -1;
+ } else if (type == MEDIA_TRACK_TYPE_METADATA) {
+ // MEDIA_TRACK_TYPE_METADATA is always last track
+ // mMetadataSelected can only be true when mHasMetadata is true
+ return mMetadataSelected ? (mLiveSession->getTrackCount() - 1) : -1;
} else {
return mLiveSession->getSelectedTrack(type);
}
}
status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select, int64_t /*timeUs*/) {
- status_t err = mLiveSession->selectTrack(trackIndex, select);
+ if (mLiveSession == NULL) {
+ return INVALID_OPERATION;
+ }
+
+ status_t err = INVALID_OPERATION;
+ bool postFetchMsg = false, isSub = false;
+ if (trackIndex != mLiveSession->getTrackCount() - 1) {
+ err = mLiveSession->selectTrack(trackIndex, select);
+ postFetchMsg = select;
+ isSub = true;
+ } else {
+ // metadata track
+ if (mHasMetadata) {
+ if (mMetadataSelected && !select) {
+ err = OK;
+ } else if (!mMetadataSelected && select) {
+ postFetchMsg = true;
+ err = OK;
+ } else {
+ err = BAD_VALUE; // behave as LiveSession::selectTrack
+ }
+
+ mMetadataSelected = select;
+ }
+ }
if (err == OK) {
- mFetchSubtitleDataGeneration++;
- if (select) {
- sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);
- msg->setInt32("generation", mFetchSubtitleDataGeneration);
+ int32_t &generation = isSub ? mFetchSubtitleDataGeneration : mFetchMetaDataGeneration;
+ generation++;
+ if (postFetchMsg) {
+ int32_t what = isSub ? kWhatFetchSubtitleData : kWhatFetchMetaData;
+ sp<AMessage> msg = new AMessage(what, this);
+ msg->setInt32("generation", generation);
msg->post();
}
}
@@ -169,6 +202,49 @@ status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) {
return mLiveSession->seekTo(seekTimeUs);
}
+void NuPlayer::HTTPLiveSource::pollForRawData(
+ const sp<AMessage> &msg, int32_t currentGeneration,
+ LiveSession::StreamType fetchType, int32_t pushWhat) {
+
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != currentGeneration) {
+ return;
+ }
+
+ sp<ABuffer> buffer;
+ while (mLiveSession->dequeueAccessUnit(fetchType, &buffer) == OK) {
+
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", pushWhat);
+ notify->setBuffer("buffer", buffer);
+
+ int64_t timeUs, baseUs, delayUs;
+ CHECK(buffer->meta()->findInt64("baseUs", &baseUs));
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ delayUs = baseUs + timeUs - ALooper::GetNowUs();
+
+ if (fetchType == LiveSession::STREAMTYPE_SUBTITLES) {
+ notify->post();
+ msg->post(delayUs > 0ll ? delayUs : 0ll);
+ return;
+ } else if (fetchType == LiveSession::STREAMTYPE_METADATA) {
+ if (delayUs < -1000000ll) { // 1 second
+ continue;
+ }
+ notify->post();
+ // push all currently available metadata buffers in each invocation of pollForRawData
+ // continue;
+ } else {
+ TRESPASS();
+ }
+ }
+
+ // try again in 1 second
+ msg->post(1000000ll);
+}
+
void NuPlayer::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatSessionNotify:
@@ -179,33 +255,24 @@ void NuPlayer::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) {
case kWhatFetchSubtitleData:
{
- int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
+ pollForRawData(
+ msg, mFetchSubtitleDataGeneration,
+ /* fetch */ LiveSession::STREAMTYPE_SUBTITLES,
+ /* push */ kWhatSubtitleData);
+
+ break;
+ }
- if (generation != mFetchSubtitleDataGeneration) {
- // stale
+ case kWhatFetchMetaData:
+ {
+ if (!mMetadataSelected) {
break;
}
- sp<ABuffer> buffer;
- if (mLiveSession->dequeueAccessUnit(
- LiveSession::STREAMTYPE_SUBTITLES, &buffer) == OK) {
- sp<AMessage> notify = dupNotify();
- notify->setInt32("what", kWhatSubtitleData);
- notify->setBuffer("buffer", buffer);
- notify->post();
-
- int64_t timeUs, baseUs, durationUs, delayUs;
- CHECK(buffer->meta()->findInt64("baseUs", &baseUs));
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
- delayUs = baseUs + timeUs - ALooper::GetNowUs();
-
- msg->post(delayUs > 0ll ? delayUs : 0ll);
- } else {
- // try again in 1 second
- msg->post(1000000ll);
- }
+ pollForRawData(
+ msg, mFetchMetaDataGeneration,
+ /* fetch */ LiveSession::STREAMTYPE_METADATA,
+ /* push */ kWhatTimedMetaData);
break;
}
@@ -309,6 +376,19 @@ void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
break;
}
+ case LiveSession::kWhatMetadataDetected:
+ {
+ if (!mHasMetadata) {
+ mHasMetadata = true;
+
+ sp<AMessage> notify = dupNotify();
+ // notification without buffer triggers MEDIA_INFO_METADATA_UPDATE
+ notify->setInt32("what", kWhatTimedMetaData);
+ notify->post();
+ }
+ break;
+ }
+
case LiveSession::kWhatError:
{
break;
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index bbb8981..9e0ec2f 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -21,6 +21,8 @@
#include "NuPlayer.h"
#include "NuPlayerSource.h"
+#include "LiveSession.h"
+
namespace android {
struct LiveSession;
@@ -60,6 +62,7 @@ private:
enum {
kWhatSessionNotify,
kWhatFetchSubtitleData,
+ kWhatFetchMetaData,
};
sp<IMediaHTTPService> mHTTPService;
@@ -71,8 +74,14 @@ private:
sp<ALooper> mLiveLooper;
sp<LiveSession> mLiveSession;
int32_t mFetchSubtitleDataGeneration;
+ int32_t mFetchMetaDataGeneration;
+ bool mHasMetadata;
+ bool mMetadataSelected;
void onSessionNotify(const sp<AMessage> &msg);
+ void pollForRawData(
+ const sp<AMessage> &msg, int32_t currentGeneration,
+ LiveSession::StreamType fetchType, int32_t pushWhat);
DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index db73784..1bd4e57 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1859,6 +1859,17 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
break;
}
+ case Source::kWhatTimedMetaData:
+ {
+ sp<ABuffer> buffer;
+ if (!msg->findBuffer("buffer", &buffer)) {
+ notifyListener(MEDIA_INFO, MEDIA_INFO_METADATA_UPDATE, 0);
+ } else {
+ sendTimedMetaData(buffer);
+ }
+ break;
+ }
+
case Source::kWhatTimedTextData:
{
int32_t generation;
@@ -1967,6 +1978,19 @@ void NuPlayer::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) {
notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
}
+void NuPlayer::sendTimedMetaData(const sp<ABuffer> &buffer) {
+ int64_t timeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ Parcel in;
+ in.writeInt64(timeUs);
+ in.writeInt32(buffer->size());
+ in.writeInt32(buffer->size());
+ in.write(buffer->data(), buffer->size());
+
+ notifyListener(MEDIA_META_DATA, 0, 0, &in);
+}
+
void NuPlayer::sendTimedTextData(const sp<ABuffer> &buffer) {
const void *data;
size_t size = 0;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 623b0ce..c0205b6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -249,6 +249,7 @@ private:
bool audio, bool video, const sp<AMessage> &reply);
void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
+ void sendTimedMetaData(const sp<ABuffer> &buffer);
void sendTimedTextData(const sp<ABuffer> &buffer);
void writeTrackInfo(Parcel* reply, const sp<AMessage> format) const;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index d9f14a2..1b0c2df 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -53,6 +53,7 @@ struct NuPlayer::Source : public AHandler {
kWhatCacheStats,
kWhatSubtitleData,
kWhatTimedTextData,
+ kWhatTimedMetaData,
kWhatQueueDecoderShutdown,
kWhatDrmNoLicense,
kWhatInstantiateSecureDecoders,
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index c48a5ae..b0a65d2 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -62,5 +62,6 @@ const char *MEDIA_MIMETYPE_TEXT_3GPP = "text/3gpp-tt";
const char *MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";
const char *MEDIA_MIMETYPE_TEXT_VTT = "text/vtt";
const char *MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608";
+const char *MEDIA_MIMETYPE_DATA_METADATA = "application/octet-stream";
} // namespace android
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 4886000..203444a 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -38,6 +38,7 @@
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaHTTP.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
@@ -135,6 +136,8 @@ const char *LiveSession::getKeyForStream(StreamType type) {
return "timeUsAudio";
case STREAMTYPE_SUBTITLES:
return "timeUsSubtitle";
+ case STREAMTYPE_METADATA:
+ return "timeUsMetadata"; // unused
default:
TRESPASS();
}
@@ -150,12 +153,30 @@ const char *LiveSession::getNameForStream(StreamType type) {
return "audio";
case STREAMTYPE_SUBTITLES:
return "subs";
+ case STREAMTYPE_METADATA:
+ return "metadata";
default:
break;
}
return "unknown";
}
+//static
+ATSParser::SourceType LiveSession::getSourceTypeForStream(StreamType type) {
+ switch (type) {
+ case STREAMTYPE_VIDEO:
+ return ATSParser::VIDEO;
+ case STREAMTYPE_AUDIO:
+ return ATSParser::AUDIO;
+ case STREAMTYPE_METADATA:
+ return ATSParser::META;
+ case STREAMTYPE_SUBTITLES:
+ default:
+ TRESPASS();
+ }
+ return ATSParser::NUM_SOURCE_TYPES; // should not reach here
+}
+
LiveSession::LiveSession(
const sp<AMessage> &notify, uint32_t flags,
const sp<IMediaHTTPService> &httpService)
@@ -187,12 +208,13 @@ LiveSession::LiveSession(
mUpSwitchMargin(kUpSwitchMarginUs),
mFirstTimeUsValid(false),
mFirstTimeUs(0),
- mLastSeekTimeUs(0) {
+ mLastSeekTimeUs(0),
+ mHasMetadata(false) {
mStreams[kAudioIndex] = StreamItem("audio");
mStreams[kVideoIndex] = StreamItem("video");
mStreams[kSubtitleIndex] = StreamItem("subtitles");
- for (size_t i = 0; i < kMaxStreams; ++i) {
+ for (size_t i = 0; i < kNumSources; ++i) {
mPacketSources.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
mPacketSources2.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
}
@@ -204,6 +226,20 @@ LiveSession::~LiveSession() {
}
}
+int64_t LiveSession::calculateMediaTimeUs(
+ int64_t firstTimeUs, int64_t timeUs, int32_t discontinuitySeq) {
+ if (timeUs >= firstTimeUs) {
+ timeUs -= firstTimeUs;
+ } else {
+ timeUs = 0;
+ }
+ timeUs += mLastSeekTimeUs;
+ if (mDiscontinuityOffsetTimesUs.indexOfKey(discontinuitySeq) >= 0) {
+ timeUs += mDiscontinuityOffsetTimesUs.valueFor(discontinuitySeq);
+ }
+ return timeUs;
+}
+
status_t LiveSession::dequeueAccessUnit(
StreamType stream, sp<ABuffer> *accessUnit) {
status_t finalResult = OK;
@@ -236,7 +272,6 @@ status_t LiveSession::dequeueAccessUnit(
status_t err = packetSource->dequeueAccessUnit(accessUnit);
- StreamItem& strm = mStreams[streamIdx];
if (err == INFO_DISCONTINUITY) {
// adaptive streaming, discontinuities in the playlist
int32_t type;
@@ -256,6 +291,7 @@ status_t LiveSession::dequeueAccessUnit(
if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) {
int64_t timeUs, originalTimeUs;
int32_t discontinuitySeq = 0;
+ StreamItem& strm = mStreams[streamIdx];
CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
originalTimeUs = timeUs;
(*accessUnit)->meta()->findInt32("discontinuitySeq", &discontinuitySeq);
@@ -299,15 +335,7 @@ status_t LiveSession::dequeueAccessUnit(
}
strm.mLastDequeuedTimeUs = timeUs;
- if (timeUs >= firstTimeUs) {
- timeUs -= firstTimeUs;
- } else {
- timeUs = 0;
- }
- timeUs += mLastSeekTimeUs;
- if (mDiscontinuityOffsetTimesUs.indexOfKey(discontinuitySeq) >= 0) {
- timeUs += mDiscontinuityOffsetTimesUs.valueFor(discontinuitySeq);
- }
+ timeUs = calculateMediaTimeUs(firstTimeUs, timeUs, discontinuitySeq);
ALOGV("[%s] dequeueAccessUnit: time %lld us, original %lld us",
streamStr, (long long)timeUs, (long long)originalTimeUs);
@@ -323,6 +351,17 @@ status_t LiveSession::dequeueAccessUnit(
(*accessUnit)->meta()->setInt32(
"trackIndex", mPlaylist->getSelectedIndex());
(*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs);
+ } else if (stream == STREAMTYPE_METADATA) {
+ HLSTime mdTime((*accessUnit)->meta());
+ if (mDiscontinuityAbsStartTimesUs.indexOfKey(mdTime.mSeq) < 0) {
+ packetSource->requeueAccessUnit((*accessUnit));
+ return -EAGAIN;
+ } else {
+ int64_t firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(mdTime.mSeq);
+ int64_t timeUs = calculateMediaTimeUs(firstTimeUs, mdTime.mTimeUs, mdTime.mSeq);
+ (*accessUnit)->meta()->setInt64("timeUs", timeUs);
+ (*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs);
+ }
}
} else {
ALOGI("[%s] encountered error %d", streamStr, err);
@@ -728,6 +767,17 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case PlaylistFetcher::kWhatMetadataDetected:
+ {
+ if (!mHasMetadata) {
+ mHasMetadata = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatMetadataDetected);
+ notify->post();
+ }
+ break;
+ }
+
default:
TRESPASS();
}
@@ -788,7 +838,7 @@ int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b)
// static
LiveSession::StreamType LiveSession::indexToType(int idx) {
- CHECK(idx >= 0 && idx < kMaxStreams);
+ CHECK(idx >= 0 && idx < kNumSources);
return (StreamType)(1 << idx);
}
@@ -801,6 +851,8 @@ ssize_t LiveSession::typeToIndex(int32_t type) {
return 1;
case STREAMTYPE_SUBTITLES:
return 2;
+ case STREAMTYPE_METADATA:
+ return 3;
default:
return -1;
};
@@ -1179,6 +1231,45 @@ static double uniformRand() {
}
#endif
+bool LiveSession::UriIsSameAsIndex(const AString &uri, int32_t i, bool newUri) {
+ ALOGI("[timed_id3] i %d UriIsSameAsIndex newUri %s, %s", i,
+ newUri ? "true" : "false",
+ newUri ? mStreams[i].mNewUri.c_str() : mStreams[i].mUri.c_str());
+ return i >= 0
+ && ((!newUri && uri == mStreams[i].mUri)
+ || (newUri && uri == mStreams[i].mNewUri));
+}
+
+sp<AnotherPacketSource> LiveSession::getPacketSourceForStreamIndex(
+ size_t trackIndex, bool newUri) {
+ StreamType type = indexToType(trackIndex);
+ sp<AnotherPacketSource> source = NULL;
+ if (newUri) {
+ source = mPacketSources2.valueFor(type);
+ source->clear();
+ } else {
+ source = mPacketSources.valueFor(type);
+ };
+ return source;
+}
+
+sp<AnotherPacketSource> LiveSession::getMetadataSource(
+ sp<AnotherPacketSource> sources[kNumSources], uint32_t streamMask, bool newUri) {
+ // todo: One case where the following strategy can fail is when audio and video
+ // are in separate playlists, both are transport streams, and the metadata
+ // is actually contained in the audio stream.
+ ALOGV("[timed_id3] getMetadataSourceForUri streamMask %x newUri %s",
+ streamMask, newUri ? "true" : "false");
+
+ if ((sources[kVideoIndex] != NULL) // video fetcher; or ...
+ || (!(streamMask & STREAMTYPE_VIDEO) && sources[kAudioIndex] != NULL)) {
+ // ... audio fetcher for audio only variant
+ return getPacketSourceForStreamIndex(kMetaDataIndex, newUri);
+ }
+
+ return NULL;
+}
+
bool LiveSession::resumeFetcher(
const AString &uri, uint32_t streamMask, int64_t timeUs, bool newUri) {
ssize_t index = mFetcherInfos.indexOfKey(uri);
@@ -1188,18 +1279,11 @@ bool LiveSession::resumeFetcher(
}
bool resume = false;
- sp<AnotherPacketSource> sources[kMaxStreams];
+ sp<AnotherPacketSource> sources[kNumSources];
for (size_t i = 0; i < kMaxStreams; ++i) {
- if ((streamMask & indexToType(i))
- && ((!newUri && uri == mStreams[i].mUri)
- || (newUri && uri == mStreams[i].mNewUri))) {
+ if ((streamMask & indexToType(i)) && UriIsSameAsIndex(uri, i, newUri)) {
resume = true;
- if (newUri) {
- sources[i] = mPacketSources2.valueFor(indexToType(i));
- sources[i]->clear();
- } else {
- sources[i] = mPacketSources.valueFor(indexToType(i));
- }
+ sources[i] = getPacketSourceForStreamIndex(i, newUri);
}
}
@@ -1214,6 +1298,7 @@ bool LiveSession::resumeFetcher(
sources[kAudioIndex],
sources[kVideoIndex],
sources[kSubtitleIndex],
+ getMetadataSource(sources, streamMask, newUri),
timeUs, -1, -1, seekMode);
}
@@ -1424,7 +1509,7 @@ size_t LiveSession::getTrackCount() const {
if (mPlaylist == NULL) {
return 0;
} else {
- return mPlaylist->getTrackCount();
+ return mPlaylist->getTrackCount() + (mHasMetadata ? 1 : 0);
}
}
@@ -1432,6 +1517,13 @@ sp<AMessage> LiveSession::getTrackInfo(size_t trackIndex) const {
if (mPlaylist == NULL) {
return NULL;
} else {
+ if (trackIndex == mPlaylist->getTrackCount() && mHasMetadata) {
+ sp<AMessage> format = new AMessage();
+ format->setInt32("type", MEDIA_TRACK_TYPE_METADATA);
+ format->setString("language", "und");
+ format->setString("mime", MEDIA_MIMETYPE_DATA_METADATA);
+ return format;
+ }
return mPlaylist->getTrackInfo(trackIndex);
}
}
@@ -1768,7 +1860,7 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
HLSTime startTime;
SeekMode seekMode = kSeekModeExactPosition;
- sp<AnotherPacketSource> sources[kMaxStreams];
+ sp<AnotherPacketSource> sources[kNumSources];
if (i == kSubtitleIndex || (!pickTrack && !switching)) {
startTime = latestMediaSegmentStartTime();
@@ -1797,8 +1889,8 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
}
}
- if (j != kSubtitleIndex && meta != NULL
- && !meta->findInt32("discontinuity", &type)) {
+ if ((j == kAudioIndex || j == kVideoIndex)
+ && meta != NULL && !meta->findInt32("discontinuity", &type)) {
HLSTime tmpTime(meta);
if (startTime < tmpTime) {
startTime = tmpTime;
@@ -1851,6 +1943,7 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
sources[kAudioIndex],
sources[kVideoIndex],
sources[kSubtitleIndex],
+ getMetadataSource(sources, mNewStreamMask, switching),
startTime.mTimeUs < 0 ? mLastSeekTimeUs : startTime.mTimeUs,
startTime.getSegmentTimeUs(true /* midpoint */),
startTime.mSeq,
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index ed74bc2..86d0498 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -23,6 +23,8 @@
#include <utils/String8.h>
+#include "mpeg2ts/ATSParser.h"
+
namespace android {
struct ABuffer;
@@ -47,12 +49,15 @@ struct LiveSession : public AHandler {
kVideoIndex = 1,
kSubtitleIndex = 2,
kMaxStreams = 3,
+ kMetaDataIndex = 3,
+ kNumSources = 4,
};
enum StreamType {
STREAMTYPE_AUDIO = 1 << kAudioIndex,
STREAMTYPE_VIDEO = 1 << kVideoIndex,
STREAMTYPE_SUBTITLES = 1 << kSubtitleIndex,
+ STREAMTYPE_METADATA = 1 << kMetaDataIndex,
};
enum SeekMode {
@@ -66,6 +71,7 @@ struct LiveSession : public AHandler {
uint32_t flags,
const sp<IMediaHTTPService> &httpService);
+ int64_t calculateMediaTimeUs(int64_t firstTimeUs, int64_t timeUs, int32_t discontinuitySeq);
status_t dequeueAccessUnit(StreamType stream, sp<ABuffer> *accessUnit);
status_t getStreamFormat(StreamType stream, sp<AMessage> *format);
@@ -92,6 +98,7 @@ struct LiveSession : public AHandler {
static const char *getKeyForStream(StreamType type);
static const char *getNameForStream(StreamType type);
+ static ATSParser::SourceType getSourceTypeForStream(StreamType type);
enum {
kWhatStreamsChanged,
@@ -101,6 +108,7 @@ struct LiveSession : public AHandler {
kWhatBufferingStart,
kWhatBufferingEnd,
kWhatBufferingUpdate,
+ kWhatMetadataDetected,
};
protected:
@@ -233,6 +241,8 @@ private:
bool mFirstTimeUsValid;
int64_t mFirstTimeUs;
int64_t mLastSeekTimeUs;
+ bool mHasMetadata;
+
KeyedVector<size_t, int64_t> mDiscontinuityAbsStartTimesUs;
KeyedVector<size_t, int64_t> mDiscontinuityOffsetTimesUs;
@@ -268,6 +278,11 @@ private:
sp<M3UParser> fetchPlaylist(
const char *url, uint8_t *curPlaylistHash, bool *unchanged);
+ bool UriIsSameAsIndex( const AString &uri, int32_t index, bool newUri);
+ sp<AnotherPacketSource> getPacketSourceForStreamIndex(size_t trackIndex, bool newUri);
+ sp<AnotherPacketSource> getMetadataSource(
+ sp<AnotherPacketSource> sources[kNumSources], uint32_t streamMask, bool newUri);
+
bool resumeFetcher(
const AString &uri, uint32_t streamMask,
int64_t timeUs = -1ll, bool newUri = false);
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index ce79cc2..a4e523d 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "PlaylistFetcher"
#include <utils/Log.h>
+#include <utils/misc.h>
#include "PlaylistFetcher.h"
@@ -174,7 +175,8 @@ PlaylistFetcher::PlaylistFetcher(
mFirstTimeUs(-1ll),
mVideoBuffer(new AnotherPacketSource(NULL)),
mThresholdRatio(-1.0f),
- mDownloadState(new DownloadState()) {
+ mDownloadState(new DownloadState()),
+ mHasMetadata(false) {
memset(mPlaylistHash, 0, sizeof(mPlaylistHash));
mHTTPDataSource = mSession->getHTTPDataSource();
}
@@ -470,6 +472,7 @@ void PlaylistFetcher::startAsync(
const sp<AnotherPacketSource> &audioSource,
const sp<AnotherPacketSource> &videoSource,
const sp<AnotherPacketSource> &subtitleSource,
+ const sp<AnotherPacketSource> &metadataSource,
int64_t startTimeUs,
int64_t segmentStartTimeUs,
int32_t startDiscontinuitySeq,
@@ -493,6 +496,11 @@ void PlaylistFetcher::startAsync(
streamTypeMask |= LiveSession::STREAMTYPE_SUBTITLES;
}
+ if (metadataSource != NULL) {
+ msg->setPointer("metadataSource", metadataSource.get());
+ // metadataSource does not affect streamTypeMask.
+ }
+
msg->setInt32("streamTypeMask", streamTypeMask);
msg->setInt64("startTimeUs", startTimeUs);
msg->setInt64("segmentStartTimeUs", segmentStartTimeUs);
@@ -637,6 +645,15 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
static_cast<AnotherPacketSource *>(ptr));
}
+ void *ptr;
+ // metadataSource is not part of streamTypeMask
+ if ((streamTypeMask & (LiveSession::STREAMTYPE_AUDIO | LiveSession::STREAMTYPE_VIDEO))
+ && msg->findPointer("metadataSource", &ptr)) {
+ mPacketSources.add(
+ LiveSession::STREAMTYPE_METADATA,
+ static_cast<AnotherPacketSource *>(ptr));
+ }
+
mStreamTypeMask = streamTypeMask;
mSegmentStartTimeUs = segmentStartTimeUs;
@@ -1315,11 +1332,11 @@ void PlaylistFetcher::onDownloadNext() {
if (bufferStartsWithTsSyncByte(buffer)) {
// If we don't see a stream in the program table after fetching a full ts segment
// mark it as nonexistent.
- const size_t kNumTypes = ATSParser::NUM_SOURCE_TYPES;
- ATSParser::SourceType srcTypes[kNumTypes] =
+ ATSParser::SourceType srcTypes[] =
{ ATSParser::VIDEO, ATSParser::AUDIO };
- LiveSession::StreamType streamTypes[kNumTypes] =
+ LiveSession::StreamType streamTypes[] =
{ LiveSession::STREAMTYPE_VIDEO, LiveSession::STREAMTYPE_AUDIO };
+ const size_t kNumTypes = NELEM(srcTypes);
for (size_t i = 0; i < kNumTypes; i++) {
ATSParser::SourceType srcType = srcTypes[i];
@@ -1502,6 +1519,27 @@ const sp<ABuffer> &PlaylistFetcher::setAccessUnitProperties(
return accessUnit;
}
+bool PlaylistFetcher::isStartTimeReached(int64_t timeUs) {
+ if (!mFirstPTSValid) {
+ mFirstTimeUs = timeUs;
+ mFirstPTSValid = true;
+ }
+ bool startTimeReached = true;
+ if (mStartTimeUsRelative) {
+ FLOGV("startTimeUsRelative, timeUs (%lld) - %lld = %lld",
+ (long long)timeUs,
+ (long long)mFirstTimeUs,
+ (long long)(timeUs - mFirstTimeUs));
+ timeUs -= mFirstTimeUs;
+ if (timeUs < 0) {
+ FLOGV("clamp negative timeUs to 0");
+ timeUs = 0;
+ }
+ startTimeReached = (timeUs >= mStartTimeUs);
+ }
+ return startTimeReached;
+}
+
status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &buffer) {
if (mTSParser == NULL) {
// Use TS_TIMESTAMPS_ARE_ABSOLUTE so pts carry over between fetchers.
@@ -1548,10 +1586,9 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
ALOGE("MPEG2 Transport streams do not contain subtitles.");
return ERROR_MALFORMED;
}
+
const char *key = LiveSession::getKeyForStream(stream);
- ATSParser::SourceType type =
- (stream == LiveSession::STREAMTYPE_AUDIO) ?
- ATSParser::AUDIO : ATSParser::VIDEO;
+ ATSParser::SourceType type =LiveSession::getSourceTypeForStream(stream);
sp<AnotherPacketSource> source =
static_cast<AnotherPacketSource *>(
@@ -1637,23 +1674,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
}
}
if (mStartup) {
- if (!mFirstPTSValid) {
- mFirstTimeUs = timeUs;
- mFirstPTSValid = true;
- }
- bool startTimeReached = true;
- if (mStartTimeUsRelative) {
- FLOGV("startTimeUsRelative, timeUs (%lld) - %lld = %lld",
- (long long)timeUs,
- (long long)mFirstTimeUs,
- (long long)(timeUs - mFirstTimeUs));
- timeUs -= mFirstTimeUs;
- if (timeUs < 0) {
- FLOGV("clamp negative timeUs to 0");
- timeUs = 0;
- }
- startTimeReached = (timeUs >= mStartTimeUs);
- }
+ bool startTimeReached = isStartTimeReached(timeUs);
if (!startTimeReached || (isAvc && !mIDRFound)) {
// buffer up to the closest preceding IDR frame in the next segement,
@@ -1680,7 +1701,8 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
if (mStartTimeUsNotify != NULL) {
uint32_t streamMask = 0;
mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask);
- if (!(streamMask & mPacketSources.keyAt(i))) {
+ if ((mStreamTypeMask & mPacketSources.keyAt(i))
+ && !(streamMask & mPacketSources.keyAt(i))) {
streamMask |= mPacketSources.keyAt(i);
mStartTimeUsNotify->setInt32("streamMask", streamMask);
FSLOGV(stream, "found start point, timeUs=%lld, streamMask becomes %x",
@@ -1721,6 +1743,11 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
FSLOGV(stream, "queueAccessUnit (saved), timeUs=%lld",
(long long)bufferTimeUs);
}
+ } else if (stream == LiveSession::STREAMTYPE_METADATA && !mHasMetadata) {
+ mHasMetadata = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatMetadataDetected);
+ notify->post();
}
setAccessUnitProperties(accessUnit, source);
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index f64d160..bb14a0d 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -49,6 +49,7 @@ struct PlaylistFetcher : public AHandler {
kWhatPreparationFailed,
kWhatStartedAt,
kWhatStopReached,
+ kWhatMetadataDetected,
};
PlaylistFetcher(
@@ -66,6 +67,7 @@ struct PlaylistFetcher : public AHandler {
const sp<AnotherPacketSource> &audioSource,
const sp<AnotherPacketSource> &videoSource,
const sp<AnotherPacketSource> &subtitleSource,
+ const sp<AnotherPacketSource> &metadataSource,
int64_t startTimeUs = -1ll, // starting timestamps
int64_t segmentStartTimeUs = -1ll, // starting position within playlist
// startTimeUs!=segmentStartTimeUs only when playlist is live
@@ -177,6 +179,8 @@ private:
sp<DownloadState> mDownloadState;
+ bool mHasMetadata;
+
// Set first to true if decrypting the first segment of a playlist segment. When
// first is true, reset the initialization vector based on the available
// information in the manifest; otherwise, use the initialization vector as
@@ -222,6 +226,7 @@ private:
const sp<ABuffer> &accessUnit,
const sp<AnotherPacketSource> &source,
bool discard = false);
+ bool isStartTimeReached(int64_t timeUs);
status_t extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &buffer);
status_t extractAndQueueAccessUnits(
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 14ae81c..5411821 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -132,6 +132,7 @@ struct ATSParser::Stream : public RefBase {
bool isAudio() const;
bool isVideo() const;
+ bool isMeta() const;
protected:
virtual ~Stream();
@@ -604,6 +605,11 @@ ATSParser::Stream::Stream(
ElementaryStreamQueue::AC3);
break;
+ case STREAMTYPE_METADATA:
+ mQueue = new ElementaryStreamQueue(
+ ElementaryStreamQueue::METADATA);
+ break;
+
default:
break;
}
@@ -722,6 +728,13 @@ bool ATSParser::Stream::isAudio() const {
}
}
+bool ATSParser::Stream::isMeta() const {
+ if (mStreamType == STREAMTYPE_METADATA) {
+ return true;
+ }
+ return false;
+}
+
void ATSParser::Stream::signalDiscontinuity(
DiscontinuityType type, const sp<AMessage> &extra) {
mExpectedContinuityCounter = -1;
@@ -1037,6 +1050,14 @@ sp<MediaSource> ATSParser::Stream::getSource(SourceType type) {
break;
}
+ case META:
+ {
+ if (isMeta()) {
+ return mSource;
+ }
+ break;
+ }
+
default:
break;
}
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index a1405bd..87ab1a0 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -74,7 +74,8 @@ struct ATSParser : public RefBase {
enum SourceType {
VIDEO = 0,
AUDIO = 1,
- NUM_SOURCE_TYPES = 2
+ META = 2,
+ NUM_SOURCE_TYPES = 3
};
sp<MediaSource> getSource(SourceType type);
bool hasSource(SourceType type) const;
@@ -90,6 +91,7 @@ struct ATSParser : public RefBase {
STREAMTYPE_MPEG2_AUDIO = 0x04,
STREAMTYPE_MPEG2_AUDIO_ADTS = 0x0f,
STREAMTYPE_MPEG4_VIDEO = 0x10,
+ STREAMTYPE_METADATA = 0x15,
STREAMTYPE_H264 = 0x1b,
// From ATSC A/53 Part 3:2009, 6.7.1
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index a4f8739..87ec860 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -73,7 +73,7 @@ void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
} else if (!strncasecmp("video/", mime, 6)) {
mIsVideo = true;
} else {
- CHECK(!strncasecmp("text/", mime, 5));
+ CHECK(!strncasecmp("text/", mime, 5) || !strncasecmp("application/", mime, 12));
}
}
@@ -146,6 +146,12 @@ status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {
return mEOSResult;
}
+void AnotherPacketSource::requeueAccessUnit(const sp<ABuffer> &buffer) {
+ // TODO: update corresponding book keeping info.
+ Mutex::Autolock autoLock(mLock);
+ mBuffers.push_front(buffer);
+}
+
status_t AnotherPacketSource::read(
MediaBuffer **out, const ReadOptions *) {
*out = NULL;
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index fa7dd6a..08cd92e 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -67,6 +67,7 @@ struct AnotherPacketSource : public MediaSource {
void signalEOS(status_t result);
status_t dequeueAccessUnit(sp<ABuffer> *buffer);
+ void requeueAccessUnit(const sp<ABuffer> &buffer);
bool isFinished(int64_t duration) const;
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 55262ff..f28a1fd 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -415,6 +415,7 @@ status_t ElementaryStreamQueue::appendData(
}
case PCM_AUDIO:
+ case METADATA:
{
break;
}
@@ -499,6 +500,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnit() {
return dequeueAccessUnitMPEG4Video();
case PCM_AUDIO:
return dequeueAccessUnitPCMAudio();
+ case METADATA:
+ return dequeueAccessUnitMetadata();
default:
CHECK_EQ((unsigned)mMode, (unsigned)MPEG_AUDIO);
return dequeueAccessUnitMPEGAudio();
@@ -1292,5 +1295,25 @@ void ElementaryStreamQueue::signalEOS() {
}
}
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMetadata() {
+ size_t size = mBuffer->size();
+ if (!size) {
+ return NULL;
+ }
+
+ sp<ABuffer> accessUnit = new ABuffer(size);
+ int64_t timeUs = fetchTimestamp(size);
+ accessUnit->meta()->setInt64("timeUs", timeUs);
+
+ memcpy(accessUnit->data(), mBuffer->data(), size);
+ mBuffer->setRange(0, 0);
+
+ if (mFormat == NULL) {
+ mFormat = new MetaData;
+ mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_DATA_METADATA);
+ }
+
+ return accessUnit;
+}
} // namespace android
diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h
index 5425367..e9f96b7 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.h
+++ b/media/libstagefright/mpeg2ts/ESQueue.h
@@ -37,6 +37,7 @@ struct ElementaryStreamQueue {
MPEG_VIDEO,
MPEG4_VIDEO,
PCM_AUDIO,
+ METADATA,
};
enum Flags {
@@ -75,6 +76,7 @@ private:
sp<ABuffer> dequeueAccessUnitMPEGVideo();
sp<ABuffer> dequeueAccessUnitMPEG4Video();
sp<ABuffer> dequeueAccessUnitPCMAudio();
+ sp<ABuffer> dequeueAccessUnitMetadata();
// consume a logical (compressed) access unit of size "size",
// returns its timestamp in us (or -1 if no time information).